]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
sem.ads, [...]: Minor comment fixes.
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
10069d53 9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26/* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28#define GCC_DIAG_STYLE __gcc_cdiag__
29
30#include "config.h"
31#include "system.h"
32#include "coretypes.h"
33#include "tm.h"
34#include "tree.h"
35#include "flags.h"
36#include "defaults.h"
37#include "toplev.h"
38#include "output.h"
39#include "ggc.h"
40#include "debug.h"
41#include "convert.h"
42#include "target.h"
43#include "function.h"
44#include "cgraph.h"
45#include "tree-inline.h"
46#include "tree-iterator.h"
47#include "gimple.h"
48#include "tree-dump.h"
49#include "pointer-set.h"
50#include "langhooks.h"
51
52#include "ada.h"
53#include "types.h"
54#include "atree.h"
55#include "elists.h"
56#include "namet.h"
57#include "nlists.h"
58#include "stringt.h"
59#include "uintp.h"
60#include "fe.h"
61#include "sinfo.h"
62#include "einfo.h"
63#include "ada-tree.h"
64#include "gigi.h"
65
66#ifndef MAX_FIXED_MODE_SIZE
67#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
68#endif
69
70#ifndef MAX_BITS_PER_WORD
71#define MAX_BITS_PER_WORD BITS_PER_WORD
72#endif
73
74/* If nonzero, pretend we are allocating at global level. */
75int force_global;
76
77/* Tree nodes for the various types and decls we create. */
78tree gnat_std_decls[(int) ADT_LAST];
79
80/* Functions to call for each of the possible raise reasons. */
81tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82
83/* Forward declarations for handlers of attributes. */
84static tree handle_const_attribute (tree *, tree, tree, int, bool *);
85static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
86static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
87static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
88static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
89static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
90static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
91static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
92static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
93
94/* Fake handler for attributes we don't properly support, typically because
95 they'd require dragging a lot of the common-c front-end circuitry. */
96static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
97
98/* Table of machine-independent internal attributes for Ada. We support
99 this minimal set of attributes to accommodate the needs of builtins. */
100const struct attribute_spec gnat_internal_attribute_table[] =
101{
102 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
103 { "const", 0, 0, true, false, false, handle_const_attribute },
104 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
105 { "pure", 0, 0, true, false, false, handle_pure_attribute },
106 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
107 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
108 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
109 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
110 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
111 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
112
113 /* ??? format and format_arg are heavy and not supported, which actually
114 prevents support for stdio builtins, which we however declare as part
115 of the common builtins.def contents. */
116 { "format", 3, 3, false, true, true, fake_attribute_handler },
117 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
118
119 { NULL, 0, 0, false, false, false, NULL }
120};
121
122/* Associates a GNAT tree node to a GCC tree node. It is used in
123 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
124 of `save_gnu_tree' for more info. */
125static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
126
127#define GET_GNU_TREE(GNAT_ENTITY) \
128 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
129
130#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
131 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
132
133#define PRESENT_GNU_TREE(GNAT_ENTITY) \
134 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
135
136/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
137static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
138
139#define GET_DUMMY_NODE(GNAT_ENTITY) \
140 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
141
142#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
143 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
144
145#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
146 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
147
148/* This variable keeps a table for types for each precision so that we only
149 allocate each of them once. Signed and unsigned types are kept separate.
150
151 Note that these types are only used when fold-const requests something
152 special. Perhaps we should NOT share these types; we'll see how it
153 goes later. */
154static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
155
156/* Likewise for float types, but record these by mode. */
157static GTY(()) tree float_types[NUM_MACHINE_MODES];
158
159/* For each binding contour we allocate a binding_level structure to indicate
160 the binding depth. */
161
162struct gnat_binding_level GTY((chain_next ("%h.chain")))
163{
164 /* The binding level containing this one (the enclosing binding level). */
165 struct gnat_binding_level *chain;
166 /* The BLOCK node for this level. */
167 tree block;
168 /* If nonzero, the setjmp buffer that needs to be updated for any
169 variable-sized definition within this context. */
170 tree jmpbuf_decl;
171};
172
173/* The binding level currently in effect. */
174static GTY(()) struct gnat_binding_level *current_binding_level;
175
176/* A chain of gnat_binding_level structures awaiting reuse. */
177static GTY((deletable)) struct gnat_binding_level *free_binding_level;
178
179/* An array of global declarations. */
180static GTY(()) VEC(tree,gc) *global_decls;
181
182/* An array of builtin function declarations. */
183static GTY(()) VEC(tree,gc) *builtin_decls;
184
185/* An array of global renaming pointers. */
186static GTY(()) VEC(tree,gc) *global_renaming_pointers;
187
188/* A chain of unused BLOCK nodes. */
189static GTY((deletable)) tree free_block_chain;
190
a1ab4c31
AC
191static tree merge_sizes (tree, tree, tree, bool, bool);
192static tree compute_related_constant (tree, tree);
193static tree split_plus (tree, tree *);
194static void gnat_gimplify_function (tree);
195static tree float_type_for_precision (int, enum machine_mode);
196static tree convert_to_fat_pointer (tree, tree);
197static tree convert_to_thin_pointer (tree, tree);
198static tree make_descriptor_field (const char *,tree, tree, tree);
199static bool potential_alignment_gap (tree, tree, tree);
200\f
201/* Initialize the association of GNAT nodes to GCC trees. */
202
203void
204init_gnat_to_gnu (void)
205{
206 associate_gnat_to_gnu
207 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
208}
209
210/* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
211 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
1e17ef87 212 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
a1ab4c31
AC
213
214 If GNU_DECL is zero, a previous association is to be reset. */
215
216void
217save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
218{
219 /* Check that GNAT_ENTITY is not already defined and that it is being set
220 to something which is a decl. Raise gigi 401 if not. Usually, this
221 means GNAT_ENTITY is defined twice, but occasionally is due to some
222 Gigi problem. */
223 gcc_assert (!(gnu_decl
224 && (PRESENT_GNU_TREE (gnat_entity)
225 || (!no_check && !DECL_P (gnu_decl)))));
226
227 SET_GNU_TREE (gnat_entity, gnu_decl);
228}
229
230/* GNAT_ENTITY is a GNAT tree node for a defining identifier.
231 Return the ..._DECL node that was associated with it. If there is no tree
232 node associated with GNAT_ENTITY, abort.
233
234 In some cases, such as delayed elaboration or expressions that need to
235 be elaborated only once, GNAT_ENTITY is really not an entity. */
236
237tree
238get_gnu_tree (Entity_Id gnat_entity)
239{
240 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
241 return GET_GNU_TREE (gnat_entity);
242}
243
244/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
245
246bool
247present_gnu_tree (Entity_Id gnat_entity)
248{
249 return PRESENT_GNU_TREE (gnat_entity);
250}
251\f
252/* Initialize the association of GNAT nodes to GCC trees as dummies. */
253
254void
255init_dummy_type (void)
256{
257 dummy_node_table
258 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
259}
260
261/* Make a dummy type corresponding to GNAT_TYPE. */
262
263tree
264make_dummy_type (Entity_Id gnat_type)
265{
266 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
267 tree gnu_type;
268
269 /* If there is an equivalent type, get its underlying type. */
270 if (Present (gnat_underlying))
271 gnat_underlying = Underlying_Type (gnat_underlying);
272
273 /* If there was no equivalent type (can only happen when just annotating
274 types) or underlying type, go back to the original type. */
275 if (No (gnat_underlying))
276 gnat_underlying = gnat_type;
277
278 /* If it there already a dummy type, use that one. Else make one. */
279 if (PRESENT_DUMMY_NODE (gnat_underlying))
280 return GET_DUMMY_NODE (gnat_underlying);
281
282 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
283 an ENUMERAL_TYPE. */
284 gnu_type = make_node (Is_Record_Type (gnat_underlying)
285 ? tree_code_for_record_type (gnat_underlying)
286 : ENUMERAL_TYPE);
287 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
288 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
289 TYPE_STUB_DECL (gnu_type)
290 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
a1ab4c31 291 if (AGGREGATE_TYPE_P (gnu_type))
10069d53 292 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
a1ab4c31
AC
293
294 SET_DUMMY_NODE (gnat_underlying, gnu_type);
295
296 return gnu_type;
297}
298\f
299/* Return nonzero if we are currently in the global binding level. */
300
301int
302global_bindings_p (void)
303{
304 return ((force_global || !current_function_decl) ? -1 : 0);
305}
306
307/* Enter a new binding level. */
308
309void
310gnat_pushlevel ()
311{
312 struct gnat_binding_level *newlevel = NULL;
313
314 /* Reuse a struct for this binding level, if there is one. */
315 if (free_binding_level)
316 {
317 newlevel = free_binding_level;
318 free_binding_level = free_binding_level->chain;
319 }
320 else
321 newlevel
322 = (struct gnat_binding_level *)
323 ggc_alloc (sizeof (struct gnat_binding_level));
324
325 /* Use a free BLOCK, if any; otherwise, allocate one. */
326 if (free_block_chain)
327 {
328 newlevel->block = free_block_chain;
329 free_block_chain = BLOCK_CHAIN (free_block_chain);
330 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
331 }
332 else
333 newlevel->block = make_node (BLOCK);
334
335 /* Point the BLOCK we just made to its parent. */
336 if (current_binding_level)
337 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
338
339 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
340 TREE_USED (newlevel->block) = 1;
341
342 /* Add this level to the front of the chain (stack) of levels that are
343 active. */
344 newlevel->chain = current_binding_level;
345 newlevel->jmpbuf_decl = NULL_TREE;
346 current_binding_level = newlevel;
347}
348
349/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
350 and point FNDECL to this BLOCK. */
351
352void
353set_current_block_context (tree fndecl)
354{
355 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
356 DECL_INITIAL (fndecl) = current_binding_level->block;
357}
358
359/* Set the jmpbuf_decl for the current binding level to DECL. */
360
361void
362set_block_jmpbuf_decl (tree decl)
363{
364 current_binding_level->jmpbuf_decl = decl;
365}
366
367/* Get the jmpbuf_decl, if any, for the current binding level. */
368
369tree
370get_block_jmpbuf_decl ()
371{
372 return current_binding_level->jmpbuf_decl;
373}
374
375/* Exit a binding level. Set any BLOCK into the current code group. */
376
377void
378gnat_poplevel ()
379{
380 struct gnat_binding_level *level = current_binding_level;
381 tree block = level->block;
382
383 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
384 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
385
386 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
387 are no variables free the block and merge its subblocks into those of its
388 parent block. Otherwise, add it to the list of its parent. */
389 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
390 ;
391 else if (BLOCK_VARS (block) == NULL_TREE)
392 {
393 BLOCK_SUBBLOCKS (level->chain->block)
394 = chainon (BLOCK_SUBBLOCKS (block),
395 BLOCK_SUBBLOCKS (level->chain->block));
396 BLOCK_CHAIN (block) = free_block_chain;
397 free_block_chain = block;
398 }
399 else
400 {
401 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
402 BLOCK_SUBBLOCKS (level->chain->block) = block;
403 TREE_USED (block) = 1;
404 set_block_for_group (block);
405 }
406
407 /* Free this binding structure. */
408 current_binding_level = level->chain;
409 level->chain = free_binding_level;
410 free_binding_level = level;
411}
412
413\f
414/* Records a ..._DECL node DECL as belonging to the current lexical scope
415 and uses GNAT_NODE for location information and propagating flags. */
416
417void
418gnat_pushdecl (tree decl, Node_Id gnat_node)
419{
420 /* If this decl is public external or at toplevel, there is no context.
421 But PARM_DECLs always go in the level of its function. */
422 if (TREE_CODE (decl) != PARM_DECL
423 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
424 || global_bindings_p ()))
425 DECL_CONTEXT (decl) = 0;
426 else
427 {
428 DECL_CONTEXT (decl) = current_function_decl;
429
430 /* Functions imported in another function are not really nested. */
431 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
432 DECL_NO_STATIC_CHAIN (decl) = 1;
433 }
434
435 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
436
437 /* Set the location of DECL and emit a declaration for it. */
438 if (Present (gnat_node))
439 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
440 add_decl_expr (decl, gnat_node);
441
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later. Put global variables in the
444 globals list and builtin functions in a dedicated list to speed up
445 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
446 the list, as they will cause trouble with the debugger and aren't needed
447 anyway. */
448 if (TREE_CODE (decl) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
450 {
451 if (global_bindings_p ())
452 {
453 VEC_safe_push (tree, gc, global_decls, decl);
454
455 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
456 VEC_safe_push (tree, gc, builtin_decls, decl);
457 }
458 else
459 {
460 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
461 BLOCK_VARS (current_binding_level->block) = decl;
462 }
463 }
464
465 /* For the declaration of a type, set its name if it either is not already
10069d53 466 set or if the previous type name was not derived from a source name.
a1ab4c31
AC
467 We'd rather have the type named with a real name and all the pointer
468 types to the same object have the same POINTER_TYPE node. Code in the
469 equivalent function of c-decl.c makes a copy of the type node here, but
470 that may cause us trouble with incomplete types. We make an exception
471 for fat pointer types because the compiler automatically builds them
472 for unconstrained array types and the debugger uses them to represent
473 both these and pointers to these. */
474 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
475 {
476 tree t = TREE_TYPE (decl);
477
10069d53 478 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
a1ab4c31
AC
479 ;
480 else if (TYPE_FAT_POINTER_P (t))
481 {
482 tree tt = build_variant_type_copy (t);
483 TYPE_NAME (tt) = decl;
484 TREE_USED (tt) = TREE_USED (t);
485 TREE_TYPE (decl) = tt;
486 DECL_ORIGINAL_TYPE (decl) = t;
487 t = NULL_TREE;
488 }
489 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
490 ;
491 else
492 t = NULL_TREE;
493
494 /* Propagate the name to all the variants. This is needed for
495 the type qualifiers machinery to work properly. */
496 if (t)
497 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
498 TYPE_NAME (t) = decl;
499 }
500}
501\f
502/* Do little here. Set up the standard declarations later after the
503 front end has been run. */
504
505void
506gnat_init_decl_processing (void)
507{
508 /* Make the binding_level structure for global names. */
509 current_function_decl = 0;
510 current_binding_level = 0;
511 free_binding_level = 0;
512 gnat_pushlevel ();
513
514 build_common_tree_nodes (true, true);
515
516 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
517 corresponding to the size of Pmode. In most cases when ptr_mode and
518 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
519 far better code using the width of Pmode. Make this here since we need
520 this before we can expand the GNAT types. */
521 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
522 set_sizetype (size_type_node);
01ddebf2
EB
523
524 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
525 boolean_type_node = make_node (BOOLEAN_TYPE);
526 TYPE_PRECISION (boolean_type_node) = 1;
527 fixup_unsigned_type (boolean_type_node);
528 TYPE_RM_SIZE_NUM (boolean_type_node) = bitsize_int (1);
529
a1ab4c31
AC
530 build_common_tree_nodes_2 (0);
531
532 ptr_void_type_node = build_pointer_type (void_type_node);
533}
10069d53
EB
534\f
535/* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
a1ab4c31
AC
536
537void
10069d53 538record_builtin_type (const char *name, tree type)
a1ab4c31 539{
10069d53 540 tree type_decl = build_decl (TYPE_DECL, get_identifier (name), type);
a1ab4c31 541
10069d53 542 gnat_pushdecl (type_decl, Empty);
a1ab4c31 543
10069d53
EB
544 if (debug_hooks->type_decl)
545 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
546}
547\f
548/* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
549 finish constructing the record or union type. If REP_LEVEL is zero, this
550 record has no representation clause and so will be entirely laid out here.
551 If REP_LEVEL is one, this record has a representation clause and has been
552 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
553 this record is derived from a parent record and thus inherits its layout;
554 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
555 true, the record type is expected to be modified afterwards so it will
556 not be sent to the back-end for finalization. */
557
558void
559finish_record_type (tree record_type, tree fieldlist, int rep_level,
560 bool do_not_finalize)
561{
562 enum tree_code code = TREE_CODE (record_type);
563 tree name = TYPE_NAME (record_type);
564 tree ada_size = bitsize_zero_node;
565 tree size = bitsize_zero_node;
566 bool had_size = TYPE_SIZE (record_type) != 0;
567 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
568 bool had_align = TYPE_ALIGN (record_type) != 0;
569 tree field;
570
a1ab4c31 571 TYPE_FIELDS (record_type) = fieldlist;
a1ab4c31 572
10069d53
EB
573 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
574 generate debug info and have a parallel type. */
575 if (name && TREE_CODE (name) == TYPE_DECL)
576 name = DECL_NAME (name);
577 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
578
579 /* Globally initialize the record first. If this is a rep'ed record,
580 that just means some initializations; otherwise, layout the record. */
581 if (rep_level > 0)
582 {
583 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
6f9f0ce3 584 SET_TYPE_MODE (record_type, BLKmode);
a1ab4c31
AC
585
586 if (!had_size_unit)
587 TYPE_SIZE_UNIT (record_type) = size_zero_node;
588 if (!had_size)
589 TYPE_SIZE (record_type) = bitsize_zero_node;
590
591 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
592 out just like a UNION_TYPE, since the size will be fixed. */
593 else if (code == QUAL_UNION_TYPE)
594 code = UNION_TYPE;
595 }
596 else
597 {
598 /* Ensure there isn't a size already set. There can be in an error
599 case where there is a rep clause but all fields have errors and
600 no longer have a position. */
601 TYPE_SIZE (record_type) = 0;
602 layout_type (record_type);
603 }
604
605 /* At this point, the position and size of each field is known. It was
606 either set before entry by a rep clause, or by laying out the type above.
607
608 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
609 to compute the Ada size; the GCC size and alignment (for rep'ed records
610 that are not padding types); and the mode (for rep'ed records). We also
611 clear the DECL_BIT_FIELD indication for the cases we know have not been
612 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
613
614 if (code == QUAL_UNION_TYPE)
615 fieldlist = nreverse (fieldlist);
616
617 for (field = fieldlist; field; field = TREE_CHAIN (field))
618 {
619 tree type = TREE_TYPE (field);
620 tree pos = bit_position (field);
621 tree this_size = DECL_SIZE (field);
622 tree this_ada_size;
623
624 if ((TREE_CODE (type) == RECORD_TYPE
625 || TREE_CODE (type) == UNION_TYPE
626 || TREE_CODE (type) == QUAL_UNION_TYPE)
627 && !TYPE_IS_FAT_POINTER_P (type)
628 && !TYPE_CONTAINS_TEMPLATE_P (type)
629 && TYPE_ADA_SIZE (type))
630 this_ada_size = TYPE_ADA_SIZE (type);
631 else
632 this_ada_size = this_size;
633
634 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
635 if (DECL_BIT_FIELD (field)
636 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
637 {
638 unsigned int align = TYPE_ALIGN (type);
639
640 /* In the general case, type alignment is required. */
641 if (value_factor_p (pos, align))
642 {
643 /* The enclosing record type must be sufficiently aligned.
644 Otherwise, if no alignment was specified for it and it
645 has been laid out already, bump its alignment to the
646 desired one if this is compatible with its size. */
647 if (TYPE_ALIGN (record_type) >= align)
648 {
649 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
650 DECL_BIT_FIELD (field) = 0;
651 }
652 else if (!had_align
653 && rep_level == 0
654 && value_factor_p (TYPE_SIZE (record_type), align))
655 {
656 TYPE_ALIGN (record_type) = align;
657 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
658 DECL_BIT_FIELD (field) = 0;
659 }
660 }
661
662 /* In the non-strict alignment case, only byte alignment is. */
663 if (!STRICT_ALIGNMENT
664 && DECL_BIT_FIELD (field)
665 && value_factor_p (pos, BITS_PER_UNIT))
666 DECL_BIT_FIELD (field) = 0;
667 }
668
669 /* If we still have DECL_BIT_FIELD set at this point, we know the field
670 is technically not addressable. Except that it can actually be
671 addressed if the field is BLKmode and happens to be properly
672 aligned. */
673 DECL_NONADDRESSABLE_P (field)
674 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
675
676 /* A type must be as aligned as its most aligned field that is not
677 a bit-field. But this is already enforced by layout_type. */
678 if (rep_level > 0 && !DECL_BIT_FIELD (field))
679 TYPE_ALIGN (record_type)
680 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
681
682 switch (code)
683 {
684 case UNION_TYPE:
685 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
686 size = size_binop (MAX_EXPR, size, this_size);
687 break;
688
689 case QUAL_UNION_TYPE:
690 ada_size
691 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
692 this_ada_size, ada_size);
693 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
694 this_size, size);
695 break;
696
697 case RECORD_TYPE:
698 /* Since we know here that all fields are sorted in order of
699 increasing bit position, the size of the record is one
700 higher than the ending bit of the last field processed
701 unless we have a rep clause, since in that case we might
702 have a field outside a QUAL_UNION_TYPE that has a higher ending
703 position. So use a MAX in that case. Also, if this field is a
704 QUAL_UNION_TYPE, we need to take into account the previous size in
705 the case of empty variants. */
706 ada_size
707 = merge_sizes (ada_size, pos, this_ada_size,
708 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
709 size
710 = merge_sizes (size, pos, this_size,
711 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
712 break;
713
714 default:
715 gcc_unreachable ();
716 }
717 }
718
719 if (code == QUAL_UNION_TYPE)
720 nreverse (fieldlist);
721
63787194
EB
722 /* If the type is discriminated, it can be used to access all its
723 constrained subtypes, so force structural equality checks. */
724 if (CONTAINS_PLACEHOLDER_P (size))
725 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
726
a1ab4c31
AC
727 if (rep_level < 2)
728 {
729 /* If this is a padding record, we never want to make the size smaller
730 than what was specified in it, if any. */
731 if (TREE_CODE (record_type) == RECORD_TYPE
732 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
733 size = TYPE_SIZE (record_type);
734
735 /* Now set any of the values we've just computed that apply. */
736 if (!TYPE_IS_FAT_POINTER_P (record_type)
737 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
738 SET_TYPE_ADA_SIZE (record_type, ada_size);
739
740 if (rep_level > 0)
741 {
742 tree size_unit = had_size_unit
743 ? TYPE_SIZE_UNIT (record_type)
744 : convert (sizetype,
745 size_binop (CEIL_DIV_EXPR, size,
746 bitsize_unit_node));
747 unsigned int align = TYPE_ALIGN (record_type);
748
749 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
750 TYPE_SIZE_UNIT (record_type)
751 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
752
753 compute_record_mode (record_type);
754 }
755 }
756
757 if (!do_not_finalize)
758 rest_of_record_type_compilation (record_type);
759}
760
761/* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
762 the debug information associated with it. It need not be invoked
763 directly in most cases since finish_record_type takes care of doing
764 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
765
766void
767rest_of_record_type_compilation (tree record_type)
768{
769 tree fieldlist = TYPE_FIELDS (record_type);
770 tree field;
771 enum tree_code code = TREE_CODE (record_type);
772 bool var_size = false;
773
774 for (field = fieldlist; field; field = TREE_CHAIN (field))
775 {
776 /* We need to make an XVE/XVU record if any field has variable size,
777 whether or not the record does. For example, if we have a union,
778 it may be that all fields, rounded up to the alignment, have the
779 same size, in which case we'll use that size. But the debug
780 output routines (except Dwarf2) won't be able to output the fields,
781 so we need to make the special record. */
782 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
783 /* If a field has a non-constant qualifier, the record will have
784 variable size too. */
785 || (code == QUAL_UNION_TYPE
786 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
787 {
788 var_size = true;
789 break;
790 }
791 }
792
793 /* If this record is of variable size, rename it so that the
794 debugger knows it is and make a new, parallel, record
795 that tells the debugger how the record is laid out. See
796 exp_dbug.ads. But don't do this for records that are padding
797 since they confuse GDB. */
798 if (var_size
799 && !(TREE_CODE (record_type) == RECORD_TYPE
800 && TYPE_IS_PADDING_P (record_type)))
801 {
802 tree new_record_type
803 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
804 ? UNION_TYPE : TREE_CODE (record_type));
805 tree orig_name = TYPE_NAME (record_type);
806 tree orig_id
807 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
808 : orig_name);
809 tree new_id
810 = concat_id_with_name (orig_id,
811 TREE_CODE (record_type) == QUAL_UNION_TYPE
812 ? "XVU" : "XVE");
813 tree last_pos = bitsize_zero_node;
814 tree old_field;
815 tree prev_old_field = 0;
816
817 TYPE_NAME (new_record_type) = new_id;
818 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
819 TYPE_STUB_DECL (new_record_type)
10069d53 820 = create_type_stub_decl (new_id, new_record_type);
a1ab4c31
AC
821 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
822 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
823 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
824 TYPE_SIZE_UNIT (new_record_type)
825 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
826
827 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
828
829 /* Now scan all the fields, replacing each field with a new
830 field corresponding to the new encoding. */
831 for (old_field = TYPE_FIELDS (record_type); old_field;
832 old_field = TREE_CHAIN (old_field))
833 {
834 tree field_type = TREE_TYPE (old_field);
835 tree field_name = DECL_NAME (old_field);
836 tree new_field;
837 tree curpos = bit_position (old_field);
838 bool var = false;
839 unsigned int align = 0;
840 tree pos;
841
842 /* See how the position was modified from the last position.
843
844 There are two basic cases we support: a value was added
845 to the last position or the last position was rounded to
846 a boundary and they something was added. Check for the
847 first case first. If not, see if there is any evidence
848 of rounding. If so, round the last position and try
849 again.
850
851 If this is a union, the position can be taken as zero. */
852
853 /* Some computations depend on the shape of the position expression,
854 so strip conversions to make sure it's exposed. */
855 curpos = remove_conversions (curpos, true);
856
857 if (TREE_CODE (new_record_type) == UNION_TYPE)
858 pos = bitsize_zero_node, align = 0;
859 else
860 pos = compute_related_constant (curpos, last_pos);
861
862 if (!pos && TREE_CODE (curpos) == MULT_EXPR
863 && host_integerp (TREE_OPERAND (curpos, 1), 1))
864 {
865 tree offset = TREE_OPERAND (curpos, 0);
866 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
867
868 /* An offset which is a bitwise AND with a negative power of 2
869 means an alignment corresponding to this power of 2. */
870 offset = remove_conversions (offset, true);
871 if (TREE_CODE (offset) == BIT_AND_EXPR
872 && host_integerp (TREE_OPERAND (offset, 1), 0)
873 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
874 {
875 unsigned int pow
876 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
877 if (exact_log2 (pow) > 0)
878 align *= pow;
879 }
880
881 pos = compute_related_constant (curpos,
882 round_up (last_pos, align));
883 }
884 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
885 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
886 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
887 && host_integerp (TREE_OPERAND
888 (TREE_OPERAND (curpos, 0), 1),
889 1))
890 {
891 align
892 = tree_low_cst
893 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
894 pos = compute_related_constant (curpos,
895 round_up (last_pos, align));
896 }
897 else if (potential_alignment_gap (prev_old_field, old_field,
898 pos))
899 {
900 align = TYPE_ALIGN (field_type);
901 pos = compute_related_constant (curpos,
902 round_up (last_pos, align));
903 }
904
905 /* If we can't compute a position, set it to zero.
906
907 ??? We really should abort here, but it's too much work
908 to get this correct for all cases. */
909
910 if (!pos)
911 pos = bitsize_zero_node;
912
913 /* See if this type is variable-sized and make a pointer type
914 and indicate the indirection if so. Beware that the debug
915 back-end may adjust the position computed above according
916 to the alignment of the field type, i.e. the pointer type
917 in this case, if we don't preventively counter that. */
918 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
919 {
920 field_type = build_pointer_type (field_type);
921 if (align != 0 && TYPE_ALIGN (field_type) > align)
922 {
923 field_type = copy_node (field_type);
924 TYPE_ALIGN (field_type) = align;
925 }
926 var = true;
927 }
928
929 /* Make a new field name, if necessary. */
930 if (var || align != 0)
931 {
932 char suffix[16];
933
934 if (align != 0)
935 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
936 align / BITS_PER_UNIT);
937 else
938 strcpy (suffix, "XVL");
939
940 field_name = concat_id_with_name (field_name, suffix);
941 }
942
943 new_field = create_field_decl (field_name, field_type,
944 new_record_type, 0,
945 DECL_SIZE (old_field), pos, 0);
946 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
947 TYPE_FIELDS (new_record_type) = new_field;
948
949 /* If old_field is a QUAL_UNION_TYPE, take its size as being
950 zero. The only time it's not the last field of the record
951 is when there are other components at fixed positions after
952 it (meaning there was a rep clause for every field) and we
953 want to be able to encode them. */
954 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
955 (TREE_CODE (TREE_TYPE (old_field))
956 == QUAL_UNION_TYPE)
957 ? bitsize_zero_node
958 : DECL_SIZE (old_field));
959 prev_old_field = old_field;
960 }
961
962 TYPE_FIELDS (new_record_type)
963 = nreverse (TYPE_FIELDS (new_record_type));
964
965 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
966 }
967
968 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
969}
970
971/* Append PARALLEL_TYPE on the chain of parallel types for decl. */
972
973void
974add_parallel_type (tree decl, tree parallel_type)
975{
976 tree d = decl;
977
978 while (DECL_PARALLEL_TYPE (d))
979 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
980
981 SET_DECL_PARALLEL_TYPE (d, parallel_type);
982}
983
984/* Return the parallel type associated to a type, if any. */
985
986tree
987get_parallel_type (tree type)
988{
989 if (TYPE_STUB_DECL (type))
990 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
991 else
992 return NULL_TREE;
993}
994
995/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
996 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
997 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
998 replace a value of zero with the old size. If HAS_REP is true, we take the
999 MAX of the end position of this field with LAST_SIZE. In all other cases,
1000 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
1001
1002static tree
1003merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1004 bool has_rep)
1005{
1006 tree type = TREE_TYPE (last_size);
1007 tree new;
1008
1009 if (!special || TREE_CODE (size) != COND_EXPR)
1010 {
1011 new = size_binop (PLUS_EXPR, first_bit, size);
1012 if (has_rep)
1013 new = size_binop (MAX_EXPR, last_size, new);
1014 }
1015
1016 else
1017 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1018 integer_zerop (TREE_OPERAND (size, 1))
1019 ? last_size : merge_sizes (last_size, first_bit,
1020 TREE_OPERAND (size, 1),
1021 1, has_rep),
1022 integer_zerop (TREE_OPERAND (size, 2))
1023 ? last_size : merge_sizes (last_size, first_bit,
1024 TREE_OPERAND (size, 2),
1025 1, has_rep));
1026
1027 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1028 when fed through substitute_in_expr) into thinking that a constant
1029 size is not constant. */
1030 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1031 new = TREE_OPERAND (new, 0);
1032
1033 return new;
1034}
1035
1036/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1037 related by the addition of a constant. Return that constant if so. */
1038
1039static tree
1040compute_related_constant (tree op0, tree op1)
1041{
1042 tree op0_var, op1_var;
1043 tree op0_con = split_plus (op0, &op0_var);
1044 tree op1_con = split_plus (op1, &op1_var);
1045 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1046
1047 if (operand_equal_p (op0_var, op1_var, 0))
1048 return result;
1049 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1050 return result;
1051 else
1052 return 0;
1053}
1054
1055/* Utility function of above to split a tree OP which may be a sum, into a
1056 constant part, which is returned, and a variable part, which is stored
1057 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1058 bitsizetype. */
1059
1060static tree
1061split_plus (tree in, tree *pvar)
1062{
1063 /* Strip NOPS in order to ease the tree traversal and maximize the
1064 potential for constant or plus/minus discovery. We need to be careful
1065 to always return and set *pvar to bitsizetype trees, but it's worth
1066 the effort. */
1067 STRIP_NOPS (in);
1068
1069 *pvar = convert (bitsizetype, in);
1070
1071 if (TREE_CODE (in) == INTEGER_CST)
1072 {
1073 *pvar = bitsize_zero_node;
1074 return convert (bitsizetype, in);
1075 }
1076 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1077 {
1078 tree lhs_var, rhs_var;
1079 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1080 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1081
1082 if (lhs_var == TREE_OPERAND (in, 0)
1083 && rhs_var == TREE_OPERAND (in, 1))
1084 return bitsize_zero_node;
1085
1086 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1087 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1088 }
1089 else
1090 return bitsize_zero_node;
1091}
1092\f
1093/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1094 subprogram. If it is void_type_node, then we are dealing with a procedure,
1095 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1096 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1097 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1098 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1099 object. RETURNS_BY_REF is true if the function returns by reference.
1100 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1101 first parameter) the address of the place to copy its result. */
1102
1103tree
1104create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1105 bool returns_unconstrained, bool returns_by_ref,
1106 bool returns_by_target_ptr)
1107{
1108 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1109 the subprogram formal parameters. This list is generated by traversing the
1110 input list of PARM_DECL nodes. */
1111 tree param_type_list = NULL;
1112 tree param_decl;
1113 tree type;
1114
1115 for (param_decl = param_decl_list; param_decl;
1116 param_decl = TREE_CHAIN (param_decl))
1117 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1118 param_type_list);
1119
1120 /* The list of the function parameter types has to be terminated by the void
1121 type to signal to the back-end that we are not dealing with a variable
1122 parameter subprogram, but that the subprogram has a fixed number of
1123 parameters. */
1124 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1125
1126 /* The list of argument types has been created in reverse
1127 so nreverse it. */
1128 param_type_list = nreverse (param_type_list);
1129
1130 type = build_function_type (return_type, param_type_list);
1131
1132 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1133 or the new type should, make a copy of TYPE. Likewise for
1134 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1135 if (TYPE_CI_CO_LIST (type) || cico_list
1136 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1137 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1138 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1139 type = copy_type (type);
1140
1141 TYPE_CI_CO_LIST (type) = cico_list;
1142 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1143 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1144 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1145 return type;
1146}
1147\f
1148/* Return a copy of TYPE but safe to modify in any way. */
1149
1150tree
1151copy_type (tree type)
1152{
1153 tree new = copy_node (type);
1154
1155 /* copy_node clears this field instead of copying it, because it is
1156 aliased with TREE_CHAIN. */
1157 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1158
1159 TYPE_POINTER_TO (new) = 0;
1160 TYPE_REFERENCE_TO (new) = 0;
1161 TYPE_MAIN_VARIANT (new) = new;
1162 TYPE_NEXT_VARIANT (new) = 0;
1163
1164 return new;
1165}
1166\f
1167/* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1168 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1169 the decl. */
1170
1171tree
1172create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1173{
1174 /* First build a type for the desired range. */
1175 tree type = build_index_2_type (min, max);
1176
1177 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1178 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1179 is set, but not to INDEX, make a copy of this type with the requested
1180 index type. Note that we have no way of sharing these types, but that's
1181 only a small hole. */
1182 if (TYPE_INDEX_TYPE (type) == index)
1183 return type;
1184 else if (TYPE_INDEX_TYPE (type))
1185 type = copy_type (type);
1186
1187 SET_TYPE_INDEX_TYPE (type, index);
1188 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1189 return type;
1190}
1191\f
10069d53
EB
1192/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1193 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1194 its data type. */
1195
1196tree
1197create_type_stub_decl (tree type_name, tree type)
1198{
1199 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1200 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1201 emitted in DWARF. */
1202 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1203 DECL_ARTIFICIAL (type_decl) = 1;
1204 return type_decl;
1205}
1206
1207/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1208 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1209 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1210 true if we need to write debug information about this type. GNAT_NODE
1211 is used for the position of the decl. */
a1ab4c31
AC
1212
1213tree
1214create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1215 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1216{
a1ab4c31 1217 enum tree_code code = TREE_CODE (type);
10069d53
EB
1218 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1219 tree type_decl;
a1ab4c31 1220
10069d53
EB
1221 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1222 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 1223
10069d53
EB
1224 /* If the type hasn't been named yet, we're naming it; preserve an existing
1225 TYPE_STUB_DECL that has been attached to it for some purpose. */
1226 if (!named && TYPE_STUB_DECL (type))
1227 {
1228 type_decl = TYPE_STUB_DECL (type);
1229 DECL_NAME (type_decl) = type_name;
1230 }
1231 else
1232 type_decl = build_decl (TYPE_DECL, type_name, type);
a1ab4c31 1233
10069d53
EB
1234 DECL_ARTIFICIAL (type_decl) = artificial_p;
1235 gnat_pushdecl (type_decl, gnat_node);
a1ab4c31
AC
1236 process_attributes (type_decl, attr_list);
1237
10069d53
EB
1238 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1239 This causes the name to be also viewed as a "tag" by the debug
1240 back-end, with the advantage that no DW_TAG_typedef is emitted
1241 for artificial "tagged" types in DWARF. */
1242 if (!named)
1243 TYPE_STUB_DECL (type) = type_decl;
1244
1245 /* Pass the type declaration to the debug back-end unless this is an
1246 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, an
1247 ENUMERAL_TYPE or RECORD_TYPE which are handled separately, or a
a1ab4c31
AC
1248 type for which debugging information was not requested. */
1249 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1250 DECL_IGNORED_P (type_decl) = 1;
1251 else if (code != ENUMERAL_TYPE
1252 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1253 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1254 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1255 rest_of_type_decl_compilation (type_decl);
1256
1257 return type_decl;
1258}
10069d53 1259\f
a1ab4c31
AC
1260/* Return a VAR_DECL or CONST_DECL node.
1261
1262 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1263 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1264 the GCC tree for an optional initial expression; NULL_TREE if none.
1265
1266 CONST_FLAG is true if this variable is constant, in which case we might
1267 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1268
1269 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1270 definition to be made visible outside of the current compilation unit, for
1271 instance variable definitions in a package specification.
1272
1e17ef87 1273 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
1274 opposed to a definition: no storage is to be allocated for the variable).
1275
1276 STATIC_FLAG is only relevant when not at top level. In that case
1277 it indicates whether to always allocate storage to the variable.
1278
1279 GNAT_NODE is used for the position of the decl. */
1280
1281tree
1282create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1283 bool const_flag, bool public_flag, bool extern_flag,
1284 bool static_flag, bool const_decl_allowed_p,
1285 struct attrib *attr_list, Node_Id gnat_node)
1286{
1287 bool init_const
1288 = (var_init != 0
1289 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1290 && (global_bindings_p () || static_flag
1291 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1292 : TREE_CONSTANT (var_init)));
1293
1294 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1295 case the initializer may be used in-lieu of the DECL node (as done in
1296 Identifier_to_gnu). This is useful to prevent the need of elaboration
1297 code when an identifier for which such a decl is made is in turn used as
1298 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1299 but extra constraints apply to this choice (see below) and are not
1300 relevant to the distinction we wish to make. */
1301 bool constant_p = const_flag && init_const;
1302
1303 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1304 and may be used for scalars in general but not for aggregates. */
1305 tree var_decl
1306 = build_decl ((constant_p && const_decl_allowed_p
1307 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1308 var_name, type);
1309
1310 /* If this is external, throw away any initializations (they will be done
1311 elsewhere) unless this is a constant for which we would like to remain
1312 able to get the initializer. If we are defining a global here, leave a
1313 constant initialization and save any variable elaborations for the
1314 elaboration routine. If we are just annotating types, throw away the
1315 initialization if it isn't a constant. */
1316 if ((extern_flag && !constant_p)
1317 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1318 var_init = NULL_TREE;
1319
1320 /* At the global level, an initializer requiring code to be generated
1321 produces elaboration statements. Check that such statements are allowed,
1322 that is, not violating a No_Elaboration_Code restriction. */
1323 if (global_bindings_p () && var_init != 0 && ! init_const)
1324 Check_Elaboration_Code_Allowed (gnat_node);
1325
1326 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1327 try to fiddle with DECL_COMMON. However, on platforms that don't
1328 support global BSS sections, uninitialized global variables would
1329 go in DATA instead, thus increasing the size of the executable. */
1330 if (!flag_no_common
1331 && TREE_CODE (var_decl) == VAR_DECL
1332 && !have_global_bss_p ())
1333 DECL_COMMON (var_decl) = 1;
1334 DECL_INITIAL (var_decl) = var_init;
1335 TREE_READONLY (var_decl) = const_flag;
1336 DECL_EXTERNAL (var_decl) = extern_flag;
1337 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1338 TREE_CONSTANT (var_decl) = constant_p;
1339 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1340 = TYPE_VOLATILE (type);
1341
1342 /* If it's public and not external, always allocate storage for it.
1343 At the global binding level we need to allocate static storage for the
1344 variable if and only if it's not external. If we are not at the top level
1345 we allocate automatic storage unless requested not to. */
1346 TREE_STATIC (var_decl)
1347 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1348
1349 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1350 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1351
1352 process_attributes (var_decl, attr_list);
1353
1354 /* Add this decl to the current binding level. */
1355 gnat_pushdecl (var_decl, gnat_node);
1356
1357 if (TREE_SIDE_EFFECTS (var_decl))
1358 TREE_ADDRESSABLE (var_decl) = 1;
1359
1360 if (TREE_CODE (var_decl) != CONST_DECL)
1361 {
1362 if (global_bindings_p ())
1363 rest_of_decl_compilation (var_decl, true, 0);
1364 }
1365 else
1366 expand_decl (var_decl);
1367
1368 return var_decl;
1369}
1370\f
1371/* Return true if TYPE, an aggregate type, contains (or is) an array. */
1372
1373static bool
1374aggregate_type_contains_array_p (tree type)
1375{
1376 switch (TREE_CODE (type))
1377 {
1378 case RECORD_TYPE:
1379 case UNION_TYPE:
1380 case QUAL_UNION_TYPE:
1381 {
1382 tree field;
1383 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1384 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1385 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1386 return true;
1387 return false;
1388 }
1389
1390 case ARRAY_TYPE:
1391 return true;
1392
1393 default:
1394 gcc_unreachable ();
1395 }
1396}
1397
1398/* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1399 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1400 this field is in a record type with a "pragma pack". If SIZE is nonzero
1401 it is the specified size for this field. If POS is nonzero, it is the bit
1402 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1403 the address of this field for aliasing purposes. If it is negative, we
1404 should not make a bitfield, which is used by make_aligning_type. */
1405
1406tree
1407create_field_decl (tree field_name, tree field_type, tree record_type,
1408 int packed, tree size, tree pos, int addressable)
1409{
1410 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1411
1412 DECL_CONTEXT (field_decl) = record_type;
1413 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1414
1415 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1416 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1417 Likewise for an aggregate without specified position that contains an
1418 array, because in this case slices of variable length of this array
1419 must be handled by GCC and variable-sized objects need to be aligned
1420 to at least a byte boundary. */
1421 if (packed && (TYPE_MODE (field_type) == BLKmode
1422 || (!pos
1423 && AGGREGATE_TYPE_P (field_type)
1424 && aggregate_type_contains_array_p (field_type))))
1425 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1426
1427 /* If a size is specified, use it. Otherwise, if the record type is packed
1428 compute a size to use, which may differ from the object's natural size.
1429 We always set a size in this case to trigger the checks for bitfield
1430 creation below, which is typically required when no position has been
1431 specified. */
1432 if (size)
1433 size = convert (bitsizetype, size);
1434 else if (packed == 1)
1435 {
1436 size = rm_size (field_type);
1437
1438 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1439 byte. */
1440 if (TREE_CODE (size) == INTEGER_CST
1441 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1442 size = round_up (size, BITS_PER_UNIT);
1443 }
1444
1445 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1446 specified for two reasons: first if the size differs from the natural
1447 size. Second, if the alignment is insufficient. There are a number of
1448 ways the latter can be true.
1449
1450 We never make a bitfield if the type of the field has a nonconstant size,
1451 because no such entity requiring bitfield operations should reach here.
1452
1453 We do *preventively* make a bitfield when there might be the need for it
1454 but we don't have all the necessary information to decide, as is the case
1455 of a field with no specified position in a packed record.
1456
1457 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1458 in layout_decl or finish_record_type to clear the bit_field indication if
1459 it is in fact not needed. */
1460 if (addressable >= 0
1461 && size
1462 && TREE_CODE (size) == INTEGER_CST
1463 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1464 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1465 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1466 || packed
1467 || (TYPE_ALIGN (record_type) != 0
1468 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1469 {
1470 DECL_BIT_FIELD (field_decl) = 1;
1471 DECL_SIZE (field_decl) = size;
1472 if (!packed && !pos)
1473 DECL_ALIGN (field_decl)
1474 = (TYPE_ALIGN (record_type) != 0
1475 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1476 : TYPE_ALIGN (field_type));
1477 }
1478
1479 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1480
1481 /* Bump the alignment if need be, either for bitfield/packing purposes or
1482 to satisfy the type requirements if no such consideration applies. When
1483 we get the alignment from the type, indicate if this is from an explicit
1484 user request, which prevents stor-layout from lowering it later on. */
1485 {
d9223014 1486 unsigned int bit_align
a1ab4c31
AC
1487 = (DECL_BIT_FIELD (field_decl) ? 1
1488 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1489
1490 if (bit_align > DECL_ALIGN (field_decl))
1491 DECL_ALIGN (field_decl) = bit_align;
1492 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1493 {
1494 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1495 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1496 }
1497 }
1498
1499 if (pos)
1500 {
1501 /* We need to pass in the alignment the DECL is known to have.
1502 This is the lowest-order bit set in POS, but no more than
1503 the alignment of the record, if one is specified. Note
1504 that an alignment of 0 is taken as infinite. */
1505 unsigned int known_align;
1506
1507 if (host_integerp (pos, 1))
1508 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1509 else
1510 known_align = BITS_PER_UNIT;
1511
1512 if (TYPE_ALIGN (record_type)
1513 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1514 known_align = TYPE_ALIGN (record_type);
1515
1516 layout_decl (field_decl, known_align);
1517 SET_DECL_OFFSET_ALIGN (field_decl,
1518 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1519 : BITS_PER_UNIT);
1520 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1521 &DECL_FIELD_BIT_OFFSET (field_decl),
1522 DECL_OFFSET_ALIGN (field_decl), pos);
1523
1524 DECL_HAS_REP_P (field_decl) = 1;
1525 }
1526
1527 /* In addition to what our caller says, claim the field is addressable if we
1528 know that its type is not suitable.
1529
1530 The field may also be "technically" nonaddressable, meaning that even if
1531 we attempt to take the field's address we will actually get the address
1532 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1533 value we have at this point is not accurate enough, so we don't account
1534 for this here and let finish_record_type decide. */
4c5a0615 1535 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
1536 addressable = 1;
1537
1538 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1539
1540 return field_decl;
1541}
1542\f
1543/* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1544 PARAM_TYPE is its type. READONLY is true if the parameter is
1545 readonly (either an In parameter or an address of a pass-by-ref
1546 parameter). */
1547
1548tree
1549create_param_decl (tree param_name, tree param_type, bool readonly)
1550{
1551 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1552
1553 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1554 lead to various ABI violations. */
1555 if (targetm.calls.promote_prototypes (param_type)
1556 && (TREE_CODE (param_type) == INTEGER_TYPE
01ddebf2
EB
1557 || TREE_CODE (param_type) == ENUMERAL_TYPE
1558 || TREE_CODE (param_type) == BOOLEAN_TYPE)
a1ab4c31
AC
1559 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1560 {
1561 /* We have to be careful about biased types here. Make a subtype
1562 of integer_type_node with the proper biasing. */
1563 if (TREE_CODE (param_type) == INTEGER_TYPE
1564 && TYPE_BIASED_REPRESENTATION_P (param_type))
1565 {
1566 param_type
1567 = copy_type (build_range_type (integer_type_node,
1568 TYPE_MIN_VALUE (param_type),
1569 TYPE_MAX_VALUE (param_type)));
1570
1571 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1572 }
1573 else
1574 param_type = integer_type_node;
1575 }
1576
1577 DECL_ARG_TYPE (param_decl) = param_type;
1578 TREE_READONLY (param_decl) = readonly;
1579 return param_decl;
1580}
1581\f
1582/* Given a DECL and ATTR_LIST, process the listed attributes. */
1583
1584void
1585process_attributes (tree decl, struct attrib *attr_list)
1586{
1587 for (; attr_list; attr_list = attr_list->next)
1588 switch (attr_list->type)
1589 {
1590 case ATTR_MACHINE_ATTRIBUTE:
1591 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1592 NULL_TREE),
1593 ATTR_FLAG_TYPE_IN_PLACE);
1594 break;
1595
1596 case ATTR_LINK_ALIAS:
1597 if (! DECL_EXTERNAL (decl))
1598 {
1599 TREE_STATIC (decl) = 1;
1600 assemble_alias (decl, attr_list->name);
1601 }
1602 break;
1603
1604 case ATTR_WEAK_EXTERNAL:
1605 if (SUPPORTS_WEAK)
1606 declare_weak (decl);
1607 else
1608 post_error ("?weak declarations not supported on this target",
1609 attr_list->error_point);
1610 break;
1611
1612 case ATTR_LINK_SECTION:
1613 if (targetm.have_named_sections)
1614 {
1615 DECL_SECTION_NAME (decl)
1616 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1617 IDENTIFIER_POINTER (attr_list->name));
1618 DECL_COMMON (decl) = 0;
1619 }
1620 else
1621 post_error ("?section attributes are not supported for this target",
1622 attr_list->error_point);
1623 break;
1624
1625 case ATTR_LINK_CONSTRUCTOR:
1626 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1627 TREE_USED (decl) = 1;
1628 break;
1629
1630 case ATTR_LINK_DESTRUCTOR:
1631 DECL_STATIC_DESTRUCTOR (decl) = 1;
1632 TREE_USED (decl) = 1;
1633 break;
40a14772
TG
1634
1635 case ATTR_THREAD_LOCAL_STORAGE:
1636 if (targetm.have_tls)
1637 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1638 else
1639 post_error ("thread-local storage not supported for this target",
1640 attr_list->error_point);
1641 break;
a1ab4c31
AC
1642 }
1643}
1644\f
1645/* Record a global renaming pointer. */
1646
1647void
1648record_global_renaming_pointer (tree decl)
1649{
1650 gcc_assert (DECL_RENAMED_OBJECT (decl));
1651 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1652}
1653
1654/* Invalidate the global renaming pointers. */
1655
1656void
1657invalidate_global_renaming_pointers (void)
1658{
1659 unsigned int i;
1660 tree iter;
1661
1662 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1663 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1664
1665 VEC_free (tree, gc, global_renaming_pointers);
1666}
1667
1668/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1669 a power of 2. */
1670
1671bool
1672value_factor_p (tree value, HOST_WIDE_INT factor)
1673{
1674 if (host_integerp (value, 1))
1675 return tree_low_cst (value, 1) % factor == 0;
1676
1677 if (TREE_CODE (value) == MULT_EXPR)
1678 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1679 || value_factor_p (TREE_OPERAND (value, 1), factor));
1680
1681 return false;
1682}
1683
1684/* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1685 unless we can prove these 2 fields are laid out in such a way that no gap
1686 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1687 is the distance in bits between the end of PREV_FIELD and the starting
1688 position of CURR_FIELD. It is ignored if null. */
1689
1690static bool
1691potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1692{
1693 /* If this is the first field of the record, there cannot be any gap */
1694 if (!prev_field)
1695 return false;
1696
1697 /* If the previous field is a union type, then return False: The only
1698 time when such a field is not the last field of the record is when
1699 there are other components at fixed positions after it (meaning there
1700 was a rep clause for every field), in which case we don't want the
1701 alignment constraint to override them. */
1702 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1703 return false;
1704
1705 /* If the distance between the end of prev_field and the beginning of
1706 curr_field is constant, then there is a gap if the value of this
1707 constant is not null. */
1708 if (offset && host_integerp (offset, 1))
1709 return !integer_zerop (offset);
1710
1711 /* If the size and position of the previous field are constant,
1712 then check the sum of this size and position. There will be a gap
1713 iff it is not multiple of the current field alignment. */
1714 if (host_integerp (DECL_SIZE (prev_field), 1)
1715 && host_integerp (bit_position (prev_field), 1))
1716 return ((tree_low_cst (bit_position (prev_field), 1)
1717 + tree_low_cst (DECL_SIZE (prev_field), 1))
1718 % DECL_ALIGN (curr_field) != 0);
1719
1720 /* If both the position and size of the previous field are multiples
1721 of the current field alignment, there cannot be any gap. */
1722 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1723 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1724 return false;
1725
1726 /* Fallback, return that there may be a potential gap */
1727 return true;
1728}
1729
1730/* Returns a LABEL_DECL node for LABEL_NAME. */
1731
1732tree
1733create_label_decl (tree label_name)
1734{
1735 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1736
1737 DECL_CONTEXT (label_decl) = current_function_decl;
1738 DECL_MODE (label_decl) = VOIDmode;
1739 DECL_SOURCE_LOCATION (label_decl) = input_location;
1740
1741 return label_decl;
1742}
1743\f
1744/* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1745 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1746 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1747 PARM_DECL nodes chained through the TREE_CHAIN field).
1748
1749 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1750 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1751
1752tree
1753create_subprog_decl (tree subprog_name, tree asm_name,
1754 tree subprog_type, tree param_decl_list, bool inline_flag,
1755 bool public_flag, bool extern_flag,
1756 struct attrib *attr_list, Node_Id gnat_node)
1757{
1758 tree return_type = TREE_TYPE (subprog_type);
1759 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1760
d84b344a
EB
1761 /* If this is a non-inline function nested inside an inlined external
1762 function, we cannot honor both requests without cloning the nested
1763 function in the current unit since it is private to the other unit.
1764 We could inline the nested function as well but it's probably better
1765 to err on the side of too little inlining. */
1766 if (!inline_flag
1767 && current_function_decl
1768 && DECL_DECLARED_INLINE_P (current_function_decl)
a1ab4c31 1769 && DECL_EXTERNAL (current_function_decl))
d84b344a 1770 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
a1ab4c31
AC
1771
1772 DECL_EXTERNAL (subprog_decl) = extern_flag;
1773 TREE_PUBLIC (subprog_decl) = public_flag;
1774 TREE_STATIC (subprog_decl) = 1;
1775 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1776 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1777 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
d84b344a 1778 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
a1ab4c31
AC
1779 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1780 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1781 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1782 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1783
1784 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1785 target by-reference return mechanism. This is not supported all the
1786 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1787 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1788 the RESULT_DECL instead - see gnat_genericize for more details. */
1789 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1790 {
1791 tree result_decl = DECL_RESULT (subprog_decl);
1792
1793 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1794 DECL_BY_REFERENCE (result_decl) = 1;
1795 }
1796
a1ab4c31
AC
1797 if (asm_name)
1798 {
1799 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1800
1801 /* The expand_main_function circuitry expects "main_identifier_node" to
1802 designate the DECL_NAME of the 'main' entry point, in turn expected
1803 to be declared as the "main" function literally by default. Ada
1804 program entry points are typically declared with a different name
1805 within the binder generated file, exported as 'main' to satisfy the
1806 system expectations. Redirect main_identifier_node in this case. */
1807 if (asm_name == main_identifier_node)
1808 main_identifier_node = DECL_NAME (subprog_decl);
1809 }
1810
1811 process_attributes (subprog_decl, attr_list);
1812
1813 /* Add this decl to the current binding level. */
1814 gnat_pushdecl (subprog_decl, gnat_node);
1815
1816 /* Output the assembler code and/or RTL for the declaration. */
1817 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1818
1819 return subprog_decl;
1820}
1821\f
1822/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1823 body. This routine needs to be invoked before processing the declarations
1824 appearing in the subprogram. */
1825
1826void
1827begin_subprog_body (tree subprog_decl)
1828{
1829 tree param_decl;
1830
1831 current_function_decl = subprog_decl;
1832 announce_function (subprog_decl);
1833
1834 /* Enter a new binding level and show that all the parameters belong to
1835 this function. */
1836 gnat_pushlevel ();
1837 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1838 param_decl = TREE_CHAIN (param_decl))
1839 DECL_CONTEXT (param_decl) = subprog_decl;
1840
1841 make_decl_rtl (subprog_decl);
1842
1843 /* We handle pending sizes via the elaboration of types, so we don't need to
1844 save them. This causes them to be marked as part of the outer function
1845 and then discarded. */
1846 get_pending_sizes ();
1847}
1848
1849
1850/* Helper for the genericization callback. Return a dereference of VAL
1851 if it is of a reference type. */
1852
1853static tree
1854convert_from_reference (tree val)
1855{
1856 tree value_type, ref;
1857
1858 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1859 return val;
1860
1861 value_type = TREE_TYPE (TREE_TYPE (val));
1862 ref = build1 (INDIRECT_REF, value_type, val);
1863
1864 /* See if what we reference is CONST or VOLATILE, which requires
1865 looking into array types to get to the component type. */
1866
1867 while (TREE_CODE (value_type) == ARRAY_TYPE)
1868 value_type = TREE_TYPE (value_type);
1869
1870 TREE_READONLY (ref)
1871 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1872 TREE_THIS_VOLATILE (ref)
1873 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1874
1875 TREE_SIDE_EFFECTS (ref)
1876 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1877
1878 return ref;
1879}
1880
1881/* Helper for the genericization callback. Returns true if T denotes
1882 a RESULT_DECL with DECL_BY_REFERENCE set. */
1883
1884static inline bool
1885is_byref_result (tree t)
1886{
1887 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1888}
1889
1890
1891/* Tree walking callback for gnat_genericize. Currently ...
1892
1893 o Adjust references to the function's DECL_RESULT if it is marked
1894 DECL_BY_REFERENCE and so has had its type turned into a reference
1895 type at the end of the function compilation. */
1896
1897static tree
1898gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1899{
1900 /* This implementation is modeled after what the C++ front-end is
1901 doing, basis of the downstream passes behavior. */
1902
1903 tree stmt = *stmt_p;
1904 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1905
1906 /* If we have a direct mention of the result decl, dereference. */
1907 if (is_byref_result (stmt))
1908 {
1909 *stmt_p = convert_from_reference (stmt);
1910 *walk_subtrees = 0;
1911 return NULL;
1912 }
1913
1914 /* Otherwise, no need to walk the same tree twice. */
1915 if (pointer_set_contains (p_set, stmt))
1916 {
1917 *walk_subtrees = 0;
1918 return NULL_TREE;
1919 }
1920
1921 /* If we are taking the address of what now is a reference, just get the
1922 reference value. */
1923 if (TREE_CODE (stmt) == ADDR_EXPR
1924 && is_byref_result (TREE_OPERAND (stmt, 0)))
1925 {
1926 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1927 *walk_subtrees = 0;
1928 }
1929
1930 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1931 else if (TREE_CODE (stmt) == RETURN_EXPR
1932 && TREE_OPERAND (stmt, 0)
1933 && is_byref_result (TREE_OPERAND (stmt, 0)))
1934 *walk_subtrees = 0;
1935
1936 /* Don't look inside trees that cannot embed references of interest. */
1937 else if (IS_TYPE_OR_DECL_P (stmt))
1938 *walk_subtrees = 0;
1939
1940 pointer_set_insert (p_set, *stmt_p);
1941
1942 return NULL;
1943}
1944
1945/* Perform lowering of Ada trees to GENERIC. In particular:
1946
1947 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
1948 and adjust all the references to this decl accordingly. */
1949
1950static void
1951gnat_genericize (tree fndecl)
1952{
1953 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
1954 was handled by simply setting TREE_ADDRESSABLE on the result type.
1955 Everything required to actually pass by invisible ref using the target
1956 mechanism (e.g. extra parameter) was handled at RTL expansion time.
1957
1958 This doesn't work with GCC 4 any more for several reasons. First, the
1959 gimplification process might need the creation of temporaries of this
1960 type, and the gimplifier ICEs on such attempts. Second, the middle-end
1961 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
1962 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
1963 be explicitly accounted for by the front-end in the function body.
1964
1965 We achieve the complete transformation in two steps:
1966
1967 1/ create_subprog_decl performs early attribute tweaks: it clears
1968 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
1969 the result decl. The former ensures that the bit isn't set in the GCC
1970 tree saved for the function, so prevents ICEs on temporary creation.
1971 The latter we use here to trigger the rest of the processing.
1972
1973 2/ This function performs the type transformation on the result decl
1974 and adjusts all the references to this decl from the function body
1975 accordingly.
1976
1977 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
1978 strategy, which escapes the gimplifier temporary creation issues by
1979 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
1980 on simple specific support code in aggregate_value_p to look at the
1981 target function result decl explicitly. */
1982
1983 struct pointer_set_t *p_set;
1984 tree decl_result = DECL_RESULT (fndecl);
1985
1986 if (!DECL_BY_REFERENCE (decl_result))
1987 return;
1988
1989 /* Make the DECL_RESULT explicitly by-reference and adjust all the
1990 occurrences in the function body using the common tree-walking facility.
1991 We want to see every occurrence of the result decl to adjust the
1992 referencing tree, so need to use our own pointer set to control which
1993 trees should be visited again or not. */
1994
1995 p_set = pointer_set_create ();
1996
1997 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
1998 TREE_ADDRESSABLE (decl_result) = 0;
1999 relayout_decl (decl_result);
2000
2001 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2002
2003 pointer_set_destroy (p_set);
2004}
2005
2006/* Finish the definition of the current subprogram BODY and compile it all the
2007 way to assembler language output. ELAB_P tells if this is called for an
2008 elaboration routine, to be entirely discarded if empty. */
2009
2010void
2011end_subprog_body (tree body, bool elab_p)
2012{
2013 tree fndecl = current_function_decl;
2014
2015 /* Mark the BLOCK for this level as being for this function and pop the
2016 level. Since the vars in it are the parameters, clear them. */
2017 BLOCK_VARS (current_binding_level->block) = 0;
2018 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2019 DECL_INITIAL (fndecl) = current_binding_level->block;
2020 gnat_poplevel ();
2021
a1ab4c31
AC
2022 /* We handle pending sizes via the elaboration of types, so we don't
2023 need to save them. */
2024 get_pending_sizes ();
2025
2026 /* Mark the RESULT_DECL as being in this subprogram. */
2027 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2028
2029 DECL_SAVED_TREE (fndecl) = body;
2030
2031 current_function_decl = DECL_CONTEXT (fndecl);
2032 set_cfun (NULL);
2033
2034 /* We cannot track the location of errors past this point. */
2035 error_gnat_node = Empty;
2036
2037 /* If we're only annotating types, don't actually compile this function. */
2038 if (type_annotate_only)
2039 return;
2040
2041 /* Perform the required pre-gimplification transformations on the tree. */
2042 gnat_genericize (fndecl);
2043
2044 /* We do different things for nested and non-nested functions.
2045 ??? This should be in cgraph. */
2046 if (!DECL_CONTEXT (fndecl))
2047 {
2048 gnat_gimplify_function (fndecl);
2049
2050 /* If this is an empty elaboration proc, just discard the node.
2051 Otherwise, compile further. */
2052 if (elab_p && empty_body_p (gimple_body (fndecl)))
2053 cgraph_remove_node (cgraph_node (fndecl));
2054 else
2055 cgraph_finalize_function (fndecl, false);
2056 }
2057 else
2058 /* Register this function with cgraph just far enough to get it
2059 added to our parent's nested function list. */
2060 (void) cgraph_node (fndecl);
2061}
2062
2063/* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2064
2065static void
2066gnat_gimplify_function (tree fndecl)
2067{
2068 struct cgraph_node *cgn;
2069
2070 dump_function (TDI_original, fndecl);
2071 gimplify_function_tree (fndecl);
2072 dump_function (TDI_generic, fndecl);
2073
2074 /* Convert all nested functions to GIMPLE now. We do things in this order
2075 so that items like VLA sizes are expanded properly in the context of the
2076 correct function. */
2077 cgn = cgraph_node (fndecl);
2078 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2079 gnat_gimplify_function (cgn->decl);
2080}
a1ab4c31
AC
2081
2082tree
2083gnat_builtin_function (tree decl)
2084{
2085 gnat_pushdecl (decl, Empty);
2086 return decl;
2087}
2088
2089/* Return an integer type with the number of bits of precision given by
2090 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2091 it is a signed type. */
2092
2093tree
2094gnat_type_for_size (unsigned precision, int unsignedp)
2095{
2096 tree t;
2097 char type_name[20];
2098
2099 if (precision <= 2 * MAX_BITS_PER_WORD
2100 && signed_and_unsigned_types[precision][unsignedp])
2101 return signed_and_unsigned_types[precision][unsignedp];
2102
2103 if (unsignedp)
2104 t = make_unsigned_type (precision);
2105 else
2106 t = make_signed_type (precision);
2107
2108 if (precision <= 2 * MAX_BITS_PER_WORD)
2109 signed_and_unsigned_types[precision][unsignedp] = t;
2110
2111 if (!TYPE_NAME (t))
2112 {
2113 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2114 TYPE_NAME (t) = get_identifier (type_name);
2115 }
2116
2117 return t;
2118}
2119
2120/* Likewise for floating-point types. */
2121
2122static tree
2123float_type_for_precision (int precision, enum machine_mode mode)
2124{
2125 tree t;
2126 char type_name[20];
2127
2128 if (float_types[(int) mode])
2129 return float_types[(int) mode];
2130
2131 float_types[(int) mode] = t = make_node (REAL_TYPE);
2132 TYPE_PRECISION (t) = precision;
2133 layout_type (t);
2134
2135 gcc_assert (TYPE_MODE (t) == mode);
2136 if (!TYPE_NAME (t))
2137 {
2138 sprintf (type_name, "FLOAT_%d", precision);
2139 TYPE_NAME (t) = get_identifier (type_name);
2140 }
2141
2142 return t;
2143}
2144
2145/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2146 an unsigned type; otherwise a signed type is returned. */
2147
2148tree
2149gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2150{
2151 if (mode == BLKmode)
2152 return NULL_TREE;
2153 else if (mode == VOIDmode)
2154 return void_type_node;
2155 else if (COMPLEX_MODE_P (mode))
2156 return NULL_TREE;
2157 else if (SCALAR_FLOAT_MODE_P (mode))
2158 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2159 else if (SCALAR_INT_MODE_P (mode))
2160 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2161 else
2162 return NULL_TREE;
2163}
2164
2165/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2166
2167tree
2168gnat_unsigned_type (tree type_node)
2169{
2170 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2171
2172 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2173 {
2174 type = copy_node (type);
2175 TREE_TYPE (type) = type_node;
2176 }
2177 else if (TREE_TYPE (type_node)
2178 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2179 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2180 {
2181 type = copy_node (type);
2182 TREE_TYPE (type) = TREE_TYPE (type_node);
2183 }
2184
2185 return type;
2186}
2187
2188/* Return the signed version of a TYPE_NODE, a scalar type. */
2189
2190tree
2191gnat_signed_type (tree type_node)
2192{
2193 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2194
2195 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2196 {
2197 type = copy_node (type);
2198 TREE_TYPE (type) = type_node;
2199 }
2200 else if (TREE_TYPE (type_node)
2201 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2202 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2203 {
2204 type = copy_node (type);
2205 TREE_TYPE (type) = TREE_TYPE (type_node);
2206 }
2207
2208 return type;
2209}
2210
2211/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2212 transparently converted to each other. */
2213
2214int
2215gnat_types_compatible_p (tree t1, tree t2)
2216{
2217 enum tree_code code;
2218
2219 /* This is the default criterion. */
2220 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2221 return 1;
2222
2223 /* We only check structural equivalence here. */
2224 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2225 return 0;
2226
2227 /* Array types are also compatible if they are constrained and have
2228 the same component type and the same domain. */
2229 if (code == ARRAY_TYPE
2230 && TREE_TYPE (t1) == TREE_TYPE (t2)
0adef32b
JJ
2231 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2232 || (TYPE_DOMAIN (t1)
2233 && TYPE_DOMAIN (t2)
2234 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2235 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2236 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2237 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
a1ab4c31
AC
2238 return 1;
2239
2240 /* Padding record types are also compatible if they pad the same
2241 type and have the same constant size. */
2242 if (code == RECORD_TYPE
2243 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2244 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2245 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2246 return 1;
2247
2248 return 0;
2249}
2250\f
2251/* EXP is an expression for the size of an object. If this size contains
2252 discriminant references, replace them with the maximum (if MAX_P) or
2253 minimum (if !MAX_P) possible value of the discriminant. */
2254
2255tree
2256max_size (tree exp, bool max_p)
2257{
2258 enum tree_code code = TREE_CODE (exp);
2259 tree type = TREE_TYPE (exp);
2260
2261 switch (TREE_CODE_CLASS (code))
2262 {
2263 case tcc_declaration:
2264 case tcc_constant:
2265 return exp;
2266
2267 case tcc_vl_exp:
2268 if (code == CALL_EXPR)
2269 {
2270 tree *argarray;
2271 int i, n = call_expr_nargs (exp);
2272 gcc_assert (n > 0);
2273
2274 argarray = (tree *) alloca (n * sizeof (tree));
2275 for (i = 0; i < n; i++)
2276 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2277 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2278 }
2279 break;
2280
2281 case tcc_reference:
2282 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2283 modify. Otherwise, we treat it like a variable. */
2284 if (!CONTAINS_PLACEHOLDER_P (exp))
2285 return exp;
2286
2287 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2288 return
2289 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2290
2291 case tcc_comparison:
2292 return max_p ? size_one_node : size_zero_node;
2293
2294 case tcc_unary:
2295 case tcc_binary:
2296 case tcc_expression:
2297 switch (TREE_CODE_LENGTH (code))
2298 {
2299 case 1:
2300 if (code == NON_LVALUE_EXPR)
2301 return max_size (TREE_OPERAND (exp, 0), max_p);
2302 else
2303 return
2304 fold_build1 (code, type,
2305 max_size (TREE_OPERAND (exp, 0),
2306 code == NEGATE_EXPR ? !max_p : max_p));
2307
2308 case 2:
2309 if (code == COMPOUND_EXPR)
2310 return max_size (TREE_OPERAND (exp, 1), max_p);
2311
2312 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2313 may provide a tighter bound on max_size. */
2314 if (code == MINUS_EXPR
2315 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2316 {
2317 tree lhs = fold_build2 (MINUS_EXPR, type,
2318 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2319 TREE_OPERAND (exp, 1));
2320 tree rhs = fold_build2 (MINUS_EXPR, type,
2321 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2322 TREE_OPERAND (exp, 1));
2323 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2324 max_size (lhs, max_p),
2325 max_size (rhs, max_p));
2326 }
2327
2328 {
2329 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2330 tree rhs = max_size (TREE_OPERAND (exp, 1),
2331 code == MINUS_EXPR ? !max_p : max_p);
2332
2333 /* Special-case wanting the maximum value of a MIN_EXPR.
2334 In that case, if one side overflows, return the other.
2335 sizetype is signed, but we know sizes are non-negative.
2336 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2337 overflowing or the maximum possible value and the RHS
2338 a variable. */
2339 if (max_p
2340 && code == MIN_EXPR
2341 && TREE_CODE (rhs) == INTEGER_CST
2342 && TREE_OVERFLOW (rhs))
2343 return lhs;
2344 else if (max_p
2345 && code == MIN_EXPR
2346 && TREE_CODE (lhs) == INTEGER_CST
2347 && TREE_OVERFLOW (lhs))
2348 return rhs;
2349 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2350 && ((TREE_CODE (lhs) == INTEGER_CST
2351 && TREE_OVERFLOW (lhs))
2352 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2353 && !TREE_CONSTANT (rhs))
2354 return lhs;
2355 else
2356 return fold_build2 (code, type, lhs, rhs);
2357 }
2358
2359 case 3:
2360 if (code == SAVE_EXPR)
2361 return exp;
2362 else if (code == COND_EXPR)
2363 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2364 max_size (TREE_OPERAND (exp, 1), max_p),
2365 max_size (TREE_OPERAND (exp, 2), max_p));
2366 }
2367
2368 /* Other tree classes cannot happen. */
2369 default:
2370 break;
2371 }
2372
2373 gcc_unreachable ();
2374}
2375\f
2376/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2377 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2378 Return a constructor for the template. */
2379
2380tree
2381build_template (tree template_type, tree array_type, tree expr)
2382{
2383 tree template_elts = NULL_TREE;
2384 tree bound_list = NULL_TREE;
2385 tree field;
2386
2387 while (TREE_CODE (array_type) == RECORD_TYPE
2388 && (TYPE_IS_PADDING_P (array_type)
2389 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2390 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2391
2392 if (TREE_CODE (array_type) == ARRAY_TYPE
2393 || (TREE_CODE (array_type) == INTEGER_TYPE
2394 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2395 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2396
2397 /* First make the list for a CONSTRUCTOR for the template. Go down the
2398 field list of the template instead of the type chain because this
2399 array might be an Ada array of arrays and we can't tell where the
2400 nested arrays stop being the underlying object. */
2401
2402 for (field = TYPE_FIELDS (template_type); field;
2403 (bound_list
2404 ? (bound_list = TREE_CHAIN (bound_list))
2405 : (array_type = TREE_TYPE (array_type))),
2406 field = TREE_CHAIN (TREE_CHAIN (field)))
2407 {
2408 tree bounds, min, max;
2409
2410 /* If we have a bound list, get the bounds from there. Likewise
2411 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2412 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2413 This will give us a maximum range. */
2414 if (bound_list)
2415 bounds = TREE_VALUE (bound_list);
2416 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2417 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2418 else if (expr && TREE_CODE (expr) == PARM_DECL
2419 && DECL_BY_COMPONENT_PTR_P (expr))
2420 bounds = TREE_TYPE (field);
2421 else
2422 gcc_unreachable ();
2423
2424 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2425 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2426
2427 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2428 substitute it from OBJECT. */
2429 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2430 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2431
2432 template_elts = tree_cons (TREE_CHAIN (field), max,
2433 tree_cons (field, min, template_elts));
2434 }
2435
2436 return gnat_build_constructor (template_type, nreverse (template_elts));
2437}
2438\f
6ca2b0a0 2439/* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
a1ab4c31
AC
2440 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2441 in the type contains in its DECL_INITIAL the expression to use when
2442 a constructor is made for the type. GNAT_ENTITY is an entity used
2443 to print out an error message if the mechanism cannot be applied to
2444 an object of that type and also for the name. */
2445
2446tree
d628c015 2447build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
2448{
2449 tree record_type = make_node (RECORD_TYPE);
2450 tree pointer32_type;
2451 tree field_list = 0;
2452 int class;
2453 int dtype = 0;
2454 tree inner_type;
2455 int ndim;
2456 int i;
2457 tree *idx_arr;
2458 tree tem;
2459
2460 /* If TYPE is an unconstrained array, use the underlying array type. */
2461 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2462 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2463
2464 /* If this is an array, compute the number of dimensions in the array,
2465 get the index types, and point to the inner type. */
2466 if (TREE_CODE (type) != ARRAY_TYPE)
2467 ndim = 0;
2468 else
2469 for (ndim = 1, inner_type = type;
2470 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2471 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2472 ndim++, inner_type = TREE_TYPE (inner_type))
2473 ;
2474
2475 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2476
d628c015 2477 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
2478 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2479 for (i = ndim - 1, inner_type = type;
2480 i >= 0;
2481 i--, inner_type = TREE_TYPE (inner_type))
2482 idx_arr[i] = TYPE_DOMAIN (inner_type);
2483 else
2484 for (i = 0, inner_type = type;
2485 i < ndim;
2486 i++, inner_type = TREE_TYPE (inner_type))
2487 idx_arr[i] = TYPE_DOMAIN (inner_type);
2488
2489 /* Now get the DTYPE value. */
2490 switch (TREE_CODE (type))
2491 {
2492 case INTEGER_TYPE:
2493 case ENUMERAL_TYPE:
01ddebf2 2494 case BOOLEAN_TYPE:
a1ab4c31
AC
2495 if (TYPE_VAX_FLOATING_POINT_P (type))
2496 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2497 {
2498 case 6:
2499 dtype = 10;
2500 break;
2501 case 9:
2502 dtype = 11;
2503 break;
2504 case 15:
2505 dtype = 27;
2506 break;
2507 }
2508 else
2509 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2510 {
2511 case 8:
2512 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2513 break;
2514 case 16:
2515 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2516 break;
2517 case 32:
2518 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2519 break;
2520 case 64:
2521 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2522 break;
2523 case 128:
2524 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2525 break;
2526 }
2527 break;
2528
2529 case REAL_TYPE:
2530 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2531 break;
2532
2533 case COMPLEX_TYPE:
2534 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2535 && TYPE_VAX_FLOATING_POINT_P (type))
2536 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2537 {
2538 case 6:
2539 dtype = 12;
2540 break;
2541 case 9:
2542 dtype = 13;
2543 break;
2544 case 15:
2545 dtype = 29;
2546 }
2547 else
2548 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2549 break;
2550
2551 case ARRAY_TYPE:
2552 dtype = 14;
2553 break;
2554
2555 default:
2556 break;
2557 }
2558
2559 /* Get the CLASS value. */
2560 switch (mech)
2561 {
2562 case By_Descriptor_A:
d628c015 2563 case By_Short_Descriptor_A:
a1ab4c31
AC
2564 class = 4;
2565 break;
2566 case By_Descriptor_NCA:
d628c015 2567 case By_Short_Descriptor_NCA:
a1ab4c31
AC
2568 class = 10;
2569 break;
2570 case By_Descriptor_SB:
d628c015 2571 case By_Short_Descriptor_SB:
a1ab4c31
AC
2572 class = 15;
2573 break;
2574 case By_Descriptor:
d628c015 2575 case By_Short_Descriptor:
a1ab4c31 2576 case By_Descriptor_S:
d628c015 2577 case By_Short_Descriptor_S:
a1ab4c31
AC
2578 default:
2579 class = 1;
2580 break;
2581 }
2582
2583 /* Make the type for a descriptor for VMS. The first four fields
2584 are the same for all types. */
2585
2586 field_list
2587 = chainon (field_list,
2588 make_descriptor_field
2589 ("LENGTH", gnat_type_for_size (16, 1), record_type,
d628c015
DR
2590 size_in_bytes ((mech == By_Descriptor_A ||
2591 mech == By_Short_Descriptor_A)
2592 ? inner_type : type)));
a1ab4c31
AC
2593
2594 field_list = chainon (field_list,
2595 make_descriptor_field ("DTYPE",
2596 gnat_type_for_size (8, 1),
2597 record_type, size_int (dtype)));
2598 field_list = chainon (field_list,
2599 make_descriptor_field ("CLASS",
2600 gnat_type_for_size (8, 1),
2601 record_type, size_int (class)));
2602
2603 /* Of course this will crash at run-time if the address space is not
2604 within the low 32 bits, but there is nothing else we can do. */
2605 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2606
2607 field_list
2608 = chainon (field_list,
2609 make_descriptor_field
2610 ("POINTER", pointer32_type, record_type,
2611 build_unary_op (ADDR_EXPR,
2612 pointer32_type,
2613 build0 (PLACEHOLDER_EXPR, type))));
2614
2615 switch (mech)
2616 {
2617 case By_Descriptor:
d628c015 2618 case By_Short_Descriptor:
a1ab4c31 2619 case By_Descriptor_S:
d628c015 2620 case By_Short_Descriptor_S:
a1ab4c31
AC
2621 break;
2622
2623 case By_Descriptor_SB:
d628c015 2624 case By_Short_Descriptor_SB:
a1ab4c31
AC
2625 field_list
2626 = chainon (field_list,
2627 make_descriptor_field
2628 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2629 TREE_CODE (type) == ARRAY_TYPE
2630 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2631 field_list
2632 = chainon (field_list,
2633 make_descriptor_field
2634 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2635 TREE_CODE (type) == ARRAY_TYPE
2636 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2637 break;
2638
2639 case By_Descriptor_A:
d628c015 2640 case By_Short_Descriptor_A:
a1ab4c31 2641 case By_Descriptor_NCA:
d628c015 2642 case By_Short_Descriptor_NCA:
a1ab4c31
AC
2643 field_list = chainon (field_list,
2644 make_descriptor_field ("SCALE",
2645 gnat_type_for_size (8, 1),
2646 record_type,
2647 size_zero_node));
2648
2649 field_list = chainon (field_list,
2650 make_descriptor_field ("DIGITS",
2651 gnat_type_for_size (8, 1),
2652 record_type,
2653 size_zero_node));
2654
2655 field_list
2656 = chainon (field_list,
2657 make_descriptor_field
2658 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
d628c015
DR
2659 size_int ((mech == By_Descriptor_NCA ||
2660 mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2661 ? 0
2662 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2663 : (TREE_CODE (type) == ARRAY_TYPE
2664 && TYPE_CONVENTION_FORTRAN_P (type)
2665 ? 224 : 192))));
2666
2667 field_list = chainon (field_list,
2668 make_descriptor_field ("DIMCT",
2669 gnat_type_for_size (8, 1),
2670 record_type,
2671 size_int (ndim)));
2672
2673 field_list = chainon (field_list,
2674 make_descriptor_field ("ARSIZE",
2675 gnat_type_for_size (32, 1),
2676 record_type,
2677 size_in_bytes (type)));
2678
2679 /* Now build a pointer to the 0,0,0... element. */
2680 tem = build0 (PLACEHOLDER_EXPR, type);
2681 for (i = 0, inner_type = type; i < ndim;
2682 i++, inner_type = TREE_TYPE (inner_type))
2683 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2684 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2685 NULL_TREE, NULL_TREE);
2686
2687 field_list
2688 = chainon (field_list,
2689 make_descriptor_field
2690 ("A0",
2691 build_pointer_type_for_mode (inner_type, SImode, false),
2692 record_type,
2693 build1 (ADDR_EXPR,
2694 build_pointer_type_for_mode (inner_type, SImode,
2695 false),
2696 tem)));
2697
2698 /* Next come the addressing coefficients. */
2699 tem = size_one_node;
2700 for (i = 0; i < ndim; i++)
2701 {
2702 char fname[3];
2703 tree idx_length
2704 = size_binop (MULT_EXPR, tem,
2705 size_binop (PLUS_EXPR,
2706 size_binop (MINUS_EXPR,
2707 TYPE_MAX_VALUE (idx_arr[i]),
2708 TYPE_MIN_VALUE (idx_arr[i])),
2709 size_int (1)));
2710
d628c015
DR
2711 fname[0] = ((mech == By_Descriptor_NCA ||
2712 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31
AC
2713 fname[1] = '0' + i, fname[2] = 0;
2714 field_list
2715 = chainon (field_list,
2716 make_descriptor_field (fname,
2717 gnat_type_for_size (32, 1),
2718 record_type, idx_length));
2719
d628c015 2720 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2721 tem = idx_length;
2722 }
2723
2724 /* Finally here are the bounds. */
2725 for (i = 0; i < ndim; i++)
2726 {
2727 char fname[3];
2728
2729 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2730 field_list
2731 = chainon (field_list,
2732 make_descriptor_field
2733 (fname, gnat_type_for_size (32, 1), record_type,
2734 TYPE_MIN_VALUE (idx_arr[i])));
2735
2736 fname[0] = 'U';
2737 field_list
2738 = chainon (field_list,
2739 make_descriptor_field
2740 (fname, gnat_type_for_size (32, 1), record_type,
2741 TYPE_MAX_VALUE (idx_arr[i])));
2742 }
2743 break;
2744
2745 default:
2746 post_error ("unsupported descriptor type for &", gnat_entity);
2747 }
2748
10069d53 2749 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
a1ab4c31 2750 finish_record_type (record_type, field_list, 0, true);
a1ab4c31
AC
2751 return record_type;
2752}
2753
6ca2b0a0
DR
2754/* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2755 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2756 in the type contains in its DECL_INITIAL the expression to use when
2757 a constructor is made for the type. GNAT_ENTITY is an entity used
2758 to print out an error message if the mechanism cannot be applied to
2759 an object of that type and also for the name. */
2760
2761tree
d628c015 2762build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0
DR
2763{
2764 tree record64_type = make_node (RECORD_TYPE);
2765 tree pointer64_type;
2766 tree field_list64 = 0;
2767 int class;
2768 int dtype = 0;
2769 tree inner_type;
2770 int ndim;
2771 int i;
2772 tree *idx_arr;
2773 tree tem;
2774
2775 /* If TYPE is an unconstrained array, use the underlying array type. */
2776 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2777 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2778
2779 /* If this is an array, compute the number of dimensions in the array,
2780 get the index types, and point to the inner type. */
2781 if (TREE_CODE (type) != ARRAY_TYPE)
2782 ndim = 0;
2783 else
2784 for (ndim = 1, inner_type = type;
2785 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2786 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2787 ndim++, inner_type = TREE_TYPE (inner_type))
2788 ;
2789
2790 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2791
2792 if (mech != By_Descriptor_NCA
2793 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2794 for (i = ndim - 1, inner_type = type;
2795 i >= 0;
2796 i--, inner_type = TREE_TYPE (inner_type))
2797 idx_arr[i] = TYPE_DOMAIN (inner_type);
2798 else
2799 for (i = 0, inner_type = type;
2800 i < ndim;
2801 i++, inner_type = TREE_TYPE (inner_type))
2802 idx_arr[i] = TYPE_DOMAIN (inner_type);
2803
2804 /* Now get the DTYPE value. */
2805 switch (TREE_CODE (type))
2806 {
2807 case INTEGER_TYPE:
2808 case ENUMERAL_TYPE:
01ddebf2 2809 case BOOLEAN_TYPE:
6ca2b0a0
DR
2810 if (TYPE_VAX_FLOATING_POINT_P (type))
2811 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2812 {
2813 case 6:
2814 dtype = 10;
2815 break;
2816 case 9:
2817 dtype = 11;
2818 break;
2819 case 15:
2820 dtype = 27;
2821 break;
2822 }
2823 else
2824 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2825 {
2826 case 8:
2827 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2828 break;
2829 case 16:
2830 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2831 break;
2832 case 32:
2833 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2834 break;
2835 case 64:
2836 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2837 break;
2838 case 128:
2839 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2840 break;
2841 }
2842 break;
2843
2844 case REAL_TYPE:
2845 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2846 break;
2847
2848 case COMPLEX_TYPE:
2849 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2850 && TYPE_VAX_FLOATING_POINT_P (type))
2851 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2852 {
2853 case 6:
2854 dtype = 12;
2855 break;
2856 case 9:
2857 dtype = 13;
2858 break;
2859 case 15:
2860 dtype = 29;
2861 }
2862 else
2863 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2864 break;
2865
2866 case ARRAY_TYPE:
2867 dtype = 14;
2868 break;
2869
2870 default:
2871 break;
2872 }
2873
2874 /* Get the CLASS value. */
2875 switch (mech)
2876 {
2877 case By_Descriptor_A:
2878 class = 4;
2879 break;
2880 case By_Descriptor_NCA:
2881 class = 10;
2882 break;
2883 case By_Descriptor_SB:
2884 class = 15;
2885 break;
2886 case By_Descriptor:
2887 case By_Descriptor_S:
2888 default:
2889 class = 1;
2890 break;
2891 }
2892
2893 /* Make the type for a 64bit descriptor for VMS. The first six fields
2894 are the same for all types. */
2895
2896 field_list64 = chainon (field_list64,
2897 make_descriptor_field ("MBO",
2898 gnat_type_for_size (16, 1),
2899 record64_type, size_int (1)));
2900
2901 field_list64 = chainon (field_list64,
2902 make_descriptor_field ("DTYPE",
2903 gnat_type_for_size (8, 1),
2904 record64_type, size_int (dtype)));
2905 field_list64 = chainon (field_list64,
2906 make_descriptor_field ("CLASS",
2907 gnat_type_for_size (8, 1),
2908 record64_type, size_int (class)));
2909
2910 field_list64 = chainon (field_list64,
2911 make_descriptor_field ("MBMO",
2912 gnat_type_for_size (32, 1),
2913 record64_type, ssize_int (-1)));
2914
2915 field_list64
2916 = chainon (field_list64,
2917 make_descriptor_field
2918 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2919 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2920
2921 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2922
2923 field_list64
2924 = chainon (field_list64,
2925 make_descriptor_field
2926 ("POINTER", pointer64_type, record64_type,
2927 build_unary_op (ADDR_EXPR,
2928 pointer64_type,
2929 build0 (PLACEHOLDER_EXPR, type))));
2930
2931 switch (mech)
2932 {
2933 case By_Descriptor:
2934 case By_Descriptor_S:
2935 break;
2936
2937 case By_Descriptor_SB:
2938 field_list64
2939 = chainon (field_list64,
2940 make_descriptor_field
2941 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2942 TREE_CODE (type) == ARRAY_TYPE
2943 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2944 field_list64
2945 = chainon (field_list64,
2946 make_descriptor_field
2947 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2948 TREE_CODE (type) == ARRAY_TYPE
2949 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2950 break;
2951
2952 case By_Descriptor_A:
2953 case By_Descriptor_NCA:
2954 field_list64 = chainon (field_list64,
2955 make_descriptor_field ("SCALE",
2956 gnat_type_for_size (8, 1),
2957 record64_type,
2958 size_zero_node));
2959
2960 field_list64 = chainon (field_list64,
2961 make_descriptor_field ("DIGITS",
2962 gnat_type_for_size (8, 1),
2963 record64_type,
2964 size_zero_node));
2965
2966 field_list64
2967 = chainon (field_list64,
2968 make_descriptor_field
2969 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2970 size_int (mech == By_Descriptor_NCA
2971 ? 0
2972 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2973 : (TREE_CODE (type) == ARRAY_TYPE
2974 && TYPE_CONVENTION_FORTRAN_P (type)
2975 ? 224 : 192))));
2976
2977 field_list64 = chainon (field_list64,
2978 make_descriptor_field ("DIMCT",
2979 gnat_type_for_size (8, 1),
2980 record64_type,
2981 size_int (ndim)));
2982
2983 field_list64 = chainon (field_list64,
2984 make_descriptor_field ("MBZ",
2985 gnat_type_for_size (32, 1),
2986 record64_type,
2987 size_int (0)));
2988 field_list64 = chainon (field_list64,
2989 make_descriptor_field ("ARSIZE",
2990 gnat_type_for_size (64, 1),
2991 record64_type,
2992 size_in_bytes (type)));
2993
2994 /* Now build a pointer to the 0,0,0... element. */
2995 tem = build0 (PLACEHOLDER_EXPR, type);
2996 for (i = 0, inner_type = type; i < ndim;
2997 i++, inner_type = TREE_TYPE (inner_type))
2998 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2999 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3000 NULL_TREE, NULL_TREE);
3001
3002 field_list64
3003 = chainon (field_list64,
3004 make_descriptor_field
3005 ("A0",
3006 build_pointer_type_for_mode (inner_type, DImode, false),
3007 record64_type,
3008 build1 (ADDR_EXPR,
3009 build_pointer_type_for_mode (inner_type, DImode,
3010 false),
3011 tem)));
3012
3013 /* Next come the addressing coefficients. */
3014 tem = size_one_node;
3015 for (i = 0; i < ndim; i++)
3016 {
3017 char fname[3];
3018 tree idx_length
3019 = size_binop (MULT_EXPR, tem,
3020 size_binop (PLUS_EXPR,
3021 size_binop (MINUS_EXPR,
3022 TYPE_MAX_VALUE (idx_arr[i]),
3023 TYPE_MIN_VALUE (idx_arr[i])),
3024 size_int (1)));
3025
3026 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3027 fname[1] = '0' + i, fname[2] = 0;
3028 field_list64
3029 = chainon (field_list64,
3030 make_descriptor_field (fname,
3031 gnat_type_for_size (64, 1),
3032 record64_type, idx_length));
3033
3034 if (mech == By_Descriptor_NCA)
3035 tem = idx_length;
3036 }
3037
3038 /* Finally here are the bounds. */
3039 for (i = 0; i < ndim; i++)
3040 {
3041 char fname[3];
3042
3043 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3044 field_list64
3045 = chainon (field_list64,
3046 make_descriptor_field
3047 (fname, gnat_type_for_size (64, 1), record64_type,
3048 TYPE_MIN_VALUE (idx_arr[i])));
3049
3050 fname[0] = 'U';
3051 field_list64
3052 = chainon (field_list64,
3053 make_descriptor_field
3054 (fname, gnat_type_for_size (64, 1), record64_type,
3055 TYPE_MAX_VALUE (idx_arr[i])));
3056 }
3057 break;
3058
3059 default:
3060 post_error ("unsupported descriptor type for &", gnat_entity);
3061 }
3062
10069d53 3063 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
6ca2b0a0 3064 finish_record_type (record64_type, field_list64, 0, true);
6ca2b0a0
DR
3065 return record64_type;
3066}
3067
a1ab4c31
AC
3068/* Utility routine for above code to make a field. */
3069
3070static tree
3071make_descriptor_field (const char *name, tree type,
3072 tree rec_type, tree initial)
3073{
3074 tree field
3075 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3076
3077 DECL_INITIAL (field) = initial;
3078 return field;
3079}
3080
d628c015
DR
3081/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3082 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3083 which the VMS descriptor is passed. */
a1ab4c31
AC
3084
3085static tree
d628c015
DR
3086convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3087{
3088 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3089 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3090 /* The CLASS field is the 3rd field in the descriptor. */
3091 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3092 /* The POINTER field is the 6th field in the descriptor. */
3093 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3094
3095 /* Retrieve the value of the POINTER field. */
3096 tree gnu_expr64
3097 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3098
3099 if (POINTER_TYPE_P (gnu_type))
3100 return convert (gnu_type, gnu_expr64);
3101
3102 else if (TYPE_FAT_POINTER_P (gnu_type))
3103 {
3104 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3105 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3106 tree template_type = TREE_TYPE (p_bounds_type);
3107 tree min_field = TYPE_FIELDS (template_type);
3108 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3109 tree template, template_addr, aflags, dimct, t, u;
3110 /* See the head comment of build_vms_descriptor. */
3111 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3112 tree lfield, ufield;
3113
3114 /* Convert POINTER to the type of the P_ARRAY field. */
3115 gnu_expr64 = convert (p_array_type, gnu_expr64);
3116
3117 switch (iclass)
3118 {
3119 case 1: /* Class S */
3120 case 15: /* Class SB */
3121 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3122 t = TREE_CHAIN (TREE_CHAIN (class));
3123 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3124 t = tree_cons (min_field,
3125 convert (TREE_TYPE (min_field), integer_one_node),
3126 tree_cons (max_field,
3127 convert (TREE_TYPE (max_field), t),
3128 NULL_TREE));
3129 template = gnat_build_constructor (template_type, t);
3130 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3131
3132 /* For class S, we are done. */
3133 if (iclass == 1)
3134 break;
3135
3136 /* Test that we really have a SB descriptor, like DEC Ada. */
3137 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3138 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3139 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3140 /* If so, there is already a template in the descriptor and
3141 it is located right after the POINTER field. The fields are
3142 64bits so they must be repacked. */
3143 t = TREE_CHAIN (pointer64);
3144 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3145 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3146
3147 t = TREE_CHAIN (t);
3148 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3149 ufield = convert
3150 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3151
3152 /* Build the template in the form of a constructor. */
3153 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3154 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3155 ufield, NULL_TREE));
3156 template = gnat_build_constructor (template_type, t);
3157
3158 /* Otherwise use the {1, LENGTH} template we build above. */
3159 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3160 build_unary_op (ADDR_EXPR, p_bounds_type,
3161 template),
3162 template_addr);
3163 break;
3164
3165 case 4: /* Class A */
3166 /* The AFLAGS field is the 3rd field after the pointer in the
3167 descriptor. */
3168 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3169 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3170 /* The DIMCT field is the next field in the descriptor after
3171 aflags. */
3172 t = TREE_CHAIN (t);
3173 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3174 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3175 or FL_COEFF or FL_BOUNDS not set. */
3176 u = build_int_cst (TREE_TYPE (aflags), 192);
3177 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3178 build_binary_op (NE_EXPR, integer_type_node,
3179 dimct,
3180 convert (TREE_TYPE (dimct),
3181 size_one_node)),
3182 build_binary_op (NE_EXPR, integer_type_node,
3183 build2 (BIT_AND_EXPR,
3184 TREE_TYPE (aflags),
3185 aflags, u),
3186 u));
3187 /* There is already a template in the descriptor and it is located
3188 in block 3. The fields are 64bits so they must be repacked. */
3189 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3190 (t)))));
3191 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3192 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3193
3194 t = TREE_CHAIN (t);
3195 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3196 ufield = convert
3197 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3198
3199 /* Build the template in the form of a constructor. */
3200 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3201 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3202 ufield, NULL_TREE));
3203 template = gnat_build_constructor (template_type, t);
3204 template = build3 (COND_EXPR, p_bounds_type, u,
3205 build_call_raise (CE_Length_Check_Failed, Empty,
3206 N_Raise_Constraint_Error),
3207 template);
3208 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3209 break;
3210
3211 case 10: /* Class NCA */
3212 default:
3213 post_error ("unsupported descriptor type for &", gnat_subprog);
3214 template_addr = integer_zero_node;
3215 break;
3216 }
3217
3218 /* Build the fat pointer in the form of a constructor. */
3219 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3220 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3221 template_addr, NULL_TREE));
3222 return gnat_build_constructor (gnu_type, t);
3223 }
3224
3225 else
3226 gcc_unreachable ();
3227}
3228
3229/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3230 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3231 which the VMS descriptor is passed. */
3232
3233static tree
3234convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
3235{
3236 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3237 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3238 /* The CLASS field is the 3rd field in the descriptor. */
3239 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3240 /* The POINTER field is the 4th field in the descriptor. */
3241 tree pointer = TREE_CHAIN (class);
3242
3243 /* Retrieve the value of the POINTER field. */
d628c015 3244 tree gnu_expr32
a1ab4c31
AC
3245 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3246
3247 if (POINTER_TYPE_P (gnu_type))
d628c015 3248 return convert (gnu_type, gnu_expr32);
a1ab4c31
AC
3249
3250 else if (TYPE_FAT_POINTER_P (gnu_type))
3251 {
3252 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3253 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3254 tree template_type = TREE_TYPE (p_bounds_type);
3255 tree min_field = TYPE_FIELDS (template_type);
3256 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3257 tree template, template_addr, aflags, dimct, t, u;
3258 /* See the head comment of build_vms_descriptor. */
3259 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3260
3261 /* Convert POINTER to the type of the P_ARRAY field. */
d628c015 3262 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31
AC
3263
3264 switch (iclass)
3265 {
3266 case 1: /* Class S */
3267 case 15: /* Class SB */
3268 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3269 t = TYPE_FIELDS (desc_type);
3270 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3271 t = tree_cons (min_field,
3272 convert (TREE_TYPE (min_field), integer_one_node),
3273 tree_cons (max_field,
3274 convert (TREE_TYPE (max_field), t),
3275 NULL_TREE));
3276 template = gnat_build_constructor (template_type, t);
3277 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3278
3279 /* For class S, we are done. */
3280 if (iclass == 1)
3281 break;
3282
3283 /* Test that we really have a SB descriptor, like DEC Ada. */
3284 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3285 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3286 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3287 /* If so, there is already a template in the descriptor and
3288 it is located right after the POINTER field. */
3289 t = TREE_CHAIN (pointer);
3290 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3291 /* Otherwise use the {1, LENGTH} template we build above. */
3292 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3293 build_unary_op (ADDR_EXPR, p_bounds_type,
3294 template),
3295 template_addr);
3296 break;
3297
3298 case 4: /* Class A */
3299 /* The AFLAGS field is the 7th field in the descriptor. */
3300 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3301 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3302 /* The DIMCT field is the 8th field in the descriptor. */
3303 t = TREE_CHAIN (t);
3304 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3305 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3306 or FL_COEFF or FL_BOUNDS not set. */
3307 u = build_int_cst (TREE_TYPE (aflags), 192);
3308 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3309 build_binary_op (NE_EXPR, integer_type_node,
3310 dimct,
3311 convert (TREE_TYPE (dimct),
3312 size_one_node)),
3313 build_binary_op (NE_EXPR, integer_type_node,
3314 build2 (BIT_AND_EXPR,
3315 TREE_TYPE (aflags),
3316 aflags, u),
3317 u));
a1ab4c31
AC
3318 /* There is already a template in the descriptor and it is
3319 located at the start of block 3 (12th field). */
3320 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3321 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
d628c015
DR
3322 template = build3 (COND_EXPR, p_bounds_type, u,
3323 build_call_raise (CE_Length_Check_Failed, Empty,
3324 N_Raise_Constraint_Error),
3325 template);
a1ab4c31
AC
3326 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3327 break;
3328
3329 case 10: /* Class NCA */
3330 default:
3331 post_error ("unsupported descriptor type for &", gnat_subprog);
3332 template_addr = integer_zero_node;
3333 break;
3334 }
3335
3336 /* Build the fat pointer in the form of a constructor. */
d628c015 3337 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
a1ab4c31
AC
3338 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3339 template_addr, NULL_TREE));
d628c015 3340
a1ab4c31
AC
3341 return gnat_build_constructor (gnu_type, t);
3342 }
3343
3344 else
3345 gcc_unreachable ();
3346}
3347
a981c964
EB
3348/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3349 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3350 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3351 VMS descriptor is passed. */
d628c015
DR
3352
3353static tree
a981c964
EB
3354convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3355 Entity_Id gnat_subprog)
d628c015
DR
3356{
3357 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3358 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3359 tree mbo = TYPE_FIELDS (desc_type);
3360 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3361 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
a981c964 3362 tree is64bit, gnu_expr32, gnu_expr64;
d628c015 3363
a981c964
EB
3364 /* If the field name is not MBO, it must be 32-bit and no alternate.
3365 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 3366 if (strcmp (mbostr, "MBO") != 0)
d628c015
DR
3367 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3368
a981c964 3369 /* Build the test for 64-bit descriptor. */
d628c015
DR
3370 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3371 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
a981c964
EB
3372 is64bit
3373 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3374 build_binary_op (EQ_EXPR, integer_type_node,
3375 convert (integer_type_node, mbo),
3376 integer_one_node),
3377 build_binary_op (EQ_EXPR, integer_type_node,
3378 convert (integer_type_node, mbmo),
3379 integer_minus_one_node));
3380
3381 /* Build the 2 possible end results. */
3382 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3383 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3384 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3385
3386 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015
DR
3387}
3388
a1ab4c31
AC
3389/* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3390 and the GNAT node GNAT_SUBPROG. */
3391
3392void
3393build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3394{
3395 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3396 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3397 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3398 tree gnu_body;
3399
3400 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3401 gnu_param_list = NULL_TREE;
3402
3403 begin_subprog_body (gnu_stub_decl);
3404 gnat_pushlevel ();
3405
3406 start_stmt_group ();
3407
3408 /* Loop over the parameters of the stub and translate any of them
3409 passed by descriptor into a by reference one. */
3410 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3411 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3412 gnu_stub_param;
3413 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3414 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3415 {
3416 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
a981c964
EB
3417 gnu_param
3418 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3419 gnu_stub_param,
3420 DECL_PARM_ALT_TYPE (gnu_stub_param),
3421 gnat_subprog);
a1ab4c31
AC
3422 else
3423 gnu_param = gnu_stub_param;
3424
3425 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3426 }
3427
3428 gnu_body = end_stmt_group ();
3429
3430 /* Invoke the internal subprogram. */
3431 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3432 gnu_subprog);
3433 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3434 gnu_subprog_addr,
3435 nreverse (gnu_param_list));
3436
3437 /* Propagate the return value, if any. */
3438 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3439 append_to_statement_list (gnu_subprog_call, &gnu_body);
3440 else
3441 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3442 gnu_subprog_call),
3443 &gnu_body);
3444
3445 gnat_poplevel ();
3446
3447 allocate_struct_function (gnu_stub_decl, false);
3448 end_subprog_body (gnu_body, false);
3449}
3450\f
3451/* Build a type to be used to represent an aliased object whose nominal
3452 type is an unconstrained array. This consists of a RECORD_TYPE containing
3453 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3454 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3455 is used to represent an arbitrary unconstrained object. Use NAME
3456 as the name of the record. */
3457
3458tree
3459build_unc_object_type (tree template_type, tree object_type, tree name)
3460{
3461 tree type = make_node (RECORD_TYPE);
3462 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3463 template_type, type, 0, 0, 0, 1);
3464 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3465 type, 0, 0, 0, 1);
3466
3467 TYPE_NAME (type) = name;
3468 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3469 finish_record_type (type,
3470 chainon (chainon (NULL_TREE, template_field),
3471 array_field),
3472 0, false);
3473
3474 return type;
3475}
3476
3477/* Same, taking a thin or fat pointer type instead of a template type. */
3478
3479tree
3480build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3481 tree name)
3482{
3483 tree template_type;
3484
3485 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3486
3487 template_type
3488 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3489 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3490 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3491 return build_unc_object_type (template_type, object_type, name);
3492}
3493
3494/* Shift the component offsets within an unconstrained object TYPE to make it
3495 suitable for use as a designated type for thin pointers. */
3496
3497void
3498shift_unc_components_for_thin_pointers (tree type)
3499{
3500 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3501 allocated past the BOUNDS template. The designated type is adjusted to
3502 have ARRAY at position zero and the template at a negative offset, so
3503 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3504
3505 tree bounds_field = TYPE_FIELDS (type);
3506 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3507
3508 DECL_FIELD_OFFSET (bounds_field)
3509 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3510
3511 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3512 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3513}
3514\f
229077b0
EB
3515/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3516 In the normal case this is just two adjustments, but we have more to
3517 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
3518
3519void
3520update_pointer_to (tree old_type, tree new_type)
3521{
3522 tree ptr = TYPE_POINTER_TO (old_type);
3523 tree ref = TYPE_REFERENCE_TO (old_type);
3524 tree ptr1, ref1;
3525 tree type;
3526
3527 /* If this is the main variant, process all the other variants first. */
3528 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3529 for (type = TYPE_NEXT_VARIANT (old_type); type;
3530 type = TYPE_NEXT_VARIANT (type))
3531 update_pointer_to (type, new_type);
3532
229077b0 3533 /* If no pointers and no references, we are done. */
a1ab4c31
AC
3534 if (!ptr && !ref)
3535 return;
3536
3537 /* Merge the old type qualifiers in the new type.
3538
3539 Each old variant has qualifiers for specific reasons, and the new
229077b0 3540 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
3541 information grabbed at some point, and merging the two simply unifies
3542 these inputs into the final type description.
3543
3544 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
3545 type designating it; after the designated type's freeze, we get here with
3546 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3547 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
3548 designated type, because that's what it really is.
3549
229077b0
EB
3550 We might also get here for a non-dummy OLD_TYPE variant with different
3551 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 3552 to private record type elaboration (see the comments around the call to
229077b0
EB
3553 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3554 the qualifiers in those cases too, to avoid accidentally discarding the
3555 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3556 new_type
3557 = build_qualified_type (new_type,
3558 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3559
3560 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
3561 if (old_type == new_type)
3562 return;
3563
3564 /* Otherwise, first handle the simple case. */
3565 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3566 {
3567 TYPE_POINTER_TO (new_type) = ptr;
3568 TYPE_REFERENCE_TO (new_type) = ref;
3569
3570 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3571 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3572 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3573 TREE_TYPE (ptr1) = new_type;
3574
3575 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3576 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3577 ref1 = TYPE_NEXT_VARIANT (ref1))
3578 TREE_TYPE (ref1) = new_type;
3579 }
3580
229077b0 3581 /* Now deal with the unconstrained array case. In this case the "pointer"
a1ab4c31
AC
3582 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3583 Turn them into pointers to the correct types using update_pointer_to. */
229077b0 3584 else if (!TYPE_FAT_POINTER_P (ptr))
a1ab4c31
AC
3585 gcc_unreachable ();
3586
3587 else
3588 {
3589 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3590 tree array_field = TYPE_FIELDS (ptr);
3591 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3592 tree new_ptr = TYPE_POINTER_TO (new_type);
3593 tree new_ref;
3594 tree var;
3595
3596 /* Make pointers to the dummy template point to the real template. */
3597 update_pointer_to
3598 (TREE_TYPE (TREE_TYPE (bounds_field)),
3599 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3600
3601 /* The references to the template bounds present in the array type
229077b0
EB
3602 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3603 are updating PTR to make it a full replacement for NEW_PTR as
3604 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3605 to make it of type PTR. */
a1ab4c31
AC
3606 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3607 build0 (PLACEHOLDER_EXPR, ptr),
3608 bounds_field, NULL_TREE);
3609
229077b0
EB
3610 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3611 to the dummy array point to it.
a1ab4c31 3612
229077b0
EB
3613 ??? This is now the only use of substitute_in_type, which is a very
3614 "heavy" routine to do this, it should be replaced at some point. */
a1ab4c31
AC
3615 update_pointer_to
3616 (TREE_TYPE (TREE_TYPE (array_field)),
3617 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3618 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3619
229077b0 3620 /* Make PTR the pointer to NEW_TYPE. */
a1ab4c31
AC
3621 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3622 = TREE_TYPE (new_type) = ptr;
3623
3624 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3625 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3626
3627 /* Now handle updating the allocation record, what the thin pointer
3628 points to. Update all pointers from the old record into the new
3629 one, update the type of the array field, and recompute the size. */
3630 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3631
3632 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3633 = TREE_TYPE (TREE_TYPE (array_field));
3634
3635 /* The size recomputation needs to account for alignment constraints, so
3636 we let layout_type work it out. This will reset the field offsets to
3637 what they would be in a regular record, so we shift them back to what
3638 we want them to be for a thin pointer designated type afterwards. */
3639 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3640 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3641 TYPE_SIZE (new_obj_rec) = 0;
3642 layout_type (new_obj_rec);
3643
3644 shift_unc_components_for_thin_pointers (new_obj_rec);
3645
3646 /* We are done, at last. */
3647 rest_of_record_type_compilation (ptr);
3648 }
3649}
3650\f
8df2e902
EB
3651/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3652 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
3653
3654static tree
3655convert_to_fat_pointer (tree type, tree expr)
3656{
3657 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
8df2e902 3658 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 3659 tree etype = TREE_TYPE (expr);
8df2e902 3660 tree template;
a1ab4c31 3661
8df2e902
EB
3662 /* If EXPR is null, make a fat pointer that contains null pointers to the
3663 template and array. */
a1ab4c31
AC
3664 if (integer_zerop (expr))
3665 return
3666 gnat_build_constructor
3667 (type,
3668 tree_cons (TYPE_FIELDS (type),
8df2e902 3669 convert (p_array_type, expr),
a1ab4c31
AC
3670 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3671 convert (build_pointer_type (template_type),
3672 expr),
3673 NULL_TREE)));
3674
8df2e902 3675 /* If EXPR is a thin pointer, make template and data from the record.. */
a1ab4c31
AC
3676 else if (TYPE_THIN_POINTER_P (etype))
3677 {
3678 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3679
3680 expr = save_expr (expr);
3681 if (TREE_CODE (expr) == ADDR_EXPR)
3682 expr = TREE_OPERAND (expr, 0);
3683 else
3684 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3685
3686 template = build_component_ref (expr, NULL_TREE, fields, false);
3687 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3688 build_component_ref (expr, NULL_TREE,
3689 TREE_CHAIN (fields), false));
3690 }
8df2e902
EB
3691
3692 /* Otherwise, build the constructor for the template. */
a1ab4c31 3693 else
a1ab4c31
AC
3694 template = build_template (template_type, TREE_TYPE (etype), expr);
3695
8df2e902 3696 /* The final result is a constructor for the fat pointer.
a1ab4c31 3697
8df2e902
EB
3698 If EXPR is an argument of a foreign convention subprogram, the type it
3699 points to is directly the component type. In this case, the expression
a1ab4c31 3700 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 3701 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 3702 required, for instance because it ensures that possible later folding of
8df2e902 3703 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
3704 same type as the initial reference.
3705
8df2e902
EB
3706 Note that the call to "build_template" above is still fine because it
3707 will only refer to the provided TEMPLATE_TYPE in this case. */
3708 return
3709 gnat_build_constructor
3710 (type,
3711 tree_cons (TYPE_FIELDS (type),
3712 convert (p_array_type, expr),
3713 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3714 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3715 NULL_TREE)));
a1ab4c31
AC
3716}
3717\f
3718/* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3719 is something that is a fat pointer, so convert to it first if it EXPR
3720 is not already a fat pointer. */
3721
3722static tree
3723convert_to_thin_pointer (tree type, tree expr)
3724{
3725 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3726 expr
3727 = convert_to_fat_pointer
3728 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3729
3730 /* We get the pointer to the data and use a NOP_EXPR to make it the
3731 proper GCC type. */
3732 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3733 false);
3734 expr = build1 (NOP_EXPR, type, expr);
3735
3736 return expr;
3737}
3738\f
3739/* Create an expression whose value is that of EXPR,
3740 converted to type TYPE. The TREE_TYPE of the value
3741 is always TYPE. This function implements all reasonable
3742 conversions; callers should filter out those that are
3743 not permitted by the language being compiled. */
3744
3745tree
3746convert (tree type, tree expr)
3747{
3748 enum tree_code code = TREE_CODE (type);
3749 tree etype = TREE_TYPE (expr);
3750 enum tree_code ecode = TREE_CODE (etype);
3751
3752 /* If EXPR is already the right type, we are done. */
3753 if (type == etype)
3754 return expr;
3755
3756 /* If both input and output have padding and are of variable size, do this
3757 as an unchecked conversion. Likewise if one is a mere variant of the
3758 other, so we avoid a pointless unpad/repad sequence. */
3759 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3760 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3761 && (!TREE_CONSTANT (TYPE_SIZE (type))
3762 || !TREE_CONSTANT (TYPE_SIZE (etype))
3763 || gnat_types_compatible_p (type, etype)
3764 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3765 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3766 ;
3767
3768 /* If the output type has padding, convert to the inner type and
3769 make a constructor to build the record. */
3770 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3771 {
3772 /* If we previously converted from another type and our type is
3773 of variable size, remove the conversion to avoid the need for
3774 variable-size temporaries. Likewise for a conversion between
3775 original and packable version. */
3776 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3777 && (!TREE_CONSTANT (TYPE_SIZE (type))
3778 || (ecode == RECORD_TYPE
3779 && TYPE_NAME (etype)
3780 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3781 expr = TREE_OPERAND (expr, 0);
3782
3783 /* If we are just removing the padding from expr, convert the original
3784 object if we have variable size in order to avoid the need for some
3785 variable-size temporaries. Likewise if the padding is a mere variant
3786 of the other, so we avoid a pointless unpad/repad sequence. */
3787 if (TREE_CODE (expr) == COMPONENT_REF
3788 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3789 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3790 && (!TREE_CONSTANT (TYPE_SIZE (type))
3791 || gnat_types_compatible_p (type,
3792 TREE_TYPE (TREE_OPERAND (expr, 0)))
3793 || (ecode == RECORD_TYPE
3794 && TYPE_NAME (etype)
3795 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3796 return convert (type, TREE_OPERAND (expr, 0));
3797
3798 /* If the result type is a padded type with a self-referentially-sized
3799 field and the expression type is a record, do this as an
3800 unchecked conversion. */
3801 else if (TREE_CODE (etype) == RECORD_TYPE
3802 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3803 return unchecked_convert (type, expr, false);
3804
3805 else
3806 return
3807 gnat_build_constructor (type,
3808 tree_cons (TYPE_FIELDS (type),
3809 convert (TREE_TYPE
3810 (TYPE_FIELDS (type)),
3811 expr),
3812 NULL_TREE));
3813 }
3814
3815 /* If the input type has padding, remove it and convert to the output type.
3816 The conditions ordering is arranged to ensure that the output type is not
3817 a padding type here, as it is not clear whether the conversion would
3818 always be correct if this was to happen. */
3819 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3820 {
3821 tree unpadded;
3822
3823 /* If we have just converted to this padded type, just get the
3824 inner expression. */
3825 if (TREE_CODE (expr) == CONSTRUCTOR
3826 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3827 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3828 == TYPE_FIELDS (etype))
3829 unpadded
3830 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3831
3832 /* Otherwise, build an explicit component reference. */
3833 else
3834 unpadded
3835 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3836
3837 return convert (type, unpadded);
3838 }
3839
3840 /* If the input is a biased type, adjust first. */
3841 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3842 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3843 fold_convert (TREE_TYPE (etype),
3844 expr),
3845 TYPE_MIN_VALUE (etype)));
3846
3847 /* If the input is a justified modular type, we need to extract the actual
3848 object before converting it to any other type with the exceptions of an
3849 unconstrained array or of a mere type variant. It is useful to avoid the
3850 extraction and conversion in the type variant case because it could end
3851 up replacing a VAR_DECL expr by a constructor and we might be about the
3852 take the address of the result. */
3853 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3854 && code != UNCONSTRAINED_ARRAY_TYPE
3855 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3856 return convert (type, build_component_ref (expr, NULL_TREE,
3857 TYPE_FIELDS (etype), false));
3858
3859 /* If converting to a type that contains a template, convert to the data
3860 type and then build the template. */
3861 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3862 {
3863 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3864
3865 /* If the source already has a template, get a reference to the
3866 associated array only, as we are going to rebuild a template
3867 for the target type anyway. */
3868 expr = maybe_unconstrained_array (expr);
3869
3870 return
3871 gnat_build_constructor
3872 (type,
3873 tree_cons (TYPE_FIELDS (type),
3874 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3875 obj_type, NULL_TREE),
3876 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3877 convert (obj_type, expr), NULL_TREE)));
3878 }
3879
3880 /* There are some special cases of expressions that we process
3881 specially. */
3882 switch (TREE_CODE (expr))
3883 {
3884 case ERROR_MARK:
3885 return expr;
3886
3887 case NULL_EXPR:
3888 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3889 conversion in gnat_expand_expr. NULL_EXPR does not represent
3890 and actual value, so no conversion is needed. */
3891 expr = copy_node (expr);
3892 TREE_TYPE (expr) = type;
3893 return expr;
3894
3895 case STRING_CST:
3896 /* If we are converting a STRING_CST to another constrained array type,
3897 just make a new one in the proper type. */
3898 if (code == ecode && AGGREGATE_TYPE_P (etype)
3899 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3900 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3901 {
3902 expr = copy_node (expr);
3903 TREE_TYPE (expr) = type;
3904 return expr;
3905 }
3906 break;
3907
3908 case CONSTRUCTOR:
3909 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3910 a new one in the proper type. */
3911 if (code == ecode && gnat_types_compatible_p (type, etype))
3912 {
3913 expr = copy_node (expr);
3914 TREE_TYPE (expr) = type;
3915 return expr;
3916 }
3917
3918 /* Likewise for a conversion between original and packable version, but
3919 we have to work harder in order to preserve type consistency. */
3920 if (code == ecode
3921 && code == RECORD_TYPE
3922 && TYPE_NAME (type) == TYPE_NAME (etype))
3923 {
3924 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3925 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3926 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3927 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3928 unsigned HOST_WIDE_INT idx;
3929 tree index, value;
3930
3931 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3932 {
3933 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3934 /* We expect only simple constructors. Otherwise, punt. */
3935 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
3936 break;
3937 elt->index = field;
3938 elt->value = convert (TREE_TYPE (field), value);
3939 efield = TREE_CHAIN (efield);
3940 field = TREE_CHAIN (field);
3941 }
3942
3943 if (idx == len)
3944 {
3945 expr = copy_node (expr);
3946 TREE_TYPE (expr) = type;
3947 CONSTRUCTOR_ELTS (expr) = v;
3948 return expr;
3949 }
3950 }
3951 break;
3952
3953 case UNCONSTRAINED_ARRAY_REF:
3954 /* Convert this to the type of the inner array by getting the address of
3955 the array from the template. */
3956 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3957 build_component_ref (TREE_OPERAND (expr, 0),
3958 get_identifier ("P_ARRAY"),
3959 NULL_TREE, false));
3960 etype = TREE_TYPE (expr);
3961 ecode = TREE_CODE (etype);
3962 break;
3963
3964 case VIEW_CONVERT_EXPR:
3965 {
3966 /* GCC 4.x is very sensitive to type consistency overall, and view
3967 conversions thus are very frequent. Even though just "convert"ing
3968 the inner operand to the output type is fine in most cases, it
3969 might expose unexpected input/output type mismatches in special
3970 circumstances so we avoid such recursive calls when we can. */
3971 tree op0 = TREE_OPERAND (expr, 0);
3972
3973 /* If we are converting back to the original type, we can just
3974 lift the input conversion. This is a common occurrence with
3975 switches back-and-forth amongst type variants. */
3976 if (type == TREE_TYPE (op0))
3977 return op0;
3978
3979 /* Otherwise, if we're converting between two aggregate types, we
3980 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3981 in place or to just convert the inner expression. */
3982 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3983 {
3984 /* If we are converting between mere variants, we can just
3985 substitute the VIEW_CONVERT_EXPR in place. */
3986 if (gnat_types_compatible_p (type, etype))
3987 return build1 (VIEW_CONVERT_EXPR, type, op0);
3988
3989 /* Otherwise, we may just bypass the input view conversion unless
3990 one of the types is a fat pointer, which is handled by
3991 specialized code below which relies on exact type matching. */
3992 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3993 return convert (type, op0);
3994 }
3995 }
3996 break;
3997
3998 case INDIRECT_REF:
3999 /* If both types are record types, just convert the pointer and
4000 make a new INDIRECT_REF.
4001
4002 ??? Disable this for now since it causes problems with the
4003 code in build_binary_op for MODIFY_EXPR which wants to
4004 strip off conversions. But that code really is a mess and
4005 we need to do this a much better way some time. */
4006 if (0
4007 && (TREE_CODE (type) == RECORD_TYPE
4008 || TREE_CODE (type) == UNION_TYPE)
4009 && (TREE_CODE (etype) == RECORD_TYPE
4010 || TREE_CODE (etype) == UNION_TYPE)
4011 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4012 return build_unary_op (INDIRECT_REF, NULL_TREE,
4013 convert (build_pointer_type (type),
4014 TREE_OPERAND (expr, 0)));
4015 break;
4016
4017 default:
4018 break;
4019 }
4020
4021 /* Check for converting to a pointer to an unconstrained array. */
4022 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4023 return convert_to_fat_pointer (type, expr);
4024
4025 /* If we are converting between two aggregate types that are mere
4026 variants, just make a VIEW_CONVERT_EXPR. */
4027 else if (code == ecode
4028 && AGGREGATE_TYPE_P (type)
4029 && gnat_types_compatible_p (type, etype))
4030 return build1 (VIEW_CONVERT_EXPR, type, expr);
4031
4032 /* In all other cases of related types, make a NOP_EXPR. */
4033 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4034 || (code == INTEGER_CST && ecode == INTEGER_CST
4035 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4036 return fold_convert (type, expr);
4037
4038 switch (code)
4039 {
4040 case VOID_TYPE:
4041 return fold_build1 (CONVERT_EXPR, type, expr);
4042
a1ab4c31
AC
4043 case INTEGER_TYPE:
4044 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4045 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4046 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4047 return unchecked_convert (type, expr, false);
4048 else if (TYPE_BIASED_REPRESENTATION_P (type))
4049 return fold_convert (type,
4050 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4051 convert (TREE_TYPE (type), expr),
4052 TYPE_MIN_VALUE (type)));
4053
4054 /* ... fall through ... */
4055
4056 case ENUMERAL_TYPE:
01ddebf2 4057 case BOOLEAN_TYPE:
a1ab4c31
AC
4058 /* If we are converting an additive expression to an integer type
4059 with lower precision, be wary of the optimization that can be
4060 applied by convert_to_integer. There are 2 problematic cases:
4061 - if the first operand was originally of a biased type,
4062 because we could be recursively called to convert it
4063 to an intermediate type and thus rematerialize the
4064 additive operator endlessly,
4065 - if the expression contains a placeholder, because an
4066 intermediate conversion that changes the sign could
4067 be inserted and thus introduce an artificial overflow
4068 at compile time when the placeholder is substituted. */
4069 if (code == INTEGER_TYPE
4070 && ecode == INTEGER_TYPE
4071 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4072 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4073 {
4074 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4075
4076 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4077 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4078 || CONTAINS_PLACEHOLDER_P (expr))
4079 return build1 (NOP_EXPR, type, expr);
4080 }
4081
4082 return fold (convert_to_integer (type, expr));
4083
4084 case POINTER_TYPE:
4085 case REFERENCE_TYPE:
4086 /* If converting between two pointers to records denoting
4087 both a template and type, adjust if needed to account
4088 for any differing offsets, since one might be negative. */
4089 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4090 {
4091 tree bit_diff
4092 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4093 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4094 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4095 sbitsize_int (BITS_PER_UNIT));
4096
4097 expr = build1 (NOP_EXPR, type, expr);
4098 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4099 if (integer_zerop (byte_diff))
4100 return expr;
4101
4102 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4103 fold (convert (sizetype, byte_diff)));
4104 }
4105
4106 /* If converting to a thin pointer, handle specially. */
4107 if (TYPE_THIN_POINTER_P (type)
4108 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4109 return convert_to_thin_pointer (type, expr);
4110
4111 /* If converting fat pointer to normal pointer, get the pointer to the
4112 array and then convert it. */
4113 else if (TYPE_FAT_POINTER_P (etype))
4114 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4115 NULL_TREE, false);
4116
4117 return fold (convert_to_pointer (type, expr));
4118
4119 case REAL_TYPE:
4120 return fold (convert_to_real (type, expr));
4121
4122 case RECORD_TYPE:
4123 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4124 return
4125 gnat_build_constructor
4126 (type, tree_cons (TYPE_FIELDS (type),
4127 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4128 NULL_TREE));
4129
4130 /* ... fall through ... */
4131
4132 case ARRAY_TYPE:
4133 /* In these cases, assume the front-end has validated the conversion.
4134 If the conversion is valid, it will be a bit-wise conversion, so
4135 it can be viewed as an unchecked conversion. */
4136 return unchecked_convert (type, expr, false);
4137
4138 case UNION_TYPE:
4139 /* This is a either a conversion between a tagged type and some
4140 subtype, which we have to mark as a UNION_TYPE because of
4141 overlapping fields or a conversion of an Unchecked_Union. */
4142 return unchecked_convert (type, expr, false);
4143
4144 case UNCONSTRAINED_ARRAY_TYPE:
4145 /* If EXPR is a constrained array, take its address, convert it to a
4146 fat pointer, and then dereference it. Likewise if EXPR is a
4147 record containing both a template and a constrained array.
4148 Note that a record representing a justified modular type
4149 always represents a packed constrained array. */
4150 if (ecode == ARRAY_TYPE
4151 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4152 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4153 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4154 return
4155 build_unary_op
4156 (INDIRECT_REF, NULL_TREE,
4157 convert_to_fat_pointer (TREE_TYPE (type),
4158 build_unary_op (ADDR_EXPR,
4159 NULL_TREE, expr)));
4160
4161 /* Do something very similar for converting one unconstrained
4162 array to another. */
4163 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4164 return
4165 build_unary_op (INDIRECT_REF, NULL_TREE,
4166 convert (TREE_TYPE (type),
4167 build_unary_op (ADDR_EXPR,
4168 NULL_TREE, expr)));
4169 else
4170 gcc_unreachable ();
4171
4172 case COMPLEX_TYPE:
4173 return fold (convert_to_complex (type, expr));
4174
4175 default:
4176 gcc_unreachable ();
4177 }
4178}
4179\f
4180/* Remove all conversions that are done in EXP. This includes converting
4181 from a padded type or to a justified modular type. If TRUE_ADDRESS
4182 is true, always return the address of the containing object even if
4183 the address is not bit-aligned. */
4184
4185tree
4186remove_conversions (tree exp, bool true_address)
4187{
4188 switch (TREE_CODE (exp))
4189 {
4190 case CONSTRUCTOR:
4191 if (true_address
4192 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4193 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4194 return
4195 remove_conversions (VEC_index (constructor_elt,
4196 CONSTRUCTOR_ELTS (exp), 0)->value,
4197 true);
4198 break;
4199
4200 case COMPONENT_REF:
4201 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4202 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4203 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4204 break;
4205
4206 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4207 CASE_CONVERT:
4208 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4209
4210 default:
4211 break;
4212 }
4213
4214 return exp;
4215}
4216\f
4217/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4218 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4219 likewise return an expression pointing to the underlying array. */
4220
4221tree
4222maybe_unconstrained_array (tree exp)
4223{
4224 enum tree_code code = TREE_CODE (exp);
4225 tree new;
4226
4227 switch (TREE_CODE (TREE_TYPE (exp)))
4228 {
4229 case UNCONSTRAINED_ARRAY_TYPE:
4230 if (code == UNCONSTRAINED_ARRAY_REF)
4231 {
4232 new
4233 = build_unary_op (INDIRECT_REF, NULL_TREE,
4234 build_component_ref (TREE_OPERAND (exp, 0),
4235 get_identifier ("P_ARRAY"),
4236 NULL_TREE, false));
4237 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4238 return new;
4239 }
4240
4241 else if (code == NULL_EXPR)
4242 return build1 (NULL_EXPR,
4243 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4244 (TREE_TYPE (TREE_TYPE (exp))))),
4245 TREE_OPERAND (exp, 0));
4246
4247 case RECORD_TYPE:
4248 /* If this is a padded type, convert to the unpadded type and see if
4249 it contains a template. */
4250 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4251 {
4252 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4253 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4254 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4255 return
4256 build_component_ref (new, NULL_TREE,
4257 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4258 0);
4259 }
4260 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4261 return
4262 build_component_ref (exp, NULL_TREE,
4263 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4264 break;
4265
4266 default:
4267 break;
4268 }
4269
4270 return exp;
4271}
4272\f
afcea859
EB
4273/* Return true if EXPR is an expression that can be folded as an operand
4274 of a VIEW_CONVERT_EXPR. See the head comment of unchecked_convert for
4275 the rationale. */
4276
4277static bool
4278can_fold_for_view_convert_p (tree expr)
4279{
4280 tree t1, t2;
4281
4282 /* The folder will fold NOP_EXPRs between integral types with the same
4283 precision (in the middle-end's sense). We cannot allow it if the
4284 types don't have the same precision in the Ada sense as well. */
4285 if (TREE_CODE (expr) != NOP_EXPR)
4286 return true;
4287
4288 t1 = TREE_TYPE (expr);
4289 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4290
4291 /* Defer to the folder for non-integral conversions. */
4292 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4293 return true;
4294
4295 /* Only fold conversions that preserve both precisions. */
4296 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4297 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4298 return true;
4299
4300 return false;
4301}
4302
a1ab4c31 4303/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
4304 If NOTRUNC_P is true, truncation operations should be suppressed.
4305
4306 Special care is required with (source or target) integral types whose
4307 precision is not equal to their size, to make sure we fetch or assign
4308 the value bits whose location might depend on the endianness, e.g.
4309
4310 Rmsize : constant := 8;
4311 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4312
4313 type Bit_Array is array (1 .. Rmsize) of Boolean;
4314 pragma Pack (Bit_Array);
4315
4316 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4317
4318 Value : Int := 2#1000_0001#;
4319 Vbits : Bit_Array := To_Bit_Array (Value);
4320
4321 we expect the 8 bits at Vbits'Address to always contain Value, while
4322 their original location depends on the endianness, at Value'Address
4323 on a little-endian architecture but not on a big-endian one.
4324
4325 ??? There is a problematic discrepancy between what is called precision
4326 here (and more generally throughout gigi) for integral types and what is
4327 called precision in the middle-end. In the former case it's the RM size
4328 as given by TYPE_RM_SIZE (or rm_size) whereas it's TYPE_PRECISION in the
4329 latter case, the hitch being that they are not equal when they matter,
4330 that is when the number of value bits is not equal to the type's size:
4331 TYPE_RM_SIZE does give the number of value bits but TYPE_PRECISION is set
4332 to the size. The sole exception are BOOLEAN_TYPEs for which both are 1.
4333
4334 The consequence is that gigi must duplicate code bridging the gap between
4335 the type's size and its precision that exists for TYPE_PRECISION in the
4336 middle-end, because the latter knows nothing about TYPE_RM_SIZE, and be
4337 wary of transformations applied in the middle-end based on TYPE_PRECISION
4338 because this value doesn't reflect the actual precision for Ada. */
a1ab4c31
AC
4339
4340tree
4341unchecked_convert (tree type, tree expr, bool notrunc_p)
4342{
4343 tree etype = TREE_TYPE (expr);
4344
4345 /* If the expression is already the right type, we are done. */
4346 if (etype == type)
4347 return expr;
4348
4349 /* If both types types are integral just do a normal conversion.
4350 Likewise for a conversion to an unconstrained array. */
4351 if ((((INTEGRAL_TYPE_P (type)
4352 && !(TREE_CODE (type) == INTEGER_TYPE
4353 && TYPE_VAX_FLOATING_POINT_P (type)))
4354 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4355 || (TREE_CODE (type) == RECORD_TYPE
4356 && TYPE_JUSTIFIED_MODULAR_P (type)))
4357 && ((INTEGRAL_TYPE_P (etype)
4358 && !(TREE_CODE (etype) == INTEGER_TYPE
4359 && TYPE_VAX_FLOATING_POINT_P (etype)))
4360 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4361 || (TREE_CODE (etype) == RECORD_TYPE
4362 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4363 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4364 {
a1ab4c31
AC
4365 if (TREE_CODE (etype) == INTEGER_TYPE
4366 && TYPE_BIASED_REPRESENTATION_P (etype))
4367 {
4368 tree ntype = copy_type (etype);
a1ab4c31
AC
4369 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4370 TYPE_MAIN_VARIANT (ntype) = ntype;
4371 expr = build1 (NOP_EXPR, ntype, expr);
4372 }
4373
4374 if (TREE_CODE (type) == INTEGER_TYPE
4375 && TYPE_BIASED_REPRESENTATION_P (type))
4376 {
afcea859 4377 tree rtype = copy_type (type);
a1ab4c31
AC
4378 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4379 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
4380 expr = convert (rtype, expr);
4381 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4382 }
4383
afcea859
EB
4384 /* We have another special case: if we are unchecked converting either
4385 a subtype or a type with limited range into a base type, we need to
4386 ensure that VRP doesn't propagate range information because this
4387 conversion may be done precisely to validate that the object is
4388 within the range it is supposed to have. */
a1ab4c31
AC
4389 else if (TREE_CODE (expr) != INTEGER_CST
4390 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4391 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4392 || TREE_CODE (etype) == ENUMERAL_TYPE
4393 || TREE_CODE (etype) == BOOLEAN_TYPE))
4394 {
4395 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4396 in order not to be deemed an useless type conversion, it must
4397 be from subtype to base type.
4398
afcea859
EB
4399 Therefore we first do the bulk of the conversion to a subtype of
4400 the final type. And this conversion must itself not be deemed
4401 useless if the source type is not a subtype because, otherwise,
4402 the final VIEW_CONVERT_EXPR will be deemed so as well. That's
4403 why we toggle the unsigned flag in this conversion, which is
4404 harmless since the final conversion is only a reinterpretation
4405 of the bit pattern.
4406
a1ab4c31
AC
4407 ??? This may raise addressability and/or aliasing issues because
4408 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4409 address of its operand to be taken if it is deemed addressable
4410 and not already in GIMPLE form. */
afcea859
EB
4411 tree rtype
4412 = gnat_type_for_mode (TYPE_MODE (type), !TYPE_UNSIGNED (etype));
a1ab4c31
AC
4413 rtype = copy_type (rtype);
4414 TYPE_MAIN_VARIANT (rtype) = rtype;
4415 TREE_TYPE (rtype) = type;
afcea859
EB
4416 expr = convert (rtype, expr);
4417 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
4418 }
4419
afcea859
EB
4420 else
4421 expr = convert (type, expr);
a1ab4c31
AC
4422 }
4423
afcea859
EB
4424 /* If we are converting to an integral type whose precision is not equal
4425 to its size, first unchecked convert to a record that contains an
4426 object of the output type. Then extract the field. */
a1ab4c31
AC
4427 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4428 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4429 GET_MODE_BITSIZE (TYPE_MODE (type))))
4430 {
4431 tree rec_type = make_node (RECORD_TYPE);
4432 tree field = create_field_decl (get_identifier ("OBJ"), type,
4433 rec_type, 1, 0, 0, 0);
4434
4435 TYPE_FIELDS (rec_type) = field;
4436 layout_type (rec_type);
4437
4438 expr = unchecked_convert (rec_type, expr, notrunc_p);
4439 expr = build_component_ref (expr, NULL_TREE, field, 0);
4440 }
4441
afcea859
EB
4442 /* Similarly if we are converting from an integral type whose precision
4443 is not equal to its size. */
a1ab4c31
AC
4444 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4445 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4446 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4447 {
4448 tree rec_type = make_node (RECORD_TYPE);
4449 tree field
4450 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4451 1, 0, 0, 0);
4452
4453 TYPE_FIELDS (rec_type) = field;
4454 layout_type (rec_type);
4455
4456 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4457 expr = unchecked_convert (type, expr, notrunc_p);
4458 }
4459
4460 /* We have a special case when we are converting between two
4461 unconstrained array types. In that case, take the address,
4462 convert the fat pointer types, and dereference. */
4463 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4464 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4465 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4466 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4467 build_unary_op (ADDR_EXPR, NULL_TREE,
4468 expr)));
4469 else
4470 {
4471 expr = maybe_unconstrained_array (expr);
4472 etype = TREE_TYPE (expr);
afcea859
EB
4473 if (can_fold_for_view_convert_p (expr))
4474 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4475 else
4476 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
4477 }
4478
afcea859
EB
4479 /* If the result is an integral type whose precision is not equal to its
4480 size, sign- or zero-extend the result. We need not do this if the input
4481 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
4482 is a biased type or if both the input and output are unsigned. */
4483 if (!notrunc_p
4484 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4485 && !(TREE_CODE (type) == INTEGER_TYPE
4486 && TYPE_BIASED_REPRESENTATION_P (type))
4487 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4488 GET_MODE_BITSIZE (TYPE_MODE (type)))
4489 && !(INTEGRAL_TYPE_P (etype)
4490 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4491 && operand_equal_p (TYPE_RM_SIZE (type),
4492 (TYPE_RM_SIZE (etype) != 0
4493 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4494 0))
4495 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4496 {
4497 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4498 TYPE_UNSIGNED (type));
4499 tree shift_expr
4500 = convert (base_type,
4501 size_binop (MINUS_EXPR,
4502 bitsize_int
4503 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4504 TYPE_RM_SIZE (type)));
4505 expr
4506 = convert (type,
4507 build_binary_op (RSHIFT_EXPR, base_type,
4508 build_binary_op (LSHIFT_EXPR, base_type,
4509 convert (base_type, expr),
4510 shift_expr),
4511 shift_expr));
4512 }
4513
4514 /* An unchecked conversion should never raise Constraint_Error. The code
4515 below assumes that GCC's conversion routines overflow the same way that
4516 the underlying hardware does. This is probably true. In the rare case
4517 when it is false, we can rely on the fact that such conversions are
4518 erroneous anyway. */
4519 if (TREE_CODE (expr) == INTEGER_CST)
4520 TREE_OVERFLOW (expr) = 0;
4521
4522 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4523 show no longer constant. */
4524 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4525 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4526 OEP_ONLY_CONST))
4527 TREE_CONSTANT (expr) = 0;
4528
4529 return expr;
4530}
4531\f
4532/* Return the appropriate GCC tree code for the specified GNAT type,
4533 the latter being a record type as predicated by Is_Record_Type. */
4534
4535enum tree_code
4536tree_code_for_record_type (Entity_Id gnat_type)
4537{
4538 Node_Id component_list
4539 = Component_List (Type_Definition
4540 (Declaration_Node
4541 (Implementation_Base_Type (gnat_type))));
4542 Node_Id component;
4543
4544 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4545 we have a non-discriminant field outside a variant. In either case,
4546 it's a RECORD_TYPE. */
4547
4548 if (!Is_Unchecked_Union (gnat_type))
4549 return RECORD_TYPE;
4550
4551 for (component = First_Non_Pragma (Component_Items (component_list));
4552 Present (component);
4553 component = Next_Non_Pragma (component))
4554 if (Ekind (Defining_Entity (component)) == E_Component)
4555 return RECORD_TYPE;
4556
4557 return UNION_TYPE;
4558}
4559
4560/* Return true if GNU_TYPE is suitable as the type of a non-aliased
4561 component of an aggregate type. */
4562
4563bool
4564type_for_nonaliased_component_p (tree gnu_type)
4565{
4566 /* If the type is passed by reference, we may have pointers to the
4567 component so it cannot be made non-aliased. */
4568 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4569 return false;
4570
4571 /* We used to say that any component of aggregate type is aliased
4572 because the front-end may take 'Reference of it. The front-end
4573 has been enhanced in the meantime so as to use a renaming instead
4574 in most cases, but the back-end can probably take the address of
4575 such a component too so we go for the conservative stance.
4576
4577 For instance, we might need the address of any array type, even
4578 if normally passed by copy, to construct a fat pointer if the
4579 component is used as an actual for an unconstrained formal.
4580
4581 Likewise for record types: even if a specific record subtype is
4582 passed by copy, the parent type might be passed by ref (e.g. if
4583 it's of variable size) and we might take the address of a child
4584 component to pass to a parent formal. We have no way to check
4585 for such conditions here. */
4586 if (AGGREGATE_TYPE_P (gnu_type))
4587 return false;
4588
4589 return true;
4590}
4591
4592/* Perform final processing on global variables. */
4593
4594void
4595gnat_write_global_declarations (void)
4596{
4597 /* Proceed to optimize and emit assembly.
4598 FIXME: shouldn't be the front end's responsibility to call this. */
4599 cgraph_optimize ();
4600
4601 /* Emit debug info for all global declarations. */
4602 emit_debug_global_declarations (VEC_address (tree, global_decls),
4603 VEC_length (tree, global_decls));
4604}
4605
4606/* ************************************************************************
4607 * * GCC builtins support *
4608 * ************************************************************************ */
4609
4610/* The general scheme is fairly simple:
4611
4612 For each builtin function/type to be declared, gnat_install_builtins calls
4613 internal facilities which eventually get to gnat_push_decl, which in turn
4614 tracks the so declared builtin function decls in the 'builtin_decls' global
4615 datastructure. When an Intrinsic subprogram declaration is processed, we
4616 search this global datastructure to retrieve the associated BUILT_IN DECL
4617 node. */
4618
4619/* Search the chain of currently available builtin declarations for a node
4620 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4621 found, if any, or NULL_TREE otherwise. */
4622tree
4623builtin_decl_for (tree name)
4624{
4625 unsigned i;
4626 tree decl;
4627
4628 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4629 if (DECL_NAME (decl) == name)
4630 return decl;
4631
4632 return NULL_TREE;
4633}
4634
4635/* The code below eventually exposes gnat_install_builtins, which declares
4636 the builtin types and functions we might need, either internally or as
4637 user accessible facilities.
4638
4639 ??? This is a first implementation shot, still in rough shape. It is
4640 heavily inspired from the "C" family implementation, with chunks copied
4641 verbatim from there.
4642
4643 Two obvious TODO candidates are
4644 o Use a more efficient name/decl mapping scheme
4645 o Devise a middle-end infrastructure to avoid having to copy
4646 pieces between front-ends. */
4647
4648/* ----------------------------------------------------------------------- *
4649 * BUILTIN ELEMENTARY TYPES *
4650 * ----------------------------------------------------------------------- */
4651
4652/* Standard data types to be used in builtin argument declarations. */
4653
4654enum c_tree_index
4655{
4656 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4657 CTI_STRING_TYPE,
4658 CTI_CONST_STRING_TYPE,
4659
4660 CTI_MAX
4661};
4662
4663static tree c_global_trees[CTI_MAX];
4664
4665#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4666#define string_type_node c_global_trees[CTI_STRING_TYPE]
4667#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4668
4669/* ??? In addition some attribute handlers, we currently don't support a
4670 (small) number of builtin-types, which in turns inhibits support for a
4671 number of builtin functions. */
4672#define wint_type_node void_type_node
4673#define intmax_type_node void_type_node
4674#define uintmax_type_node void_type_node
4675
4676/* Build the void_list_node (void_type_node having been created). */
4677
4678static tree
4679build_void_list_node (void)
4680{
4681 tree t = build_tree_list (NULL_TREE, void_type_node);
4682 return t;
4683}
4684
4685/* Used to help initialize the builtin-types.def table. When a type of
4686 the correct size doesn't exist, use error_mark_node instead of NULL.
4687 The later results in segfaults even when a decl using the type doesn't
4688 get invoked. */
4689
4690static tree
4691builtin_type_for_size (int size, bool unsignedp)
4692{
4693 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4694 return type ? type : error_mark_node;
4695}
4696
4697/* Build/push the elementary type decls that builtin functions/types
4698 will need. */
4699
4700static void
4701install_builtin_elementary_types (void)
4702{
4703 signed_size_type_node = size_type_node;
4704 pid_type_node = integer_type_node;
4705 void_list_node = build_void_list_node ();
4706
4707 string_type_node = build_pointer_type (char_type_node);
4708 const_string_type_node
4709 = build_pointer_type (build_qualified_type
4710 (char_type_node, TYPE_QUAL_CONST));
4711}
4712
4713/* ----------------------------------------------------------------------- *
4714 * BUILTIN FUNCTION TYPES *
4715 * ----------------------------------------------------------------------- */
4716
4717/* Now, builtin function types per se. */
4718
4719enum c_builtin_type
4720{
4721#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4722#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4723#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4724#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4725#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4726#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4727#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4728#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4729#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4730#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4731#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4732#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4733#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4734#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4735#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4736 NAME,
4737#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4738#include "builtin-types.def"
4739#undef DEF_PRIMITIVE_TYPE
4740#undef DEF_FUNCTION_TYPE_0
4741#undef DEF_FUNCTION_TYPE_1
4742#undef DEF_FUNCTION_TYPE_2
4743#undef DEF_FUNCTION_TYPE_3
4744#undef DEF_FUNCTION_TYPE_4
4745#undef DEF_FUNCTION_TYPE_5
4746#undef DEF_FUNCTION_TYPE_6
4747#undef DEF_FUNCTION_TYPE_7
4748#undef DEF_FUNCTION_TYPE_VAR_0
4749#undef DEF_FUNCTION_TYPE_VAR_1
4750#undef DEF_FUNCTION_TYPE_VAR_2
4751#undef DEF_FUNCTION_TYPE_VAR_3
4752#undef DEF_FUNCTION_TYPE_VAR_4
4753#undef DEF_FUNCTION_TYPE_VAR_5
4754#undef DEF_POINTER_TYPE
4755 BT_LAST
4756};
4757
4758typedef enum c_builtin_type builtin_type;
4759
4760/* A temporary array used in communication with def_fn_type. */
4761static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4762
4763/* A helper function for install_builtin_types. Build function type
4764 for DEF with return type RET and N arguments. If VAR is true, then the
4765 function should be variadic after those N arguments.
4766
4767 Takes special care not to ICE if any of the types involved are
4768 error_mark_node, which indicates that said type is not in fact available
4769 (see builtin_type_for_size). In which case the function type as a whole
4770 should be error_mark_node. */
4771
4772static void
4773def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4774{
4775 tree args = NULL, t;
4776 va_list list;
4777 int i;
4778
4779 va_start (list, n);
4780 for (i = 0; i < n; ++i)
4781 {
4782 builtin_type a = va_arg (list, builtin_type);
4783 t = builtin_types[a];
4784 if (t == error_mark_node)
4785 goto egress;
4786 args = tree_cons (NULL_TREE, t, args);
4787 }
4788 va_end (list);
4789
4790 args = nreverse (args);
4791 if (!var)
4792 args = chainon (args, void_list_node);
4793
4794 t = builtin_types[ret];
4795 if (t == error_mark_node)
4796 goto egress;
4797 t = build_function_type (t, args);
4798
4799 egress:
4800 builtin_types[def] = t;
4801}
4802
4803/* Build the builtin function types and install them in the builtin_types
4804 array for later use in builtin function decls. */
4805
4806static void
4807install_builtin_function_types (void)
4808{
4809 tree va_list_ref_type_node;
4810 tree va_list_arg_type_node;
4811
4812 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4813 {
4814 va_list_arg_type_node = va_list_ref_type_node =
4815 build_pointer_type (TREE_TYPE (va_list_type_node));
4816 }
4817 else
4818 {
4819 va_list_arg_type_node = va_list_type_node;
4820 va_list_ref_type_node = build_reference_type (va_list_type_node);
4821 }
4822
4823#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4824 builtin_types[ENUM] = VALUE;
4825#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4826 def_fn_type (ENUM, RETURN, 0, 0);
4827#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4828 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4829#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4830 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4831#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4832 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4833#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4834 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4835#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4836 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4837#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4838 ARG6) \
4839 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4840#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4841 ARG6, ARG7) \
4842 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4843#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4844 def_fn_type (ENUM, RETURN, 1, 0);
4845#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4846 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4847#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4848 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4849#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4850 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4851#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4852 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4853#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4854 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4855#define DEF_POINTER_TYPE(ENUM, TYPE) \
4856 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4857
4858#include "builtin-types.def"
4859
4860#undef DEF_PRIMITIVE_TYPE
4861#undef DEF_FUNCTION_TYPE_1
4862#undef DEF_FUNCTION_TYPE_2
4863#undef DEF_FUNCTION_TYPE_3
4864#undef DEF_FUNCTION_TYPE_4
4865#undef DEF_FUNCTION_TYPE_5
4866#undef DEF_FUNCTION_TYPE_6
4867#undef DEF_FUNCTION_TYPE_VAR_0
4868#undef DEF_FUNCTION_TYPE_VAR_1
4869#undef DEF_FUNCTION_TYPE_VAR_2
4870#undef DEF_FUNCTION_TYPE_VAR_3
4871#undef DEF_FUNCTION_TYPE_VAR_4
4872#undef DEF_FUNCTION_TYPE_VAR_5
4873#undef DEF_POINTER_TYPE
4874 builtin_types[(int) BT_LAST] = NULL_TREE;
4875}
4876
4877/* ----------------------------------------------------------------------- *
4878 * BUILTIN ATTRIBUTES *
4879 * ----------------------------------------------------------------------- */
4880
4881enum built_in_attribute
4882{
4883#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4884#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4885#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4886#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4887#include "builtin-attrs.def"
4888#undef DEF_ATTR_NULL_TREE
4889#undef DEF_ATTR_INT
4890#undef DEF_ATTR_IDENT
4891#undef DEF_ATTR_TREE_LIST
4892 ATTR_LAST
4893};
4894
4895static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4896
4897static void
4898install_builtin_attributes (void)
4899{
4900 /* Fill in the built_in_attributes array. */
4901#define DEF_ATTR_NULL_TREE(ENUM) \
4902 built_in_attributes[(int) ENUM] = NULL_TREE;
4903#define DEF_ATTR_INT(ENUM, VALUE) \
4904 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4905#define DEF_ATTR_IDENT(ENUM, STRING) \
4906 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4907#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4908 built_in_attributes[(int) ENUM] \
4909 = tree_cons (built_in_attributes[(int) PURPOSE], \
4910 built_in_attributes[(int) VALUE], \
4911 built_in_attributes[(int) CHAIN]);
4912#include "builtin-attrs.def"
4913#undef DEF_ATTR_NULL_TREE
4914#undef DEF_ATTR_INT
4915#undef DEF_ATTR_IDENT
4916#undef DEF_ATTR_TREE_LIST
4917}
4918
4919/* Handle a "const" attribute; arguments as in
4920 struct attribute_spec.handler. */
4921
4922static tree
4923handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4924 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4925 bool *no_add_attrs)
4926{
4927 if (TREE_CODE (*node) == FUNCTION_DECL)
4928 TREE_READONLY (*node) = 1;
4929 else
4930 *no_add_attrs = true;
4931
4932 return NULL_TREE;
4933}
4934
4935/* Handle a "nothrow" attribute; arguments as in
4936 struct attribute_spec.handler. */
4937
4938static tree
4939handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4940 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4941 bool *no_add_attrs)
4942{
4943 if (TREE_CODE (*node) == FUNCTION_DECL)
4944 TREE_NOTHROW (*node) = 1;
4945 else
4946 *no_add_attrs = true;
4947
4948 return NULL_TREE;
4949}
4950
4951/* Handle a "pure" attribute; arguments as in
4952 struct attribute_spec.handler. */
4953
4954static tree
4955handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4956 int ARG_UNUSED (flags), bool *no_add_attrs)
4957{
4958 if (TREE_CODE (*node) == FUNCTION_DECL)
4959 DECL_PURE_P (*node) = 1;
4960 /* ??? TODO: Support types. */
4961 else
4962 {
4963 warning (OPT_Wattributes, "%qE attribute ignored", name);
4964 *no_add_attrs = true;
4965 }
4966
4967 return NULL_TREE;
4968}
4969
4970/* Handle a "no vops" attribute; arguments as in
4971 struct attribute_spec.handler. */
4972
4973static tree
4974handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4975 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4976 bool *ARG_UNUSED (no_add_attrs))
4977{
4978 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4979 DECL_IS_NOVOPS (*node) = 1;
4980 return NULL_TREE;
4981}
4982
4983/* Helper for nonnull attribute handling; fetch the operand number
4984 from the attribute argument list. */
4985
4986static bool
4987get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4988{
4989 /* Verify the arg number is a constant. */
4990 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4991 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
4992 return false;
4993
4994 *valp = TREE_INT_CST_LOW (arg_num_expr);
4995 return true;
4996}
4997
4998/* Handle the "nonnull" attribute. */
4999static tree
5000handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5001 tree args, int ARG_UNUSED (flags),
5002 bool *no_add_attrs)
5003{
5004 tree type = *node;
5005 unsigned HOST_WIDE_INT attr_arg_num;
5006
5007 /* If no arguments are specified, all pointer arguments should be
5008 non-null. Verify a full prototype is given so that the arguments
5009 will have the correct types when we actually check them later. */
5010 if (!args)
5011 {
5012 if (!TYPE_ARG_TYPES (type))
5013 {
5014 error ("nonnull attribute without arguments on a non-prototype");
5015 *no_add_attrs = true;
5016 }
5017 return NULL_TREE;
5018 }
5019
5020 /* Argument list specified. Verify that each argument number references
5021 a pointer argument. */
5022 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5023 {
5024 tree argument;
5025 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5026
5027 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5028 {
5029 error ("nonnull argument has invalid operand number (argument %lu)",
5030 (unsigned long) attr_arg_num);
5031 *no_add_attrs = true;
5032 return NULL_TREE;
5033 }
5034
5035 argument = TYPE_ARG_TYPES (type);
5036 if (argument)
5037 {
5038 for (ck_num = 1; ; ck_num++)
5039 {
5040 if (!argument || ck_num == arg_num)
5041 break;
5042 argument = TREE_CHAIN (argument);
5043 }
5044
5045 if (!argument
5046 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5047 {
5048 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5049 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5050 *no_add_attrs = true;
5051 return NULL_TREE;
5052 }
5053
5054 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5055 {
5056 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5057 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5058 *no_add_attrs = true;
5059 return NULL_TREE;
5060 }
5061 }
5062 }
5063
5064 return NULL_TREE;
5065}
5066
5067/* Handle a "sentinel" attribute. */
5068
5069static tree
5070handle_sentinel_attribute (tree *node, tree name, tree args,
5071 int ARG_UNUSED (flags), bool *no_add_attrs)
5072{
5073 tree params = TYPE_ARG_TYPES (*node);
5074
5075 if (!params)
5076 {
5077 warning (OPT_Wattributes,
5078 "%qE attribute requires prototypes with named arguments", name);
5079 *no_add_attrs = true;
5080 }
5081 else
5082 {
5083 while (TREE_CHAIN (params))
5084 params = TREE_CHAIN (params);
5085
5086 if (VOID_TYPE_P (TREE_VALUE (params)))
5087 {
5088 warning (OPT_Wattributes,
5089 "%qE attribute only applies to variadic functions", name);
5090 *no_add_attrs = true;
5091 }
5092 }
5093
5094 if (args)
5095 {
5096 tree position = TREE_VALUE (args);
5097
5098 if (TREE_CODE (position) != INTEGER_CST)
5099 {
5100 warning (0, "requested position is not an integer constant");
5101 *no_add_attrs = true;
5102 }
5103 else
5104 {
5105 if (tree_int_cst_lt (position, integer_zero_node))
5106 {
5107 warning (0, "requested position is less than zero");
5108 *no_add_attrs = true;
5109 }
5110 }
5111 }
5112
5113 return NULL_TREE;
5114}
5115
5116/* Handle a "noreturn" attribute; arguments as in
5117 struct attribute_spec.handler. */
5118
5119static tree
5120handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5121 int ARG_UNUSED (flags), bool *no_add_attrs)
5122{
5123 tree type = TREE_TYPE (*node);
5124
5125 /* See FIXME comment in c_common_attribute_table. */
5126 if (TREE_CODE (*node) == FUNCTION_DECL)
5127 TREE_THIS_VOLATILE (*node) = 1;
5128 else if (TREE_CODE (type) == POINTER_TYPE
5129 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5130 TREE_TYPE (*node)
5131 = build_pointer_type
5132 (build_type_variant (TREE_TYPE (type),
5133 TYPE_READONLY (TREE_TYPE (type)), 1));
5134 else
5135 {
5136 warning (OPT_Wattributes, "%qE attribute ignored", name);
5137 *no_add_attrs = true;
5138 }
5139
5140 return NULL_TREE;
5141}
5142
5143/* Handle a "malloc" attribute; arguments as in
5144 struct attribute_spec.handler. */
5145
5146static tree
5147handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5148 int ARG_UNUSED (flags), bool *no_add_attrs)
5149{
5150 if (TREE_CODE (*node) == FUNCTION_DECL
5151 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5152 DECL_IS_MALLOC (*node) = 1;
5153 else
5154 {
5155 warning (OPT_Wattributes, "%qE attribute ignored", name);
5156 *no_add_attrs = true;
5157 }
5158
5159 return NULL_TREE;
5160}
5161
5162/* Fake handler for attributes we don't properly support. */
5163
5164tree
5165fake_attribute_handler (tree * ARG_UNUSED (node),
5166 tree ARG_UNUSED (name),
5167 tree ARG_UNUSED (args),
5168 int ARG_UNUSED (flags),
5169 bool * ARG_UNUSED (no_add_attrs))
5170{
5171 return NULL_TREE;
5172}
5173
5174/* Handle a "type_generic" attribute. */
5175
5176static tree
5177handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5178 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5179 bool * ARG_UNUSED (no_add_attrs))
5180{
5181 tree params;
5182
5183 /* Ensure we have a function type. */
5184 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5185
5186 params = TYPE_ARG_TYPES (*node);
5187 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5188 params = TREE_CHAIN (params);
5189
5190 /* Ensure we have a variadic function. */
5191 gcc_assert (!params);
5192
5193 return NULL_TREE;
5194}
5195
5196/* ----------------------------------------------------------------------- *
5197 * BUILTIN FUNCTIONS *
5198 * ----------------------------------------------------------------------- */
5199
5200/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5201 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5202 if nonansi_p and flag_no_nonansi_builtin. */
5203
5204static void
5205def_builtin_1 (enum built_in_function fncode,
5206 const char *name,
5207 enum built_in_class fnclass,
5208 tree fntype, tree libtype,
5209 bool both_p, bool fallback_p,
5210 bool nonansi_p ATTRIBUTE_UNUSED,
5211 tree fnattrs, bool implicit_p)
5212{
5213 tree decl;
5214 const char *libname;
5215
5216 /* Preserve an already installed decl. It most likely was setup in advance
5217 (e.g. as part of the internal builtins) for specific reasons. */
5218 if (built_in_decls[(int) fncode] != NULL_TREE)
5219 return;
5220
5221 gcc_assert ((!both_p && !fallback_p)
5222 || !strncmp (name, "__builtin_",
5223 strlen ("__builtin_")));
5224
5225 libname = name + strlen ("__builtin_");
5226 decl = add_builtin_function (name, fntype, fncode, fnclass,
5227 (fallback_p ? libname : NULL),
5228 fnattrs);
5229 if (both_p)
5230 /* ??? This is normally further controlled by command-line options
5231 like -fno-builtin, but we don't have them for Ada. */
5232 add_builtin_function (libname, libtype, fncode, fnclass,
5233 NULL, fnattrs);
5234
5235 built_in_decls[(int) fncode] = decl;
5236 if (implicit_p)
5237 implicit_built_in_decls[(int) fncode] = decl;
5238}
5239
5240static int flag_isoc94 = 0;
5241static int flag_isoc99 = 0;
5242
5243/* Install what the common builtins.def offers. */
5244
5245static void
5246install_builtin_functions (void)
5247{
5248#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5249 NONANSI_P, ATTRS, IMPLICIT, COND) \
5250 if (NAME && COND) \
5251 def_builtin_1 (ENUM, NAME, CLASS, \
5252 builtin_types[(int) TYPE], \
5253 builtin_types[(int) LIBTYPE], \
5254 BOTH_P, FALLBACK_P, NONANSI_P, \
5255 built_in_attributes[(int) ATTRS], IMPLICIT);
5256#include "builtins.def"
5257#undef DEF_BUILTIN
5258}
5259
5260/* ----------------------------------------------------------------------- *
5261 * BUILTIN FUNCTIONS *
5262 * ----------------------------------------------------------------------- */
5263
5264/* Install the builtin functions we might need. */
5265
5266void
5267gnat_install_builtins (void)
5268{
5269 install_builtin_elementary_types ();
5270 install_builtin_function_types ();
5271 install_builtin_attributes ();
5272
5273 /* Install builtins used by generic middle-end pieces first. Some of these
5274 know about internal specificities and control attributes accordingly, for
5275 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5276 the generic definition from builtins.def. */
5277 build_common_builtin_nodes ();
5278
5279 /* Now, install the target specific builtins, such as the AltiVec family on
5280 ppc, and the common set as exposed by builtins.def. */
5281 targetm.init_builtins ();
5282 install_builtin_functions ();
5283}
5284
5285#include "gt-ada-utils.h"
5286#include "gtype-ada.h"
This page took 0.685326 seconds and 5 git commands to generate.