]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
spu.c (get_branch_target): Return NULL for ASM_OPERANDS patterns.
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
10069d53 9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "tree.h"
31#include "flags.h"
a1ab4c31 32#include "toplev.h"
8713b7e4 33#include "rtl.h"
a1ab4c31
AC
34#include "output.h"
35#include "ggc.h"
36#include "debug.h"
37#include "convert.h"
38#include "target.h"
39#include "function.h"
8713b7e4
EB
40#include "langhooks.h"
41#include "pointer-set.h"
a1ab4c31 42#include "cgraph.h"
8713b7e4 43#include "tree-dump.h"
a1ab4c31
AC
44#include "tree-inline.h"
45#include "tree-iterator.h"
46#include "gimple.h"
a1ab4c31
AC
47
48#include "ada.h"
49#include "types.h"
50#include "atree.h"
51#include "elists.h"
52#include "namet.h"
53#include "nlists.h"
54#include "stringt.h"
55#include "uintp.h"
56#include "fe.h"
57#include "sinfo.h"
58#include "einfo.h"
59#include "ada-tree.h"
60#include "gigi.h"
61
62#ifndef MAX_FIXED_MODE_SIZE
63#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64#endif
65
66#ifndef MAX_BITS_PER_WORD
67#define MAX_BITS_PER_WORD BITS_PER_WORD
68#endif
69
70/* If nonzero, pretend we are allocating at global level. */
71int force_global;
72
caa9d12a
EB
73/* The default alignment of "double" floating-point types, i.e. floating
74 point types whose size is equal to 64 bits, or 0 if this alignment is
75 not specifically capped. */
76int double_float_alignment;
77
78/* The default alignment of "double" or larger scalar types, i.e. scalar
79 types whose size is greater or equal to 64 bits, or 0 if this alignment
80 is not specifically capped. */
81int double_scalar_alignment;
82
a1ab4c31
AC
83/* Tree nodes for the various types and decls we create. */
84tree gnat_std_decls[(int) ADT_LAST];
85
86/* Functions to call for each of the possible raise reasons. */
87tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
88
89/* Forward declarations for handlers of attributes. */
90static tree handle_const_attribute (tree *, tree, tree, int, bool *);
91static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
92static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
93static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
94static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
95static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
96static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
97static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 99static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 100static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
101
102/* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106/* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108const struct attribute_spec gnat_internal_attribute_table[] =
109{
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
111 { "const", 0, 0, true, false, false, handle_const_attribute },
112 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
113 { "pure", 0, 0, true, false, false, handle_pure_attribute },
114 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
115 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
116 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
117 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
118 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
2724e58f
OH
119 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
120
121 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
7948ae37 122 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute },
4b956d8b 123 { "may_alias", 0, 0, false, true, false, NULL },
a1ab4c31
AC
124
125 /* ??? format and format_arg are heavy and not supported, which actually
126 prevents support for stdio builtins, which we however declare as part
127 of the common builtins.def contents. */
128 { "format", 3, 3, false, true, true, fake_attribute_handler },
129 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
130
131 { NULL, 0, 0, false, false, false, NULL }
132};
133
134/* Associates a GNAT tree node to a GCC tree node. It is used in
135 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
136 of `save_gnu_tree' for more info. */
137static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
138
139#define GET_GNU_TREE(GNAT_ENTITY) \
140 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
141
142#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
143 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
144
145#define PRESENT_GNU_TREE(GNAT_ENTITY) \
146 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
147
148/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
149static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
150
151#define GET_DUMMY_NODE(GNAT_ENTITY) \
152 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
153
154#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
155 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
156
157#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
158 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
159
160/* This variable keeps a table for types for each precision so that we only
161 allocate each of them once. Signed and unsigned types are kept separate.
162
163 Note that these types are only used when fold-const requests something
164 special. Perhaps we should NOT share these types; we'll see how it
165 goes later. */
166static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
167
168/* Likewise for float types, but record these by mode. */
169static GTY(()) tree float_types[NUM_MACHINE_MODES];
170
171/* For each binding contour we allocate a binding_level structure to indicate
172 the binding depth. */
173
d1b38208 174struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
175 /* The binding level containing this one (the enclosing binding level). */
176 struct gnat_binding_level *chain;
177 /* The BLOCK node for this level. */
178 tree block;
179 /* If nonzero, the setjmp buffer that needs to be updated for any
180 variable-sized definition within this context. */
181 tree jmpbuf_decl;
182};
183
184/* The binding level currently in effect. */
185static GTY(()) struct gnat_binding_level *current_binding_level;
186
187/* A chain of gnat_binding_level structures awaiting reuse. */
188static GTY((deletable)) struct gnat_binding_level *free_binding_level;
189
190/* An array of global declarations. */
191static GTY(()) VEC(tree,gc) *global_decls;
192
193/* An array of builtin function declarations. */
194static GTY(()) VEC(tree,gc) *builtin_decls;
195
196/* An array of global renaming pointers. */
197static GTY(()) VEC(tree,gc) *global_renaming_pointers;
198
199/* A chain of unused BLOCK nodes. */
200static GTY((deletable)) tree free_block_chain;
201
a1ab4c31
AC
202static tree merge_sizes (tree, tree, tree, bool, bool);
203static tree compute_related_constant (tree, tree);
204static tree split_plus (tree, tree *);
a1ab4c31
AC
205static tree float_type_for_precision (int, enum machine_mode);
206static tree convert_to_fat_pointer (tree, tree);
207static tree convert_to_thin_pointer (tree, tree);
208static tree make_descriptor_field (const char *,tree, tree, tree);
209static bool potential_alignment_gap (tree, tree, tree);
210\f
211/* Initialize the association of GNAT nodes to GCC trees. */
212
213void
214init_gnat_to_gnu (void)
215{
216 associate_gnat_to_gnu
217 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
218}
219
220/* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
221 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
1e17ef87 222 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
a1ab4c31
AC
223
224 If GNU_DECL is zero, a previous association is to be reset. */
225
226void
227save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
228{
229 /* Check that GNAT_ENTITY is not already defined and that it is being set
230 to something which is a decl. Raise gigi 401 if not. Usually, this
231 means GNAT_ENTITY is defined twice, but occasionally is due to some
232 Gigi problem. */
233 gcc_assert (!(gnu_decl
234 && (PRESENT_GNU_TREE (gnat_entity)
235 || (!no_check && !DECL_P (gnu_decl)))));
236
237 SET_GNU_TREE (gnat_entity, gnu_decl);
238}
239
240/* GNAT_ENTITY is a GNAT tree node for a defining identifier.
241 Return the ..._DECL node that was associated with it. If there is no tree
242 node associated with GNAT_ENTITY, abort.
243
244 In some cases, such as delayed elaboration or expressions that need to
245 be elaborated only once, GNAT_ENTITY is really not an entity. */
246
247tree
248get_gnu_tree (Entity_Id gnat_entity)
249{
250 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
251 return GET_GNU_TREE (gnat_entity);
252}
253
254/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
255
256bool
257present_gnu_tree (Entity_Id gnat_entity)
258{
259 return PRESENT_GNU_TREE (gnat_entity);
260}
261\f
262/* Initialize the association of GNAT nodes to GCC trees as dummies. */
263
264void
265init_dummy_type (void)
266{
267 dummy_node_table
268 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
269}
270
271/* Make a dummy type corresponding to GNAT_TYPE. */
272
273tree
274make_dummy_type (Entity_Id gnat_type)
275{
276 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
277 tree gnu_type;
278
279 /* If there is an equivalent type, get its underlying type. */
280 if (Present (gnat_underlying))
281 gnat_underlying = Underlying_Type (gnat_underlying);
282
283 /* If there was no equivalent type (can only happen when just annotating
284 types) or underlying type, go back to the original type. */
285 if (No (gnat_underlying))
286 gnat_underlying = gnat_type;
287
288 /* If it there already a dummy type, use that one. Else make one. */
289 if (PRESENT_DUMMY_NODE (gnat_underlying))
290 return GET_DUMMY_NODE (gnat_underlying);
291
292 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
293 an ENUMERAL_TYPE. */
294 gnu_type = make_node (Is_Record_Type (gnat_underlying)
295 ? tree_code_for_record_type (gnat_underlying)
296 : ENUMERAL_TYPE);
297 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
298 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
299 TYPE_STUB_DECL (gnu_type)
300 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
a1ab4c31 301 if (AGGREGATE_TYPE_P (gnu_type))
10069d53 302 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
a1ab4c31
AC
303
304 SET_DUMMY_NODE (gnat_underlying, gnu_type);
305
306 return gnu_type;
307}
308\f
309/* Return nonzero if we are currently in the global binding level. */
310
311int
312global_bindings_p (void)
313{
314 return ((force_global || !current_function_decl) ? -1 : 0);
315}
316
317/* Enter a new binding level. */
318
319void
c6bd4220 320gnat_pushlevel (void)
a1ab4c31
AC
321{
322 struct gnat_binding_level *newlevel = NULL;
323
324 /* Reuse a struct for this binding level, if there is one. */
325 if (free_binding_level)
326 {
327 newlevel = free_binding_level;
328 free_binding_level = free_binding_level->chain;
329 }
330 else
331 newlevel
332 = (struct gnat_binding_level *)
333 ggc_alloc (sizeof (struct gnat_binding_level));
334
335 /* Use a free BLOCK, if any; otherwise, allocate one. */
336 if (free_block_chain)
337 {
338 newlevel->block = free_block_chain;
339 free_block_chain = BLOCK_CHAIN (free_block_chain);
340 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
341 }
342 else
343 newlevel->block = make_node (BLOCK);
344
345 /* Point the BLOCK we just made to its parent. */
346 if (current_binding_level)
347 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
348
349 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
350 TREE_USED (newlevel->block) = 1;
351
352 /* Add this level to the front of the chain (stack) of levels that are
353 active. */
354 newlevel->chain = current_binding_level;
355 newlevel->jmpbuf_decl = NULL_TREE;
356 current_binding_level = newlevel;
357}
358
359/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
360 and point FNDECL to this BLOCK. */
361
362void
363set_current_block_context (tree fndecl)
364{
365 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
366 DECL_INITIAL (fndecl) = current_binding_level->block;
367}
368
369/* Set the jmpbuf_decl for the current binding level to DECL. */
370
371void
372set_block_jmpbuf_decl (tree decl)
373{
374 current_binding_level->jmpbuf_decl = decl;
375}
376
377/* Get the jmpbuf_decl, if any, for the current binding level. */
378
379tree
c6bd4220 380get_block_jmpbuf_decl (void)
a1ab4c31
AC
381{
382 return current_binding_level->jmpbuf_decl;
383}
384
385/* Exit a binding level. Set any BLOCK into the current code group. */
386
387void
c6bd4220 388gnat_poplevel (void)
a1ab4c31
AC
389{
390 struct gnat_binding_level *level = current_binding_level;
391 tree block = level->block;
392
393 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
394 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
395
396 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
397 are no variables free the block and merge its subblocks into those of its
398 parent block. Otherwise, add it to the list of its parent. */
399 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
400 ;
401 else if (BLOCK_VARS (block) == NULL_TREE)
402 {
403 BLOCK_SUBBLOCKS (level->chain->block)
404 = chainon (BLOCK_SUBBLOCKS (block),
405 BLOCK_SUBBLOCKS (level->chain->block));
406 BLOCK_CHAIN (block) = free_block_chain;
407 free_block_chain = block;
408 }
409 else
410 {
411 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
412 BLOCK_SUBBLOCKS (level->chain->block) = block;
413 TREE_USED (block) = 1;
414 set_block_for_group (block);
415 }
416
417 /* Free this binding structure. */
418 current_binding_level = level->chain;
419 level->chain = free_binding_level;
420 free_binding_level = level;
421}
422
423\f
424/* Records a ..._DECL node DECL as belonging to the current lexical scope
425 and uses GNAT_NODE for location information and propagating flags. */
426
427void
428gnat_pushdecl (tree decl, Node_Id gnat_node)
429{
430 /* If this decl is public external or at toplevel, there is no context.
431 But PARM_DECLs always go in the level of its function. */
432 if (TREE_CODE (decl) != PARM_DECL
433 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
434 || global_bindings_p ()))
435 DECL_CONTEXT (decl) = 0;
436 else
437 {
438 DECL_CONTEXT (decl) = current_function_decl;
439
9f62cb92
JJ
440 /* Functions imported in another function are not really nested.
441 For really nested functions mark them initially as needing
442 a static chain for uses of that flag before unnesting;
443 lower_nested_functions will then recompute it. */
444 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
445 DECL_STATIC_CHAIN (decl) = 1;
a1ab4c31
AC
446 }
447
448 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
449
450 /* Set the location of DECL and emit a declaration for it. */
451 if (Present (gnat_node))
452 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
453 add_decl_expr (decl, gnat_node);
454
455 /* Put the declaration on the list. The list of declarations is in reverse
456 order. The list will be reversed later. Put global variables in the
457 globals list and builtin functions in a dedicated list to speed up
458 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
459 the list, as they will cause trouble with the debugger and aren't needed
460 anyway. */
461 if (TREE_CODE (decl) != TYPE_DECL
462 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
463 {
464 if (global_bindings_p ())
465 {
466 VEC_safe_push (tree, gc, global_decls, decl);
467
468 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
469 VEC_safe_push (tree, gc, builtin_decls, decl);
470 }
471 else
472 {
473 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
474 BLOCK_VARS (current_binding_level->block) = decl;
475 }
476 }
477
478 /* For the declaration of a type, set its name if it either is not already
10069d53 479 set or if the previous type name was not derived from a source name.
a1ab4c31
AC
480 We'd rather have the type named with a real name and all the pointer
481 types to the same object have the same POINTER_TYPE node. Code in the
482 equivalent function of c-decl.c makes a copy of the type node here, but
483 that may cause us trouble with incomplete types. We make an exception
484 for fat pointer types because the compiler automatically builds them
485 for unconstrained array types and the debugger uses them to represent
486 both these and pointers to these. */
487 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
488 {
489 tree t = TREE_TYPE (decl);
490
10069d53 491 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
a1ab4c31
AC
492 ;
493 else if (TYPE_FAT_POINTER_P (t))
494 {
495 tree tt = build_variant_type_copy (t);
496 TYPE_NAME (tt) = decl;
497 TREE_USED (tt) = TREE_USED (t);
498 TREE_TYPE (decl) = tt;
40c88b94
EB
499 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
500 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
501 else
502 DECL_ORIGINAL_TYPE (decl) = t;
a1ab4c31 503 t = NULL_TREE;
40c88b94 504 DECL_ARTIFICIAL (decl) = 0;
a1ab4c31
AC
505 }
506 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
507 ;
508 else
509 t = NULL_TREE;
510
511 /* Propagate the name to all the variants. This is needed for
512 the type qualifiers machinery to work properly. */
513 if (t)
514 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
515 TYPE_NAME (t) = decl;
516 }
517}
518\f
519/* Do little here. Set up the standard declarations later after the
520 front end has been run. */
521
522void
523gnat_init_decl_processing (void)
524{
525 /* Make the binding_level structure for global names. */
526 current_function_decl = 0;
527 current_binding_level = 0;
528 free_binding_level = 0;
529 gnat_pushlevel ();
530
531 build_common_tree_nodes (true, true);
532
533 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
b4680ca1
EB
534 corresponding to the width of Pmode. In most cases when ptr_mode
535 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
536 But we get far better code using the width of Pmode. */
537 size_type_node = gnat_type_for_mode (Pmode, 0);
a1ab4c31 538 set_sizetype (size_type_node);
01ddebf2
EB
539
540 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
84fb43a1
EB
541 boolean_type_node = make_unsigned_type (8);
542 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
543 SET_TYPE_RM_MAX_VALUE (boolean_type_node,
544 build_int_cst (boolean_type_node, 1));
545 SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
01ddebf2 546
a1ab4c31 547 build_common_tree_nodes_2 (0);
84fb43a1 548 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
a1ab4c31
AC
549
550 ptr_void_type_node = build_pointer_type (void_type_node);
551}
10069d53
EB
552\f
553/* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
a1ab4c31
AC
554
555void
10069d53 556record_builtin_type (const char *name, tree type)
a1ab4c31 557{
c172df28
AH
558 tree type_decl = build_decl (input_location,
559 TYPE_DECL, get_identifier (name), type);
a1ab4c31 560
10069d53 561 gnat_pushdecl (type_decl, Empty);
a1ab4c31 562
10069d53
EB
563 if (debug_hooks->type_decl)
564 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
565}
566\f
567/* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
568 finish constructing the record or union type. If REP_LEVEL is zero, this
569 record has no representation clause and so will be entirely laid out here.
570 If REP_LEVEL is one, this record has a representation clause and has been
571 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
572 this record is derived from a parent record and thus inherits its layout;
573 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
574 true, the record type is expected to be modified afterwards so it will
575 not be sent to the back-end for finalization. */
576
577void
578finish_record_type (tree record_type, tree fieldlist, int rep_level,
579 bool do_not_finalize)
580{
581 enum tree_code code = TREE_CODE (record_type);
582 tree name = TYPE_NAME (record_type);
583 tree ada_size = bitsize_zero_node;
584 tree size = bitsize_zero_node;
585 bool had_size = TYPE_SIZE (record_type) != 0;
586 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
587 bool had_align = TYPE_ALIGN (record_type) != 0;
588 tree field;
589
a1ab4c31 590 TYPE_FIELDS (record_type) = fieldlist;
a1ab4c31 591
10069d53
EB
592 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
593 generate debug info and have a parallel type. */
594 if (name && TREE_CODE (name) == TYPE_DECL)
595 name = DECL_NAME (name);
596 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
597
598 /* Globally initialize the record first. If this is a rep'ed record,
599 that just means some initializations; otherwise, layout the record. */
600 if (rep_level > 0)
601 {
602 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
6f9f0ce3 603 SET_TYPE_MODE (record_type, BLKmode);
a1ab4c31
AC
604
605 if (!had_size_unit)
606 TYPE_SIZE_UNIT (record_type) = size_zero_node;
607 if (!had_size)
608 TYPE_SIZE (record_type) = bitsize_zero_node;
609
610 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
611 out just like a UNION_TYPE, since the size will be fixed. */
612 else if (code == QUAL_UNION_TYPE)
613 code = UNION_TYPE;
614 }
615 else
616 {
617 /* Ensure there isn't a size already set. There can be in an error
618 case where there is a rep clause but all fields have errors and
619 no longer have a position. */
620 TYPE_SIZE (record_type) = 0;
621 layout_type (record_type);
622 }
623
624 /* At this point, the position and size of each field is known. It was
625 either set before entry by a rep clause, or by laying out the type above.
626
627 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
628 to compute the Ada size; the GCC size and alignment (for rep'ed records
629 that are not padding types); and the mode (for rep'ed records). We also
630 clear the DECL_BIT_FIELD indication for the cases we know have not been
631 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
632
633 if (code == QUAL_UNION_TYPE)
634 fieldlist = nreverse (fieldlist);
635
636 for (field = fieldlist; field; field = TREE_CHAIN (field))
637 {
638 tree type = TREE_TYPE (field);
639 tree pos = bit_position (field);
640 tree this_size = DECL_SIZE (field);
641 tree this_ada_size;
642
643 if ((TREE_CODE (type) == RECORD_TYPE
644 || TREE_CODE (type) == UNION_TYPE
645 || TREE_CODE (type) == QUAL_UNION_TYPE)
646 && !TYPE_IS_FAT_POINTER_P (type)
647 && !TYPE_CONTAINS_TEMPLATE_P (type)
648 && TYPE_ADA_SIZE (type))
649 this_ada_size = TYPE_ADA_SIZE (type);
650 else
651 this_ada_size = this_size;
652
653 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
654 if (DECL_BIT_FIELD (field)
655 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
656 {
657 unsigned int align = TYPE_ALIGN (type);
658
659 /* In the general case, type alignment is required. */
660 if (value_factor_p (pos, align))
661 {
662 /* The enclosing record type must be sufficiently aligned.
663 Otherwise, if no alignment was specified for it and it
664 has been laid out already, bump its alignment to the
665 desired one if this is compatible with its size. */
666 if (TYPE_ALIGN (record_type) >= align)
667 {
668 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
669 DECL_BIT_FIELD (field) = 0;
670 }
671 else if (!had_align
672 && rep_level == 0
673 && value_factor_p (TYPE_SIZE (record_type), align))
674 {
675 TYPE_ALIGN (record_type) = align;
676 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
677 DECL_BIT_FIELD (field) = 0;
678 }
679 }
680
681 /* In the non-strict alignment case, only byte alignment is. */
682 if (!STRICT_ALIGNMENT
683 && DECL_BIT_FIELD (field)
684 && value_factor_p (pos, BITS_PER_UNIT))
685 DECL_BIT_FIELD (field) = 0;
686 }
687
c1abd261
EB
688 /* If we still have DECL_BIT_FIELD set at this point, we know that the
689 field is technically not addressable. Except that it can actually
690 be addressed if it is BLKmode and happens to be properly aligned. */
691 if (DECL_BIT_FIELD (field)
692 && !(DECL_MODE (field) == BLKmode
693 && value_factor_p (pos, BITS_PER_UNIT)))
694 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
695
696 /* A type must be as aligned as its most aligned field that is not
697 a bit-field. But this is already enforced by layout_type. */
698 if (rep_level > 0 && !DECL_BIT_FIELD (field))
699 TYPE_ALIGN (record_type)
700 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
701
702 switch (code)
703 {
704 case UNION_TYPE:
705 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
706 size = size_binop (MAX_EXPR, size, this_size);
707 break;
708
709 case QUAL_UNION_TYPE:
710 ada_size
711 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
712 this_ada_size, ada_size);
713 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
714 this_size, size);
715 break;
716
717 case RECORD_TYPE:
718 /* Since we know here that all fields are sorted in order of
719 increasing bit position, the size of the record is one
720 higher than the ending bit of the last field processed
721 unless we have a rep clause, since in that case we might
722 have a field outside a QUAL_UNION_TYPE that has a higher ending
723 position. So use a MAX in that case. Also, if this field is a
724 QUAL_UNION_TYPE, we need to take into account the previous size in
725 the case of empty variants. */
726 ada_size
727 = merge_sizes (ada_size, pos, this_ada_size,
728 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
729 size
730 = merge_sizes (size, pos, this_size,
731 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
732 break;
733
734 default:
735 gcc_unreachable ();
736 }
737 }
738
739 if (code == QUAL_UNION_TYPE)
740 nreverse (fieldlist);
741
63787194
EB
742 /* If the type is discriminated, it can be used to access all its
743 constrained subtypes, so force structural equality checks. */
744 if (CONTAINS_PLACEHOLDER_P (size))
745 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
746
a1ab4c31
AC
747 if (rep_level < 2)
748 {
749 /* If this is a padding record, we never want to make the size smaller
750 than what was specified in it, if any. */
751 if (TREE_CODE (record_type) == RECORD_TYPE
752 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
753 size = TYPE_SIZE (record_type);
754
755 /* Now set any of the values we've just computed that apply. */
756 if (!TYPE_IS_FAT_POINTER_P (record_type)
757 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
758 SET_TYPE_ADA_SIZE (record_type, ada_size);
759
760 if (rep_level > 0)
761 {
762 tree size_unit = had_size_unit
763 ? TYPE_SIZE_UNIT (record_type)
764 : convert (sizetype,
765 size_binop (CEIL_DIV_EXPR, size,
766 bitsize_unit_node));
767 unsigned int align = TYPE_ALIGN (record_type);
768
769 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
770 TYPE_SIZE_UNIT (record_type)
771 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
772
773 compute_record_mode (record_type);
774 }
775 }
776
777 if (!do_not_finalize)
778 rest_of_record_type_compilation (record_type);
779}
780
781/* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
782 the debug information associated with it. It need not be invoked
783 directly in most cases since finish_record_type takes care of doing
784 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
785
786void
787rest_of_record_type_compilation (tree record_type)
788{
789 tree fieldlist = TYPE_FIELDS (record_type);
790 tree field;
791 enum tree_code code = TREE_CODE (record_type);
792 bool var_size = false;
793
794 for (field = fieldlist; field; field = TREE_CHAIN (field))
795 {
796 /* We need to make an XVE/XVU record if any field has variable size,
797 whether or not the record does. For example, if we have a union,
798 it may be that all fields, rounded up to the alignment, have the
799 same size, in which case we'll use that size. But the debug
800 output routines (except Dwarf2) won't be able to output the fields,
801 so we need to make the special record. */
802 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
803 /* If a field has a non-constant qualifier, the record will have
804 variable size too. */
805 || (code == QUAL_UNION_TYPE
806 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
807 {
808 var_size = true;
809 break;
810 }
811 }
812
813 /* If this record is of variable size, rename it so that the
814 debugger knows it is and make a new, parallel, record
815 that tells the debugger how the record is laid out. See
816 exp_dbug.ads. But don't do this for records that are padding
817 since they confuse GDB. */
818 if (var_size
819 && !(TREE_CODE (record_type) == RECORD_TYPE
820 && TYPE_IS_PADDING_P (record_type)))
821 {
822 tree new_record_type
823 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
824 ? UNION_TYPE : TREE_CODE (record_type));
0fb2335d 825 tree orig_name = TYPE_NAME (record_type), new_name;
a1ab4c31 826 tree last_pos = bitsize_zero_node;
0fb2335d 827 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 828
0fb2335d
EB
829 if (TREE_CODE (orig_name) == TYPE_DECL)
830 orig_name = DECL_NAME (orig_name);
831
832 new_name
833 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
834 ? "XVU" : "XVE");
835 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
836 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
837 TYPE_STUB_DECL (new_record_type)
0fb2335d 838 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
839 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
840 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
841 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
842 TYPE_SIZE_UNIT (new_record_type)
843 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
844
845 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
846
847 /* Now scan all the fields, replacing each field with a new
848 field corresponding to the new encoding. */
849 for (old_field = TYPE_FIELDS (record_type); old_field;
850 old_field = TREE_CHAIN (old_field))
851 {
852 tree field_type = TREE_TYPE (old_field);
853 tree field_name = DECL_NAME (old_field);
854 tree new_field;
855 tree curpos = bit_position (old_field);
856 bool var = false;
857 unsigned int align = 0;
858 tree pos;
859
860 /* See how the position was modified from the last position.
861
862 There are two basic cases we support: a value was added
863 to the last position or the last position was rounded to
864 a boundary and they something was added. Check for the
865 first case first. If not, see if there is any evidence
866 of rounding. If so, round the last position and try
867 again.
868
869 If this is a union, the position can be taken as zero. */
870
871 /* Some computations depend on the shape of the position expression,
872 so strip conversions to make sure it's exposed. */
873 curpos = remove_conversions (curpos, true);
874
875 if (TREE_CODE (new_record_type) == UNION_TYPE)
876 pos = bitsize_zero_node, align = 0;
877 else
878 pos = compute_related_constant (curpos, last_pos);
879
880 if (!pos && TREE_CODE (curpos) == MULT_EXPR
881 && host_integerp (TREE_OPERAND (curpos, 1), 1))
882 {
883 tree offset = TREE_OPERAND (curpos, 0);
884 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
885
886 /* An offset which is a bitwise AND with a negative power of 2
887 means an alignment corresponding to this power of 2. */
888 offset = remove_conversions (offset, true);
889 if (TREE_CODE (offset) == BIT_AND_EXPR
890 && host_integerp (TREE_OPERAND (offset, 1), 0)
891 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
892 {
893 unsigned int pow
894 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
895 if (exact_log2 (pow) > 0)
896 align *= pow;
897 }
898
899 pos = compute_related_constant (curpos,
900 round_up (last_pos, align));
901 }
902 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
903 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
904 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
905 && host_integerp (TREE_OPERAND
906 (TREE_OPERAND (curpos, 0), 1),
907 1))
908 {
909 align
910 = tree_low_cst
911 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
912 pos = compute_related_constant (curpos,
913 round_up (last_pos, align));
914 }
915 else if (potential_alignment_gap (prev_old_field, old_field,
916 pos))
917 {
918 align = TYPE_ALIGN (field_type);
919 pos = compute_related_constant (curpos,
920 round_up (last_pos, align));
921 }
922
923 /* If we can't compute a position, set it to zero.
924
925 ??? We really should abort here, but it's too much work
926 to get this correct for all cases. */
927
928 if (!pos)
929 pos = bitsize_zero_node;
930
931 /* See if this type is variable-sized and make a pointer type
932 and indicate the indirection if so. Beware that the debug
933 back-end may adjust the position computed above according
934 to the alignment of the field type, i.e. the pointer type
935 in this case, if we don't preventively counter that. */
936 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
937 {
938 field_type = build_pointer_type (field_type);
939 if (align != 0 && TYPE_ALIGN (field_type) > align)
940 {
941 field_type = copy_node (field_type);
942 TYPE_ALIGN (field_type) = align;
943 }
944 var = true;
945 }
946
947 /* Make a new field name, if necessary. */
948 if (var || align != 0)
949 {
950 char suffix[16];
951
952 if (align != 0)
953 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
954 align / BITS_PER_UNIT);
955 else
956 strcpy (suffix, "XVL");
957
0fb2335d 958 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
959 }
960
961 new_field = create_field_decl (field_name, field_type,
962 new_record_type, 0,
963 DECL_SIZE (old_field), pos, 0);
964 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
965 TYPE_FIELDS (new_record_type) = new_field;
966
967 /* If old_field is a QUAL_UNION_TYPE, take its size as being
968 zero. The only time it's not the last field of the record
969 is when there are other components at fixed positions after
970 it (meaning there was a rep clause for every field) and we
971 want to be able to encode them. */
972 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
973 (TREE_CODE (TREE_TYPE (old_field))
974 == QUAL_UNION_TYPE)
975 ? bitsize_zero_node
976 : DECL_SIZE (old_field));
977 prev_old_field = old_field;
978 }
979
980 TYPE_FIELDS (new_record_type)
981 = nreverse (TYPE_FIELDS (new_record_type));
982
983 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
984 }
985
986 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
987}
988
989/* Append PARALLEL_TYPE on the chain of parallel types for decl. */
990
991void
992add_parallel_type (tree decl, tree parallel_type)
993{
994 tree d = decl;
995
996 while (DECL_PARALLEL_TYPE (d))
997 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
998
999 SET_DECL_PARALLEL_TYPE (d, parallel_type);
1000}
1001
1002/* Return the parallel type associated to a type, if any. */
1003
1004tree
1005get_parallel_type (tree type)
1006{
1007 if (TYPE_STUB_DECL (type))
1008 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1009 else
1010 return NULL_TREE;
1011}
1012
1013/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
1014 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1015 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1016 replace a value of zero with the old size. If HAS_REP is true, we take the
1017 MAX of the end position of this field with LAST_SIZE. In all other cases,
1018 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
1019
1020static tree
1021merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1022 bool has_rep)
1023{
1024 tree type = TREE_TYPE (last_size);
c6bd4220 1025 tree new_size;
a1ab4c31
AC
1026
1027 if (!special || TREE_CODE (size) != COND_EXPR)
1028 {
c6bd4220 1029 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 1030 if (has_rep)
c6bd4220 1031 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
1032 }
1033
1034 else
c6bd4220
EB
1035 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1036 integer_zerop (TREE_OPERAND (size, 1))
1037 ? last_size : merge_sizes (last_size, first_bit,
1038 TREE_OPERAND (size, 1),
1039 1, has_rep),
1040 integer_zerop (TREE_OPERAND (size, 2))
1041 ? last_size : merge_sizes (last_size, first_bit,
1042 TREE_OPERAND (size, 2),
1043 1, has_rep));
a1ab4c31
AC
1044
1045 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1046 when fed through substitute_in_expr) into thinking that a constant
1047 size is not constant. */
c6bd4220
EB
1048 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1049 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 1050
c6bd4220 1051 return new_size;
a1ab4c31
AC
1052}
1053
1054/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1055 related by the addition of a constant. Return that constant if so. */
1056
1057static tree
1058compute_related_constant (tree op0, tree op1)
1059{
1060 tree op0_var, op1_var;
1061 tree op0_con = split_plus (op0, &op0_var);
1062 tree op1_con = split_plus (op1, &op1_var);
1063 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1064
1065 if (operand_equal_p (op0_var, op1_var, 0))
1066 return result;
1067 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1068 return result;
1069 else
1070 return 0;
1071}
1072
1073/* Utility function of above to split a tree OP which may be a sum, into a
1074 constant part, which is returned, and a variable part, which is stored
1075 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1076 bitsizetype. */
1077
1078static tree
1079split_plus (tree in, tree *pvar)
1080{
1081 /* Strip NOPS in order to ease the tree traversal and maximize the
1082 potential for constant or plus/minus discovery. We need to be careful
1083 to always return and set *pvar to bitsizetype trees, but it's worth
1084 the effort. */
1085 STRIP_NOPS (in);
1086
1087 *pvar = convert (bitsizetype, in);
1088
1089 if (TREE_CODE (in) == INTEGER_CST)
1090 {
1091 *pvar = bitsize_zero_node;
1092 return convert (bitsizetype, in);
1093 }
1094 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1095 {
1096 tree lhs_var, rhs_var;
1097 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1098 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1099
1100 if (lhs_var == TREE_OPERAND (in, 0)
1101 && rhs_var == TREE_OPERAND (in, 1))
1102 return bitsize_zero_node;
1103
1104 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1105 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1106 }
1107 else
1108 return bitsize_zero_node;
1109}
1110\f
1111/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1112 subprogram. If it is void_type_node, then we are dealing with a procedure,
1113 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1114 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1115 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1116 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1117 object. RETURNS_BY_REF is true if the function returns by reference.
1118 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1119 first parameter) the address of the place to copy its result. */
1120
1121tree
1122create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1123 bool returns_unconstrained, bool returns_by_ref,
1124 bool returns_by_target_ptr)
1125{
1126 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1127 the subprogram formal parameters. This list is generated by traversing the
1128 input list of PARM_DECL nodes. */
1129 tree param_type_list = NULL;
1130 tree param_decl;
1131 tree type;
1132
1133 for (param_decl = param_decl_list; param_decl;
1134 param_decl = TREE_CHAIN (param_decl))
1135 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1136 param_type_list);
1137
1138 /* The list of the function parameter types has to be terminated by the void
1139 type to signal to the back-end that we are not dealing with a variable
1140 parameter subprogram, but that the subprogram has a fixed number of
1141 parameters. */
1142 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1143
1144 /* The list of argument types has been created in reverse
1145 so nreverse it. */
1146 param_type_list = nreverse (param_type_list);
1147
1148 type = build_function_type (return_type, param_type_list);
1149
1150 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1151 or the new type should, make a copy of TYPE. Likewise for
1152 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1153 if (TYPE_CI_CO_LIST (type) || cico_list
1154 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1155 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1156 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1157 type = copy_type (type);
1158
1159 TYPE_CI_CO_LIST (type) = cico_list;
1160 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1161 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1162 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1163 return type;
1164}
1165\f
1166/* Return a copy of TYPE but safe to modify in any way. */
1167
1168tree
1169copy_type (tree type)
1170{
c6bd4220 1171 tree new_type = copy_node (type);
a1ab4c31
AC
1172
1173 /* copy_node clears this field instead of copying it, because it is
1174 aliased with TREE_CHAIN. */
c6bd4220 1175 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 1176
c6bd4220
EB
1177 TYPE_POINTER_TO (new_type) = 0;
1178 TYPE_REFERENCE_TO (new_type) = 0;
1179 TYPE_MAIN_VARIANT (new_type) = new_type;
1180 TYPE_NEXT_VARIANT (new_type) = 0;
a1ab4c31 1181
c6bd4220 1182 return new_type;
a1ab4c31
AC
1183}
1184\f
c1abd261
EB
1185/* Return a subtype of sizetype with range MIN to MAX and whose
1186 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1187 of the associated TYPE_DECL. */
a1ab4c31
AC
1188
1189tree
1190create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1191{
1192 /* First build a type for the desired range. */
1193 tree type = build_index_2_type (min, max);
1194
c1abd261 1195 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
a1ab4c31
AC
1196 if (TYPE_INDEX_TYPE (type) == index)
1197 return type;
c1abd261
EB
1198
1199 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1200 no way of sharing these types, but that's only a small hole. */
1201 if (TYPE_INDEX_TYPE (type))
a1ab4c31
AC
1202 type = copy_type (type);
1203
1204 SET_TYPE_INDEX_TYPE (type, index);
1205 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
c1abd261 1206
a1ab4c31
AC
1207 return type;
1208}
84fb43a1
EB
1209
1210/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1211 sizetype is used. */
1212
1213tree
1214create_range_type (tree type, tree min, tree max)
1215{
1216 tree range_type;
1217
1218 if (type == NULL_TREE)
1219 type = sizetype;
1220
1221 /* First build a type with the base range. */
1222 range_type
1223 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1224
1225 min = convert (type, min);
1226 max = convert (type, max);
1227
1228 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1229 if (TYPE_RM_MIN_VALUE (range_type)
1230 && TYPE_RM_MAX_VALUE (range_type)
1231 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1232 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1233 return range_type;
1234
1235 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1236 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1237 range_type = copy_type (range_type);
1238
1239 /* Then set the actual range. */
1240 SET_TYPE_RM_MIN_VALUE (range_type, min);
1241 SET_TYPE_RM_MAX_VALUE (range_type, max);
1242
1243 return range_type;
1244}
a1ab4c31 1245\f
10069d53
EB
1246/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1247 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1248 its data type. */
1249
1250tree
1251create_type_stub_decl (tree type_name, tree type)
1252{
1253 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1254 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1255 emitted in DWARF. */
c172df28
AH
1256 tree type_decl = build_decl (input_location,
1257 TYPE_DECL, type_name, type);
10069d53
EB
1258 DECL_ARTIFICIAL (type_decl) = 1;
1259 return type_decl;
1260}
1261
1262/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1263 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1264 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1265 true if we need to write debug information about this type. GNAT_NODE
1266 is used for the position of the decl. */
a1ab4c31
AC
1267
1268tree
1269create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1270 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1271{
a1ab4c31 1272 enum tree_code code = TREE_CODE (type);
10069d53
EB
1273 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1274 tree type_decl;
a1ab4c31 1275
10069d53
EB
1276 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1277 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 1278
10069d53
EB
1279 /* If the type hasn't been named yet, we're naming it; preserve an existing
1280 TYPE_STUB_DECL that has been attached to it for some purpose. */
1281 if (!named && TYPE_STUB_DECL (type))
1282 {
1283 type_decl = TYPE_STUB_DECL (type);
1284 DECL_NAME (type_decl) = type_name;
1285 }
1286 else
c172df28
AH
1287 type_decl = build_decl (input_location,
1288 TYPE_DECL, type_name, type);
a1ab4c31 1289
10069d53
EB
1290 DECL_ARTIFICIAL (type_decl) = artificial_p;
1291 gnat_pushdecl (type_decl, gnat_node);
a1ab4c31
AC
1292 process_attributes (type_decl, attr_list);
1293
10069d53
EB
1294 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1295 This causes the name to be also viewed as a "tag" by the debug
1296 back-end, with the advantage that no DW_TAG_typedef is emitted
1297 for artificial "tagged" types in DWARF. */
1298 if (!named)
1299 TYPE_STUB_DECL (type) = type_decl;
1300
1301 /* Pass the type declaration to the debug back-end unless this is an
ac53d5f2
EB
1302 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1303 type for which debugging information was not requested, or else an
1304 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1305 handled separately. And do not pass dummy types either. */
a1ab4c31
AC
1306 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1307 DECL_IGNORED_P (type_decl) = 1;
1308 else if (code != ENUMERAL_TYPE
1309 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1310 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
ac53d5f2
EB
1311 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1312 && !(code == RECORD_TYPE
1313 && TYPE_IS_DUMMY_P
1314 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
a1ab4c31
AC
1315 rest_of_type_decl_compilation (type_decl);
1316
1317 return type_decl;
1318}
10069d53 1319\f
a1ab4c31
AC
1320/* Return a VAR_DECL or CONST_DECL node.
1321
1322 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1323 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1324 the GCC tree for an optional initial expression; NULL_TREE if none.
1325
1326 CONST_FLAG is true if this variable is constant, in which case we might
1327 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1328
1329 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1330 definition to be made visible outside of the current compilation unit, for
1331 instance variable definitions in a package specification.
1332
1e17ef87 1333 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
1334 opposed to a definition: no storage is to be allocated for the variable).
1335
1336 STATIC_FLAG is only relevant when not at top level. In that case
1337 it indicates whether to always allocate storage to the variable.
1338
1339 GNAT_NODE is used for the position of the decl. */
1340
1341tree
1342create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1343 bool const_flag, bool public_flag, bool extern_flag,
1344 bool static_flag, bool const_decl_allowed_p,
1345 struct attrib *attr_list, Node_Id gnat_node)
1346{
1347 bool init_const
1348 = (var_init != 0
1349 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1350 && (global_bindings_p () || static_flag
1351 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1352 : TREE_CONSTANT (var_init)));
1353
1354 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1355 case the initializer may be used in-lieu of the DECL node (as done in
1356 Identifier_to_gnu). This is useful to prevent the need of elaboration
1357 code when an identifier for which such a decl is made is in turn used as
1358 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1359 but extra constraints apply to this choice (see below) and are not
1360 relevant to the distinction we wish to make. */
1361 bool constant_p = const_flag && init_const;
1362
1363 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1364 and may be used for scalars in general but not for aggregates. */
1365 tree var_decl
c172df28
AH
1366 = build_decl (input_location,
1367 (constant_p && const_decl_allowed_p
a1ab4c31
AC
1368 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1369 var_name, type);
1370
1371 /* If this is external, throw away any initializations (they will be done
1372 elsewhere) unless this is a constant for which we would like to remain
1373 able to get the initializer. If we are defining a global here, leave a
1374 constant initialization and save any variable elaborations for the
1375 elaboration routine. If we are just annotating types, throw away the
1376 initialization if it isn't a constant. */
1377 if ((extern_flag && !constant_p)
1378 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1379 var_init = NULL_TREE;
1380
1381 /* At the global level, an initializer requiring code to be generated
1382 produces elaboration statements. Check that such statements are allowed,
1383 that is, not violating a No_Elaboration_Code restriction. */
1384 if (global_bindings_p () && var_init != 0 && ! init_const)
1385 Check_Elaboration_Code_Allowed (gnat_node);
1386
1387 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1388 try to fiddle with DECL_COMMON. However, on platforms that don't
1389 support global BSS sections, uninitialized global variables would
1390 go in DATA instead, thus increasing the size of the executable. */
1391 if (!flag_no_common
1392 && TREE_CODE (var_decl) == VAR_DECL
1393 && !have_global_bss_p ())
1394 DECL_COMMON (var_decl) = 1;
1395 DECL_INITIAL (var_decl) = var_init;
1396 TREE_READONLY (var_decl) = const_flag;
1397 DECL_EXTERNAL (var_decl) = extern_flag;
1398 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1399 TREE_CONSTANT (var_decl) = constant_p;
1400 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1401 = TYPE_VOLATILE (type);
1402
1403 /* If it's public and not external, always allocate storage for it.
1404 At the global binding level we need to allocate static storage for the
1405 variable if and only if it's not external. If we are not at the top level
1406 we allocate automatic storage unless requested not to. */
1407 TREE_STATIC (var_decl)
1408 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1409
5225a138
EB
1410 /* For an external constant whose initializer is not absolute, do not emit
1411 debug info. In DWARF this would mean a global relocation in a read-only
1412 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1413 if (extern_flag
1414 && constant_p
1415 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1416 != null_pointer_node)
1417 DECL_IGNORED_P (var_decl) = 1;
1418
a1ab4c31
AC
1419 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1420 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1421
1422 process_attributes (var_decl, attr_list);
1423
1424 /* Add this decl to the current binding level. */
1425 gnat_pushdecl (var_decl, gnat_node);
1426
1427 if (TREE_SIDE_EFFECTS (var_decl))
1428 TREE_ADDRESSABLE (var_decl) = 1;
1429
1430 if (TREE_CODE (var_decl) != CONST_DECL)
1431 {
1432 if (global_bindings_p ())
1433 rest_of_decl_compilation (var_decl, true, 0);
1434 }
1435 else
1436 expand_decl (var_decl);
1437
1438 return var_decl;
1439}
1440\f
1441/* Return true if TYPE, an aggregate type, contains (or is) an array. */
1442
1443static bool
1444aggregate_type_contains_array_p (tree type)
1445{
1446 switch (TREE_CODE (type))
1447 {
1448 case RECORD_TYPE:
1449 case UNION_TYPE:
1450 case QUAL_UNION_TYPE:
1451 {
1452 tree field;
1453 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1454 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1455 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1456 return true;
1457 return false;
1458 }
1459
1460 case ARRAY_TYPE:
1461 return true;
1462
1463 default:
1464 gcc_unreachable ();
1465 }
1466}
1467
a8e05f92 1468/* Return a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
a1ab4c31
AC
1469 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1470 this field is in a record type with a "pragma pack". If SIZE is nonzero
1471 it is the specified size for this field. If POS is nonzero, it is the bit
1472 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1473 the address of this field for aliasing purposes. If it is negative, we
1474 should not make a bitfield, which is used by make_aligning_type. */
1475
1476tree
1477create_field_decl (tree field_name, tree field_type, tree record_type,
1478 int packed, tree size, tree pos, int addressable)
1479{
c172df28
AH
1480 tree field_decl = build_decl (input_location,
1481 FIELD_DECL, field_name, field_type);
a1ab4c31
AC
1482
1483 DECL_CONTEXT (field_decl) = record_type;
1484 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1485
1486 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1487 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1488 Likewise for an aggregate without specified position that contains an
1489 array, because in this case slices of variable length of this array
1490 must be handled by GCC and variable-sized objects need to be aligned
1491 to at least a byte boundary. */
1492 if (packed && (TYPE_MODE (field_type) == BLKmode
1493 || (!pos
1494 && AGGREGATE_TYPE_P (field_type)
1495 && aggregate_type_contains_array_p (field_type))))
1496 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1497
1498 /* If a size is specified, use it. Otherwise, if the record type is packed
1499 compute a size to use, which may differ from the object's natural size.
1500 We always set a size in this case to trigger the checks for bitfield
1501 creation below, which is typically required when no position has been
1502 specified. */
1503 if (size)
1504 size = convert (bitsizetype, size);
1505 else if (packed == 1)
1506 {
1507 size = rm_size (field_type);
1508
1509 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1510 byte. */
1511 if (TREE_CODE (size) == INTEGER_CST
1512 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1513 size = round_up (size, BITS_PER_UNIT);
1514 }
1515
1516 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1517 specified for two reasons: first if the size differs from the natural
1518 size. Second, if the alignment is insufficient. There are a number of
1519 ways the latter can be true.
1520
1521 We never make a bitfield if the type of the field has a nonconstant size,
1522 because no such entity requiring bitfield operations should reach here.
1523
1524 We do *preventively* make a bitfield when there might be the need for it
1525 but we don't have all the necessary information to decide, as is the case
1526 of a field with no specified position in a packed record.
1527
1528 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1529 in layout_decl or finish_record_type to clear the bit_field indication if
1530 it is in fact not needed. */
1531 if (addressable >= 0
1532 && size
1533 && TREE_CODE (size) == INTEGER_CST
1534 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1535 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1536 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1537 || packed
1538 || (TYPE_ALIGN (record_type) != 0
1539 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1540 {
1541 DECL_BIT_FIELD (field_decl) = 1;
1542 DECL_SIZE (field_decl) = size;
1543 if (!packed && !pos)
feec4372
EB
1544 {
1545 if (TYPE_ALIGN (record_type) != 0
1546 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1547 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1548 else
1549 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1550 }
a1ab4c31
AC
1551 }
1552
1553 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1554
1555 /* Bump the alignment if need be, either for bitfield/packing purposes or
1556 to satisfy the type requirements if no such consideration applies. When
1557 we get the alignment from the type, indicate if this is from an explicit
1558 user request, which prevents stor-layout from lowering it later on. */
1559 {
d9223014 1560 unsigned int bit_align
a1ab4c31
AC
1561 = (DECL_BIT_FIELD (field_decl) ? 1
1562 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1563
1564 if (bit_align > DECL_ALIGN (field_decl))
1565 DECL_ALIGN (field_decl) = bit_align;
1566 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1567 {
1568 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1569 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1570 }
1571 }
1572
1573 if (pos)
1574 {
1575 /* We need to pass in the alignment the DECL is known to have.
1576 This is the lowest-order bit set in POS, but no more than
1577 the alignment of the record, if one is specified. Note
1578 that an alignment of 0 is taken as infinite. */
1579 unsigned int known_align;
1580
1581 if (host_integerp (pos, 1))
1582 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1583 else
1584 known_align = BITS_PER_UNIT;
1585
1586 if (TYPE_ALIGN (record_type)
1587 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1588 known_align = TYPE_ALIGN (record_type);
1589
1590 layout_decl (field_decl, known_align);
1591 SET_DECL_OFFSET_ALIGN (field_decl,
1592 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1593 : BITS_PER_UNIT);
1594 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1595 &DECL_FIELD_BIT_OFFSET (field_decl),
1596 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
1597 }
1598
1599 /* In addition to what our caller says, claim the field is addressable if we
1600 know that its type is not suitable.
1601
1602 The field may also be "technically" nonaddressable, meaning that even if
1603 we attempt to take the field's address we will actually get the address
1604 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1605 value we have at this point is not accurate enough, so we don't account
1606 for this here and let finish_record_type decide. */
4c5a0615 1607 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
1608 addressable = 1;
1609
1610 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1611
1612 return field_decl;
1613}
1614\f
a8e05f92
EB
1615/* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1616 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1617 (either an In parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
1618
1619tree
1620create_param_decl (tree param_name, tree param_type, bool readonly)
1621{
c172df28
AH
1622 tree param_decl = build_decl (input_location,
1623 PARM_DECL, param_name, param_type);
a1ab4c31 1624
a8e05f92
EB
1625 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1626 can lead to various ABI violations. */
1627 if (targetm.calls.promote_prototypes (NULL_TREE)
1628 && INTEGRAL_TYPE_P (param_type)
a1ab4c31
AC
1629 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1630 {
1631 /* We have to be careful about biased types here. Make a subtype
1632 of integer_type_node with the proper biasing. */
1633 if (TREE_CODE (param_type) == INTEGER_TYPE
1634 && TYPE_BIASED_REPRESENTATION_P (param_type))
1635 {
84fb43a1
EB
1636 tree subtype
1637 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
1638 TREE_TYPE (subtype) = integer_type_node;
1639 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
84fb43a1
EB
1640 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1641 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
c1abd261 1642 param_type = subtype;
a1ab4c31
AC
1643 }
1644 else
1645 param_type = integer_type_node;
1646 }
1647
1648 DECL_ARG_TYPE (param_decl) = param_type;
1649 TREE_READONLY (param_decl) = readonly;
1650 return param_decl;
1651}
1652\f
1653/* Given a DECL and ATTR_LIST, process the listed attributes. */
1654
1655void
1656process_attributes (tree decl, struct attrib *attr_list)
1657{
1658 for (; attr_list; attr_list = attr_list->next)
1659 switch (attr_list->type)
1660 {
1661 case ATTR_MACHINE_ATTRIBUTE:
1662 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1663 NULL_TREE),
1664 ATTR_FLAG_TYPE_IN_PLACE);
1665 break;
1666
1667 case ATTR_LINK_ALIAS:
1668 if (! DECL_EXTERNAL (decl))
1669 {
1670 TREE_STATIC (decl) = 1;
1671 assemble_alias (decl, attr_list->name);
1672 }
1673 break;
1674
1675 case ATTR_WEAK_EXTERNAL:
1676 if (SUPPORTS_WEAK)
1677 declare_weak (decl);
1678 else
1679 post_error ("?weak declarations not supported on this target",
1680 attr_list->error_point);
1681 break;
1682
1683 case ATTR_LINK_SECTION:
1684 if (targetm.have_named_sections)
1685 {
1686 DECL_SECTION_NAME (decl)
1687 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1688 IDENTIFIER_POINTER (attr_list->name));
1689 DECL_COMMON (decl) = 0;
1690 }
1691 else
1692 post_error ("?section attributes are not supported for this target",
1693 attr_list->error_point);
1694 break;
1695
1696 case ATTR_LINK_CONSTRUCTOR:
1697 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1698 TREE_USED (decl) = 1;
1699 break;
1700
1701 case ATTR_LINK_DESTRUCTOR:
1702 DECL_STATIC_DESTRUCTOR (decl) = 1;
1703 TREE_USED (decl) = 1;
1704 break;
40a14772
TG
1705
1706 case ATTR_THREAD_LOCAL_STORAGE:
62298c61
TG
1707 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1708 DECL_COMMON (decl) = 0;
40a14772 1709 break;
a1ab4c31
AC
1710 }
1711}
1712\f
feec4372 1713/* Record DECL as a global renaming pointer. */
a1ab4c31
AC
1714
1715void
1716record_global_renaming_pointer (tree decl)
1717{
1718 gcc_assert (DECL_RENAMED_OBJECT (decl));
1719 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1720}
1721
1722/* Invalidate the global renaming pointers. */
1723
1724void
1725invalidate_global_renaming_pointers (void)
1726{
1727 unsigned int i;
1728 tree iter;
1729
1730 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1731 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1732
1733 VEC_free (tree, gc, global_renaming_pointers);
1734}
1735
1736/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1737 a power of 2. */
1738
1739bool
1740value_factor_p (tree value, HOST_WIDE_INT factor)
1741{
1742 if (host_integerp (value, 1))
1743 return tree_low_cst (value, 1) % factor == 0;
1744
1745 if (TREE_CODE (value) == MULT_EXPR)
1746 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1747 || value_factor_p (TREE_OPERAND (value, 1), factor));
1748
1749 return false;
1750}
1751
1752/* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1753 unless we can prove these 2 fields are laid out in such a way that no gap
1754 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1755 is the distance in bits between the end of PREV_FIELD and the starting
1756 position of CURR_FIELD. It is ignored if null. */
1757
1758static bool
1759potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1760{
1761 /* If this is the first field of the record, there cannot be any gap */
1762 if (!prev_field)
1763 return false;
1764
1765 /* If the previous field is a union type, then return False: The only
1766 time when such a field is not the last field of the record is when
1767 there are other components at fixed positions after it (meaning there
1768 was a rep clause for every field), in which case we don't want the
1769 alignment constraint to override them. */
1770 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1771 return false;
1772
1773 /* If the distance between the end of prev_field and the beginning of
1774 curr_field is constant, then there is a gap if the value of this
1775 constant is not null. */
1776 if (offset && host_integerp (offset, 1))
1777 return !integer_zerop (offset);
1778
1779 /* If the size and position of the previous field are constant,
1780 then check the sum of this size and position. There will be a gap
1781 iff it is not multiple of the current field alignment. */
1782 if (host_integerp (DECL_SIZE (prev_field), 1)
1783 && host_integerp (bit_position (prev_field), 1))
1784 return ((tree_low_cst (bit_position (prev_field), 1)
1785 + tree_low_cst (DECL_SIZE (prev_field), 1))
1786 % DECL_ALIGN (curr_field) != 0);
1787
1788 /* If both the position and size of the previous field are multiples
1789 of the current field alignment, there cannot be any gap. */
1790 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1791 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1792 return false;
1793
1794 /* Fallback, return that there may be a potential gap */
1795 return true;
1796}
1797
1798/* Returns a LABEL_DECL node for LABEL_NAME. */
1799
1800tree
1801create_label_decl (tree label_name)
1802{
c172df28
AH
1803 tree label_decl = build_decl (input_location,
1804 LABEL_DECL, label_name, void_type_node);
a1ab4c31
AC
1805
1806 DECL_CONTEXT (label_decl) = current_function_decl;
1807 DECL_MODE (label_decl) = VOIDmode;
1808 DECL_SOURCE_LOCATION (label_decl) = input_location;
1809
1810 return label_decl;
1811}
1812\f
1813/* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1814 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1815 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1816 PARM_DECL nodes chained through the TREE_CHAIN field).
1817
1818 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1819 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1820
1821tree
1822create_subprog_decl (tree subprog_name, tree asm_name,
1823 tree subprog_type, tree param_decl_list, bool inline_flag,
1824 bool public_flag, bool extern_flag,
1825 struct attrib *attr_list, Node_Id gnat_node)
1826{
1827 tree return_type = TREE_TYPE (subprog_type);
c172df28
AH
1828 tree subprog_decl = build_decl (input_location,
1829 FUNCTION_DECL, subprog_name, subprog_type);
a1ab4c31 1830
d84b344a
EB
1831 /* If this is a non-inline function nested inside an inlined external
1832 function, we cannot honor both requests without cloning the nested
1833 function in the current unit since it is private to the other unit.
1834 We could inline the nested function as well but it's probably better
1835 to err on the side of too little inlining. */
1836 if (!inline_flag
1837 && current_function_decl
1838 && DECL_DECLARED_INLINE_P (current_function_decl)
a1ab4c31 1839 && DECL_EXTERNAL (current_function_decl))
d84b344a 1840 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
a1ab4c31
AC
1841
1842 DECL_EXTERNAL (subprog_decl) = extern_flag;
1843 TREE_PUBLIC (subprog_decl) = public_flag;
1844 TREE_STATIC (subprog_decl) = 1;
1845 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1846 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1847 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
d84b344a 1848 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
a1ab4c31 1849 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
c172df28
AH
1850 DECL_RESULT (subprog_decl) = build_decl (input_location,
1851 RESULT_DECL, 0, return_type);
a1ab4c31
AC
1852 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1853 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1854
1855 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1856 target by-reference return mechanism. This is not supported all the
1857 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1858 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1859 the RESULT_DECL instead - see gnat_genericize for more details. */
1860 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1861 {
1862 tree result_decl = DECL_RESULT (subprog_decl);
1863
1864 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1865 DECL_BY_REFERENCE (result_decl) = 1;
1866 }
1867
a1ab4c31
AC
1868 if (asm_name)
1869 {
1870 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1871
1872 /* The expand_main_function circuitry expects "main_identifier_node" to
1873 designate the DECL_NAME of the 'main' entry point, in turn expected
1874 to be declared as the "main" function literally by default. Ada
1875 program entry points are typically declared with a different name
1876 within the binder generated file, exported as 'main' to satisfy the
1877 system expectations. Redirect main_identifier_node in this case. */
1878 if (asm_name == main_identifier_node)
1879 main_identifier_node = DECL_NAME (subprog_decl);
1880 }
1881
1882 process_attributes (subprog_decl, attr_list);
1883
1884 /* Add this decl to the current binding level. */
1885 gnat_pushdecl (subprog_decl, gnat_node);
1886
1887 /* Output the assembler code and/or RTL for the declaration. */
1888 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1889
1890 return subprog_decl;
1891}
1892\f
1893/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1894 body. This routine needs to be invoked before processing the declarations
1895 appearing in the subprogram. */
1896
1897void
1898begin_subprog_body (tree subprog_decl)
1899{
1900 tree param_decl;
1901
1902 current_function_decl = subprog_decl;
1903 announce_function (subprog_decl);
1904
1905 /* Enter a new binding level and show that all the parameters belong to
1906 this function. */
1907 gnat_pushlevel ();
1908 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1909 param_decl = TREE_CHAIN (param_decl))
1910 DECL_CONTEXT (param_decl) = subprog_decl;
1911
1912 make_decl_rtl (subprog_decl);
1913
1914 /* We handle pending sizes via the elaboration of types, so we don't need to
1915 save them. This causes them to be marked as part of the outer function
1916 and then discarded. */
1917 get_pending_sizes ();
1918}
1919
1920
1921/* Helper for the genericization callback. Return a dereference of VAL
1922 if it is of a reference type. */
1923
1924static tree
1925convert_from_reference (tree val)
1926{
1927 tree value_type, ref;
1928
1929 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1930 return val;
1931
1932 value_type = TREE_TYPE (TREE_TYPE (val));
1933 ref = build1 (INDIRECT_REF, value_type, val);
1934
1935 /* See if what we reference is CONST or VOLATILE, which requires
1936 looking into array types to get to the component type. */
1937
1938 while (TREE_CODE (value_type) == ARRAY_TYPE)
1939 value_type = TREE_TYPE (value_type);
1940
1941 TREE_READONLY (ref)
1942 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1943 TREE_THIS_VOLATILE (ref)
1944 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1945
1946 TREE_SIDE_EFFECTS (ref)
1947 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1948
1949 return ref;
1950}
1951
1952/* Helper for the genericization callback. Returns true if T denotes
1953 a RESULT_DECL with DECL_BY_REFERENCE set. */
1954
1955static inline bool
1956is_byref_result (tree t)
1957{
1958 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1959}
1960
1961
1962/* Tree walking callback for gnat_genericize. Currently ...
1963
1964 o Adjust references to the function's DECL_RESULT if it is marked
1965 DECL_BY_REFERENCE and so has had its type turned into a reference
1966 type at the end of the function compilation. */
1967
1968static tree
1969gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1970{
1971 /* This implementation is modeled after what the C++ front-end is
1972 doing, basis of the downstream passes behavior. */
1973
1974 tree stmt = *stmt_p;
1975 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1976
1977 /* If we have a direct mention of the result decl, dereference. */
1978 if (is_byref_result (stmt))
1979 {
1980 *stmt_p = convert_from_reference (stmt);
1981 *walk_subtrees = 0;
1982 return NULL;
1983 }
1984
1985 /* Otherwise, no need to walk the same tree twice. */
1986 if (pointer_set_contains (p_set, stmt))
1987 {
1988 *walk_subtrees = 0;
1989 return NULL_TREE;
1990 }
1991
1992 /* If we are taking the address of what now is a reference, just get the
1993 reference value. */
1994 if (TREE_CODE (stmt) == ADDR_EXPR
1995 && is_byref_result (TREE_OPERAND (stmt, 0)))
1996 {
1997 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1998 *walk_subtrees = 0;
1999 }
2000
2001 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2002 else if (TREE_CODE (stmt) == RETURN_EXPR
2003 && TREE_OPERAND (stmt, 0)
2004 && is_byref_result (TREE_OPERAND (stmt, 0)))
2005 *walk_subtrees = 0;
2006
2007 /* Don't look inside trees that cannot embed references of interest. */
2008 else if (IS_TYPE_OR_DECL_P (stmt))
2009 *walk_subtrees = 0;
2010
2011 pointer_set_insert (p_set, *stmt_p);
2012
2013 return NULL;
2014}
2015
2016/* Perform lowering of Ada trees to GENERIC. In particular:
2017
2018 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2019 and adjust all the references to this decl accordingly. */
2020
2021static void
2022gnat_genericize (tree fndecl)
2023{
2024 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2025 was handled by simply setting TREE_ADDRESSABLE on the result type.
2026 Everything required to actually pass by invisible ref using the target
2027 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2028
2029 This doesn't work with GCC 4 any more for several reasons. First, the
2030 gimplification process might need the creation of temporaries of this
2031 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2032 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2033 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2034 be explicitly accounted for by the front-end in the function body.
2035
2036 We achieve the complete transformation in two steps:
2037
2038 1/ create_subprog_decl performs early attribute tweaks: it clears
2039 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2040 the result decl. The former ensures that the bit isn't set in the GCC
2041 tree saved for the function, so prevents ICEs on temporary creation.
2042 The latter we use here to trigger the rest of the processing.
2043
2044 2/ This function performs the type transformation on the result decl
2045 and adjusts all the references to this decl from the function body
2046 accordingly.
2047
2048 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2049 strategy, which escapes the gimplifier temporary creation issues by
2050 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2051 on simple specific support code in aggregate_value_p to look at the
2052 target function result decl explicitly. */
2053
2054 struct pointer_set_t *p_set;
2055 tree decl_result = DECL_RESULT (fndecl);
2056
2057 if (!DECL_BY_REFERENCE (decl_result))
2058 return;
2059
2060 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2061 occurrences in the function body using the common tree-walking facility.
2062 We want to see every occurrence of the result decl to adjust the
2063 referencing tree, so need to use our own pointer set to control which
2064 trees should be visited again or not. */
2065
2066 p_set = pointer_set_create ();
2067
2068 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2069 TREE_ADDRESSABLE (decl_result) = 0;
2070 relayout_decl (decl_result);
2071
2072 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2073
2074 pointer_set_destroy (p_set);
2075}
2076
2fa03086 2077/* Finish the definition of the current subprogram BODY and finalize it. */
a1ab4c31
AC
2078
2079void
a406865a 2080end_subprog_body (tree body)
a1ab4c31
AC
2081{
2082 tree fndecl = current_function_decl;
2083
2084 /* Mark the BLOCK for this level as being for this function and pop the
2085 level. Since the vars in it are the parameters, clear them. */
2086 BLOCK_VARS (current_binding_level->block) = 0;
2087 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2088 DECL_INITIAL (fndecl) = current_binding_level->block;
2089 gnat_poplevel ();
2090
a1ab4c31
AC
2091 /* We handle pending sizes via the elaboration of types, so we don't
2092 need to save them. */
2093 get_pending_sizes ();
2094
2095 /* Mark the RESULT_DECL as being in this subprogram. */
2096 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2097
2098 DECL_SAVED_TREE (fndecl) = body;
2099
2100 current_function_decl = DECL_CONTEXT (fndecl);
2101 set_cfun (NULL);
2102
2103 /* We cannot track the location of errors past this point. */
2104 error_gnat_node = Empty;
2105
2106 /* If we're only annotating types, don't actually compile this function. */
2107 if (type_annotate_only)
2108 return;
2109
2110 /* Perform the required pre-gimplification transformations on the tree. */
2111 gnat_genericize (fndecl);
2112
a406865a
RG
2113 /* Dump functions before gimplification. */
2114 dump_function (TDI_original, fndecl);
2115
2fa03086 2116 /* ??? This special handling of nested functions is probably obsolete. */
a1ab4c31 2117 if (!DECL_CONTEXT (fndecl))
a406865a 2118 cgraph_finalize_function (fndecl, false);
a1ab4c31
AC
2119 else
2120 /* Register this function with cgraph just far enough to get it
2121 added to our parent's nested function list. */
2122 (void) cgraph_node (fndecl);
2123}
2124
a1ab4c31
AC
2125tree
2126gnat_builtin_function (tree decl)
2127{
2128 gnat_pushdecl (decl, Empty);
2129 return decl;
2130}
2131
2132/* Return an integer type with the number of bits of precision given by
2133 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2134 it is a signed type. */
2135
2136tree
2137gnat_type_for_size (unsigned precision, int unsignedp)
2138{
2139 tree t;
2140 char type_name[20];
2141
2142 if (precision <= 2 * MAX_BITS_PER_WORD
2143 && signed_and_unsigned_types[precision][unsignedp])
2144 return signed_and_unsigned_types[precision][unsignedp];
2145
2146 if (unsignedp)
2147 t = make_unsigned_type (precision);
2148 else
2149 t = make_signed_type (precision);
2150
2151 if (precision <= 2 * MAX_BITS_PER_WORD)
2152 signed_and_unsigned_types[precision][unsignedp] = t;
2153
2154 if (!TYPE_NAME (t))
2155 {
2156 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2157 TYPE_NAME (t) = get_identifier (type_name);
2158 }
2159
2160 return t;
2161}
2162
2163/* Likewise for floating-point types. */
2164
2165static tree
2166float_type_for_precision (int precision, enum machine_mode mode)
2167{
2168 tree t;
2169 char type_name[20];
2170
2171 if (float_types[(int) mode])
2172 return float_types[(int) mode];
2173
2174 float_types[(int) mode] = t = make_node (REAL_TYPE);
2175 TYPE_PRECISION (t) = precision;
2176 layout_type (t);
2177
2178 gcc_assert (TYPE_MODE (t) == mode);
2179 if (!TYPE_NAME (t))
2180 {
2181 sprintf (type_name, "FLOAT_%d", precision);
2182 TYPE_NAME (t) = get_identifier (type_name);
2183 }
2184
2185 return t;
2186}
2187
2188/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2189 an unsigned type; otherwise a signed type is returned. */
2190
2191tree
2192gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2193{
2194 if (mode == BLKmode)
2195 return NULL_TREE;
2196 else if (mode == VOIDmode)
2197 return void_type_node;
2198 else if (COMPLEX_MODE_P (mode))
2199 return NULL_TREE;
2200 else if (SCALAR_FLOAT_MODE_P (mode))
2201 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2202 else if (SCALAR_INT_MODE_P (mode))
2203 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2204 else
2205 return NULL_TREE;
2206}
2207
2208/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2209
2210tree
2211gnat_unsigned_type (tree type_node)
2212{
2213 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2214
2215 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2216 {
2217 type = copy_node (type);
2218 TREE_TYPE (type) = type_node;
2219 }
2220 else if (TREE_TYPE (type_node)
2221 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2222 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2223 {
2224 type = copy_node (type);
2225 TREE_TYPE (type) = TREE_TYPE (type_node);
2226 }
2227
2228 return type;
2229}
2230
2231/* Return the signed version of a TYPE_NODE, a scalar type. */
2232
2233tree
2234gnat_signed_type (tree type_node)
2235{
2236 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2237
2238 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2239 {
2240 type = copy_node (type);
2241 TREE_TYPE (type) = type_node;
2242 }
2243 else if (TREE_TYPE (type_node)
2244 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2245 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2246 {
2247 type = copy_node (type);
2248 TREE_TYPE (type) = TREE_TYPE (type_node);
2249 }
2250
2251 return type;
2252}
2253
2254/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2255 transparently converted to each other. */
2256
2257int
2258gnat_types_compatible_p (tree t1, tree t2)
2259{
2260 enum tree_code code;
2261
2262 /* This is the default criterion. */
2263 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2264 return 1;
2265
2266 /* We only check structural equivalence here. */
2267 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2268 return 0;
2269
7948ae37
OH
2270 /* Vector types are also compatible if they have the same number of subparts
2271 and the same form of (scalar) element type. */
2272 if (code == VECTOR_TYPE
2273 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2274 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2275 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2276 return 1;
2277
a1ab4c31
AC
2278 /* Array types are also compatible if they are constrained and have
2279 the same component type and the same domain. */
2280 if (code == ARRAY_TYPE
2281 && TREE_TYPE (t1) == TREE_TYPE (t2)
0adef32b
JJ
2282 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2283 || (TYPE_DOMAIN (t1)
b4680ca1 2284 && TYPE_DOMAIN (t2)
0adef32b
JJ
2285 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2286 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2287 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2288 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
a1ab4c31
AC
2289 return 1;
2290
2291 /* Padding record types are also compatible if they pad the same
2292 type and have the same constant size. */
2293 if (code == RECORD_TYPE
2294 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2295 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2296 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2297 return 1;
2298
2299 return 0;
2300}
2301\f
2302/* EXP is an expression for the size of an object. If this size contains
2303 discriminant references, replace them with the maximum (if MAX_P) or
2304 minimum (if !MAX_P) possible value of the discriminant. */
2305
2306tree
2307max_size (tree exp, bool max_p)
2308{
2309 enum tree_code code = TREE_CODE (exp);
2310 tree type = TREE_TYPE (exp);
2311
2312 switch (TREE_CODE_CLASS (code))
2313 {
2314 case tcc_declaration:
2315 case tcc_constant:
2316 return exp;
2317
2318 case tcc_vl_exp:
2319 if (code == CALL_EXPR)
2320 {
f82a627c
EB
2321 tree t, *argarray;
2322 int n, i;
2323
2324 t = maybe_inline_call_in_expr (exp);
2325 if (t)
2326 return max_size (t, max_p);
a1ab4c31 2327
f82a627c
EB
2328 n = call_expr_nargs (exp);
2329 gcc_assert (n > 0);
a1ab4c31
AC
2330 argarray = (tree *) alloca (n * sizeof (tree));
2331 for (i = 0; i < n; i++)
2332 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2333 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2334 }
2335 break;
2336
2337 case tcc_reference:
2338 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2339 modify. Otherwise, we treat it like a variable. */
2340 if (!CONTAINS_PLACEHOLDER_P (exp))
2341 return exp;
2342
2343 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2344 return
2345 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2346
2347 case tcc_comparison:
2348 return max_p ? size_one_node : size_zero_node;
2349
2350 case tcc_unary:
2351 case tcc_binary:
2352 case tcc_expression:
2353 switch (TREE_CODE_LENGTH (code))
2354 {
2355 case 1:
2356 if (code == NON_LVALUE_EXPR)
2357 return max_size (TREE_OPERAND (exp, 0), max_p);
2358 else
2359 return
2360 fold_build1 (code, type,
2361 max_size (TREE_OPERAND (exp, 0),
2362 code == NEGATE_EXPR ? !max_p : max_p));
2363
2364 case 2:
2365 if (code == COMPOUND_EXPR)
2366 return max_size (TREE_OPERAND (exp, 1), max_p);
2367
2368 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2369 may provide a tighter bound on max_size. */
2370 if (code == MINUS_EXPR
2371 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2372 {
2373 tree lhs = fold_build2 (MINUS_EXPR, type,
2374 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2375 TREE_OPERAND (exp, 1));
2376 tree rhs = fold_build2 (MINUS_EXPR, type,
2377 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2378 TREE_OPERAND (exp, 1));
2379 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2380 max_size (lhs, max_p),
2381 max_size (rhs, max_p));
2382 }
2383
2384 {
2385 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2386 tree rhs = max_size (TREE_OPERAND (exp, 1),
2387 code == MINUS_EXPR ? !max_p : max_p);
2388
2389 /* Special-case wanting the maximum value of a MIN_EXPR.
2390 In that case, if one side overflows, return the other.
2391 sizetype is signed, but we know sizes are non-negative.
2392 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2393 overflowing or the maximum possible value and the RHS
2394 a variable. */
2395 if (max_p
2396 && code == MIN_EXPR
2397 && TREE_CODE (rhs) == INTEGER_CST
2398 && TREE_OVERFLOW (rhs))
2399 return lhs;
2400 else if (max_p
2401 && code == MIN_EXPR
2402 && TREE_CODE (lhs) == INTEGER_CST
2403 && TREE_OVERFLOW (lhs))
2404 return rhs;
2405 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2406 && ((TREE_CODE (lhs) == INTEGER_CST
2407 && TREE_OVERFLOW (lhs))
2408 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2409 && !TREE_CONSTANT (rhs))
2410 return lhs;
2411 else
2412 return fold_build2 (code, type, lhs, rhs);
2413 }
2414
2415 case 3:
2416 if (code == SAVE_EXPR)
2417 return exp;
2418 else if (code == COND_EXPR)
2419 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2420 max_size (TREE_OPERAND (exp, 1), max_p),
2421 max_size (TREE_OPERAND (exp, 2), max_p));
2422 }
2423
2424 /* Other tree classes cannot happen. */
2425 default:
2426 break;
2427 }
2428
2429 gcc_unreachable ();
2430}
2431\f
2432/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2433 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2434 Return a constructor for the template. */
2435
2436tree
2437build_template (tree template_type, tree array_type, tree expr)
2438{
2439 tree template_elts = NULL_TREE;
2440 tree bound_list = NULL_TREE;
2441 tree field;
2442
2443 while (TREE_CODE (array_type) == RECORD_TYPE
2444 && (TYPE_IS_PADDING_P (array_type)
2445 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2446 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2447
2448 if (TREE_CODE (array_type) == ARRAY_TYPE
2449 || (TREE_CODE (array_type) == INTEGER_TYPE
2450 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2451 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2452
2453 /* First make the list for a CONSTRUCTOR for the template. Go down the
2454 field list of the template instead of the type chain because this
2455 array might be an Ada array of arrays and we can't tell where the
2456 nested arrays stop being the underlying object. */
2457
2458 for (field = TYPE_FIELDS (template_type); field;
2459 (bound_list
2460 ? (bound_list = TREE_CHAIN (bound_list))
2461 : (array_type = TREE_TYPE (array_type))),
2462 field = TREE_CHAIN (TREE_CHAIN (field)))
2463 {
2464 tree bounds, min, max;
2465
2466 /* If we have a bound list, get the bounds from there. Likewise
2467 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2468 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2469 This will give us a maximum range. */
2470 if (bound_list)
2471 bounds = TREE_VALUE (bound_list);
2472 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2473 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2474 else if (expr && TREE_CODE (expr) == PARM_DECL
2475 && DECL_BY_COMPONENT_PTR_P (expr))
2476 bounds = TREE_TYPE (field);
2477 else
2478 gcc_unreachable ();
2479
2480 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2481 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2482
2483 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2484 substitute it from OBJECT. */
2485 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2486 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2487
2488 template_elts = tree_cons (TREE_CHAIN (field), max,
2489 tree_cons (field, min, template_elts));
2490 }
2491
2492 return gnat_build_constructor (template_type, nreverse (template_elts));
2493}
2494\f
6ca2b0a0 2495/* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
a1ab4c31
AC
2496 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2497 in the type contains in its DECL_INITIAL the expression to use when
2498 a constructor is made for the type. GNAT_ENTITY is an entity used
2499 to print out an error message if the mechanism cannot be applied to
2500 an object of that type and also for the name. */
2501
2502tree
d628c015 2503build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
2504{
2505 tree record_type = make_node (RECORD_TYPE);
2506 tree pointer32_type;
2507 tree field_list = 0;
c6bd4220 2508 int klass;
a1ab4c31
AC
2509 int dtype = 0;
2510 tree inner_type;
2511 int ndim;
2512 int i;
2513 tree *idx_arr;
2514 tree tem;
2515
2516 /* If TYPE is an unconstrained array, use the underlying array type. */
2517 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2518 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2519
2520 /* If this is an array, compute the number of dimensions in the array,
2521 get the index types, and point to the inner type. */
2522 if (TREE_CODE (type) != ARRAY_TYPE)
2523 ndim = 0;
2524 else
2525 for (ndim = 1, inner_type = type;
2526 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2527 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2528 ndim++, inner_type = TREE_TYPE (inner_type))
2529 ;
2530
2531 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2532
d628c015 2533 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
2534 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2535 for (i = ndim - 1, inner_type = type;
2536 i >= 0;
2537 i--, inner_type = TREE_TYPE (inner_type))
2538 idx_arr[i] = TYPE_DOMAIN (inner_type);
2539 else
2540 for (i = 0, inner_type = type;
2541 i < ndim;
2542 i++, inner_type = TREE_TYPE (inner_type))
2543 idx_arr[i] = TYPE_DOMAIN (inner_type);
2544
2545 /* Now get the DTYPE value. */
2546 switch (TREE_CODE (type))
2547 {
2548 case INTEGER_TYPE:
2549 case ENUMERAL_TYPE:
01ddebf2 2550 case BOOLEAN_TYPE:
a1ab4c31
AC
2551 if (TYPE_VAX_FLOATING_POINT_P (type))
2552 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2553 {
2554 case 6:
2555 dtype = 10;
2556 break;
2557 case 9:
2558 dtype = 11;
2559 break;
2560 case 15:
2561 dtype = 27;
2562 break;
2563 }
2564 else
2565 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2566 {
2567 case 8:
2568 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2569 break;
2570 case 16:
2571 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2572 break;
2573 case 32:
2574 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2575 break;
2576 case 64:
2577 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2578 break;
2579 case 128:
2580 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2581 break;
2582 }
2583 break;
2584
2585 case REAL_TYPE:
2586 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2587 break;
2588
2589 case COMPLEX_TYPE:
2590 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2591 && TYPE_VAX_FLOATING_POINT_P (type))
2592 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2593 {
2594 case 6:
2595 dtype = 12;
2596 break;
2597 case 9:
2598 dtype = 13;
2599 break;
2600 case 15:
2601 dtype = 29;
2602 }
2603 else
2604 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2605 break;
2606
2607 case ARRAY_TYPE:
2608 dtype = 14;
2609 break;
2610
2611 default:
2612 break;
2613 }
2614
2615 /* Get the CLASS value. */
2616 switch (mech)
2617 {
2618 case By_Descriptor_A:
d628c015 2619 case By_Short_Descriptor_A:
c6bd4220 2620 klass = 4;
a1ab4c31
AC
2621 break;
2622 case By_Descriptor_NCA:
d628c015 2623 case By_Short_Descriptor_NCA:
c6bd4220 2624 klass = 10;
a1ab4c31
AC
2625 break;
2626 case By_Descriptor_SB:
d628c015 2627 case By_Short_Descriptor_SB:
c6bd4220 2628 klass = 15;
a1ab4c31
AC
2629 break;
2630 case By_Descriptor:
d628c015 2631 case By_Short_Descriptor:
a1ab4c31 2632 case By_Descriptor_S:
d628c015 2633 case By_Short_Descriptor_S:
a1ab4c31 2634 default:
c6bd4220 2635 klass = 1;
a1ab4c31
AC
2636 break;
2637 }
2638
2639 /* Make the type for a descriptor for VMS. The first four fields
2640 are the same for all types. */
2641
2642 field_list
2643 = chainon (field_list,
2644 make_descriptor_field
2645 ("LENGTH", gnat_type_for_size (16, 1), record_type,
d628c015
DR
2646 size_in_bytes ((mech == By_Descriptor_A ||
2647 mech == By_Short_Descriptor_A)
2648 ? inner_type : type)));
a1ab4c31
AC
2649
2650 field_list = chainon (field_list,
2651 make_descriptor_field ("DTYPE",
2652 gnat_type_for_size (8, 1),
2653 record_type, size_int (dtype)));
2654 field_list = chainon (field_list,
2655 make_descriptor_field ("CLASS",
2656 gnat_type_for_size (8, 1),
c6bd4220 2657 record_type, size_int (klass)));
a1ab4c31
AC
2658
2659 /* Of course this will crash at run-time if the address space is not
2660 within the low 32 bits, but there is nothing else we can do. */
2661 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2662
2663 field_list
2664 = chainon (field_list,
2665 make_descriptor_field
2666 ("POINTER", pointer32_type, record_type,
2667 build_unary_op (ADDR_EXPR,
2668 pointer32_type,
2669 build0 (PLACEHOLDER_EXPR, type))));
2670
2671 switch (mech)
2672 {
2673 case By_Descriptor:
d628c015 2674 case By_Short_Descriptor:
a1ab4c31 2675 case By_Descriptor_S:
d628c015 2676 case By_Short_Descriptor_S:
a1ab4c31
AC
2677 break;
2678
2679 case By_Descriptor_SB:
d628c015 2680 case By_Short_Descriptor_SB:
a1ab4c31
AC
2681 field_list
2682 = chainon (field_list,
2683 make_descriptor_field
2684 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2685 TREE_CODE (type) == ARRAY_TYPE
2686 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2687 field_list
2688 = chainon (field_list,
2689 make_descriptor_field
2690 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2691 TREE_CODE (type) == ARRAY_TYPE
2692 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2693 break;
2694
2695 case By_Descriptor_A:
d628c015 2696 case By_Short_Descriptor_A:
a1ab4c31 2697 case By_Descriptor_NCA:
d628c015 2698 case By_Short_Descriptor_NCA:
a1ab4c31
AC
2699 field_list = chainon (field_list,
2700 make_descriptor_field ("SCALE",
2701 gnat_type_for_size (8, 1),
2702 record_type,
2703 size_zero_node));
2704
2705 field_list = chainon (field_list,
2706 make_descriptor_field ("DIGITS",
2707 gnat_type_for_size (8, 1),
2708 record_type,
2709 size_zero_node));
2710
2711 field_list
2712 = chainon (field_list,
2713 make_descriptor_field
2714 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
d628c015
DR
2715 size_int ((mech == By_Descriptor_NCA ||
2716 mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2717 ? 0
2718 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2719 : (TREE_CODE (type) == ARRAY_TYPE
2720 && TYPE_CONVENTION_FORTRAN_P (type)
2721 ? 224 : 192))));
2722
2723 field_list = chainon (field_list,
2724 make_descriptor_field ("DIMCT",
2725 gnat_type_for_size (8, 1),
2726 record_type,
2727 size_int (ndim)));
2728
2729 field_list = chainon (field_list,
2730 make_descriptor_field ("ARSIZE",
2731 gnat_type_for_size (32, 1),
2732 record_type,
2733 size_in_bytes (type)));
2734
2735 /* Now build a pointer to the 0,0,0... element. */
2736 tem = build0 (PLACEHOLDER_EXPR, type);
2737 for (i = 0, inner_type = type; i < ndim;
2738 i++, inner_type = TREE_TYPE (inner_type))
2739 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2740 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2741 NULL_TREE, NULL_TREE);
2742
2743 field_list
2744 = chainon (field_list,
2745 make_descriptor_field
2746 ("A0",
2747 build_pointer_type_for_mode (inner_type, SImode, false),
2748 record_type,
2749 build1 (ADDR_EXPR,
2750 build_pointer_type_for_mode (inner_type, SImode,
2751 false),
2752 tem)));
2753
2754 /* Next come the addressing coefficients. */
2755 tem = size_one_node;
2756 for (i = 0; i < ndim; i++)
2757 {
2758 char fname[3];
2759 tree idx_length
2760 = size_binop (MULT_EXPR, tem,
2761 size_binop (PLUS_EXPR,
2762 size_binop (MINUS_EXPR,
2763 TYPE_MAX_VALUE (idx_arr[i]),
2764 TYPE_MIN_VALUE (idx_arr[i])),
2765 size_int (1)));
2766
d628c015
DR
2767 fname[0] = ((mech == By_Descriptor_NCA ||
2768 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31
AC
2769 fname[1] = '0' + i, fname[2] = 0;
2770 field_list
2771 = chainon (field_list,
2772 make_descriptor_field (fname,
2773 gnat_type_for_size (32, 1),
2774 record_type, idx_length));
2775
d628c015 2776 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2777 tem = idx_length;
2778 }
2779
2780 /* Finally here are the bounds. */
2781 for (i = 0; i < ndim; i++)
2782 {
2783 char fname[3];
2784
2785 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2786 field_list
2787 = chainon (field_list,
2788 make_descriptor_field
2789 (fname, gnat_type_for_size (32, 1), record_type,
2790 TYPE_MIN_VALUE (idx_arr[i])));
2791
2792 fname[0] = 'U';
2793 field_list
2794 = chainon (field_list,
2795 make_descriptor_field
2796 (fname, gnat_type_for_size (32, 1), record_type,
2797 TYPE_MAX_VALUE (idx_arr[i])));
2798 }
2799 break;
2800
2801 default:
2802 post_error ("unsupported descriptor type for &", gnat_entity);
2803 }
2804
10069d53 2805 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
a1ab4c31 2806 finish_record_type (record_type, field_list, 0, true);
a1ab4c31
AC
2807 return record_type;
2808}
2809
6ca2b0a0
DR
2810/* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2811 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2812 in the type contains in its DECL_INITIAL the expression to use when
2813 a constructor is made for the type. GNAT_ENTITY is an entity used
2814 to print out an error message if the mechanism cannot be applied to
2815 an object of that type and also for the name. */
2816
2817tree
d628c015 2818build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0
DR
2819{
2820 tree record64_type = make_node (RECORD_TYPE);
2821 tree pointer64_type;
2822 tree field_list64 = 0;
c6bd4220 2823 int klass;
6ca2b0a0
DR
2824 int dtype = 0;
2825 tree inner_type;
2826 int ndim;
2827 int i;
2828 tree *idx_arr;
2829 tree tem;
2830
2831 /* If TYPE is an unconstrained array, use the underlying array type. */
2832 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2833 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2834
2835 /* If this is an array, compute the number of dimensions in the array,
2836 get the index types, and point to the inner type. */
2837 if (TREE_CODE (type) != ARRAY_TYPE)
2838 ndim = 0;
2839 else
2840 for (ndim = 1, inner_type = type;
2841 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2842 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2843 ndim++, inner_type = TREE_TYPE (inner_type))
2844 ;
2845
2846 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2847
2848 if (mech != By_Descriptor_NCA
2849 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2850 for (i = ndim - 1, inner_type = type;
2851 i >= 0;
2852 i--, inner_type = TREE_TYPE (inner_type))
2853 idx_arr[i] = TYPE_DOMAIN (inner_type);
2854 else
2855 for (i = 0, inner_type = type;
2856 i < ndim;
2857 i++, inner_type = TREE_TYPE (inner_type))
2858 idx_arr[i] = TYPE_DOMAIN (inner_type);
2859
2860 /* Now get the DTYPE value. */
2861 switch (TREE_CODE (type))
2862 {
2863 case INTEGER_TYPE:
2864 case ENUMERAL_TYPE:
01ddebf2 2865 case BOOLEAN_TYPE:
6ca2b0a0
DR
2866 if (TYPE_VAX_FLOATING_POINT_P (type))
2867 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2868 {
2869 case 6:
2870 dtype = 10;
2871 break;
2872 case 9:
2873 dtype = 11;
2874 break;
2875 case 15:
2876 dtype = 27;
2877 break;
2878 }
2879 else
2880 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2881 {
2882 case 8:
2883 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2884 break;
2885 case 16:
2886 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2887 break;
2888 case 32:
2889 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2890 break;
2891 case 64:
2892 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2893 break;
2894 case 128:
2895 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2896 break;
2897 }
2898 break;
2899
2900 case REAL_TYPE:
2901 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2902 break;
2903
2904 case COMPLEX_TYPE:
2905 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2906 && TYPE_VAX_FLOATING_POINT_P (type))
2907 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2908 {
2909 case 6:
2910 dtype = 12;
2911 break;
2912 case 9:
2913 dtype = 13;
2914 break;
2915 case 15:
2916 dtype = 29;
2917 }
2918 else
2919 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2920 break;
2921
2922 case ARRAY_TYPE:
2923 dtype = 14;
2924 break;
2925
2926 default:
2927 break;
2928 }
2929
2930 /* Get the CLASS value. */
2931 switch (mech)
2932 {
2933 case By_Descriptor_A:
c6bd4220 2934 klass = 4;
6ca2b0a0
DR
2935 break;
2936 case By_Descriptor_NCA:
c6bd4220 2937 klass = 10;
6ca2b0a0
DR
2938 break;
2939 case By_Descriptor_SB:
c6bd4220 2940 klass = 15;
6ca2b0a0
DR
2941 break;
2942 case By_Descriptor:
2943 case By_Descriptor_S:
2944 default:
c6bd4220 2945 klass = 1;
6ca2b0a0
DR
2946 break;
2947 }
2948
2949 /* Make the type for a 64bit descriptor for VMS. The first six fields
2950 are the same for all types. */
2951
2952 field_list64 = chainon (field_list64,
2953 make_descriptor_field ("MBO",
2954 gnat_type_for_size (16, 1),
2955 record64_type, size_int (1)));
2956
2957 field_list64 = chainon (field_list64,
2958 make_descriptor_field ("DTYPE",
2959 gnat_type_for_size (8, 1),
2960 record64_type, size_int (dtype)));
2961 field_list64 = chainon (field_list64,
2962 make_descriptor_field ("CLASS",
2963 gnat_type_for_size (8, 1),
c6bd4220 2964 record64_type, size_int (klass)));
6ca2b0a0
DR
2965
2966 field_list64 = chainon (field_list64,
2967 make_descriptor_field ("MBMO",
2968 gnat_type_for_size (32, 1),
2969 record64_type, ssize_int (-1)));
2970
2971 field_list64
2972 = chainon (field_list64,
2973 make_descriptor_field
2974 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2975 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2976
2977 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2978
2979 field_list64
2980 = chainon (field_list64,
2981 make_descriptor_field
2982 ("POINTER", pointer64_type, record64_type,
2983 build_unary_op (ADDR_EXPR,
2984 pointer64_type,
2985 build0 (PLACEHOLDER_EXPR, type))));
2986
2987 switch (mech)
2988 {
2989 case By_Descriptor:
2990 case By_Descriptor_S:
2991 break;
2992
2993 case By_Descriptor_SB:
2994 field_list64
2995 = chainon (field_list64,
2996 make_descriptor_field
2997 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2998 TREE_CODE (type) == ARRAY_TYPE
2999 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3000 field_list64
3001 = chainon (field_list64,
3002 make_descriptor_field
3003 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
3004 TREE_CODE (type) == ARRAY_TYPE
3005 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3006 break;
3007
3008 case By_Descriptor_A:
3009 case By_Descriptor_NCA:
3010 field_list64 = chainon (field_list64,
3011 make_descriptor_field ("SCALE",
3012 gnat_type_for_size (8, 1),
3013 record64_type,
3014 size_zero_node));
3015
3016 field_list64 = chainon (field_list64,
3017 make_descriptor_field ("DIGITS",
3018 gnat_type_for_size (8, 1),
3019 record64_type,
3020 size_zero_node));
3021
3022 field_list64
3023 = chainon (field_list64,
3024 make_descriptor_field
3025 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3026 size_int (mech == By_Descriptor_NCA
3027 ? 0
3028 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3029 : (TREE_CODE (type) == ARRAY_TYPE
3030 && TYPE_CONVENTION_FORTRAN_P (type)
3031 ? 224 : 192))));
3032
3033 field_list64 = chainon (field_list64,
3034 make_descriptor_field ("DIMCT",
3035 gnat_type_for_size (8, 1),
3036 record64_type,
3037 size_int (ndim)));
3038
3039 field_list64 = chainon (field_list64,
3040 make_descriptor_field ("MBZ",
3041 gnat_type_for_size (32, 1),
3042 record64_type,
3043 size_int (0)));
3044 field_list64 = chainon (field_list64,
3045 make_descriptor_field ("ARSIZE",
3046 gnat_type_for_size (64, 1),
3047 record64_type,
3048 size_in_bytes (type)));
3049
3050 /* Now build a pointer to the 0,0,0... element. */
3051 tem = build0 (PLACEHOLDER_EXPR, type);
3052 for (i = 0, inner_type = type; i < ndim;
3053 i++, inner_type = TREE_TYPE (inner_type))
3054 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3055 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3056 NULL_TREE, NULL_TREE);
3057
3058 field_list64
3059 = chainon (field_list64,
3060 make_descriptor_field
3061 ("A0",
3062 build_pointer_type_for_mode (inner_type, DImode, false),
3063 record64_type,
3064 build1 (ADDR_EXPR,
3065 build_pointer_type_for_mode (inner_type, DImode,
3066 false),
3067 tem)));
3068
3069 /* Next come the addressing coefficients. */
3070 tem = size_one_node;
3071 for (i = 0; i < ndim; i++)
3072 {
3073 char fname[3];
3074 tree idx_length
3075 = size_binop (MULT_EXPR, tem,
3076 size_binop (PLUS_EXPR,
3077 size_binop (MINUS_EXPR,
3078 TYPE_MAX_VALUE (idx_arr[i]),
3079 TYPE_MIN_VALUE (idx_arr[i])),
3080 size_int (1)));
3081
3082 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3083 fname[1] = '0' + i, fname[2] = 0;
3084 field_list64
3085 = chainon (field_list64,
3086 make_descriptor_field (fname,
3087 gnat_type_for_size (64, 1),
3088 record64_type, idx_length));
3089
3090 if (mech == By_Descriptor_NCA)
3091 tem = idx_length;
3092 }
3093
3094 /* Finally here are the bounds. */
3095 for (i = 0; i < ndim; i++)
3096 {
3097 char fname[3];
3098
3099 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3100 field_list64
3101 = chainon (field_list64,
3102 make_descriptor_field
3103 (fname, gnat_type_for_size (64, 1), record64_type,
3104 TYPE_MIN_VALUE (idx_arr[i])));
3105
3106 fname[0] = 'U';
3107 field_list64
3108 = chainon (field_list64,
3109 make_descriptor_field
3110 (fname, gnat_type_for_size (64, 1), record64_type,
3111 TYPE_MAX_VALUE (idx_arr[i])));
3112 }
3113 break;
3114
3115 default:
3116 post_error ("unsupported descriptor type for &", gnat_entity);
3117 }
3118
10069d53 3119 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
6ca2b0a0 3120 finish_record_type (record64_type, field_list64, 0, true);
6ca2b0a0
DR
3121 return record64_type;
3122}
3123
a1ab4c31
AC
3124/* Utility routine for above code to make a field. */
3125
3126static tree
3127make_descriptor_field (const char *name, tree type,
3128 tree rec_type, tree initial)
3129{
3130 tree field
3131 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3132
3133 DECL_INITIAL (field) = initial;
3134 return field;
3135}
3136
d628c015
DR
3137/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3138 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3139 which the VMS descriptor is passed. */
a1ab4c31
AC
3140
3141static tree
d628c015
DR
3142convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3143{
3144 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3145 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3146 /* The CLASS field is the 3rd field in the descriptor. */
c6bd4220 3147 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
d628c015 3148 /* The POINTER field is the 6th field in the descriptor. */
c6bd4220 3149 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
d628c015
DR
3150
3151 /* Retrieve the value of the POINTER field. */
3152 tree gnu_expr64
3153 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3154
3155 if (POINTER_TYPE_P (gnu_type))
3156 return convert (gnu_type, gnu_expr64);
3157
3158 else if (TYPE_FAT_POINTER_P (gnu_type))
3159 {
3160 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3161 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3162 tree template_type = TREE_TYPE (p_bounds_type);
3163 tree min_field = TYPE_FIELDS (template_type);
3164 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 3165 tree template_tree, template_addr, aflags, dimct, t, u;
d628c015 3166 /* See the head comment of build_vms_descriptor. */
c6bd4220 3167 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
d628c015
DR
3168 tree lfield, ufield;
3169
3170 /* Convert POINTER to the type of the P_ARRAY field. */
3171 gnu_expr64 = convert (p_array_type, gnu_expr64);
3172
c6bd4220 3173 switch (iklass)
d628c015
DR
3174 {
3175 case 1: /* Class S */
3176 case 15: /* Class SB */
3177 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
c6bd4220 3178 t = TREE_CHAIN (TREE_CHAIN (klass));
d628c015
DR
3179 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3180 t = tree_cons (min_field,
3181 convert (TREE_TYPE (min_field), integer_one_node),
3182 tree_cons (max_field,
3183 convert (TREE_TYPE (max_field), t),
3184 NULL_TREE));
6bf68a93
LG
3185 template_tree = gnat_build_constructor (template_type, t);
3186 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
d628c015
DR
3187
3188 /* For class S, we are done. */
c6bd4220 3189 if (iklass == 1)
d628c015
DR
3190 break;
3191
3192 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
3193 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3194 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
d628c015
DR
3195 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3196 /* If so, there is already a template in the descriptor and
3197 it is located right after the POINTER field. The fields are
3198 64bits so they must be repacked. */
3199 t = TREE_CHAIN (pointer64);
3200 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3201 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3202
3203 t = TREE_CHAIN (t);
3204 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3205 ufield = convert
3206 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3207
3208 /* Build the template in the form of a constructor. */
3209 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3210 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3211 ufield, NULL_TREE));
6bf68a93 3212 template_tree = gnat_build_constructor (template_type, t);
d628c015
DR
3213
3214 /* Otherwise use the {1, LENGTH} template we build above. */
3215 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3216 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 3217 template_tree),
d628c015
DR
3218 template_addr);
3219 break;
3220
3221 case 4: /* Class A */
3222 /* The AFLAGS field is the 3rd field after the pointer in the
3223 descriptor. */
3224 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3225 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3226 /* The DIMCT field is the next field in the descriptor after
3227 aflags. */
3228 t = TREE_CHAIN (t);
3229 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3230 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3231 or FL_COEFF or FL_BOUNDS not set. */
3232 u = build_int_cst (TREE_TYPE (aflags), 192);
3233 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3234 build_binary_op (NE_EXPR, integer_type_node,
3235 dimct,
3236 convert (TREE_TYPE (dimct),
3237 size_one_node)),
3238 build_binary_op (NE_EXPR, integer_type_node,
3239 build2 (BIT_AND_EXPR,
3240 TREE_TYPE (aflags),
3241 aflags, u),
3242 u));
3243 /* There is already a template in the descriptor and it is located
3244 in block 3. The fields are 64bits so they must be repacked. */
3245 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3246 (t)))));
3247 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3248 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3249
3250 t = TREE_CHAIN (t);
3251 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3252 ufield = convert
3253 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3254
3255 /* Build the template in the form of a constructor. */
3256 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3257 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3258 ufield, NULL_TREE));
6bf68a93 3259 template_tree = gnat_build_constructor (template_type, t);
f76d6e6f 3260 template_tree = build3 (COND_EXPR, template_type, u,
d628c015
DR
3261 build_call_raise (CE_Length_Check_Failed, Empty,
3262 N_Raise_Constraint_Error),
6bf68a93 3263 template_tree);
c6bd4220
EB
3264 template_addr
3265 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
d628c015
DR
3266 break;
3267
3268 case 10: /* Class NCA */
3269 default:
3270 post_error ("unsupported descriptor type for &", gnat_subprog);
3271 template_addr = integer_zero_node;
3272 break;
3273 }
3274
3275 /* Build the fat pointer in the form of a constructor. */
3276 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3277 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3278 template_addr, NULL_TREE));
3279 return gnat_build_constructor (gnu_type, t);
3280 }
3281
3282 else
3283 gcc_unreachable ();
3284}
3285
3286/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3287 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3288 which the VMS descriptor is passed. */
3289
3290static tree
3291convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
3292{
3293 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3294 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3295 /* The CLASS field is the 3rd field in the descriptor. */
c6bd4220 3296 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
a1ab4c31 3297 /* The POINTER field is the 4th field in the descriptor. */
c6bd4220 3298 tree pointer = TREE_CHAIN (klass);
a1ab4c31
AC
3299
3300 /* Retrieve the value of the POINTER field. */
d628c015 3301 tree gnu_expr32
a1ab4c31
AC
3302 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3303
3304 if (POINTER_TYPE_P (gnu_type))
d628c015 3305 return convert (gnu_type, gnu_expr32);
a1ab4c31
AC
3306
3307 else if (TYPE_FAT_POINTER_P (gnu_type))
3308 {
3309 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3310 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3311 tree template_type = TREE_TYPE (p_bounds_type);
3312 tree min_field = TYPE_FIELDS (template_type);
3313 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 3314 tree template_tree, template_addr, aflags, dimct, t, u;
a1ab4c31 3315 /* See the head comment of build_vms_descriptor. */
c6bd4220 3316 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
a1ab4c31
AC
3317
3318 /* Convert POINTER to the type of the P_ARRAY field. */
d628c015 3319 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31 3320
c6bd4220 3321 switch (iklass)
a1ab4c31
AC
3322 {
3323 case 1: /* Class S */
3324 case 15: /* Class SB */
3325 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3326 t = TYPE_FIELDS (desc_type);
3327 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3328 t = tree_cons (min_field,
3329 convert (TREE_TYPE (min_field), integer_one_node),
3330 tree_cons (max_field,
3331 convert (TREE_TYPE (max_field), t),
3332 NULL_TREE));
6bf68a93
LG
3333 template_tree = gnat_build_constructor (template_type, t);
3334 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
a1ab4c31
AC
3335
3336 /* For class S, we are done. */
c6bd4220 3337 if (iklass == 1)
a1ab4c31
AC
3338 break;
3339
3340 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
3341 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3342 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
a1ab4c31
AC
3343 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3344 /* If so, there is already a template in the descriptor and
3345 it is located right after the POINTER field. */
3346 t = TREE_CHAIN (pointer);
c6bd4220
EB
3347 template_tree
3348 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
a1ab4c31
AC
3349 /* Otherwise use the {1, LENGTH} template we build above. */
3350 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3351 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 3352 template_tree),
a1ab4c31
AC
3353 template_addr);
3354 break;
3355
3356 case 4: /* Class A */
3357 /* The AFLAGS field is the 7th field in the descriptor. */
3358 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3359 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3360 /* The DIMCT field is the 8th field in the descriptor. */
3361 t = TREE_CHAIN (t);
3362 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3363 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3364 or FL_COEFF or FL_BOUNDS not set. */
3365 u = build_int_cst (TREE_TYPE (aflags), 192);
3366 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3367 build_binary_op (NE_EXPR, integer_type_node,
3368 dimct,
3369 convert (TREE_TYPE (dimct),
3370 size_one_node)),
3371 build_binary_op (NE_EXPR, integer_type_node,
3372 build2 (BIT_AND_EXPR,
3373 TREE_TYPE (aflags),
3374 aflags, u),
3375 u));
a1ab4c31
AC
3376 /* There is already a template in the descriptor and it is
3377 located at the start of block 3 (12th field). */
3378 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
c6bd4220
EB
3379 template_tree
3380 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
f76d6e6f 3381 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
d628c015
DR
3382 build_call_raise (CE_Length_Check_Failed, Empty,
3383 N_Raise_Constraint_Error),
6bf68a93 3384 template_tree);
c6bd4220
EB
3385 template_addr
3386 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
a1ab4c31
AC
3387 break;
3388
3389 case 10: /* Class NCA */
3390 default:
3391 post_error ("unsupported descriptor type for &", gnat_subprog);
3392 template_addr = integer_zero_node;
3393 break;
3394 }
3395
3396 /* Build the fat pointer in the form of a constructor. */
d628c015 3397 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
a1ab4c31
AC
3398 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3399 template_addr, NULL_TREE));
d628c015 3400
a1ab4c31
AC
3401 return gnat_build_constructor (gnu_type, t);
3402 }
3403
3404 else
3405 gcc_unreachable ();
3406}
3407
a981c964
EB
3408/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3409 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3410 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3411 VMS descriptor is passed. */
d628c015
DR
3412
3413static tree
a981c964
EB
3414convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3415 Entity_Id gnat_subprog)
d628c015
DR
3416{
3417 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3418 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3419 tree mbo = TYPE_FIELDS (desc_type);
3420 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3421 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
a981c964 3422 tree is64bit, gnu_expr32, gnu_expr64;
d628c015 3423
a981c964
EB
3424 /* If the field name is not MBO, it must be 32-bit and no alternate.
3425 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 3426 if (strcmp (mbostr, "MBO") != 0)
d628c015
DR
3427 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3428
a981c964 3429 /* Build the test for 64-bit descriptor. */
d628c015
DR
3430 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3431 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
a981c964
EB
3432 is64bit
3433 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3434 build_binary_op (EQ_EXPR, integer_type_node,
3435 convert (integer_type_node, mbo),
3436 integer_one_node),
3437 build_binary_op (EQ_EXPR, integer_type_node,
3438 convert (integer_type_node, mbmo),
3439 integer_minus_one_node));
3440
3441 /* Build the 2 possible end results. */
3442 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3443 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3444 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3445
3446 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015
DR
3447}
3448
a1ab4c31
AC
3449/* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3450 and the GNAT node GNAT_SUBPROG. */
3451
3452void
3453build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3454{
3455 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3456 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3457 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3458 tree gnu_body;
3459
3460 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3461 gnu_param_list = NULL_TREE;
3462
3463 begin_subprog_body (gnu_stub_decl);
3464 gnat_pushlevel ();
3465
3466 start_stmt_group ();
3467
3468 /* Loop over the parameters of the stub and translate any of them
3469 passed by descriptor into a by reference one. */
3470 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3471 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3472 gnu_stub_param;
3473 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3474 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3475 {
3476 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
a981c964
EB
3477 gnu_param
3478 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3479 gnu_stub_param,
3480 DECL_PARM_ALT_TYPE (gnu_stub_param),
3481 gnat_subprog);
a1ab4c31
AC
3482 else
3483 gnu_param = gnu_stub_param;
3484
3485 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3486 }
3487
3488 gnu_body = end_stmt_group ();
3489
3490 /* Invoke the internal subprogram. */
3491 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3492 gnu_subprog);
3493 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3494 gnu_subprog_addr,
3495 nreverse (gnu_param_list));
3496
3497 /* Propagate the return value, if any. */
3498 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3499 append_to_statement_list (gnu_subprog_call, &gnu_body);
3500 else
3501 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3502 gnu_subprog_call),
3503 &gnu_body);
3504
3505 gnat_poplevel ();
3506
3507 allocate_struct_function (gnu_stub_decl, false);
a406865a 3508 end_subprog_body (gnu_body);
a1ab4c31
AC
3509}
3510\f
3511/* Build a type to be used to represent an aliased object whose nominal
3512 type is an unconstrained array. This consists of a RECORD_TYPE containing
3513 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3514 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3515 is used to represent an arbitrary unconstrained object. Use NAME
3516 as the name of the record. */
3517
3518tree
3519build_unc_object_type (tree template_type, tree object_type, tree name)
3520{
3521 tree type = make_node (RECORD_TYPE);
3522 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3523 template_type, type, 0, 0, 0, 1);
3524 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3525 type, 0, 0, 0, 1);
3526
3527 TYPE_NAME (type) = name;
3528 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3529 finish_record_type (type,
3530 chainon (chainon (NULL_TREE, template_field),
3531 array_field),
3532 0, false);
3533
3534 return type;
3535}
3536
3537/* Same, taking a thin or fat pointer type instead of a template type. */
3538
3539tree
3540build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3541 tree name)
3542{
3543 tree template_type;
3544
3545 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3546
3547 template_type
3548 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3549 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3550 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3551 return build_unc_object_type (template_type, object_type, name);
3552}
3553
3554/* Shift the component offsets within an unconstrained object TYPE to make it
3555 suitable for use as a designated type for thin pointers. */
3556
3557void
3558shift_unc_components_for_thin_pointers (tree type)
3559{
3560 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3561 allocated past the BOUNDS template. The designated type is adjusted to
3562 have ARRAY at position zero and the template at a negative offset, so
3563 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3564
3565 tree bounds_field = TYPE_FIELDS (type);
3566 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3567
3568 DECL_FIELD_OFFSET (bounds_field)
3569 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3570
3571 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3572 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3573}
3574\f
229077b0
EB
3575/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3576 In the normal case this is just two adjustments, but we have more to
3577 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
3578
3579void
3580update_pointer_to (tree old_type, tree new_type)
3581{
3582 tree ptr = TYPE_POINTER_TO (old_type);
3583 tree ref = TYPE_REFERENCE_TO (old_type);
3584 tree ptr1, ref1;
3585 tree type;
3586
3587 /* If this is the main variant, process all the other variants first. */
3588 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3589 for (type = TYPE_NEXT_VARIANT (old_type); type;
3590 type = TYPE_NEXT_VARIANT (type))
3591 update_pointer_to (type, new_type);
3592
229077b0 3593 /* If no pointers and no references, we are done. */
a1ab4c31
AC
3594 if (!ptr && !ref)
3595 return;
3596
3597 /* Merge the old type qualifiers in the new type.
3598
3599 Each old variant has qualifiers for specific reasons, and the new
229077b0 3600 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
3601 information grabbed at some point, and merging the two simply unifies
3602 these inputs into the final type description.
3603
3604 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
3605 type designating it; after the designated type's freeze, we get here with
3606 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3607 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
3608 designated type, because that's what it really is.
3609
229077b0
EB
3610 We might also get here for a non-dummy OLD_TYPE variant with different
3611 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 3612 to private record type elaboration (see the comments around the call to
229077b0
EB
3613 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3614 the qualifiers in those cases too, to avoid accidentally discarding the
3615 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3616 new_type
3617 = build_qualified_type (new_type,
3618 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3619
3620 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
3621 if (old_type == new_type)
3622 return;
3623
3624 /* Otherwise, first handle the simple case. */
3625 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3626 {
3627 TYPE_POINTER_TO (new_type) = ptr;
3628 TYPE_REFERENCE_TO (new_type) = ref;
3629
3630 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3631 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3632 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3633 TREE_TYPE (ptr1) = new_type;
3634
3635 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3636 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3637 ref1 = TYPE_NEXT_VARIANT (ref1))
3638 TREE_TYPE (ref1) = new_type;
3639 }
3640
229077b0 3641 /* Now deal with the unconstrained array case. In this case the "pointer"
a1ab4c31
AC
3642 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3643 Turn them into pointers to the correct types using update_pointer_to. */
229077b0 3644 else if (!TYPE_FAT_POINTER_P (ptr))
a1ab4c31
AC
3645 gcc_unreachable ();
3646
3647 else
3648 {
3649 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3650 tree array_field = TYPE_FIELDS (ptr);
3651 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3652 tree new_ptr = TYPE_POINTER_TO (new_type);
3653 tree new_ref;
3654 tree var;
3655
3656 /* Make pointers to the dummy template point to the real template. */
3657 update_pointer_to
3658 (TREE_TYPE (TREE_TYPE (bounds_field)),
3659 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3660
3661 /* The references to the template bounds present in the array type
229077b0
EB
3662 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3663 are updating PTR to make it a full replacement for NEW_PTR as
3664 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3665 to make it of type PTR. */
a1ab4c31
AC
3666 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3667 build0 (PLACEHOLDER_EXPR, ptr),
3668 bounds_field, NULL_TREE);
3669
229077b0 3670 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
77022fa8 3671 to the dummy array point to it. */
a1ab4c31
AC
3672 update_pointer_to
3673 (TREE_TYPE (TREE_TYPE (array_field)),
3674 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3675 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3676
229077b0 3677 /* Make PTR the pointer to NEW_TYPE. */
a1ab4c31
AC
3678 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3679 = TREE_TYPE (new_type) = ptr;
3680
40c88b94
EB
3681 /* And show the original pointer NEW_PTR to the debugger. This is the
3682 counterpart of the equivalent processing in gnat_pushdecl when the
3683 unconstrained array type is frozen after access types to it. Note
3684 that update_pointer_to can be invoked multiple times on the same
3685 couple of types because of the type variants. */
3686 if (TYPE_NAME (ptr)
3687 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
3688 && !DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)))
3689 {
3690 DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3691 DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3692 }
a1ab4c31
AC
3693 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3694 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3695
3696 /* Now handle updating the allocation record, what the thin pointer
3697 points to. Update all pointers from the old record into the new
3698 one, update the type of the array field, and recompute the size. */
3699 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3700
3701 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3702 = TREE_TYPE (TREE_TYPE (array_field));
3703
3704 /* The size recomputation needs to account for alignment constraints, so
3705 we let layout_type work it out. This will reset the field offsets to
3706 what they would be in a regular record, so we shift them back to what
3707 we want them to be for a thin pointer designated type afterwards. */
3708 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3709 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3710 TYPE_SIZE (new_obj_rec) = 0;
3711 layout_type (new_obj_rec);
3712
3713 shift_unc_components_for_thin_pointers (new_obj_rec);
3714
3715 /* We are done, at last. */
3716 rest_of_record_type_compilation (ptr);
3717 }
3718}
3719\f
8df2e902
EB
3720/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3721 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
3722
3723static tree
3724convert_to_fat_pointer (tree type, tree expr)
3725{
3726 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
8df2e902 3727 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 3728 tree etype = TREE_TYPE (expr);
6bf68a93 3729 tree template_tree;
a1ab4c31 3730
8df2e902
EB
3731 /* If EXPR is null, make a fat pointer that contains null pointers to the
3732 template and array. */
a1ab4c31
AC
3733 if (integer_zerop (expr))
3734 return
3735 gnat_build_constructor
3736 (type,
3737 tree_cons (TYPE_FIELDS (type),
8df2e902 3738 convert (p_array_type, expr),
a1ab4c31
AC
3739 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3740 convert (build_pointer_type (template_type),
3741 expr),
3742 NULL_TREE)));
3743
8df2e902 3744 /* If EXPR is a thin pointer, make template and data from the record.. */
a1ab4c31
AC
3745 else if (TYPE_THIN_POINTER_P (etype))
3746 {
3747 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3748
3749 expr = save_expr (expr);
3750 if (TREE_CODE (expr) == ADDR_EXPR)
3751 expr = TREE_OPERAND (expr, 0);
3752 else
3753 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3754
6bf68a93 3755 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
a1ab4c31
AC
3756 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3757 build_component_ref (expr, NULL_TREE,
3758 TREE_CHAIN (fields), false));
3759 }
8df2e902
EB
3760
3761 /* Otherwise, build the constructor for the template. */
a1ab4c31 3762 else
6bf68a93 3763 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
a1ab4c31 3764
8df2e902 3765 /* The final result is a constructor for the fat pointer.
a1ab4c31 3766
8df2e902
EB
3767 If EXPR is an argument of a foreign convention subprogram, the type it
3768 points to is directly the component type. In this case, the expression
a1ab4c31 3769 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 3770 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 3771 required, for instance because it ensures that possible later folding of
8df2e902 3772 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
3773 same type as the initial reference.
3774
8df2e902
EB
3775 Note that the call to "build_template" above is still fine because it
3776 will only refer to the provided TEMPLATE_TYPE in this case. */
3777 return
3778 gnat_build_constructor
3779 (type,
3780 tree_cons (TYPE_FIELDS (type),
3781 convert (p_array_type, expr),
3782 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
c6bd4220
EB
3783 build_unary_op (ADDR_EXPR, NULL_TREE,
3784 template_tree),
8df2e902 3785 NULL_TREE)));
a1ab4c31
AC
3786}
3787\f
3788/* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3789 is something that is a fat pointer, so convert to it first if it EXPR
3790 is not already a fat pointer. */
3791
3792static tree
3793convert_to_thin_pointer (tree type, tree expr)
3794{
3795 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3796 expr
3797 = convert_to_fat_pointer
3798 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3799
3800 /* We get the pointer to the data and use a NOP_EXPR to make it the
3801 proper GCC type. */
3802 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3803 false);
3804 expr = build1 (NOP_EXPR, type, expr);
3805
3806 return expr;
3807}
3808\f
3809/* Create an expression whose value is that of EXPR,
3810 converted to type TYPE. The TREE_TYPE of the value
3811 is always TYPE. This function implements all reasonable
3812 conversions; callers should filter out those that are
3813 not permitted by the language being compiled. */
3814
3815tree
3816convert (tree type, tree expr)
3817{
3818 enum tree_code code = TREE_CODE (type);
3819 tree etype = TREE_TYPE (expr);
3820 enum tree_code ecode = TREE_CODE (etype);
3821
3822 /* If EXPR is already the right type, we are done. */
3823 if (type == etype)
3824 return expr;
3825
3826 /* If both input and output have padding and are of variable size, do this
3827 as an unchecked conversion. Likewise if one is a mere variant of the
3828 other, so we avoid a pointless unpad/repad sequence. */
3829 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3830 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3831 && (!TREE_CONSTANT (TYPE_SIZE (type))
3832 || !TREE_CONSTANT (TYPE_SIZE (etype))
3833 || gnat_types_compatible_p (type, etype)
3834 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3835 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3836 ;
3837
f88facfe
EB
3838 /* If the output type has padding, convert to the inner type and make a
3839 constructor to build the record, unless a variable size is involved. */
a1ab4c31
AC
3840 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3841 {
3842 /* If we previously converted from another type and our type is
3843 of variable size, remove the conversion to avoid the need for
f88facfe 3844 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
3845 original and packable version. */
3846 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3847 && (!TREE_CONSTANT (TYPE_SIZE (type))
3848 || (ecode == RECORD_TYPE
3849 && TYPE_NAME (etype)
3850 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3851 expr = TREE_OPERAND (expr, 0);
3852
3853 /* If we are just removing the padding from expr, convert the original
3854 object if we have variable size in order to avoid the need for some
f88facfe 3855 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
3856 of the other, so we avoid a pointless unpad/repad sequence. */
3857 if (TREE_CODE (expr) == COMPONENT_REF
3858 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3859 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3860 && (!TREE_CONSTANT (TYPE_SIZE (type))
3861 || gnat_types_compatible_p (type,
3862 TREE_TYPE (TREE_OPERAND (expr, 0)))
3863 || (ecode == RECORD_TYPE
3864 && TYPE_NAME (etype)
3865 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3866 return convert (type, TREE_OPERAND (expr, 0));
3867
3868 /* If the result type is a padded type with a self-referentially-sized
f88facfe
EB
3869 field and the expression type is a record, do this as an unchecked
3870 conversion. */
3871 if (TREE_CODE (etype) == RECORD_TYPE
3872 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
a1ab4c31
AC
3873 return unchecked_convert (type, expr, false);
3874
f88facfe
EB
3875 /* If we are converting between array types with variable size, do the
3876 final conversion as an unchecked conversion, again to avoid the need
3877 for some variable-sized temporaries. If valid, this conversion is
3878 very likely purely technical and without real effects. */
3879 if (TREE_CODE (etype) == ARRAY_TYPE
3880 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3881 && !TREE_CONSTANT (TYPE_SIZE (etype))
3882 && !TREE_CONSTANT (TYPE_SIZE (type)))
3883 return unchecked_convert (type,
3884 convert (TREE_TYPE (TYPE_FIELDS (type)),
3885 expr),
3886 false);
3887
3888 return
3889 gnat_build_constructor (type,
3890 tree_cons (TYPE_FIELDS (type),
3891 convert (TREE_TYPE
3892 (TYPE_FIELDS (type)),
3893 expr),
3894 NULL_TREE));
a1ab4c31
AC
3895 }
3896
3897 /* If the input type has padding, remove it and convert to the output type.
3898 The conditions ordering is arranged to ensure that the output type is not
3899 a padding type here, as it is not clear whether the conversion would
3900 always be correct if this was to happen. */
3901 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3902 {
3903 tree unpadded;
3904
3905 /* If we have just converted to this padded type, just get the
3906 inner expression. */
3907 if (TREE_CODE (expr) == CONSTRUCTOR
3908 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3909 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3910 == TYPE_FIELDS (etype))
3911 unpadded
3912 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3913
3914 /* Otherwise, build an explicit component reference. */
3915 else
3916 unpadded
3917 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3918
3919 return convert (type, unpadded);
3920 }
3921
3922 /* If the input is a biased type, adjust first. */
3923 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3924 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3925 fold_convert (TREE_TYPE (etype),
3926 expr),
3927 TYPE_MIN_VALUE (etype)));
3928
3929 /* If the input is a justified modular type, we need to extract the actual
3930 object before converting it to any other type with the exceptions of an
3931 unconstrained array or of a mere type variant. It is useful to avoid the
3932 extraction and conversion in the type variant case because it could end
3933 up replacing a VAR_DECL expr by a constructor and we might be about the
3934 take the address of the result. */
3935 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3936 && code != UNCONSTRAINED_ARRAY_TYPE
3937 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3938 return convert (type, build_component_ref (expr, NULL_TREE,
3939 TYPE_FIELDS (etype), false));
3940
3941 /* If converting to a type that contains a template, convert to the data
3942 type and then build the template. */
3943 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3944 {
3945 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3946
3947 /* If the source already has a template, get a reference to the
3948 associated array only, as we are going to rebuild a template
3949 for the target type anyway. */
3950 expr = maybe_unconstrained_array (expr);
3951
3952 return
3953 gnat_build_constructor
3954 (type,
3955 tree_cons (TYPE_FIELDS (type),
3956 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3957 obj_type, NULL_TREE),
3958 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3959 convert (obj_type, expr), NULL_TREE)));
3960 }
3961
3962 /* There are some special cases of expressions that we process
3963 specially. */
3964 switch (TREE_CODE (expr))
3965 {
3966 case ERROR_MARK:
3967 return expr;
3968
3969 case NULL_EXPR:
3970 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3971 conversion in gnat_expand_expr. NULL_EXPR does not represent
3972 and actual value, so no conversion is needed. */
3973 expr = copy_node (expr);
3974 TREE_TYPE (expr) = type;
3975 return expr;
3976
3977 case STRING_CST:
3978 /* If we are converting a STRING_CST to another constrained array type,
3979 just make a new one in the proper type. */
3980 if (code == ecode && AGGREGATE_TYPE_P (etype)
3981 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3982 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3983 {
3984 expr = copy_node (expr);
3985 TREE_TYPE (expr) = type;
3986 return expr;
3987 }
3988 break;
3989
7948ae37
OH
3990 case VECTOR_CST:
3991 /* If we are converting a VECTOR_CST to a mere variant type, just make
3992 a new one in the proper type. */
3993 if (code == ecode && gnat_types_compatible_p (type, etype))
3994 {
3995 expr = copy_node (expr);
3996 TREE_TYPE (expr) = type;
3997 return expr;
3998 }
3999
a1ab4c31
AC
4000 case CONSTRUCTOR:
4001 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4002 a new one in the proper type. */
4003 if (code == ecode && gnat_types_compatible_p (type, etype))
4004 {
4005 expr = copy_node (expr);
4006 TREE_TYPE (expr) = type;
4007 return expr;
4008 }
4009
4010 /* Likewise for a conversion between original and packable version, but
4011 we have to work harder in order to preserve type consistency. */
4012 if (code == ecode
4013 && code == RECORD_TYPE
4014 && TYPE_NAME (type) == TYPE_NAME (etype))
4015 {
4016 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4017 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4018 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4019 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4020 unsigned HOST_WIDE_INT idx;
4021 tree index, value;
4022
db868e1e
OH
4023 /* Whether we need to clear TREE_CONSTANT et al. on the output
4024 constructor when we convert in place. */
4025 bool clear_constant = false;
4026
a1ab4c31
AC
4027 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4028 {
4029 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4030 /* We expect only simple constructors. Otherwise, punt. */
4031 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4032 break;
4033 elt->index = field;
4034 elt->value = convert (TREE_TYPE (field), value);
db868e1e
OH
4035
4036 /* If packing has made this field a bitfield and the input
4037 value couldn't be emitted statically any more, we need to
4038 clear TREE_CONSTANT on our output. */
4039 if (!clear_constant && TREE_CONSTANT (expr)
4040 && !CONSTRUCTOR_BITFIELD_P (efield)
4041 && CONSTRUCTOR_BITFIELD_P (field)
4042 && !initializer_constant_valid_for_bitfield_p (value))
4043 clear_constant = true;
4044
a1ab4c31
AC
4045 efield = TREE_CHAIN (efield);
4046 field = TREE_CHAIN (field);
4047 }
4048
db868e1e
OH
4049 /* If we have been able to match and convert all the input fields
4050 to their output type, convert in place now. We'll fallback to a
4051 view conversion downstream otherwise. */
a1ab4c31
AC
4052 if (idx == len)
4053 {
4054 expr = copy_node (expr);
4055 TREE_TYPE (expr) = type;
4056 CONSTRUCTOR_ELTS (expr) = v;
db868e1e
OH
4057 if (clear_constant)
4058 TREE_CONSTANT (expr) = TREE_STATIC (expr) = false;
a1ab4c31
AC
4059 return expr;
4060 }
4061 }
7948ae37
OH
4062
4063 /* Likewise for a conversion between array type and vector type with a
4064 compatible representative array. */
4065 else if (code == VECTOR_TYPE
4066 && ecode == ARRAY_TYPE
4067 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4068 etype))
4069 {
4070 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4071 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4072 VEC(constructor_elt,gc) *v;
4073 unsigned HOST_WIDE_INT ix;
4074 tree value;
4075
4076 /* Build a VECTOR_CST from a *constant* array constructor. */
4077 if (TREE_CONSTANT (expr))
4078 {
4079 bool constant_p = true;
4080
4081 /* Iterate through elements and check if all constructor
4082 elements are *_CSTs. */
4083 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4084 if (!CONSTANT_CLASS_P (value))
4085 {
4086 constant_p = false;
4087 break;
4088 }
4089
4090 if (constant_p)
4091 return build_vector_from_ctor (type,
4092 CONSTRUCTOR_ELTS (expr));
4093 }
4094
4095 /* Otherwise, build a regular vector constructor. */
4096 v = VEC_alloc (constructor_elt, gc, len);
4097 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4098 {
4099 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4100 elt->index = NULL_TREE;
4101 elt->value = value;
4102 }
4103 expr = copy_node (expr);
4104 TREE_TYPE (expr) = type;
4105 CONSTRUCTOR_ELTS (expr) = v;
4106 return expr;
4107 }
a1ab4c31
AC
4108 break;
4109
4110 case UNCONSTRAINED_ARRAY_REF:
4111 /* Convert this to the type of the inner array by getting the address of
4112 the array from the template. */
4113 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4114 build_component_ref (TREE_OPERAND (expr, 0),
4115 get_identifier ("P_ARRAY"),
4116 NULL_TREE, false));
4117 etype = TREE_TYPE (expr);
4118 ecode = TREE_CODE (etype);
4119 break;
4120
4121 case VIEW_CONVERT_EXPR:
4122 {
4123 /* GCC 4.x is very sensitive to type consistency overall, and view
4124 conversions thus are very frequent. Even though just "convert"ing
4125 the inner operand to the output type is fine in most cases, it
4126 might expose unexpected input/output type mismatches in special
4127 circumstances so we avoid such recursive calls when we can. */
4128 tree op0 = TREE_OPERAND (expr, 0);
4129
4130 /* If we are converting back to the original type, we can just
4131 lift the input conversion. This is a common occurrence with
4132 switches back-and-forth amongst type variants. */
4133 if (type == TREE_TYPE (op0))
4134 return op0;
4135
7948ae37
OH
4136 /* Otherwise, if we're converting between two aggregate or vector
4137 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4138 target type in place or to just convert the inner expression. */
4139 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4140 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4141 {
4142 /* If we are converting between mere variants, we can just
4143 substitute the VIEW_CONVERT_EXPR in place. */
4144 if (gnat_types_compatible_p (type, etype))
4145 return build1 (VIEW_CONVERT_EXPR, type, op0);
4146
4147 /* Otherwise, we may just bypass the input view conversion unless
4148 one of the types is a fat pointer, which is handled by
4149 specialized code below which relies on exact type matching. */
4150 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4151 return convert (type, op0);
4152 }
4153 }
4154 break;
4155
4156 case INDIRECT_REF:
4157 /* If both types are record types, just convert the pointer and
4158 make a new INDIRECT_REF.
4159
4160 ??? Disable this for now since it causes problems with the
4161 code in build_binary_op for MODIFY_EXPR which wants to
4162 strip off conversions. But that code really is a mess and
4163 we need to do this a much better way some time. */
4164 if (0
4165 && (TREE_CODE (type) == RECORD_TYPE
4166 || TREE_CODE (type) == UNION_TYPE)
4167 && (TREE_CODE (etype) == RECORD_TYPE
4168 || TREE_CODE (etype) == UNION_TYPE)
4169 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4170 return build_unary_op (INDIRECT_REF, NULL_TREE,
4171 convert (build_pointer_type (type),
4172 TREE_OPERAND (expr, 0)));
4173 break;
4174
4175 default:
4176 break;
4177 }
4178
4179 /* Check for converting to a pointer to an unconstrained array. */
4180 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4181 return convert_to_fat_pointer (type, expr);
4182
7948ae37
OH
4183 /* If we are converting between two aggregate or vector types that are mere
4184 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4185 to a vector type from its representative array type. */
4186 else if ((code == ecode
4187 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4188 && gnat_types_compatible_p (type, etype))
4189 || (code == VECTOR_TYPE
4190 && ecode == ARRAY_TYPE
4191 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4192 etype)))
a1ab4c31
AC
4193 return build1 (VIEW_CONVERT_EXPR, type, expr);
4194
4195 /* In all other cases of related types, make a NOP_EXPR. */
4196 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4197 || (code == INTEGER_CST && ecode == INTEGER_CST
4198 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4199 return fold_convert (type, expr);
4200
4201 switch (code)
4202 {
4203 case VOID_TYPE:
4204 return fold_build1 (CONVERT_EXPR, type, expr);
4205
a1ab4c31
AC
4206 case INTEGER_TYPE:
4207 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4208 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4209 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4210 return unchecked_convert (type, expr, false);
4211 else if (TYPE_BIASED_REPRESENTATION_P (type))
4212 return fold_convert (type,
4213 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4214 convert (TREE_TYPE (type), expr),
4215 TYPE_MIN_VALUE (type)));
4216
4217 /* ... fall through ... */
4218
4219 case ENUMERAL_TYPE:
01ddebf2 4220 case BOOLEAN_TYPE:
a1ab4c31
AC
4221 /* If we are converting an additive expression to an integer type
4222 with lower precision, be wary of the optimization that can be
4223 applied by convert_to_integer. There are 2 problematic cases:
4224 - if the first operand was originally of a biased type,
4225 because we could be recursively called to convert it
4226 to an intermediate type and thus rematerialize the
4227 additive operator endlessly,
4228 - if the expression contains a placeholder, because an
4229 intermediate conversion that changes the sign could
4230 be inserted and thus introduce an artificial overflow
4231 at compile time when the placeholder is substituted. */
4232 if (code == INTEGER_TYPE
4233 && ecode == INTEGER_TYPE
4234 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4235 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4236 {
4237 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4238
4239 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4240 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4241 || CONTAINS_PLACEHOLDER_P (expr))
4242 return build1 (NOP_EXPR, type, expr);
4243 }
4244
4245 return fold (convert_to_integer (type, expr));
4246
4247 case POINTER_TYPE:
4248 case REFERENCE_TYPE:
4249 /* If converting between two pointers to records denoting
4250 both a template and type, adjust if needed to account
4251 for any differing offsets, since one might be negative. */
4252 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4253 {
4254 tree bit_diff
4255 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4256 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4257 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4258 sbitsize_int (BITS_PER_UNIT));
4259
4260 expr = build1 (NOP_EXPR, type, expr);
4261 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4262 if (integer_zerop (byte_diff))
4263 return expr;
4264
4265 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4266 fold (convert (sizetype, byte_diff)));
4267 }
4268
4269 /* If converting to a thin pointer, handle specially. */
4270 if (TYPE_THIN_POINTER_P (type)
4271 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4272 return convert_to_thin_pointer (type, expr);
4273
4274 /* If converting fat pointer to normal pointer, get the pointer to the
4275 array and then convert it. */
4276 else if (TYPE_FAT_POINTER_P (etype))
4277 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4278 NULL_TREE, false);
4279
4280 return fold (convert_to_pointer (type, expr));
4281
4282 case REAL_TYPE:
4283 return fold (convert_to_real (type, expr));
4284
4285 case RECORD_TYPE:
4286 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4287 return
4288 gnat_build_constructor
4289 (type, tree_cons (TYPE_FIELDS (type),
4290 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4291 NULL_TREE));
4292
4293 /* ... fall through ... */
4294
4295 case ARRAY_TYPE:
4296 /* In these cases, assume the front-end has validated the conversion.
4297 If the conversion is valid, it will be a bit-wise conversion, so
4298 it can be viewed as an unchecked conversion. */
4299 return unchecked_convert (type, expr, false);
4300
4301 case UNION_TYPE:
4302 /* This is a either a conversion between a tagged type and some
4303 subtype, which we have to mark as a UNION_TYPE because of
4304 overlapping fields or a conversion of an Unchecked_Union. */
4305 return unchecked_convert (type, expr, false);
4306
4307 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4308 /* If the input is a VECTOR_TYPE, convert to the representative
4309 array type first. */
4310 if (ecode == VECTOR_TYPE)
4311 {
4312 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4313 etype = TREE_TYPE (expr);
4314 ecode = TREE_CODE (etype);
4315 }
4316
a1ab4c31
AC
4317 /* If EXPR is a constrained array, take its address, convert it to a
4318 fat pointer, and then dereference it. Likewise if EXPR is a
4319 record containing both a template and a constrained array.
4320 Note that a record representing a justified modular type
4321 always represents a packed constrained array. */
4322 if (ecode == ARRAY_TYPE
4323 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4324 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4325 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4326 return
4327 build_unary_op
4328 (INDIRECT_REF, NULL_TREE,
4329 convert_to_fat_pointer (TREE_TYPE (type),
4330 build_unary_op (ADDR_EXPR,
4331 NULL_TREE, expr)));
4332
4333 /* Do something very similar for converting one unconstrained
4334 array to another. */
4335 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4336 return
4337 build_unary_op (INDIRECT_REF, NULL_TREE,
4338 convert (TREE_TYPE (type),
4339 build_unary_op (ADDR_EXPR,
4340 NULL_TREE, expr)));
4341 else
4342 gcc_unreachable ();
4343
4344 case COMPLEX_TYPE:
4345 return fold (convert_to_complex (type, expr));
4346
4347 default:
4348 gcc_unreachable ();
4349 }
4350}
4351\f
4352/* Remove all conversions that are done in EXP. This includes converting
4353 from a padded type or to a justified modular type. If TRUE_ADDRESS
4354 is true, always return the address of the containing object even if
4355 the address is not bit-aligned. */
4356
4357tree
4358remove_conversions (tree exp, bool true_address)
4359{
4360 switch (TREE_CODE (exp))
4361 {
4362 case CONSTRUCTOR:
4363 if (true_address
4364 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4365 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4366 return
4367 remove_conversions (VEC_index (constructor_elt,
4368 CONSTRUCTOR_ELTS (exp), 0)->value,
4369 true);
4370 break;
4371
4372 case COMPONENT_REF:
4373 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4374 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4375 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4376 break;
4377
4378 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4379 CASE_CONVERT:
4380 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4381
4382 default:
4383 break;
4384 }
4385
4386 return exp;
4387}
4388\f
4389/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4390 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4391 likewise return an expression pointing to the underlying array. */
4392
4393tree
4394maybe_unconstrained_array (tree exp)
4395{
4396 enum tree_code code = TREE_CODE (exp);
c6bd4220 4397 tree new_exp;
a1ab4c31
AC
4398
4399 switch (TREE_CODE (TREE_TYPE (exp)))
4400 {
4401 case UNCONSTRAINED_ARRAY_TYPE:
4402 if (code == UNCONSTRAINED_ARRAY_REF)
4403 {
c6bd4220 4404 new_exp
a1ab4c31
AC
4405 = build_unary_op (INDIRECT_REF, NULL_TREE,
4406 build_component_ref (TREE_OPERAND (exp, 0),
4407 get_identifier ("P_ARRAY"),
4408 NULL_TREE, false));
c6bd4220
EB
4409 TREE_READONLY (new_exp) = TREE_STATIC (new_exp)
4410 = TREE_READONLY (exp);
4411 return new_exp;
a1ab4c31
AC
4412 }
4413
4414 else if (code == NULL_EXPR)
4415 return build1 (NULL_EXPR,
4416 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4417 (TREE_TYPE (TREE_TYPE (exp))))),
4418 TREE_OPERAND (exp, 0));
4419
4420 case RECORD_TYPE:
4421 /* If this is a padded type, convert to the unpadded type and see if
4422 it contains a template. */
4423 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4424 {
c6bd4220
EB
4425 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4426 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4427 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
a1ab4c31 4428 return
c6bd4220
EB
4429 build_component_ref (new_exp, NULL_TREE,
4430 TREE_CHAIN
4431 (TYPE_FIELDS (TREE_TYPE (new_exp))),
a1ab4c31
AC
4432 0);
4433 }
4434 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4435 return
4436 build_component_ref (exp, NULL_TREE,
4437 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4438 break;
4439
4440 default:
4441 break;
4442 }
4443
4444 return exp;
4445}
7948ae37
OH
4446
4447/* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4448 TYPE_REPRESENTATIVE_ARRAY. */
4449
4450tree
4451maybe_vector_array (tree exp)
4452{
4453 tree etype = TREE_TYPE (exp);
4454
4455 if (VECTOR_TYPE_P (etype))
4456 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4457
4458 return exp;
4459}
a1ab4c31 4460\f
afcea859 4461/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 4462 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
4463
4464static bool
4465can_fold_for_view_convert_p (tree expr)
4466{
4467 tree t1, t2;
4468
4469 /* The folder will fold NOP_EXPRs between integral types with the same
4470 precision (in the middle-end's sense). We cannot allow it if the
4471 types don't have the same precision in the Ada sense as well. */
4472 if (TREE_CODE (expr) != NOP_EXPR)
4473 return true;
4474
4475 t1 = TREE_TYPE (expr);
4476 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4477
4478 /* Defer to the folder for non-integral conversions. */
4479 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4480 return true;
4481
4482 /* Only fold conversions that preserve both precisions. */
4483 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4484 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4485 return true;
4486
4487 return false;
4488}
4489
a1ab4c31 4490/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
4491 If NOTRUNC_P is true, truncation operations should be suppressed.
4492
4493 Special care is required with (source or target) integral types whose
4494 precision is not equal to their size, to make sure we fetch or assign
4495 the value bits whose location might depend on the endianness, e.g.
4496
4497 Rmsize : constant := 8;
4498 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4499
4500 type Bit_Array is array (1 .. Rmsize) of Boolean;
4501 pragma Pack (Bit_Array);
4502
4503 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4504
4505 Value : Int := 2#1000_0001#;
4506 Vbits : Bit_Array := To_Bit_Array (Value);
4507
4508 we expect the 8 bits at Vbits'Address to always contain Value, while
4509 their original location depends on the endianness, at Value'Address
84fb43a1 4510 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
4511
4512tree
4513unchecked_convert (tree type, tree expr, bool notrunc_p)
4514{
4515 tree etype = TREE_TYPE (expr);
4516
4517 /* If the expression is already the right type, we are done. */
4518 if (etype == type)
4519 return expr;
4520
4521 /* If both types types are integral just do a normal conversion.
4522 Likewise for a conversion to an unconstrained array. */
4523 if ((((INTEGRAL_TYPE_P (type)
4524 && !(TREE_CODE (type) == INTEGER_TYPE
4525 && TYPE_VAX_FLOATING_POINT_P (type)))
4526 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4527 || (TREE_CODE (type) == RECORD_TYPE
4528 && TYPE_JUSTIFIED_MODULAR_P (type)))
4529 && ((INTEGRAL_TYPE_P (etype)
4530 && !(TREE_CODE (etype) == INTEGER_TYPE
4531 && TYPE_VAX_FLOATING_POINT_P (etype)))
4532 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4533 || (TREE_CODE (etype) == RECORD_TYPE
4534 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4535 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4536 {
a1ab4c31
AC
4537 if (TREE_CODE (etype) == INTEGER_TYPE
4538 && TYPE_BIASED_REPRESENTATION_P (etype))
4539 {
4540 tree ntype = copy_type (etype);
a1ab4c31
AC
4541 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4542 TYPE_MAIN_VARIANT (ntype) = ntype;
4543 expr = build1 (NOP_EXPR, ntype, expr);
4544 }
4545
4546 if (TREE_CODE (type) == INTEGER_TYPE
4547 && TYPE_BIASED_REPRESENTATION_P (type))
4548 {
afcea859 4549 tree rtype = copy_type (type);
a1ab4c31
AC
4550 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4551 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
4552 expr = convert (rtype, expr);
4553 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 4554 }
afcea859
EB
4555 else
4556 expr = convert (type, expr);
a1ab4c31
AC
4557 }
4558
afcea859
EB
4559 /* If we are converting to an integral type whose precision is not equal
4560 to its size, first unchecked convert to a record that contains an
4561 object of the output type. Then extract the field. */
a1ab4c31
AC
4562 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4563 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4564 GET_MODE_BITSIZE (TYPE_MODE (type))))
4565 {
4566 tree rec_type = make_node (RECORD_TYPE);
4567 tree field = create_field_decl (get_identifier ("OBJ"), type,
4568 rec_type, 1, 0, 0, 0);
4569
4570 TYPE_FIELDS (rec_type) = field;
4571 layout_type (rec_type);
4572
4573 expr = unchecked_convert (rec_type, expr, notrunc_p);
4574 expr = build_component_ref (expr, NULL_TREE, field, 0);
4575 }
4576
afcea859
EB
4577 /* Similarly if we are converting from an integral type whose precision
4578 is not equal to its size. */
a1ab4c31
AC
4579 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4580 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4581 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4582 {
4583 tree rec_type = make_node (RECORD_TYPE);
4584 tree field
4585 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4586 1, 0, 0, 0);
4587
4588 TYPE_FIELDS (rec_type) = field;
4589 layout_type (rec_type);
4590
4591 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4592 expr = unchecked_convert (type, expr, notrunc_p);
4593 }
4594
7948ae37
OH
4595 /* We have a special case when we are converting between two unconstrained
4596 array types. In that case, take the address, convert the fat pointer
4597 types, and dereference. */
a1ab4c31
AC
4598 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4599 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4600 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4601 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4602 build_unary_op (ADDR_EXPR, NULL_TREE,
4603 expr)));
7948ae37
OH
4604
4605 /* Another special case is when we are converting to a vector type from its
4606 representative array type; this a regular conversion. */
4607 else if (TREE_CODE (type) == VECTOR_TYPE
4608 && TREE_CODE (etype) == ARRAY_TYPE
4609 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4610 etype))
4611 expr = convert (type, expr);
4612
a1ab4c31
AC
4613 else
4614 {
4615 expr = maybe_unconstrained_array (expr);
4616 etype = TREE_TYPE (expr);
afcea859
EB
4617 if (can_fold_for_view_convert_p (expr))
4618 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4619 else
4620 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
4621 }
4622
afcea859
EB
4623 /* If the result is an integral type whose precision is not equal to its
4624 size, sign- or zero-extend the result. We need not do this if the input
4625 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
4626 is a biased type or if both the input and output are unsigned. */
4627 if (!notrunc_p
4628 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4629 && !(TREE_CODE (type) == INTEGER_TYPE
4630 && TYPE_BIASED_REPRESENTATION_P (type))
4631 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4632 GET_MODE_BITSIZE (TYPE_MODE (type)))
4633 && !(INTEGRAL_TYPE_P (etype)
4634 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4635 && operand_equal_p (TYPE_RM_SIZE (type),
4636 (TYPE_RM_SIZE (etype) != 0
4637 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4638 0))
4639 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4640 {
4641 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4642 TYPE_UNSIGNED (type));
4643 tree shift_expr
4644 = convert (base_type,
4645 size_binop (MINUS_EXPR,
4646 bitsize_int
4647 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4648 TYPE_RM_SIZE (type)));
4649 expr
4650 = convert (type,
4651 build_binary_op (RSHIFT_EXPR, base_type,
4652 build_binary_op (LSHIFT_EXPR, base_type,
4653 convert (base_type, expr),
4654 shift_expr),
4655 shift_expr));
4656 }
4657
4658 /* An unchecked conversion should never raise Constraint_Error. The code
4659 below assumes that GCC's conversion routines overflow the same way that
4660 the underlying hardware does. This is probably true. In the rare case
4661 when it is false, we can rely on the fact that such conversions are
4662 erroneous anyway. */
4663 if (TREE_CODE (expr) == INTEGER_CST)
4664 TREE_OVERFLOW (expr) = 0;
4665
4666 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4667 show no longer constant. */
4668 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4669 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4670 OEP_ONLY_CONST))
4671 TREE_CONSTANT (expr) = 0;
4672
4673 return expr;
4674}
4675\f
feec4372 4676/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
4677 the latter being a record type as predicated by Is_Record_Type. */
4678
4679enum tree_code
4680tree_code_for_record_type (Entity_Id gnat_type)
4681{
4682 Node_Id component_list
4683 = Component_List (Type_Definition
4684 (Declaration_Node
4685 (Implementation_Base_Type (gnat_type))));
4686 Node_Id component;
4687
4688 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4689 we have a non-discriminant field outside a variant. In either case,
4690 it's a RECORD_TYPE. */
4691
4692 if (!Is_Unchecked_Union (gnat_type))
4693 return RECORD_TYPE;
4694
4695 for (component = First_Non_Pragma (Component_Items (component_list));
4696 Present (component);
4697 component = Next_Non_Pragma (component))
4698 if (Ekind (Defining_Entity (component)) == E_Component)
4699 return RECORD_TYPE;
4700
4701 return UNION_TYPE;
4702}
4703
caa9d12a
EB
4704/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4705 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4706 according to the presence of an alignment clause on the type or, if it
4707 is an array, on the component type. */
4708
4709bool
4710is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4711{
4712 gnat_type = Underlying_Type (gnat_type);
4713
4714 *align_clause = Present (Alignment_Clause (gnat_type));
4715
4716 if (Is_Array_Type (gnat_type))
4717 {
4718 gnat_type = Underlying_Type (Component_Type (gnat_type));
4719 if (Present (Alignment_Clause (gnat_type)))
4720 *align_clause = true;
4721 }
4722
4723 if (!Is_Floating_Point_Type (gnat_type))
4724 return false;
4725
4726 if (UI_To_Int (Esize (gnat_type)) != 64)
4727 return false;
4728
4729 return true;
4730}
4731
4732/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4733 size is greater or equal to 64 bits, or an array of such a type. Set
4734 ALIGN_CLAUSE according to the presence of an alignment clause on the
4735 type or, if it is an array, on the component type. */
4736
4737bool
4738is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4739{
4740 gnat_type = Underlying_Type (gnat_type);
4741
4742 *align_clause = Present (Alignment_Clause (gnat_type));
4743
4744 if (Is_Array_Type (gnat_type))
4745 {
4746 gnat_type = Underlying_Type (Component_Type (gnat_type));
4747 if (Present (Alignment_Clause (gnat_type)))
4748 *align_clause = true;
4749 }
4750
4751 if (!Is_Scalar_Type (gnat_type))
4752 return false;
4753
4754 if (UI_To_Int (Esize (gnat_type)) < 64)
4755 return false;
4756
4757 return true;
4758}
4759
a1ab4c31
AC
4760/* Return true if GNU_TYPE is suitable as the type of a non-aliased
4761 component of an aggregate type. */
4762
4763bool
4764type_for_nonaliased_component_p (tree gnu_type)
4765{
4766 /* If the type is passed by reference, we may have pointers to the
4767 component so it cannot be made non-aliased. */
4768 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4769 return false;
4770
4771 /* We used to say that any component of aggregate type is aliased
4772 because the front-end may take 'Reference of it. The front-end
4773 has been enhanced in the meantime so as to use a renaming instead
4774 in most cases, but the back-end can probably take the address of
4775 such a component too so we go for the conservative stance.
4776
4777 For instance, we might need the address of any array type, even
4778 if normally passed by copy, to construct a fat pointer if the
4779 component is used as an actual for an unconstrained formal.
4780
4781 Likewise for record types: even if a specific record subtype is
4782 passed by copy, the parent type might be passed by ref (e.g. if
4783 it's of variable size) and we might take the address of a child
4784 component to pass to a parent formal. We have no way to check
4785 for such conditions here. */
4786 if (AGGREGATE_TYPE_P (gnu_type))
4787 return false;
4788
4789 return true;
4790}
4791
4792/* Perform final processing on global variables. */
4793
4794void
4795gnat_write_global_declarations (void)
4796{
4797 /* Proceed to optimize and emit assembly.
4798 FIXME: shouldn't be the front end's responsibility to call this. */
a406865a 4799 cgraph_finalize_compilation_unit ();
a1ab4c31
AC
4800
4801 /* Emit debug info for all global declarations. */
4802 emit_debug_global_declarations (VEC_address (tree, global_decls),
4803 VEC_length (tree, global_decls));
4804}
4805
4806/* ************************************************************************
4807 * * GCC builtins support *
4808 * ************************************************************************ */
4809
4810/* The general scheme is fairly simple:
4811
4812 For each builtin function/type to be declared, gnat_install_builtins calls
4813 internal facilities which eventually get to gnat_push_decl, which in turn
4814 tracks the so declared builtin function decls in the 'builtin_decls' global
4815 datastructure. When an Intrinsic subprogram declaration is processed, we
4816 search this global datastructure to retrieve the associated BUILT_IN DECL
4817 node. */
4818
4819/* Search the chain of currently available builtin declarations for a node
4820 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4821 found, if any, or NULL_TREE otherwise. */
4822tree
4823builtin_decl_for (tree name)
4824{
4825 unsigned i;
4826 tree decl;
4827
4828 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4829 if (DECL_NAME (decl) == name)
4830 return decl;
4831
4832 return NULL_TREE;
4833}
4834
4835/* The code below eventually exposes gnat_install_builtins, which declares
4836 the builtin types and functions we might need, either internally or as
4837 user accessible facilities.
4838
4839 ??? This is a first implementation shot, still in rough shape. It is
4840 heavily inspired from the "C" family implementation, with chunks copied
4841 verbatim from there.
4842
4843 Two obvious TODO candidates are
4844 o Use a more efficient name/decl mapping scheme
4845 o Devise a middle-end infrastructure to avoid having to copy
4846 pieces between front-ends. */
4847
4848/* ----------------------------------------------------------------------- *
4849 * BUILTIN ELEMENTARY TYPES *
4850 * ----------------------------------------------------------------------- */
4851
4852/* Standard data types to be used in builtin argument declarations. */
4853
4854enum c_tree_index
4855{
4856 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4857 CTI_STRING_TYPE,
4858 CTI_CONST_STRING_TYPE,
4859
4860 CTI_MAX
4861};
4862
4863static tree c_global_trees[CTI_MAX];
4864
4865#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4866#define string_type_node c_global_trees[CTI_STRING_TYPE]
4867#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4868
4869/* ??? In addition some attribute handlers, we currently don't support a
4870 (small) number of builtin-types, which in turns inhibits support for a
4871 number of builtin functions. */
4872#define wint_type_node void_type_node
4873#define intmax_type_node void_type_node
4874#define uintmax_type_node void_type_node
4875
4876/* Build the void_list_node (void_type_node having been created). */
4877
4878static tree
4879build_void_list_node (void)
4880{
4881 tree t = build_tree_list (NULL_TREE, void_type_node);
4882 return t;
4883}
4884
4885/* Used to help initialize the builtin-types.def table. When a type of
4886 the correct size doesn't exist, use error_mark_node instead of NULL.
4887 The later results in segfaults even when a decl using the type doesn't
4888 get invoked. */
4889
4890static tree
4891builtin_type_for_size (int size, bool unsignedp)
4892{
4893 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4894 return type ? type : error_mark_node;
4895}
4896
4897/* Build/push the elementary type decls that builtin functions/types
4898 will need. */
4899
4900static void
4901install_builtin_elementary_types (void)
4902{
4903 signed_size_type_node = size_type_node;
4904 pid_type_node = integer_type_node;
4905 void_list_node = build_void_list_node ();
4906
4907 string_type_node = build_pointer_type (char_type_node);
4908 const_string_type_node
4909 = build_pointer_type (build_qualified_type
4910 (char_type_node, TYPE_QUAL_CONST));
4911}
4912
4913/* ----------------------------------------------------------------------- *
4914 * BUILTIN FUNCTION TYPES *
4915 * ----------------------------------------------------------------------- */
4916
4917/* Now, builtin function types per se. */
4918
4919enum c_builtin_type
4920{
4921#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4922#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4923#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4924#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4925#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4926#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4927#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4928#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4929#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4930#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4931#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4932#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4933#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4934#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4935#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4936 NAME,
4937#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4938#include "builtin-types.def"
4939#undef DEF_PRIMITIVE_TYPE
4940#undef DEF_FUNCTION_TYPE_0
4941#undef DEF_FUNCTION_TYPE_1
4942#undef DEF_FUNCTION_TYPE_2
4943#undef DEF_FUNCTION_TYPE_3
4944#undef DEF_FUNCTION_TYPE_4
4945#undef DEF_FUNCTION_TYPE_5
4946#undef DEF_FUNCTION_TYPE_6
4947#undef DEF_FUNCTION_TYPE_7
4948#undef DEF_FUNCTION_TYPE_VAR_0
4949#undef DEF_FUNCTION_TYPE_VAR_1
4950#undef DEF_FUNCTION_TYPE_VAR_2
4951#undef DEF_FUNCTION_TYPE_VAR_3
4952#undef DEF_FUNCTION_TYPE_VAR_4
4953#undef DEF_FUNCTION_TYPE_VAR_5
4954#undef DEF_POINTER_TYPE
4955 BT_LAST
4956};
4957
4958typedef enum c_builtin_type builtin_type;
4959
4960/* A temporary array used in communication with def_fn_type. */
4961static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4962
4963/* A helper function for install_builtin_types. Build function type
4964 for DEF with return type RET and N arguments. If VAR is true, then the
4965 function should be variadic after those N arguments.
4966
4967 Takes special care not to ICE if any of the types involved are
4968 error_mark_node, which indicates that said type is not in fact available
4969 (see builtin_type_for_size). In which case the function type as a whole
4970 should be error_mark_node. */
4971
4972static void
4973def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4974{
4975 tree args = NULL, t;
4976 va_list list;
4977 int i;
4978
4979 va_start (list, n);
4980 for (i = 0; i < n; ++i)
4981 {
c6bd4220 4982 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
4983 t = builtin_types[a];
4984 if (t == error_mark_node)
4985 goto egress;
4986 args = tree_cons (NULL_TREE, t, args);
4987 }
4988 va_end (list);
4989
4990 args = nreverse (args);
4991 if (!var)
4992 args = chainon (args, void_list_node);
4993
4994 t = builtin_types[ret];
4995 if (t == error_mark_node)
4996 goto egress;
4997 t = build_function_type (t, args);
4998
4999 egress:
5000 builtin_types[def] = t;
5001}
5002
5003/* Build the builtin function types and install them in the builtin_types
5004 array for later use in builtin function decls. */
5005
5006static void
5007install_builtin_function_types (void)
5008{
5009 tree va_list_ref_type_node;
5010 tree va_list_arg_type_node;
5011
5012 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5013 {
5014 va_list_arg_type_node = va_list_ref_type_node =
5015 build_pointer_type (TREE_TYPE (va_list_type_node));
5016 }
5017 else
5018 {
5019 va_list_arg_type_node = va_list_type_node;
5020 va_list_ref_type_node = build_reference_type (va_list_type_node);
5021 }
5022
5023#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5024 builtin_types[ENUM] = VALUE;
5025#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5026 def_fn_type (ENUM, RETURN, 0, 0);
5027#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5028 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5029#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5030 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5031#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5032 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5033#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5034 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5035#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5036 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5037#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5038 ARG6) \
5039 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5040#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5041 ARG6, ARG7) \
5042 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5043#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5044 def_fn_type (ENUM, RETURN, 1, 0);
5045#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5046 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5047#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5048 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5049#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5050 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5051#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5052 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5053#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5054 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5055#define DEF_POINTER_TYPE(ENUM, TYPE) \
5056 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5057
5058#include "builtin-types.def"
5059
5060#undef DEF_PRIMITIVE_TYPE
5061#undef DEF_FUNCTION_TYPE_1
5062#undef DEF_FUNCTION_TYPE_2
5063#undef DEF_FUNCTION_TYPE_3
5064#undef DEF_FUNCTION_TYPE_4
5065#undef DEF_FUNCTION_TYPE_5
5066#undef DEF_FUNCTION_TYPE_6
5067#undef DEF_FUNCTION_TYPE_VAR_0
5068#undef DEF_FUNCTION_TYPE_VAR_1
5069#undef DEF_FUNCTION_TYPE_VAR_2
5070#undef DEF_FUNCTION_TYPE_VAR_3
5071#undef DEF_FUNCTION_TYPE_VAR_4
5072#undef DEF_FUNCTION_TYPE_VAR_5
5073#undef DEF_POINTER_TYPE
5074 builtin_types[(int) BT_LAST] = NULL_TREE;
5075}
5076
5077/* ----------------------------------------------------------------------- *
5078 * BUILTIN ATTRIBUTES *
5079 * ----------------------------------------------------------------------- */
5080
5081enum built_in_attribute
5082{
5083#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5084#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5085#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5086#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5087#include "builtin-attrs.def"
5088#undef DEF_ATTR_NULL_TREE
5089#undef DEF_ATTR_INT
5090#undef DEF_ATTR_IDENT
5091#undef DEF_ATTR_TREE_LIST
5092 ATTR_LAST
5093};
5094
5095static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5096
5097static void
5098install_builtin_attributes (void)
5099{
5100 /* Fill in the built_in_attributes array. */
5101#define DEF_ATTR_NULL_TREE(ENUM) \
5102 built_in_attributes[(int) ENUM] = NULL_TREE;
5103#define DEF_ATTR_INT(ENUM, VALUE) \
5104 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5105#define DEF_ATTR_IDENT(ENUM, STRING) \
5106 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5107#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5108 built_in_attributes[(int) ENUM] \
5109 = tree_cons (built_in_attributes[(int) PURPOSE], \
5110 built_in_attributes[(int) VALUE], \
5111 built_in_attributes[(int) CHAIN]);
5112#include "builtin-attrs.def"
5113#undef DEF_ATTR_NULL_TREE
5114#undef DEF_ATTR_INT
5115#undef DEF_ATTR_IDENT
5116#undef DEF_ATTR_TREE_LIST
5117}
5118
5119/* Handle a "const" attribute; arguments as in
5120 struct attribute_spec.handler. */
5121
5122static tree
5123handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5124 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5125 bool *no_add_attrs)
5126{
5127 if (TREE_CODE (*node) == FUNCTION_DECL)
5128 TREE_READONLY (*node) = 1;
5129 else
5130 *no_add_attrs = true;
5131
5132 return NULL_TREE;
5133}
5134
5135/* Handle a "nothrow" attribute; arguments as in
5136 struct attribute_spec.handler. */
5137
5138static tree
5139handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5140 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5141 bool *no_add_attrs)
5142{
5143 if (TREE_CODE (*node) == FUNCTION_DECL)
5144 TREE_NOTHROW (*node) = 1;
5145 else
5146 *no_add_attrs = true;
5147
5148 return NULL_TREE;
5149}
5150
5151/* Handle a "pure" attribute; arguments as in
5152 struct attribute_spec.handler. */
5153
5154static tree
5155handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5156 int ARG_UNUSED (flags), bool *no_add_attrs)
5157{
5158 if (TREE_CODE (*node) == FUNCTION_DECL)
5159 DECL_PURE_P (*node) = 1;
5160 /* ??? TODO: Support types. */
5161 else
5162 {
7948ae37
OH
5163 warning (OPT_Wattributes, "%qs attribute ignored",
5164 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5165 *no_add_attrs = true;
5166 }
5167
5168 return NULL_TREE;
5169}
5170
5171/* Handle a "no vops" attribute; arguments as in
5172 struct attribute_spec.handler. */
5173
5174static tree
5175handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5176 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5177 bool *ARG_UNUSED (no_add_attrs))
5178{
5179 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5180 DECL_IS_NOVOPS (*node) = 1;
5181 return NULL_TREE;
5182}
5183
5184/* Helper for nonnull attribute handling; fetch the operand number
5185 from the attribute argument list. */
5186
5187static bool
5188get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5189{
5190 /* Verify the arg number is a constant. */
5191 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5192 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5193 return false;
5194
5195 *valp = TREE_INT_CST_LOW (arg_num_expr);
5196 return true;
5197}
5198
5199/* Handle the "nonnull" attribute. */
5200static tree
5201handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5202 tree args, int ARG_UNUSED (flags),
5203 bool *no_add_attrs)
5204{
5205 tree type = *node;
5206 unsigned HOST_WIDE_INT attr_arg_num;
5207
5208 /* If no arguments are specified, all pointer arguments should be
5209 non-null. Verify a full prototype is given so that the arguments
5210 will have the correct types when we actually check them later. */
5211 if (!args)
5212 {
5213 if (!TYPE_ARG_TYPES (type))
5214 {
5215 error ("nonnull attribute without arguments on a non-prototype");
5216 *no_add_attrs = true;
5217 }
5218 return NULL_TREE;
5219 }
5220
5221 /* Argument list specified. Verify that each argument number references
5222 a pointer argument. */
5223 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5224 {
5225 tree argument;
5226 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5227
5228 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5229 {
5230 error ("nonnull argument has invalid operand number (argument %lu)",
5231 (unsigned long) attr_arg_num);
5232 *no_add_attrs = true;
5233 return NULL_TREE;
5234 }
5235
5236 argument = TYPE_ARG_TYPES (type);
5237 if (argument)
5238 {
5239 for (ck_num = 1; ; ck_num++)
5240 {
5241 if (!argument || ck_num == arg_num)
5242 break;
5243 argument = TREE_CHAIN (argument);
5244 }
5245
5246 if (!argument
5247 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5248 {
5249 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5250 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5251 *no_add_attrs = true;
5252 return NULL_TREE;
5253 }
5254
5255 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5256 {
5257 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5258 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5259 *no_add_attrs = true;
5260 return NULL_TREE;
5261 }
5262 }
5263 }
5264
5265 return NULL_TREE;
5266}
5267
5268/* Handle a "sentinel" attribute. */
5269
5270static tree
5271handle_sentinel_attribute (tree *node, tree name, tree args,
5272 int ARG_UNUSED (flags), bool *no_add_attrs)
5273{
5274 tree params = TYPE_ARG_TYPES (*node);
5275
5276 if (!params)
5277 {
5278 warning (OPT_Wattributes,
7948ae37
OH
5279 "%qs attribute requires prototypes with named arguments",
5280 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5281 *no_add_attrs = true;
5282 }
5283 else
5284 {
5285 while (TREE_CHAIN (params))
5286 params = TREE_CHAIN (params);
5287
5288 if (VOID_TYPE_P (TREE_VALUE (params)))
5289 {
5290 warning (OPT_Wattributes,
7948ae37
OH
5291 "%qs attribute only applies to variadic functions",
5292 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5293 *no_add_attrs = true;
5294 }
5295 }
5296
5297 if (args)
5298 {
5299 tree position = TREE_VALUE (args);
5300
5301 if (TREE_CODE (position) != INTEGER_CST)
5302 {
5303 warning (0, "requested position is not an integer constant");
5304 *no_add_attrs = true;
5305 }
5306 else
5307 {
5308 if (tree_int_cst_lt (position, integer_zero_node))
5309 {
5310 warning (0, "requested position is less than zero");
5311 *no_add_attrs = true;
5312 }
5313 }
5314 }
5315
5316 return NULL_TREE;
5317}
5318
5319/* Handle a "noreturn" attribute; arguments as in
5320 struct attribute_spec.handler. */
5321
5322static tree
5323handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5324 int ARG_UNUSED (flags), bool *no_add_attrs)
5325{
5326 tree type = TREE_TYPE (*node);
5327
5328 /* See FIXME comment in c_common_attribute_table. */
5329 if (TREE_CODE (*node) == FUNCTION_DECL)
5330 TREE_THIS_VOLATILE (*node) = 1;
5331 else if (TREE_CODE (type) == POINTER_TYPE
5332 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5333 TREE_TYPE (*node)
5334 = build_pointer_type
5335 (build_type_variant (TREE_TYPE (type),
5336 TYPE_READONLY (TREE_TYPE (type)), 1));
5337 else
5338 {
7948ae37
OH
5339 warning (OPT_Wattributes, "%qs attribute ignored",
5340 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5341 *no_add_attrs = true;
5342 }
5343
5344 return NULL_TREE;
5345}
5346
5347/* Handle a "malloc" attribute; arguments as in
5348 struct attribute_spec.handler. */
5349
5350static tree
5351handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5352 int ARG_UNUSED (flags), bool *no_add_attrs)
5353{
5354 if (TREE_CODE (*node) == FUNCTION_DECL
5355 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5356 DECL_IS_MALLOC (*node) = 1;
5357 else
5358 {
7948ae37
OH
5359 warning (OPT_Wattributes, "%qs attribute ignored",
5360 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5361 *no_add_attrs = true;
5362 }
5363
5364 return NULL_TREE;
5365}
5366
5367/* Fake handler for attributes we don't properly support. */
5368
5369tree
5370fake_attribute_handler (tree * ARG_UNUSED (node),
5371 tree ARG_UNUSED (name),
5372 tree ARG_UNUSED (args),
5373 int ARG_UNUSED (flags),
5374 bool * ARG_UNUSED (no_add_attrs))
5375{
5376 return NULL_TREE;
5377}
5378
5379/* Handle a "type_generic" attribute. */
5380
5381static tree
5382handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5383 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5384 bool * ARG_UNUSED (no_add_attrs))
5385{
5386 tree params;
b4680ca1 5387
a1ab4c31
AC
5388 /* Ensure we have a function type. */
5389 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 5390
a1ab4c31
AC
5391 params = TYPE_ARG_TYPES (*node);
5392 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5393 params = TREE_CHAIN (params);
5394
5395 /* Ensure we have a variadic function. */
5396 gcc_assert (!params);
5397
5398 return NULL_TREE;
5399}
5400
2724e58f
OH
5401/* Handle a "vector_size" attribute; arguments as in
5402 struct attribute_spec.handler. */
5403
5404static tree
5405handle_vector_size_attribute (tree *node, tree name, tree args,
5406 int ARG_UNUSED (flags),
5407 bool *no_add_attrs)
5408{
5409 unsigned HOST_WIDE_INT vecsize, nunits;
5410 enum machine_mode orig_mode;
5411 tree type = *node, new_type, size;
5412
5413 *no_add_attrs = true;
5414
5415 size = TREE_VALUE (args);
5416
5417 if (!host_integerp (size, 1))
5418 {
7948ae37
OH
5419 warning (OPT_Wattributes, "%qs attribute ignored",
5420 IDENTIFIER_POINTER (name));
2724e58f
OH
5421 return NULL_TREE;
5422 }
5423
5424 /* Get the vector size (in bytes). */
5425 vecsize = tree_low_cst (size, 1);
5426
5427 /* We need to provide for vector pointers, vector arrays, and
5428 functions returning vectors. For example:
5429
5430 __attribute__((vector_size(16))) short *foo;
5431
5432 In this case, the mode is SI, but the type being modified is
5433 HI, so we need to look further. */
5434
5435 while (POINTER_TYPE_P (type)
5436 || TREE_CODE (type) == FUNCTION_TYPE
5437 || TREE_CODE (type) == METHOD_TYPE
5438 || TREE_CODE (type) == ARRAY_TYPE
5439 || TREE_CODE (type) == OFFSET_TYPE)
5440 type = TREE_TYPE (type);
5441
5442 /* Get the mode of the type being modified. */
5443 orig_mode = TYPE_MODE (type);
5444
5445 if ((!INTEGRAL_TYPE_P (type)
5446 && !SCALAR_FLOAT_TYPE_P (type)
5447 && !FIXED_POINT_TYPE_P (type))
5448 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5449 && GET_MODE_CLASS (orig_mode) != MODE_INT
5450 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5451 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5452 || TREE_CODE (type) == BOOLEAN_TYPE)
5453 {
7948ae37
OH
5454 error ("invalid vector type for attribute %qs",
5455 IDENTIFIER_POINTER (name));
2724e58f
OH
5456 return NULL_TREE;
5457 }
5458
5459 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5460 {
5461 error ("vector size not an integral multiple of component size");
5462 return NULL;
5463 }
5464
5465 if (vecsize == 0)
5466 {
5467 error ("zero vector size");
5468 return NULL;
5469 }
5470
5471 /* Calculate how many units fit in the vector. */
5472 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5473 if (nunits & (nunits - 1))
5474 {
5475 error ("number of components of the vector not a power of two");
5476 return NULL_TREE;
5477 }
5478
5479 new_type = build_vector_type (type, nunits);
5480
5481 /* Build back pointers if needed. */
5482 *node = lang_hooks.types.reconstruct_complex_type (*node, new_type);
5483
5484 return NULL_TREE;
5485}
5486
7948ae37
OH
5487/* Handle a "vector_type" attribute; arguments as in
5488 struct attribute_spec.handler. */
5489
5490static tree
5491handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5492 int ARG_UNUSED (flags),
5493 bool *no_add_attrs)
5494{
5495 /* Vector representative type and size. */
5496 tree rep_type = *node;
5497 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5498 tree rep_name;
5499
5500 /* Vector size in bytes and number of units. */
5501 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5502
5503 /* Vector element type and mode. */
5504 tree elem_type;
5505 enum machine_mode elem_mode;
5506
5507 *no_add_attrs = true;
5508
5509 /* Get the representative array type, possibly nested within a
5510 padding record e.g. for alignment purposes. */
5511
5512 if (TREE_CODE (rep_type) == RECORD_TYPE && TYPE_IS_PADDING_P (rep_type))
5513 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5514
5515 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5516 {
5517 error ("attribute %qs applies to array types only",
5518 IDENTIFIER_POINTER (name));
5519 return NULL_TREE;
5520 }
5521
5522 /* Silently punt on variable sizes. We can't make vector types for them,
5523 need to ignore them on front-end generated subtypes of unconstrained
5524 bases, and this attribute is for binding implementors, not end-users, so
5525 we should never get there from legitimate explicit uses. */
5526
5527 if (!host_integerp (rep_size, 1))
5528 return NULL_TREE;
5529
5530 /* Get the element type/mode and check this is something we know
5531 how to make vectors of. */
5532
5533 elem_type = TREE_TYPE (rep_type);
5534 elem_mode = TYPE_MODE (elem_type);
5535
5536 if ((!INTEGRAL_TYPE_P (elem_type)
5537 && !SCALAR_FLOAT_TYPE_P (elem_type)
5538 && !FIXED_POINT_TYPE_P (elem_type))
5539 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5540 && GET_MODE_CLASS (elem_mode) != MODE_INT
5541 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5542 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5543 {
5544 error ("invalid element type for attribute %qs",
5545 IDENTIFIER_POINTER (name));
5546 return NULL_TREE;
5547 }
5548
5549 /* Sanity check the vector size and element type consistency. */
5550
5551 vec_bytes = tree_low_cst (rep_size, 1);
5552
5553 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5554 {
5555 error ("vector size not an integral multiple of component size");
5556 return NULL;
5557 }
5558
5559 if (vec_bytes == 0)
5560 {
5561 error ("zero vector size");
5562 return NULL;
5563 }
5564
5565 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5566 if (vec_units & (vec_units - 1))
5567 {
5568 error ("number of components of the vector not a power of two");
5569 return NULL_TREE;
5570 }
5571
5572 /* Build the vector type and replace. */
5573
5574 *node = build_vector_type (elem_type, vec_units);
5575 rep_name = TYPE_NAME (rep_type);
5576 if (TREE_CODE (rep_name) == TYPE_DECL)
5577 rep_name = DECL_NAME (rep_name);
5578 TYPE_NAME (*node) = rep_name;
5579 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5580
5581 return NULL_TREE;
5582}
5583
a1ab4c31
AC
5584/* ----------------------------------------------------------------------- *
5585 * BUILTIN FUNCTIONS *
5586 * ----------------------------------------------------------------------- */
5587
5588/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5589 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5590 if nonansi_p and flag_no_nonansi_builtin. */
5591
5592static void
5593def_builtin_1 (enum built_in_function fncode,
5594 const char *name,
5595 enum built_in_class fnclass,
5596 tree fntype, tree libtype,
5597 bool both_p, bool fallback_p,
5598 bool nonansi_p ATTRIBUTE_UNUSED,
5599 tree fnattrs, bool implicit_p)
5600{
5601 tree decl;
5602 const char *libname;
5603
5604 /* Preserve an already installed decl. It most likely was setup in advance
5605 (e.g. as part of the internal builtins) for specific reasons. */
5606 if (built_in_decls[(int) fncode] != NULL_TREE)
5607 return;
5608
5609 gcc_assert ((!both_p && !fallback_p)
5610 || !strncmp (name, "__builtin_",
5611 strlen ("__builtin_")));
5612
5613 libname = name + strlen ("__builtin_");
5614 decl = add_builtin_function (name, fntype, fncode, fnclass,
5615 (fallback_p ? libname : NULL),
5616 fnattrs);
5617 if (both_p)
5618 /* ??? This is normally further controlled by command-line options
5619 like -fno-builtin, but we don't have them for Ada. */
5620 add_builtin_function (libname, libtype, fncode, fnclass,
5621 NULL, fnattrs);
5622
5623 built_in_decls[(int) fncode] = decl;
5624 if (implicit_p)
5625 implicit_built_in_decls[(int) fncode] = decl;
5626}
5627
5628static int flag_isoc94 = 0;
5629static int flag_isoc99 = 0;
5630
5631/* Install what the common builtins.def offers. */
5632
5633static void
5634install_builtin_functions (void)
5635{
5636#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5637 NONANSI_P, ATTRS, IMPLICIT, COND) \
5638 if (NAME && COND) \
5639 def_builtin_1 (ENUM, NAME, CLASS, \
5640 builtin_types[(int) TYPE], \
5641 builtin_types[(int) LIBTYPE], \
5642 BOTH_P, FALLBACK_P, NONANSI_P, \
5643 built_in_attributes[(int) ATTRS], IMPLICIT);
5644#include "builtins.def"
5645#undef DEF_BUILTIN
5646}
5647
5648/* ----------------------------------------------------------------------- *
5649 * BUILTIN FUNCTIONS *
5650 * ----------------------------------------------------------------------- */
5651
5652/* Install the builtin functions we might need. */
5653
5654void
5655gnat_install_builtins (void)
5656{
5657 install_builtin_elementary_types ();
5658 install_builtin_function_types ();
5659 install_builtin_attributes ();
5660
5661 /* Install builtins used by generic middle-end pieces first. Some of these
5662 know about internal specificities and control attributes accordingly, for
5663 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5664 the generic definition from builtins.def. */
1d65f45c 5665 build_common_builtin_nodes (false);
a1ab4c31
AC
5666
5667 /* Now, install the target specific builtins, such as the AltiVec family on
5668 ppc, and the common set as exposed by builtins.def. */
5669 targetm.init_builtins ();
5670 install_builtin_functions ();
5671}
5672
5673#include "gt-ada-utils.h"
5674#include "gtype-ada.h"
This page took 0.974767 seconds and 5 git commands to generate.