]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
re PR middle-end/36988 (ICE in gimple_rhs_has_side_effects, at gimple.c:2369)
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26/* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28#define GCC_DIAG_STYLE __gcc_cdiag__
29
30#include "config.h"
31#include "system.h"
32#include "coretypes.h"
33#include "tm.h"
34#include "tree.h"
35#include "flags.h"
36#include "defaults.h"
37#include "toplev.h"
38#include "output.h"
39#include "ggc.h"
40#include "debug.h"
41#include "convert.h"
42#include "target.h"
43#include "function.h"
44#include "cgraph.h"
45#include "tree-inline.h"
46#include "tree-iterator.h"
47#include "gimple.h"
48#include "tree-dump.h"
49#include "pointer-set.h"
50#include "langhooks.h"
51
52#include "ada.h"
53#include "types.h"
54#include "atree.h"
55#include "elists.h"
56#include "namet.h"
57#include "nlists.h"
58#include "stringt.h"
59#include "uintp.h"
60#include "fe.h"
61#include "sinfo.h"
62#include "einfo.h"
63#include "ada-tree.h"
64#include "gigi.h"
65
66#ifndef MAX_FIXED_MODE_SIZE
67#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
68#endif
69
70#ifndef MAX_BITS_PER_WORD
71#define MAX_BITS_PER_WORD BITS_PER_WORD
72#endif
73
74/* If nonzero, pretend we are allocating at global level. */
75int force_global;
76
77/* Tree nodes for the various types and decls we create. */
78tree gnat_std_decls[(int) ADT_LAST];
79
80/* Functions to call for each of the possible raise reasons. */
81tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82
83/* Forward declarations for handlers of attributes. */
84static tree handle_const_attribute (tree *, tree, tree, int, bool *);
85static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
86static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
87static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
88static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
89static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
90static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
91static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
92static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
93
94/* Fake handler for attributes we don't properly support, typically because
95 they'd require dragging a lot of the common-c front-end circuitry. */
96static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
97
98/* Table of machine-independent internal attributes for Ada. We support
99 this minimal set of attributes to accommodate the needs of builtins. */
100const struct attribute_spec gnat_internal_attribute_table[] =
101{
102 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
103 { "const", 0, 0, true, false, false, handle_const_attribute },
104 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
105 { "pure", 0, 0, true, false, false, handle_pure_attribute },
106 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
107 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
108 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
109 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
110 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
111 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
112
113 /* ??? format and format_arg are heavy and not supported, which actually
114 prevents support for stdio builtins, which we however declare as part
115 of the common builtins.def contents. */
116 { "format", 3, 3, false, true, true, fake_attribute_handler },
117 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
118
119 { NULL, 0, 0, false, false, false, NULL }
120};
121
122/* Associates a GNAT tree node to a GCC tree node. It is used in
123 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
124 of `save_gnu_tree' for more info. */
125static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
126
127#define GET_GNU_TREE(GNAT_ENTITY) \
128 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
129
130#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
131 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
132
133#define PRESENT_GNU_TREE(GNAT_ENTITY) \
134 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
135
136/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
137static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
138
139#define GET_DUMMY_NODE(GNAT_ENTITY) \
140 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
141
142#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
143 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
144
145#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
146 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
147
148/* This variable keeps a table for types for each precision so that we only
149 allocate each of them once. Signed and unsigned types are kept separate.
150
151 Note that these types are only used when fold-const requests something
152 special. Perhaps we should NOT share these types; we'll see how it
153 goes later. */
154static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
155
156/* Likewise for float types, but record these by mode. */
157static GTY(()) tree float_types[NUM_MACHINE_MODES];
158
159/* For each binding contour we allocate a binding_level structure to indicate
160 the binding depth. */
161
162struct gnat_binding_level GTY((chain_next ("%h.chain")))
163{
164 /* The binding level containing this one (the enclosing binding level). */
165 struct gnat_binding_level *chain;
166 /* The BLOCK node for this level. */
167 tree block;
168 /* If nonzero, the setjmp buffer that needs to be updated for any
169 variable-sized definition within this context. */
170 tree jmpbuf_decl;
171};
172
173/* The binding level currently in effect. */
174static GTY(()) struct gnat_binding_level *current_binding_level;
175
176/* A chain of gnat_binding_level structures awaiting reuse. */
177static GTY((deletable)) struct gnat_binding_level *free_binding_level;
178
179/* An array of global declarations. */
180static GTY(()) VEC(tree,gc) *global_decls;
181
182/* An array of builtin function declarations. */
183static GTY(()) VEC(tree,gc) *builtin_decls;
184
185/* An array of global renaming pointers. */
186static GTY(()) VEC(tree,gc) *global_renaming_pointers;
187
188/* A chain of unused BLOCK nodes. */
189static GTY((deletable)) tree free_block_chain;
190
191static void gnat_install_builtins (void);
192static tree merge_sizes (tree, tree, tree, bool, bool);
193static tree compute_related_constant (tree, tree);
194static tree split_plus (tree, tree *);
195static void gnat_gimplify_function (tree);
196static tree float_type_for_precision (int, enum machine_mode);
197static tree convert_to_fat_pointer (tree, tree);
198static tree convert_to_thin_pointer (tree, tree);
199static tree make_descriptor_field (const char *,tree, tree, tree);
200static bool potential_alignment_gap (tree, tree, tree);
201\f
202/* Initialize the association of GNAT nodes to GCC trees. */
203
204void
205init_gnat_to_gnu (void)
206{
207 associate_gnat_to_gnu
208 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
209}
210
211/* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
212 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
213 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
214
215 If GNU_DECL is zero, a previous association is to be reset. */
216
217void
218save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
219{
220 /* Check that GNAT_ENTITY is not already defined and that it is being set
221 to something which is a decl. Raise gigi 401 if not. Usually, this
222 means GNAT_ENTITY is defined twice, but occasionally is due to some
223 Gigi problem. */
224 gcc_assert (!(gnu_decl
225 && (PRESENT_GNU_TREE (gnat_entity)
226 || (!no_check && !DECL_P (gnu_decl)))));
227
228 SET_GNU_TREE (gnat_entity, gnu_decl);
229}
230
231/* GNAT_ENTITY is a GNAT tree node for a defining identifier.
232 Return the ..._DECL node that was associated with it. If there is no tree
233 node associated with GNAT_ENTITY, abort.
234
235 In some cases, such as delayed elaboration or expressions that need to
236 be elaborated only once, GNAT_ENTITY is really not an entity. */
237
238tree
239get_gnu_tree (Entity_Id gnat_entity)
240{
241 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
242 return GET_GNU_TREE (gnat_entity);
243}
244
245/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
246
247bool
248present_gnu_tree (Entity_Id gnat_entity)
249{
250 return PRESENT_GNU_TREE (gnat_entity);
251}
252\f
253/* Initialize the association of GNAT nodes to GCC trees as dummies. */
254
255void
256init_dummy_type (void)
257{
258 dummy_node_table
259 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
260}
261
262/* Make a dummy type corresponding to GNAT_TYPE. */
263
264tree
265make_dummy_type (Entity_Id gnat_type)
266{
267 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
268 tree gnu_type;
269
270 /* If there is an equivalent type, get its underlying type. */
271 if (Present (gnat_underlying))
272 gnat_underlying = Underlying_Type (gnat_underlying);
273
274 /* If there was no equivalent type (can only happen when just annotating
275 types) or underlying type, go back to the original type. */
276 if (No (gnat_underlying))
277 gnat_underlying = gnat_type;
278
279 /* If it there already a dummy type, use that one. Else make one. */
280 if (PRESENT_DUMMY_NODE (gnat_underlying))
281 return GET_DUMMY_NODE (gnat_underlying);
282
283 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
284 an ENUMERAL_TYPE. */
285 gnu_type = make_node (Is_Record_Type (gnat_underlying)
286 ? tree_code_for_record_type (gnat_underlying)
287 : ENUMERAL_TYPE);
288 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
289 TYPE_DUMMY_P (gnu_type) = 1;
290 if (AGGREGATE_TYPE_P (gnu_type))
291 {
292 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
293 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
294 }
295
296 SET_DUMMY_NODE (gnat_underlying, gnu_type);
297
298 return gnu_type;
299}
300\f
301/* Return nonzero if we are currently in the global binding level. */
302
303int
304global_bindings_p (void)
305{
306 return ((force_global || !current_function_decl) ? -1 : 0);
307}
308
309/* Enter a new binding level. */
310
311void
312gnat_pushlevel ()
313{
314 struct gnat_binding_level *newlevel = NULL;
315
316 /* Reuse a struct for this binding level, if there is one. */
317 if (free_binding_level)
318 {
319 newlevel = free_binding_level;
320 free_binding_level = free_binding_level->chain;
321 }
322 else
323 newlevel
324 = (struct gnat_binding_level *)
325 ggc_alloc (sizeof (struct gnat_binding_level));
326
327 /* Use a free BLOCK, if any; otherwise, allocate one. */
328 if (free_block_chain)
329 {
330 newlevel->block = free_block_chain;
331 free_block_chain = BLOCK_CHAIN (free_block_chain);
332 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
333 }
334 else
335 newlevel->block = make_node (BLOCK);
336
337 /* Point the BLOCK we just made to its parent. */
338 if (current_binding_level)
339 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
340
341 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
342 TREE_USED (newlevel->block) = 1;
343
344 /* Add this level to the front of the chain (stack) of levels that are
345 active. */
346 newlevel->chain = current_binding_level;
347 newlevel->jmpbuf_decl = NULL_TREE;
348 current_binding_level = newlevel;
349}
350
351/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
352 and point FNDECL to this BLOCK. */
353
354void
355set_current_block_context (tree fndecl)
356{
357 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
358 DECL_INITIAL (fndecl) = current_binding_level->block;
359}
360
361/* Set the jmpbuf_decl for the current binding level to DECL. */
362
363void
364set_block_jmpbuf_decl (tree decl)
365{
366 current_binding_level->jmpbuf_decl = decl;
367}
368
369/* Get the jmpbuf_decl, if any, for the current binding level. */
370
371tree
372get_block_jmpbuf_decl ()
373{
374 return current_binding_level->jmpbuf_decl;
375}
376
377/* Exit a binding level. Set any BLOCK into the current code group. */
378
379void
380gnat_poplevel ()
381{
382 struct gnat_binding_level *level = current_binding_level;
383 tree block = level->block;
384
385 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
386 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
387
388 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
389 are no variables free the block and merge its subblocks into those of its
390 parent block. Otherwise, add it to the list of its parent. */
391 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
392 ;
393 else if (BLOCK_VARS (block) == NULL_TREE)
394 {
395 BLOCK_SUBBLOCKS (level->chain->block)
396 = chainon (BLOCK_SUBBLOCKS (block),
397 BLOCK_SUBBLOCKS (level->chain->block));
398 BLOCK_CHAIN (block) = free_block_chain;
399 free_block_chain = block;
400 }
401 else
402 {
403 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
404 BLOCK_SUBBLOCKS (level->chain->block) = block;
405 TREE_USED (block) = 1;
406 set_block_for_group (block);
407 }
408
409 /* Free this binding structure. */
410 current_binding_level = level->chain;
411 level->chain = free_binding_level;
412 free_binding_level = level;
413}
414
415\f
416/* Records a ..._DECL node DECL as belonging to the current lexical scope
417 and uses GNAT_NODE for location information and propagating flags. */
418
419void
420gnat_pushdecl (tree decl, Node_Id gnat_node)
421{
422 /* If this decl is public external or at toplevel, there is no context.
423 But PARM_DECLs always go in the level of its function. */
424 if (TREE_CODE (decl) != PARM_DECL
425 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
426 || global_bindings_p ()))
427 DECL_CONTEXT (decl) = 0;
428 else
429 {
430 DECL_CONTEXT (decl) = current_function_decl;
431
432 /* Functions imported in another function are not really nested. */
433 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
434 DECL_NO_STATIC_CHAIN (decl) = 1;
435 }
436
437 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
438
439 /* Set the location of DECL and emit a declaration for it. */
440 if (Present (gnat_node))
441 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
442 add_decl_expr (decl, gnat_node);
443
444 /* Put the declaration on the list. The list of declarations is in reverse
445 order. The list will be reversed later. Put global variables in the
446 globals list and builtin functions in a dedicated list to speed up
447 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
448 the list, as they will cause trouble with the debugger and aren't needed
449 anyway. */
450 if (TREE_CODE (decl) != TYPE_DECL
451 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
452 {
453 if (global_bindings_p ())
454 {
455 VEC_safe_push (tree, gc, global_decls, decl);
456
457 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
458 VEC_safe_push (tree, gc, builtin_decls, decl);
459 }
460 else
461 {
462 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
463 BLOCK_VARS (current_binding_level->block) = decl;
464 }
465 }
466
467 /* For the declaration of a type, set its name if it either is not already
468 set, was set to an IDENTIFIER_NODE, indicating an internal name,
469 or if the previous type name was not derived from a source name.
470 We'd rather have the type named with a real name and all the pointer
471 types to the same object have the same POINTER_TYPE node. Code in the
472 equivalent function of c-decl.c makes a copy of the type node here, but
473 that may cause us trouble with incomplete types. We make an exception
474 for fat pointer types because the compiler automatically builds them
475 for unconstrained array types and the debugger uses them to represent
476 both these and pointers to these. */
477 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
478 {
479 tree t = TREE_TYPE (decl);
480
481 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
482 ;
483 else if (TYPE_FAT_POINTER_P (t))
484 {
485 tree tt = build_variant_type_copy (t);
486 TYPE_NAME (tt) = decl;
487 TREE_USED (tt) = TREE_USED (t);
488 TREE_TYPE (decl) = tt;
489 DECL_ORIGINAL_TYPE (decl) = t;
490 t = NULL_TREE;
491 }
492 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
493 ;
494 else
495 t = NULL_TREE;
496
497 /* Propagate the name to all the variants. This is needed for
498 the type qualifiers machinery to work properly. */
499 if (t)
500 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
501 TYPE_NAME (t) = decl;
502 }
503}
504\f
505/* Do little here. Set up the standard declarations later after the
506 front end has been run. */
507
508void
509gnat_init_decl_processing (void)
510{
511 /* Make the binding_level structure for global names. */
512 current_function_decl = 0;
513 current_binding_level = 0;
514 free_binding_level = 0;
515 gnat_pushlevel ();
516
517 build_common_tree_nodes (true, true);
518
519 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
520 corresponding to the size of Pmode. In most cases when ptr_mode and
521 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
522 far better code using the width of Pmode. Make this here since we need
523 this before we can expand the GNAT types. */
524 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
525 set_sizetype (size_type_node);
01ddebf2
EB
526
527 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
528 boolean_type_node = make_node (BOOLEAN_TYPE);
529 TYPE_PRECISION (boolean_type_node) = 1;
530 fixup_unsigned_type (boolean_type_node);
531 TYPE_RM_SIZE_NUM (boolean_type_node) = bitsize_int (1);
532
a1ab4c31
AC
533 build_common_tree_nodes_2 (0);
534
535 ptr_void_type_node = build_pointer_type (void_type_node);
536}
537
538/* Create the predefined scalar types such as `integer_type_node' needed
539 in the gcc back-end and initialize the global binding level. */
540
541void
542init_gigi_decls (tree long_long_float_type, tree exception_type)
543{
544 tree endlink, decl;
b666e568 545 tree int64_type = gnat_type_for_size (64, 0);
a1ab4c31
AC
546 unsigned int i;
547
548 /* Set the types that GCC and Gigi use from the front end. We would like
549 to do this for char_type_node, but it needs to correspond to the C
550 char type. */
551 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
552 {
553 /* In this case, the builtin floating point types are VAX float,
554 so make up a type for use. */
555 longest_float_type_node = make_node (REAL_TYPE);
556 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
557 layout_type (longest_float_type_node);
558 create_type_decl (get_identifier ("longest float type"),
559 longest_float_type_node, NULL, false, true, Empty);
560 }
561 else
562 longest_float_type_node = TREE_TYPE (long_long_float_type);
563
564 except_type_node = TREE_TYPE (exception_type);
565
566 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
567 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
568 NULL, false, true, Empty);
569
570 void_type_decl_node = create_type_decl (get_identifier ("void"),
571 void_type_node, NULL, false, true,
572 Empty);
573
574 void_ftype = build_function_type (void_type_node, NULL_TREE);
575 ptr_void_ftype = build_pointer_type (void_ftype);
576
577 /* Build the special descriptor type and its null node if needed. */
578 if (TARGET_VTABLE_USES_DESCRIPTORS)
579 {
580 tree field_list = NULL_TREE, null_list = NULL_TREE;
581 int j;
582
583 fdesc_type_node = make_node (RECORD_TYPE);
584
585 for (j = 0; j < TARGET_VTABLE_USES_DESCRIPTORS; j++)
586 {
587 tree field = create_field_decl (NULL_TREE, ptr_void_ftype,
588 fdesc_type_node, 0, 0, 0, 1);
589 TREE_CHAIN (field) = field_list;
590 field_list = field;
591 null_list = tree_cons (field, null_pointer_node, null_list);
592 }
593
594 finish_record_type (fdesc_type_node, nreverse (field_list), 0, false);
595 null_fdesc_node = gnat_build_constructor (fdesc_type_node, null_list);
596 }
597
598 /* Now declare runtime functions. */
599 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
600
601 /* malloc is a function declaration tree for a function to allocate
602 memory. */
603 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
604 NULL_TREE,
605 build_function_type (ptr_void_type_node,
606 tree_cons (NULL_TREE,
607 sizetype,
608 endlink)),
609 NULL_TREE, false, true, true, NULL,
610 Empty);
611 DECL_IS_MALLOC (malloc_decl) = 1;
612
613 /* malloc32 is a function declaration tree for a function to allocate
614 32bit memory on a 64bit system. Needed only on 64bit VMS. */
615 malloc32_decl = create_subprog_decl (get_identifier ("__gnat_malloc32"),
616 NULL_TREE,
617 build_function_type (ptr_void_type_node,
618 tree_cons (NULL_TREE,
619 sizetype,
620 endlink)),
621 NULL_TREE, false, true, true, NULL,
622 Empty);
623 DECL_IS_MALLOC (malloc32_decl) = 1;
624
625 /* free is a function declaration tree for a function to free memory. */
626 free_decl
627 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
628 build_function_type (void_type_node,
629 tree_cons (NULL_TREE,
630 ptr_void_type_node,
631 endlink)),
632 NULL_TREE, false, true, true, NULL, Empty);
b666e568
GB
633
634 /* This is used for 64-bit multiplication with overflow checking. */
635 mulv64_decl
636 = create_subprog_decl (get_identifier ("__gnat_mulv64"), NULL_TREE,
637 build_function_type_list (int64_type, int64_type,
638 int64_type, NULL_TREE),
639 NULL_TREE, false, true, true, NULL, Empty);
a1ab4c31
AC
640
641 /* Make the types and functions used for exception processing. */
642 jmpbuf_type
643 = build_array_type (gnat_type_for_mode (Pmode, 0),
644 build_index_type (build_int_cst (NULL_TREE, 5)));
645 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
646 true, true, Empty);
647 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
648
649 /* Functions to get and set the jumpbuf pointer for the current thread. */
650 get_jmpbuf_decl
651 = create_subprog_decl
652 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
653 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
654 NULL_TREE, false, true, true, NULL, Empty);
655 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
656 DECL_PURE_P (get_jmpbuf_decl) = 1;
657
658 set_jmpbuf_decl
659 = create_subprog_decl
660 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
661 NULL_TREE,
662 build_function_type (void_type_node,
663 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
664 NULL_TREE, false, true, true, NULL, Empty);
665
666 /* Function to get the current exception. */
667 get_excptr_decl
668 = create_subprog_decl
669 (get_identifier ("system__soft_links__get_gnat_exception"),
670 NULL_TREE,
671 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
672 NULL_TREE, false, true, true, NULL, Empty);
673 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
674 DECL_PURE_P (get_excptr_decl) = 1;
675
676 /* Functions that raise exceptions. */
677 raise_nodefer_decl
678 = create_subprog_decl
679 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
680 build_function_type (void_type_node,
681 tree_cons (NULL_TREE,
682 build_pointer_type (except_type_node),
683 endlink)),
684 NULL_TREE, false, true, true, NULL, Empty);
685
686 /* Dummy objects to materialize "others" and "all others" in the exception
687 tables. These are exported by a-exexpr.adb, so see this unit for the
688 types to use. */
689
690 others_decl
691 = create_var_decl (get_identifier ("OTHERS"),
692 get_identifier ("__gnat_others_value"),
693 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
694
695 all_others_decl
696 = create_var_decl (get_identifier ("ALL_OTHERS"),
697 get_identifier ("__gnat_all_others_value"),
698 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
699
700 /* Hooks to call when entering/leaving an exception handler. */
701 begin_handler_decl
702 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
703 build_function_type (void_type_node,
704 tree_cons (NULL_TREE,
705 ptr_void_type_node,
706 endlink)),
707 NULL_TREE, false, true, true, NULL, Empty);
708
709 end_handler_decl
710 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
711 build_function_type (void_type_node,
712 tree_cons (NULL_TREE,
713 ptr_void_type_node,
714 endlink)),
715 NULL_TREE, false, true, true, NULL, Empty);
716
717 /* If in no exception handlers mode, all raise statements are redirected to
718 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
719 this procedure will never be called in this mode. */
720 if (No_Exception_Handlers_Set ())
721 {
722 decl
723 = create_subprog_decl
724 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
725 build_function_type (void_type_node,
726 tree_cons (NULL_TREE,
727 build_pointer_type (char_type_node),
728 tree_cons (NULL_TREE,
729 integer_type_node,
730 endlink))),
731 NULL_TREE, false, true, true, NULL, Empty);
732
733 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
734 gnat_raise_decls[i] = decl;
735 }
736 else
737 /* Otherwise, make one decl for each exception reason. */
738 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
739 {
740 char name[17];
741
742 sprintf (name, "__gnat_rcheck_%.2d", i);
743 gnat_raise_decls[i]
744 = create_subprog_decl
745 (get_identifier (name), NULL_TREE,
746 build_function_type (void_type_node,
747 tree_cons (NULL_TREE,
748 build_pointer_type
749 (char_type_node),
750 tree_cons (NULL_TREE,
751 integer_type_node,
752 endlink))),
753 NULL_TREE, false, true, true, NULL, Empty);
754 }
755
756 /* Indicate that these never return. */
757 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
758 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
759 TREE_TYPE (raise_nodefer_decl)
760 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
761 TYPE_QUAL_VOLATILE);
762
763 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
764 {
765 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
766 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
767 TREE_TYPE (gnat_raise_decls[i])
768 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
769 TYPE_QUAL_VOLATILE);
770 }
771
772 /* setjmp returns an integer and has one operand, which is a pointer to
773 a jmpbuf. */
774 setjmp_decl
775 = create_subprog_decl
776 (get_identifier ("__builtin_setjmp"), NULL_TREE,
777 build_function_type (integer_type_node,
778 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
779 NULL_TREE, false, true, true, NULL, Empty);
780
781 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
782 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
783
784 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
785 address. */
786 update_setjmp_buf_decl
787 = create_subprog_decl
788 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
789 build_function_type (void_type_node,
790 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
791 NULL_TREE, false, true, true, NULL, Empty);
792
793 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
794 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
795
796 main_identifier_node = get_identifier ("main");
797
798 /* Install the builtins we might need, either internally or as
799 user available facilities for Intrinsic imports. */
800 gnat_install_builtins ();
801}
802\f
803/* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
804 finish constructing the record or union type. If REP_LEVEL is zero, this
805 record has no representation clause and so will be entirely laid out here.
806 If REP_LEVEL is one, this record has a representation clause and has been
807 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
808 this record is derived from a parent record and thus inherits its layout;
809 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
810 true, the record type is expected to be modified afterwards so it will
811 not be sent to the back-end for finalization. */
812
813void
814finish_record_type (tree record_type, tree fieldlist, int rep_level,
815 bool do_not_finalize)
816{
817 enum tree_code code = TREE_CODE (record_type);
818 tree name = TYPE_NAME (record_type);
819 tree ada_size = bitsize_zero_node;
820 tree size = bitsize_zero_node;
821 bool had_size = TYPE_SIZE (record_type) != 0;
822 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
823 bool had_align = TYPE_ALIGN (record_type) != 0;
824 tree field;
825
826 if (name && TREE_CODE (name) == TYPE_DECL)
827 name = DECL_NAME (name);
828
829 TYPE_FIELDS (record_type) = fieldlist;
830 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
831
832 /* We don't need both the typedef name and the record name output in
833 the debugging information, since they are the same. */
834 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
835
836 /* Globally initialize the record first. If this is a rep'ed record,
837 that just means some initializations; otherwise, layout the record. */
838 if (rep_level > 0)
839 {
840 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
841 TYPE_MODE (record_type) = BLKmode;
842
843 if (!had_size_unit)
844 TYPE_SIZE_UNIT (record_type) = size_zero_node;
845 if (!had_size)
846 TYPE_SIZE (record_type) = bitsize_zero_node;
847
848 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
849 out just like a UNION_TYPE, since the size will be fixed. */
850 else if (code == QUAL_UNION_TYPE)
851 code = UNION_TYPE;
852 }
853 else
854 {
855 /* Ensure there isn't a size already set. There can be in an error
856 case where there is a rep clause but all fields have errors and
857 no longer have a position. */
858 TYPE_SIZE (record_type) = 0;
859 layout_type (record_type);
860 }
861
862 /* At this point, the position and size of each field is known. It was
863 either set before entry by a rep clause, or by laying out the type above.
864
865 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
866 to compute the Ada size; the GCC size and alignment (for rep'ed records
867 that are not padding types); and the mode (for rep'ed records). We also
868 clear the DECL_BIT_FIELD indication for the cases we know have not been
869 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
870
871 if (code == QUAL_UNION_TYPE)
872 fieldlist = nreverse (fieldlist);
873
874 for (field = fieldlist; field; field = TREE_CHAIN (field))
875 {
876 tree type = TREE_TYPE (field);
877 tree pos = bit_position (field);
878 tree this_size = DECL_SIZE (field);
879 tree this_ada_size;
880
881 if ((TREE_CODE (type) == RECORD_TYPE
882 || TREE_CODE (type) == UNION_TYPE
883 || TREE_CODE (type) == QUAL_UNION_TYPE)
884 && !TYPE_IS_FAT_POINTER_P (type)
885 && !TYPE_CONTAINS_TEMPLATE_P (type)
886 && TYPE_ADA_SIZE (type))
887 this_ada_size = TYPE_ADA_SIZE (type);
888 else
889 this_ada_size = this_size;
890
891 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
892 if (DECL_BIT_FIELD (field)
893 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
894 {
895 unsigned int align = TYPE_ALIGN (type);
896
897 /* In the general case, type alignment is required. */
898 if (value_factor_p (pos, align))
899 {
900 /* The enclosing record type must be sufficiently aligned.
901 Otherwise, if no alignment was specified for it and it
902 has been laid out already, bump its alignment to the
903 desired one if this is compatible with its size. */
904 if (TYPE_ALIGN (record_type) >= align)
905 {
906 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
907 DECL_BIT_FIELD (field) = 0;
908 }
909 else if (!had_align
910 && rep_level == 0
911 && value_factor_p (TYPE_SIZE (record_type), align))
912 {
913 TYPE_ALIGN (record_type) = align;
914 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
915 DECL_BIT_FIELD (field) = 0;
916 }
917 }
918
919 /* In the non-strict alignment case, only byte alignment is. */
920 if (!STRICT_ALIGNMENT
921 && DECL_BIT_FIELD (field)
922 && value_factor_p (pos, BITS_PER_UNIT))
923 DECL_BIT_FIELD (field) = 0;
924 }
925
926 /* If we still have DECL_BIT_FIELD set at this point, we know the field
927 is technically not addressable. Except that it can actually be
928 addressed if the field is BLKmode and happens to be properly
929 aligned. */
930 DECL_NONADDRESSABLE_P (field)
931 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
932
933 /* A type must be as aligned as its most aligned field that is not
934 a bit-field. But this is already enforced by layout_type. */
935 if (rep_level > 0 && !DECL_BIT_FIELD (field))
936 TYPE_ALIGN (record_type)
937 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
938
939 switch (code)
940 {
941 case UNION_TYPE:
942 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
943 size = size_binop (MAX_EXPR, size, this_size);
944 break;
945
946 case QUAL_UNION_TYPE:
947 ada_size
948 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
949 this_ada_size, ada_size);
950 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
951 this_size, size);
952 break;
953
954 case RECORD_TYPE:
955 /* Since we know here that all fields are sorted in order of
956 increasing bit position, the size of the record is one
957 higher than the ending bit of the last field processed
958 unless we have a rep clause, since in that case we might
959 have a field outside a QUAL_UNION_TYPE that has a higher ending
960 position. So use a MAX in that case. Also, if this field is a
961 QUAL_UNION_TYPE, we need to take into account the previous size in
962 the case of empty variants. */
963 ada_size
964 = merge_sizes (ada_size, pos, this_ada_size,
965 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
966 size
967 = merge_sizes (size, pos, this_size,
968 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
969 break;
970
971 default:
972 gcc_unreachable ();
973 }
974 }
975
976 if (code == QUAL_UNION_TYPE)
977 nreverse (fieldlist);
978
979 if (rep_level < 2)
980 {
981 /* If this is a padding record, we never want to make the size smaller
982 than what was specified in it, if any. */
983 if (TREE_CODE (record_type) == RECORD_TYPE
984 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
985 size = TYPE_SIZE (record_type);
986
987 /* Now set any of the values we've just computed that apply. */
988 if (!TYPE_IS_FAT_POINTER_P (record_type)
989 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
990 SET_TYPE_ADA_SIZE (record_type, ada_size);
991
992 if (rep_level > 0)
993 {
994 tree size_unit = had_size_unit
995 ? TYPE_SIZE_UNIT (record_type)
996 : convert (sizetype,
997 size_binop (CEIL_DIV_EXPR, size,
998 bitsize_unit_node));
999 unsigned int align = TYPE_ALIGN (record_type);
1000
1001 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1002 TYPE_SIZE_UNIT (record_type)
1003 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1004
1005 compute_record_mode (record_type);
1006 }
1007 }
1008
1009 if (!do_not_finalize)
1010 rest_of_record_type_compilation (record_type);
1011}
1012
1013/* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
1014 the debug information associated with it. It need not be invoked
1015 directly in most cases since finish_record_type takes care of doing
1016 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
1017
1018void
1019rest_of_record_type_compilation (tree record_type)
1020{
1021 tree fieldlist = TYPE_FIELDS (record_type);
1022 tree field;
1023 enum tree_code code = TREE_CODE (record_type);
1024 bool var_size = false;
1025
1026 for (field = fieldlist; field; field = TREE_CHAIN (field))
1027 {
1028 /* We need to make an XVE/XVU record if any field has variable size,
1029 whether or not the record does. For example, if we have a union,
1030 it may be that all fields, rounded up to the alignment, have the
1031 same size, in which case we'll use that size. But the debug
1032 output routines (except Dwarf2) won't be able to output the fields,
1033 so we need to make the special record. */
1034 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1035 /* If a field has a non-constant qualifier, the record will have
1036 variable size too. */
1037 || (code == QUAL_UNION_TYPE
1038 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1039 {
1040 var_size = true;
1041 break;
1042 }
1043 }
1044
1045 /* If this record is of variable size, rename it so that the
1046 debugger knows it is and make a new, parallel, record
1047 that tells the debugger how the record is laid out. See
1048 exp_dbug.ads. But don't do this for records that are padding
1049 since they confuse GDB. */
1050 if (var_size
1051 && !(TREE_CODE (record_type) == RECORD_TYPE
1052 && TYPE_IS_PADDING_P (record_type)))
1053 {
1054 tree new_record_type
1055 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1056 ? UNION_TYPE : TREE_CODE (record_type));
1057 tree orig_name = TYPE_NAME (record_type);
1058 tree orig_id
1059 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
1060 : orig_name);
1061 tree new_id
1062 = concat_id_with_name (orig_id,
1063 TREE_CODE (record_type) == QUAL_UNION_TYPE
1064 ? "XVU" : "XVE");
1065 tree last_pos = bitsize_zero_node;
1066 tree old_field;
1067 tree prev_old_field = 0;
1068
1069 TYPE_NAME (new_record_type) = new_id;
1070 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1071 TYPE_STUB_DECL (new_record_type)
1072 = build_decl (TYPE_DECL, new_id, new_record_type);
1073 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
1074 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1075 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1076 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1077 TYPE_SIZE_UNIT (new_record_type)
1078 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1079
1080 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
1081
1082 /* Now scan all the fields, replacing each field with a new
1083 field corresponding to the new encoding. */
1084 for (old_field = TYPE_FIELDS (record_type); old_field;
1085 old_field = TREE_CHAIN (old_field))
1086 {
1087 tree field_type = TREE_TYPE (old_field);
1088 tree field_name = DECL_NAME (old_field);
1089 tree new_field;
1090 tree curpos = bit_position (old_field);
1091 bool var = false;
1092 unsigned int align = 0;
1093 tree pos;
1094
1095 /* See how the position was modified from the last position.
1096
1097 There are two basic cases we support: a value was added
1098 to the last position or the last position was rounded to
1099 a boundary and they something was added. Check for the
1100 first case first. If not, see if there is any evidence
1101 of rounding. If so, round the last position and try
1102 again.
1103
1104 If this is a union, the position can be taken as zero. */
1105
1106 /* Some computations depend on the shape of the position expression,
1107 so strip conversions to make sure it's exposed. */
1108 curpos = remove_conversions (curpos, true);
1109
1110 if (TREE_CODE (new_record_type) == UNION_TYPE)
1111 pos = bitsize_zero_node, align = 0;
1112 else
1113 pos = compute_related_constant (curpos, last_pos);
1114
1115 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1116 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1117 {
1118 tree offset = TREE_OPERAND (curpos, 0);
1119 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1120
1121 /* An offset which is a bitwise AND with a negative power of 2
1122 means an alignment corresponding to this power of 2. */
1123 offset = remove_conversions (offset, true);
1124 if (TREE_CODE (offset) == BIT_AND_EXPR
1125 && host_integerp (TREE_OPERAND (offset, 1), 0)
1126 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1127 {
1128 unsigned int pow
1129 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1130 if (exact_log2 (pow) > 0)
1131 align *= pow;
1132 }
1133
1134 pos = compute_related_constant (curpos,
1135 round_up (last_pos, align));
1136 }
1137 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1138 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1139 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1140 && host_integerp (TREE_OPERAND
1141 (TREE_OPERAND (curpos, 0), 1),
1142 1))
1143 {
1144 align
1145 = tree_low_cst
1146 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1147 pos = compute_related_constant (curpos,
1148 round_up (last_pos, align));
1149 }
1150 else if (potential_alignment_gap (prev_old_field, old_field,
1151 pos))
1152 {
1153 align = TYPE_ALIGN (field_type);
1154 pos = compute_related_constant (curpos,
1155 round_up (last_pos, align));
1156 }
1157
1158 /* If we can't compute a position, set it to zero.
1159
1160 ??? We really should abort here, but it's too much work
1161 to get this correct for all cases. */
1162
1163 if (!pos)
1164 pos = bitsize_zero_node;
1165
1166 /* See if this type is variable-sized and make a pointer type
1167 and indicate the indirection if so. Beware that the debug
1168 back-end may adjust the position computed above according
1169 to the alignment of the field type, i.e. the pointer type
1170 in this case, if we don't preventively counter that. */
1171 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1172 {
1173 field_type = build_pointer_type (field_type);
1174 if (align != 0 && TYPE_ALIGN (field_type) > align)
1175 {
1176 field_type = copy_node (field_type);
1177 TYPE_ALIGN (field_type) = align;
1178 }
1179 var = true;
1180 }
1181
1182 /* Make a new field name, if necessary. */
1183 if (var || align != 0)
1184 {
1185 char suffix[16];
1186
1187 if (align != 0)
1188 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1189 align / BITS_PER_UNIT);
1190 else
1191 strcpy (suffix, "XVL");
1192
1193 field_name = concat_id_with_name (field_name, suffix);
1194 }
1195
1196 new_field = create_field_decl (field_name, field_type,
1197 new_record_type, 0,
1198 DECL_SIZE (old_field), pos, 0);
1199 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1200 TYPE_FIELDS (new_record_type) = new_field;
1201
1202 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1203 zero. The only time it's not the last field of the record
1204 is when there are other components at fixed positions after
1205 it (meaning there was a rep clause for every field) and we
1206 want to be able to encode them. */
1207 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1208 (TREE_CODE (TREE_TYPE (old_field))
1209 == QUAL_UNION_TYPE)
1210 ? bitsize_zero_node
1211 : DECL_SIZE (old_field));
1212 prev_old_field = old_field;
1213 }
1214
1215 TYPE_FIELDS (new_record_type)
1216 = nreverse (TYPE_FIELDS (new_record_type));
1217
1218 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1219 }
1220
1221 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1222}
1223
1224/* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1225
1226void
1227add_parallel_type (tree decl, tree parallel_type)
1228{
1229 tree d = decl;
1230
1231 while (DECL_PARALLEL_TYPE (d))
1232 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
1233
1234 SET_DECL_PARALLEL_TYPE (d, parallel_type);
1235}
1236
1237/* Return the parallel type associated to a type, if any. */
1238
1239tree
1240get_parallel_type (tree type)
1241{
1242 if (TYPE_STUB_DECL (type))
1243 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1244 else
1245 return NULL_TREE;
1246}
1247
1248/* Utility function of above to merge LAST_SIZE, the previous size of a record
1249 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1250 if this represents a QUAL_UNION_TYPE in which case we must look for
1251 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1252 is nonzero, we must take the MAX of the end position of this field
1253 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1254
1255 We return an expression for the size. */
1256
1257static tree
1258merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1259 bool has_rep)
1260{
1261 tree type = TREE_TYPE (last_size);
1262 tree new;
1263
1264 if (!special || TREE_CODE (size) != COND_EXPR)
1265 {
1266 new = size_binop (PLUS_EXPR, first_bit, size);
1267 if (has_rep)
1268 new = size_binop (MAX_EXPR, last_size, new);
1269 }
1270
1271 else
1272 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1273 integer_zerop (TREE_OPERAND (size, 1))
1274 ? last_size : merge_sizes (last_size, first_bit,
1275 TREE_OPERAND (size, 1),
1276 1, has_rep),
1277 integer_zerop (TREE_OPERAND (size, 2))
1278 ? last_size : merge_sizes (last_size, first_bit,
1279 TREE_OPERAND (size, 2),
1280 1, has_rep));
1281
1282 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1283 when fed through substitute_in_expr) into thinking that a constant
1284 size is not constant. */
1285 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1286 new = TREE_OPERAND (new, 0);
1287
1288 return new;
1289}
1290
1291/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1292 related by the addition of a constant. Return that constant if so. */
1293
1294static tree
1295compute_related_constant (tree op0, tree op1)
1296{
1297 tree op0_var, op1_var;
1298 tree op0_con = split_plus (op0, &op0_var);
1299 tree op1_con = split_plus (op1, &op1_var);
1300 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1301
1302 if (operand_equal_p (op0_var, op1_var, 0))
1303 return result;
1304 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1305 return result;
1306 else
1307 return 0;
1308}
1309
1310/* Utility function of above to split a tree OP which may be a sum, into a
1311 constant part, which is returned, and a variable part, which is stored
1312 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1313 bitsizetype. */
1314
1315static tree
1316split_plus (tree in, tree *pvar)
1317{
1318 /* Strip NOPS in order to ease the tree traversal and maximize the
1319 potential for constant or plus/minus discovery. We need to be careful
1320 to always return and set *pvar to bitsizetype trees, but it's worth
1321 the effort. */
1322 STRIP_NOPS (in);
1323
1324 *pvar = convert (bitsizetype, in);
1325
1326 if (TREE_CODE (in) == INTEGER_CST)
1327 {
1328 *pvar = bitsize_zero_node;
1329 return convert (bitsizetype, in);
1330 }
1331 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1332 {
1333 tree lhs_var, rhs_var;
1334 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1335 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1336
1337 if (lhs_var == TREE_OPERAND (in, 0)
1338 && rhs_var == TREE_OPERAND (in, 1))
1339 return bitsize_zero_node;
1340
1341 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1342 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1343 }
1344 else
1345 return bitsize_zero_node;
1346}
1347\f
1348/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1349 subprogram. If it is void_type_node, then we are dealing with a procedure,
1350 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1351 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1352 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1353 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1354 object. RETURNS_BY_REF is true if the function returns by reference.
1355 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1356 first parameter) the address of the place to copy its result. */
1357
1358tree
1359create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1360 bool returns_unconstrained, bool returns_by_ref,
1361 bool returns_by_target_ptr)
1362{
1363 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1364 the subprogram formal parameters. This list is generated by traversing the
1365 input list of PARM_DECL nodes. */
1366 tree param_type_list = NULL;
1367 tree param_decl;
1368 tree type;
1369
1370 for (param_decl = param_decl_list; param_decl;
1371 param_decl = TREE_CHAIN (param_decl))
1372 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1373 param_type_list);
1374
1375 /* The list of the function parameter types has to be terminated by the void
1376 type to signal to the back-end that we are not dealing with a variable
1377 parameter subprogram, but that the subprogram has a fixed number of
1378 parameters. */
1379 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1380
1381 /* The list of argument types has been created in reverse
1382 so nreverse it. */
1383 param_type_list = nreverse (param_type_list);
1384
1385 type = build_function_type (return_type, param_type_list);
1386
1387 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1388 or the new type should, make a copy of TYPE. Likewise for
1389 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1390 if (TYPE_CI_CO_LIST (type) || cico_list
1391 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1392 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1393 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1394 type = copy_type (type);
1395
1396 TYPE_CI_CO_LIST (type) = cico_list;
1397 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1398 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1399 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1400 return type;
1401}
1402\f
1403/* Return a copy of TYPE but safe to modify in any way. */
1404
1405tree
1406copy_type (tree type)
1407{
1408 tree new = copy_node (type);
1409
1410 /* copy_node clears this field instead of copying it, because it is
1411 aliased with TREE_CHAIN. */
1412 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1413
1414 TYPE_POINTER_TO (new) = 0;
1415 TYPE_REFERENCE_TO (new) = 0;
1416 TYPE_MAIN_VARIANT (new) = new;
1417 TYPE_NEXT_VARIANT (new) = 0;
1418
1419 return new;
1420}
1421\f
1422/* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1423 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1424 the decl. */
1425
1426tree
1427create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1428{
1429 /* First build a type for the desired range. */
1430 tree type = build_index_2_type (min, max);
1431
1432 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1433 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1434 is set, but not to INDEX, make a copy of this type with the requested
1435 index type. Note that we have no way of sharing these types, but that's
1436 only a small hole. */
1437 if (TYPE_INDEX_TYPE (type) == index)
1438 return type;
1439 else if (TYPE_INDEX_TYPE (type))
1440 type = copy_type (type);
1441
1442 SET_TYPE_INDEX_TYPE (type, index);
1443 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1444 return type;
1445}
1446\f
1447/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1448 string) and TYPE is a ..._TYPE node giving its data type.
1449 ARTIFICIAL_P is true if this is a declaration that was generated
1450 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1451 information about this type. GNAT_NODE is used for the position of
1452 the decl. */
1453
1454tree
1455create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1456 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1457{
1458 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1459 enum tree_code code = TREE_CODE (type);
1460
1461 DECL_ARTIFICIAL (type_decl) = artificial_p;
1462
1463 if (!TYPE_IS_DUMMY_P (type))
1464 gnat_pushdecl (type_decl, gnat_node);
1465
1466 process_attributes (type_decl, attr_list);
1467
1468 /* Pass type declaration information to the debugger unless this is an
1469 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1470 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1471 type for which debugging information was not requested. */
1472 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1473 DECL_IGNORED_P (type_decl) = 1;
1474 else if (code != ENUMERAL_TYPE
1475 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1476 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1477 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1478 rest_of_type_decl_compilation (type_decl);
1479
1480 return type_decl;
1481}
1482
1483/* Return a VAR_DECL or CONST_DECL node.
1484
1485 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1486 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1487 the GCC tree for an optional initial expression; NULL_TREE if none.
1488
1489 CONST_FLAG is true if this variable is constant, in which case we might
1490 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1491
1492 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1493 definition to be made visible outside of the current compilation unit, for
1494 instance variable definitions in a package specification.
1495
1496 EXTERN_FLAG is nonzero when processing an external variable declaration (as
1497 opposed to a definition: no storage is to be allocated for the variable).
1498
1499 STATIC_FLAG is only relevant when not at top level. In that case
1500 it indicates whether to always allocate storage to the variable.
1501
1502 GNAT_NODE is used for the position of the decl. */
1503
1504tree
1505create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1506 bool const_flag, bool public_flag, bool extern_flag,
1507 bool static_flag, bool const_decl_allowed_p,
1508 struct attrib *attr_list, Node_Id gnat_node)
1509{
1510 bool init_const
1511 = (var_init != 0
1512 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1513 && (global_bindings_p () || static_flag
1514 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1515 : TREE_CONSTANT (var_init)));
1516
1517 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1518 case the initializer may be used in-lieu of the DECL node (as done in
1519 Identifier_to_gnu). This is useful to prevent the need of elaboration
1520 code when an identifier for which such a decl is made is in turn used as
1521 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1522 but extra constraints apply to this choice (see below) and are not
1523 relevant to the distinction we wish to make. */
1524 bool constant_p = const_flag && init_const;
1525
1526 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1527 and may be used for scalars in general but not for aggregates. */
1528 tree var_decl
1529 = build_decl ((constant_p && const_decl_allowed_p
1530 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1531 var_name, type);
1532
1533 /* If this is external, throw away any initializations (they will be done
1534 elsewhere) unless this is a constant for which we would like to remain
1535 able to get the initializer. If we are defining a global here, leave a
1536 constant initialization and save any variable elaborations for the
1537 elaboration routine. If we are just annotating types, throw away the
1538 initialization if it isn't a constant. */
1539 if ((extern_flag && !constant_p)
1540 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1541 var_init = NULL_TREE;
1542
1543 /* At the global level, an initializer requiring code to be generated
1544 produces elaboration statements. Check that such statements are allowed,
1545 that is, not violating a No_Elaboration_Code restriction. */
1546 if (global_bindings_p () && var_init != 0 && ! init_const)
1547 Check_Elaboration_Code_Allowed (gnat_node);
1548
1549 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1550 try to fiddle with DECL_COMMON. However, on platforms that don't
1551 support global BSS sections, uninitialized global variables would
1552 go in DATA instead, thus increasing the size of the executable. */
1553 if (!flag_no_common
1554 && TREE_CODE (var_decl) == VAR_DECL
1555 && !have_global_bss_p ())
1556 DECL_COMMON (var_decl) = 1;
1557 DECL_INITIAL (var_decl) = var_init;
1558 TREE_READONLY (var_decl) = const_flag;
1559 DECL_EXTERNAL (var_decl) = extern_flag;
1560 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1561 TREE_CONSTANT (var_decl) = constant_p;
1562 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1563 = TYPE_VOLATILE (type);
1564
1565 /* If it's public and not external, always allocate storage for it.
1566 At the global binding level we need to allocate static storage for the
1567 variable if and only if it's not external. If we are not at the top level
1568 we allocate automatic storage unless requested not to. */
1569 TREE_STATIC (var_decl)
1570 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1571
1572 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1573 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1574
1575 process_attributes (var_decl, attr_list);
1576
1577 /* Add this decl to the current binding level. */
1578 gnat_pushdecl (var_decl, gnat_node);
1579
1580 if (TREE_SIDE_EFFECTS (var_decl))
1581 TREE_ADDRESSABLE (var_decl) = 1;
1582
1583 if (TREE_CODE (var_decl) != CONST_DECL)
1584 {
1585 if (global_bindings_p ())
1586 rest_of_decl_compilation (var_decl, true, 0);
1587 }
1588 else
1589 expand_decl (var_decl);
1590
1591 return var_decl;
1592}
1593\f
1594/* Return true if TYPE, an aggregate type, contains (or is) an array. */
1595
1596static bool
1597aggregate_type_contains_array_p (tree type)
1598{
1599 switch (TREE_CODE (type))
1600 {
1601 case RECORD_TYPE:
1602 case UNION_TYPE:
1603 case QUAL_UNION_TYPE:
1604 {
1605 tree field;
1606 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1607 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1608 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1609 return true;
1610 return false;
1611 }
1612
1613 case ARRAY_TYPE:
1614 return true;
1615
1616 default:
1617 gcc_unreachable ();
1618 }
1619}
1620
1621/* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1622 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1623 this field is in a record type with a "pragma pack". If SIZE is nonzero
1624 it is the specified size for this field. If POS is nonzero, it is the bit
1625 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1626 the address of this field for aliasing purposes. If it is negative, we
1627 should not make a bitfield, which is used by make_aligning_type. */
1628
1629tree
1630create_field_decl (tree field_name, tree field_type, tree record_type,
1631 int packed, tree size, tree pos, int addressable)
1632{
1633 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1634
1635 DECL_CONTEXT (field_decl) = record_type;
1636 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1637
1638 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1639 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1640 Likewise for an aggregate without specified position that contains an
1641 array, because in this case slices of variable length of this array
1642 must be handled by GCC and variable-sized objects need to be aligned
1643 to at least a byte boundary. */
1644 if (packed && (TYPE_MODE (field_type) == BLKmode
1645 || (!pos
1646 && AGGREGATE_TYPE_P (field_type)
1647 && aggregate_type_contains_array_p (field_type))))
1648 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1649
1650 /* If a size is specified, use it. Otherwise, if the record type is packed
1651 compute a size to use, which may differ from the object's natural size.
1652 We always set a size in this case to trigger the checks for bitfield
1653 creation below, which is typically required when no position has been
1654 specified. */
1655 if (size)
1656 size = convert (bitsizetype, size);
1657 else if (packed == 1)
1658 {
1659 size = rm_size (field_type);
1660
1661 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1662 byte. */
1663 if (TREE_CODE (size) == INTEGER_CST
1664 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1665 size = round_up (size, BITS_PER_UNIT);
1666 }
1667
1668 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1669 specified for two reasons: first if the size differs from the natural
1670 size. Second, if the alignment is insufficient. There are a number of
1671 ways the latter can be true.
1672
1673 We never make a bitfield if the type of the field has a nonconstant size,
1674 because no such entity requiring bitfield operations should reach here.
1675
1676 We do *preventively* make a bitfield when there might be the need for it
1677 but we don't have all the necessary information to decide, as is the case
1678 of a field with no specified position in a packed record.
1679
1680 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1681 in layout_decl or finish_record_type to clear the bit_field indication if
1682 it is in fact not needed. */
1683 if (addressable >= 0
1684 && size
1685 && TREE_CODE (size) == INTEGER_CST
1686 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1687 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1688 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1689 || packed
1690 || (TYPE_ALIGN (record_type) != 0
1691 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1692 {
1693 DECL_BIT_FIELD (field_decl) = 1;
1694 DECL_SIZE (field_decl) = size;
1695 if (!packed && !pos)
1696 DECL_ALIGN (field_decl)
1697 = (TYPE_ALIGN (record_type) != 0
1698 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1699 : TYPE_ALIGN (field_type));
1700 }
1701
1702 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1703
1704 /* Bump the alignment if need be, either for bitfield/packing purposes or
1705 to satisfy the type requirements if no such consideration applies. When
1706 we get the alignment from the type, indicate if this is from an explicit
1707 user request, which prevents stor-layout from lowering it later on. */
1708 {
1709 int bit_align
1710 = (DECL_BIT_FIELD (field_decl) ? 1
1711 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1712
1713 if (bit_align > DECL_ALIGN (field_decl))
1714 DECL_ALIGN (field_decl) = bit_align;
1715 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1716 {
1717 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1718 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1719 }
1720 }
1721
1722 if (pos)
1723 {
1724 /* We need to pass in the alignment the DECL is known to have.
1725 This is the lowest-order bit set in POS, but no more than
1726 the alignment of the record, if one is specified. Note
1727 that an alignment of 0 is taken as infinite. */
1728 unsigned int known_align;
1729
1730 if (host_integerp (pos, 1))
1731 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1732 else
1733 known_align = BITS_PER_UNIT;
1734
1735 if (TYPE_ALIGN (record_type)
1736 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1737 known_align = TYPE_ALIGN (record_type);
1738
1739 layout_decl (field_decl, known_align);
1740 SET_DECL_OFFSET_ALIGN (field_decl,
1741 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1742 : BITS_PER_UNIT);
1743 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1744 &DECL_FIELD_BIT_OFFSET (field_decl),
1745 DECL_OFFSET_ALIGN (field_decl), pos);
1746
1747 DECL_HAS_REP_P (field_decl) = 1;
1748 }
1749
1750 /* In addition to what our caller says, claim the field is addressable if we
1751 know that its type is not suitable.
1752
1753 The field may also be "technically" nonaddressable, meaning that even if
1754 we attempt to take the field's address we will actually get the address
1755 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1756 value we have at this point is not accurate enough, so we don't account
1757 for this here and let finish_record_type decide. */
4c5a0615 1758 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
1759 addressable = 1;
1760
1761 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1762
1763 return field_decl;
1764}
1765\f
1766/* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1767 PARAM_TYPE is its type. READONLY is true if the parameter is
1768 readonly (either an In parameter or an address of a pass-by-ref
1769 parameter). */
1770
1771tree
1772create_param_decl (tree param_name, tree param_type, bool readonly)
1773{
1774 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1775
1776 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1777 lead to various ABI violations. */
1778 if (targetm.calls.promote_prototypes (param_type)
1779 && (TREE_CODE (param_type) == INTEGER_TYPE
01ddebf2
EB
1780 || TREE_CODE (param_type) == ENUMERAL_TYPE
1781 || TREE_CODE (param_type) == BOOLEAN_TYPE)
a1ab4c31
AC
1782 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1783 {
1784 /* We have to be careful about biased types here. Make a subtype
1785 of integer_type_node with the proper biasing. */
1786 if (TREE_CODE (param_type) == INTEGER_TYPE
1787 && TYPE_BIASED_REPRESENTATION_P (param_type))
1788 {
1789 param_type
1790 = copy_type (build_range_type (integer_type_node,
1791 TYPE_MIN_VALUE (param_type),
1792 TYPE_MAX_VALUE (param_type)));
1793
1794 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1795 }
1796 else
1797 param_type = integer_type_node;
1798 }
1799
1800 DECL_ARG_TYPE (param_decl) = param_type;
1801 TREE_READONLY (param_decl) = readonly;
1802 return param_decl;
1803}
1804\f
1805/* Given a DECL and ATTR_LIST, process the listed attributes. */
1806
1807void
1808process_attributes (tree decl, struct attrib *attr_list)
1809{
1810 for (; attr_list; attr_list = attr_list->next)
1811 switch (attr_list->type)
1812 {
1813 case ATTR_MACHINE_ATTRIBUTE:
1814 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1815 NULL_TREE),
1816 ATTR_FLAG_TYPE_IN_PLACE);
1817 break;
1818
1819 case ATTR_LINK_ALIAS:
1820 if (! DECL_EXTERNAL (decl))
1821 {
1822 TREE_STATIC (decl) = 1;
1823 assemble_alias (decl, attr_list->name);
1824 }
1825 break;
1826
1827 case ATTR_WEAK_EXTERNAL:
1828 if (SUPPORTS_WEAK)
1829 declare_weak (decl);
1830 else
1831 post_error ("?weak declarations not supported on this target",
1832 attr_list->error_point);
1833 break;
1834
1835 case ATTR_LINK_SECTION:
1836 if (targetm.have_named_sections)
1837 {
1838 DECL_SECTION_NAME (decl)
1839 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1840 IDENTIFIER_POINTER (attr_list->name));
1841 DECL_COMMON (decl) = 0;
1842 }
1843 else
1844 post_error ("?section attributes are not supported for this target",
1845 attr_list->error_point);
1846 break;
1847
1848 case ATTR_LINK_CONSTRUCTOR:
1849 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1850 TREE_USED (decl) = 1;
1851 break;
1852
1853 case ATTR_LINK_DESTRUCTOR:
1854 DECL_STATIC_DESTRUCTOR (decl) = 1;
1855 TREE_USED (decl) = 1;
1856 break;
1857 }
1858}
1859\f
1860/* Record a global renaming pointer. */
1861
1862void
1863record_global_renaming_pointer (tree decl)
1864{
1865 gcc_assert (DECL_RENAMED_OBJECT (decl));
1866 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1867}
1868
1869/* Invalidate the global renaming pointers. */
1870
1871void
1872invalidate_global_renaming_pointers (void)
1873{
1874 unsigned int i;
1875 tree iter;
1876
1877 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1878 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1879
1880 VEC_free (tree, gc, global_renaming_pointers);
1881}
1882
1883/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1884 a power of 2. */
1885
1886bool
1887value_factor_p (tree value, HOST_WIDE_INT factor)
1888{
1889 if (host_integerp (value, 1))
1890 return tree_low_cst (value, 1) % factor == 0;
1891
1892 if (TREE_CODE (value) == MULT_EXPR)
1893 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1894 || value_factor_p (TREE_OPERAND (value, 1), factor));
1895
1896 return false;
1897}
1898
1899/* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1900 unless we can prove these 2 fields are laid out in such a way that no gap
1901 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1902 is the distance in bits between the end of PREV_FIELD and the starting
1903 position of CURR_FIELD. It is ignored if null. */
1904
1905static bool
1906potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1907{
1908 /* If this is the first field of the record, there cannot be any gap */
1909 if (!prev_field)
1910 return false;
1911
1912 /* If the previous field is a union type, then return False: The only
1913 time when such a field is not the last field of the record is when
1914 there are other components at fixed positions after it (meaning there
1915 was a rep clause for every field), in which case we don't want the
1916 alignment constraint to override them. */
1917 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1918 return false;
1919
1920 /* If the distance between the end of prev_field and the beginning of
1921 curr_field is constant, then there is a gap if the value of this
1922 constant is not null. */
1923 if (offset && host_integerp (offset, 1))
1924 return !integer_zerop (offset);
1925
1926 /* If the size and position of the previous field are constant,
1927 then check the sum of this size and position. There will be a gap
1928 iff it is not multiple of the current field alignment. */
1929 if (host_integerp (DECL_SIZE (prev_field), 1)
1930 && host_integerp (bit_position (prev_field), 1))
1931 return ((tree_low_cst (bit_position (prev_field), 1)
1932 + tree_low_cst (DECL_SIZE (prev_field), 1))
1933 % DECL_ALIGN (curr_field) != 0);
1934
1935 /* If both the position and size of the previous field are multiples
1936 of the current field alignment, there cannot be any gap. */
1937 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1938 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1939 return false;
1940
1941 /* Fallback, return that there may be a potential gap */
1942 return true;
1943}
1944
1945/* Returns a LABEL_DECL node for LABEL_NAME. */
1946
1947tree
1948create_label_decl (tree label_name)
1949{
1950 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1951
1952 DECL_CONTEXT (label_decl) = current_function_decl;
1953 DECL_MODE (label_decl) = VOIDmode;
1954 DECL_SOURCE_LOCATION (label_decl) = input_location;
1955
1956 return label_decl;
1957}
1958\f
1959/* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1960 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1961 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1962 PARM_DECL nodes chained through the TREE_CHAIN field).
1963
1964 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1965 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1966
1967tree
1968create_subprog_decl (tree subprog_name, tree asm_name,
1969 tree subprog_type, tree param_decl_list, bool inline_flag,
1970 bool public_flag, bool extern_flag,
1971 struct attrib *attr_list, Node_Id gnat_node)
1972{
1973 tree return_type = TREE_TYPE (subprog_type);
1974 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1975
1976 /* If this is a function nested inside an inlined external function, it
1977 means we aren't going to compile the outer function unless it is
1978 actually inlined, so do the same for us. */
1979 if (current_function_decl && DECL_INLINE (current_function_decl)
1980 && DECL_EXTERNAL (current_function_decl))
1981 extern_flag = true;
1982
1983 DECL_EXTERNAL (subprog_decl) = extern_flag;
1984 TREE_PUBLIC (subprog_decl) = public_flag;
1985 TREE_STATIC (subprog_decl) = 1;
1986 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1987 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1988 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1989 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1990 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1991 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1992 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1993
1994 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1995 target by-reference return mechanism. This is not supported all the
1996 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1997 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1998 the RESULT_DECL instead - see gnat_genericize for more details. */
1999 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
2000 {
2001 tree result_decl = DECL_RESULT (subprog_decl);
2002
2003 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
2004 DECL_BY_REFERENCE (result_decl) = 1;
2005 }
2006
2007 if (inline_flag)
2008 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2009
2010 if (asm_name)
2011 {
2012 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2013
2014 /* The expand_main_function circuitry expects "main_identifier_node" to
2015 designate the DECL_NAME of the 'main' entry point, in turn expected
2016 to be declared as the "main" function literally by default. Ada
2017 program entry points are typically declared with a different name
2018 within the binder generated file, exported as 'main' to satisfy the
2019 system expectations. Redirect main_identifier_node in this case. */
2020 if (asm_name == main_identifier_node)
2021 main_identifier_node = DECL_NAME (subprog_decl);
2022 }
2023
2024 process_attributes (subprog_decl, attr_list);
2025
2026 /* Add this decl to the current binding level. */
2027 gnat_pushdecl (subprog_decl, gnat_node);
2028
2029 /* Output the assembler code and/or RTL for the declaration. */
2030 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2031
2032 return subprog_decl;
2033}
2034\f
2035/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2036 body. This routine needs to be invoked before processing the declarations
2037 appearing in the subprogram. */
2038
2039void
2040begin_subprog_body (tree subprog_decl)
2041{
2042 tree param_decl;
2043
2044 current_function_decl = subprog_decl;
2045 announce_function (subprog_decl);
2046
2047 /* Enter a new binding level and show that all the parameters belong to
2048 this function. */
2049 gnat_pushlevel ();
2050 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2051 param_decl = TREE_CHAIN (param_decl))
2052 DECL_CONTEXT (param_decl) = subprog_decl;
2053
2054 make_decl_rtl (subprog_decl);
2055
2056 /* We handle pending sizes via the elaboration of types, so we don't need to
2057 save them. This causes them to be marked as part of the outer function
2058 and then discarded. */
2059 get_pending_sizes ();
2060}
2061
2062
2063/* Helper for the genericization callback. Return a dereference of VAL
2064 if it is of a reference type. */
2065
2066static tree
2067convert_from_reference (tree val)
2068{
2069 tree value_type, ref;
2070
2071 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
2072 return val;
2073
2074 value_type = TREE_TYPE (TREE_TYPE (val));
2075 ref = build1 (INDIRECT_REF, value_type, val);
2076
2077 /* See if what we reference is CONST or VOLATILE, which requires
2078 looking into array types to get to the component type. */
2079
2080 while (TREE_CODE (value_type) == ARRAY_TYPE)
2081 value_type = TREE_TYPE (value_type);
2082
2083 TREE_READONLY (ref)
2084 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
2085 TREE_THIS_VOLATILE (ref)
2086 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
2087
2088 TREE_SIDE_EFFECTS (ref)
2089 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
2090
2091 return ref;
2092}
2093
2094/* Helper for the genericization callback. Returns true if T denotes
2095 a RESULT_DECL with DECL_BY_REFERENCE set. */
2096
2097static inline bool
2098is_byref_result (tree t)
2099{
2100 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2101}
2102
2103
2104/* Tree walking callback for gnat_genericize. Currently ...
2105
2106 o Adjust references to the function's DECL_RESULT if it is marked
2107 DECL_BY_REFERENCE and so has had its type turned into a reference
2108 type at the end of the function compilation. */
2109
2110static tree
2111gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2112{
2113 /* This implementation is modeled after what the C++ front-end is
2114 doing, basis of the downstream passes behavior. */
2115
2116 tree stmt = *stmt_p;
2117 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2118
2119 /* If we have a direct mention of the result decl, dereference. */
2120 if (is_byref_result (stmt))
2121 {
2122 *stmt_p = convert_from_reference (stmt);
2123 *walk_subtrees = 0;
2124 return NULL;
2125 }
2126
2127 /* Otherwise, no need to walk the same tree twice. */
2128 if (pointer_set_contains (p_set, stmt))
2129 {
2130 *walk_subtrees = 0;
2131 return NULL_TREE;
2132 }
2133
2134 /* If we are taking the address of what now is a reference, just get the
2135 reference value. */
2136 if (TREE_CODE (stmt) == ADDR_EXPR
2137 && is_byref_result (TREE_OPERAND (stmt, 0)))
2138 {
2139 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2140 *walk_subtrees = 0;
2141 }
2142
2143 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2144 else if (TREE_CODE (stmt) == RETURN_EXPR
2145 && TREE_OPERAND (stmt, 0)
2146 && is_byref_result (TREE_OPERAND (stmt, 0)))
2147 *walk_subtrees = 0;
2148
2149 /* Don't look inside trees that cannot embed references of interest. */
2150 else if (IS_TYPE_OR_DECL_P (stmt))
2151 *walk_subtrees = 0;
2152
2153 pointer_set_insert (p_set, *stmt_p);
2154
2155 return NULL;
2156}
2157
2158/* Perform lowering of Ada trees to GENERIC. In particular:
2159
2160 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2161 and adjust all the references to this decl accordingly. */
2162
2163static void
2164gnat_genericize (tree fndecl)
2165{
2166 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2167 was handled by simply setting TREE_ADDRESSABLE on the result type.
2168 Everything required to actually pass by invisible ref using the target
2169 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2170
2171 This doesn't work with GCC 4 any more for several reasons. First, the
2172 gimplification process might need the creation of temporaries of this
2173 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2174 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2175 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2176 be explicitly accounted for by the front-end in the function body.
2177
2178 We achieve the complete transformation in two steps:
2179
2180 1/ create_subprog_decl performs early attribute tweaks: it clears
2181 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2182 the result decl. The former ensures that the bit isn't set in the GCC
2183 tree saved for the function, so prevents ICEs on temporary creation.
2184 The latter we use here to trigger the rest of the processing.
2185
2186 2/ This function performs the type transformation on the result decl
2187 and adjusts all the references to this decl from the function body
2188 accordingly.
2189
2190 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2191 strategy, which escapes the gimplifier temporary creation issues by
2192 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2193 on simple specific support code in aggregate_value_p to look at the
2194 target function result decl explicitly. */
2195
2196 struct pointer_set_t *p_set;
2197 tree decl_result = DECL_RESULT (fndecl);
2198
2199 if (!DECL_BY_REFERENCE (decl_result))
2200 return;
2201
2202 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2203 occurrences in the function body using the common tree-walking facility.
2204 We want to see every occurrence of the result decl to adjust the
2205 referencing tree, so need to use our own pointer set to control which
2206 trees should be visited again or not. */
2207
2208 p_set = pointer_set_create ();
2209
2210 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2211 TREE_ADDRESSABLE (decl_result) = 0;
2212 relayout_decl (decl_result);
2213
2214 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2215
2216 pointer_set_destroy (p_set);
2217}
2218
2219/* Finish the definition of the current subprogram BODY and compile it all the
2220 way to assembler language output. ELAB_P tells if this is called for an
2221 elaboration routine, to be entirely discarded if empty. */
2222
2223void
2224end_subprog_body (tree body, bool elab_p)
2225{
2226 tree fndecl = current_function_decl;
2227
2228 /* Mark the BLOCK for this level as being for this function and pop the
2229 level. Since the vars in it are the parameters, clear them. */
2230 BLOCK_VARS (current_binding_level->block) = 0;
2231 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2232 DECL_INITIAL (fndecl) = current_binding_level->block;
2233 gnat_poplevel ();
2234
2235 /* Deal with inline. If declared inline or we should default to inline,
2236 set the flag in the decl. */
2237 DECL_INLINE (fndecl) = 1;
2238
2239 /* We handle pending sizes via the elaboration of types, so we don't
2240 need to save them. */
2241 get_pending_sizes ();
2242
2243 /* Mark the RESULT_DECL as being in this subprogram. */
2244 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2245
2246 DECL_SAVED_TREE (fndecl) = body;
2247
2248 current_function_decl = DECL_CONTEXT (fndecl);
2249 set_cfun (NULL);
2250
2251 /* We cannot track the location of errors past this point. */
2252 error_gnat_node = Empty;
2253
2254 /* If we're only annotating types, don't actually compile this function. */
2255 if (type_annotate_only)
2256 return;
2257
2258 /* Perform the required pre-gimplification transformations on the tree. */
2259 gnat_genericize (fndecl);
2260
2261 /* We do different things for nested and non-nested functions.
2262 ??? This should be in cgraph. */
2263 if (!DECL_CONTEXT (fndecl))
2264 {
2265 gnat_gimplify_function (fndecl);
2266
2267 /* If this is an empty elaboration proc, just discard the node.
2268 Otherwise, compile further. */
2269 if (elab_p && empty_body_p (gimple_body (fndecl)))
2270 cgraph_remove_node (cgraph_node (fndecl));
2271 else
2272 cgraph_finalize_function (fndecl, false);
2273 }
2274 else
2275 /* Register this function with cgraph just far enough to get it
2276 added to our parent's nested function list. */
2277 (void) cgraph_node (fndecl);
2278}
2279
2280/* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2281
2282static void
2283gnat_gimplify_function (tree fndecl)
2284{
2285 struct cgraph_node *cgn;
2286
2287 dump_function (TDI_original, fndecl);
2288 gimplify_function_tree (fndecl);
2289 dump_function (TDI_generic, fndecl);
2290
2291 /* Convert all nested functions to GIMPLE now. We do things in this order
2292 so that items like VLA sizes are expanded properly in the context of the
2293 correct function. */
2294 cgn = cgraph_node (fndecl);
2295 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2296 gnat_gimplify_function (cgn->decl);
2297}
2298\f
2299
2300tree
2301gnat_builtin_function (tree decl)
2302{
2303 gnat_pushdecl (decl, Empty);
2304 return decl;
2305}
2306
2307/* Return an integer type with the number of bits of precision given by
2308 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2309 it is a signed type. */
2310
2311tree
2312gnat_type_for_size (unsigned precision, int unsignedp)
2313{
2314 tree t;
2315 char type_name[20];
2316
2317 if (precision <= 2 * MAX_BITS_PER_WORD
2318 && signed_and_unsigned_types[precision][unsignedp])
2319 return signed_and_unsigned_types[precision][unsignedp];
2320
2321 if (unsignedp)
2322 t = make_unsigned_type (precision);
2323 else
2324 t = make_signed_type (precision);
2325
2326 if (precision <= 2 * MAX_BITS_PER_WORD)
2327 signed_and_unsigned_types[precision][unsignedp] = t;
2328
2329 if (!TYPE_NAME (t))
2330 {
2331 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2332 TYPE_NAME (t) = get_identifier (type_name);
2333 }
2334
2335 return t;
2336}
2337
2338/* Likewise for floating-point types. */
2339
2340static tree
2341float_type_for_precision (int precision, enum machine_mode mode)
2342{
2343 tree t;
2344 char type_name[20];
2345
2346 if (float_types[(int) mode])
2347 return float_types[(int) mode];
2348
2349 float_types[(int) mode] = t = make_node (REAL_TYPE);
2350 TYPE_PRECISION (t) = precision;
2351 layout_type (t);
2352
2353 gcc_assert (TYPE_MODE (t) == mode);
2354 if (!TYPE_NAME (t))
2355 {
2356 sprintf (type_name, "FLOAT_%d", precision);
2357 TYPE_NAME (t) = get_identifier (type_name);
2358 }
2359
2360 return t;
2361}
2362
2363/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2364 an unsigned type; otherwise a signed type is returned. */
2365
2366tree
2367gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2368{
2369 if (mode == BLKmode)
2370 return NULL_TREE;
2371 else if (mode == VOIDmode)
2372 return void_type_node;
2373 else if (COMPLEX_MODE_P (mode))
2374 return NULL_TREE;
2375 else if (SCALAR_FLOAT_MODE_P (mode))
2376 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2377 else if (SCALAR_INT_MODE_P (mode))
2378 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2379 else
2380 return NULL_TREE;
2381}
2382
2383/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2384
2385tree
2386gnat_unsigned_type (tree type_node)
2387{
2388 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2389
2390 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2391 {
2392 type = copy_node (type);
2393 TREE_TYPE (type) = type_node;
2394 }
2395 else if (TREE_TYPE (type_node)
2396 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2397 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2398 {
2399 type = copy_node (type);
2400 TREE_TYPE (type) = TREE_TYPE (type_node);
2401 }
2402
2403 return type;
2404}
2405
2406/* Return the signed version of a TYPE_NODE, a scalar type. */
2407
2408tree
2409gnat_signed_type (tree type_node)
2410{
2411 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2412
2413 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2414 {
2415 type = copy_node (type);
2416 TREE_TYPE (type) = type_node;
2417 }
2418 else if (TREE_TYPE (type_node)
2419 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2420 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2421 {
2422 type = copy_node (type);
2423 TREE_TYPE (type) = TREE_TYPE (type_node);
2424 }
2425
2426 return type;
2427}
2428
2429/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2430 transparently converted to each other. */
2431
2432int
2433gnat_types_compatible_p (tree t1, tree t2)
2434{
2435 enum tree_code code;
2436
2437 /* This is the default criterion. */
2438 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2439 return 1;
2440
2441 /* We only check structural equivalence here. */
2442 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2443 return 0;
2444
2445 /* Array types are also compatible if they are constrained and have
2446 the same component type and the same domain. */
2447 if (code == ARRAY_TYPE
2448 && TREE_TYPE (t1) == TREE_TYPE (t2)
2449 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2450 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2451 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2452 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))
2453 return 1;
2454
2455 /* Padding record types are also compatible if they pad the same
2456 type and have the same constant size. */
2457 if (code == RECORD_TYPE
2458 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2459 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2460 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2461 return 1;
2462
2463 return 0;
2464}
2465\f
2466/* EXP is an expression for the size of an object. If this size contains
2467 discriminant references, replace them with the maximum (if MAX_P) or
2468 minimum (if !MAX_P) possible value of the discriminant. */
2469
2470tree
2471max_size (tree exp, bool max_p)
2472{
2473 enum tree_code code = TREE_CODE (exp);
2474 tree type = TREE_TYPE (exp);
2475
2476 switch (TREE_CODE_CLASS (code))
2477 {
2478 case tcc_declaration:
2479 case tcc_constant:
2480 return exp;
2481
2482 case tcc_vl_exp:
2483 if (code == CALL_EXPR)
2484 {
2485 tree *argarray;
2486 int i, n = call_expr_nargs (exp);
2487 gcc_assert (n > 0);
2488
2489 argarray = (tree *) alloca (n * sizeof (tree));
2490 for (i = 0; i < n; i++)
2491 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2492 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2493 }
2494 break;
2495
2496 case tcc_reference:
2497 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2498 modify. Otherwise, we treat it like a variable. */
2499 if (!CONTAINS_PLACEHOLDER_P (exp))
2500 return exp;
2501
2502 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2503 return
2504 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2505
2506 case tcc_comparison:
2507 return max_p ? size_one_node : size_zero_node;
2508
2509 case tcc_unary:
2510 case tcc_binary:
2511 case tcc_expression:
2512 switch (TREE_CODE_LENGTH (code))
2513 {
2514 case 1:
2515 if (code == NON_LVALUE_EXPR)
2516 return max_size (TREE_OPERAND (exp, 0), max_p);
2517 else
2518 return
2519 fold_build1 (code, type,
2520 max_size (TREE_OPERAND (exp, 0),
2521 code == NEGATE_EXPR ? !max_p : max_p));
2522
2523 case 2:
2524 if (code == COMPOUND_EXPR)
2525 return max_size (TREE_OPERAND (exp, 1), max_p);
2526
2527 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2528 may provide a tighter bound on max_size. */
2529 if (code == MINUS_EXPR
2530 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2531 {
2532 tree lhs = fold_build2 (MINUS_EXPR, type,
2533 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2534 TREE_OPERAND (exp, 1));
2535 tree rhs = fold_build2 (MINUS_EXPR, type,
2536 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2537 TREE_OPERAND (exp, 1));
2538 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2539 max_size (lhs, max_p),
2540 max_size (rhs, max_p));
2541 }
2542
2543 {
2544 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2545 tree rhs = max_size (TREE_OPERAND (exp, 1),
2546 code == MINUS_EXPR ? !max_p : max_p);
2547
2548 /* Special-case wanting the maximum value of a MIN_EXPR.
2549 In that case, if one side overflows, return the other.
2550 sizetype is signed, but we know sizes are non-negative.
2551 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2552 overflowing or the maximum possible value and the RHS
2553 a variable. */
2554 if (max_p
2555 && code == MIN_EXPR
2556 && TREE_CODE (rhs) == INTEGER_CST
2557 && TREE_OVERFLOW (rhs))
2558 return lhs;
2559 else if (max_p
2560 && code == MIN_EXPR
2561 && TREE_CODE (lhs) == INTEGER_CST
2562 && TREE_OVERFLOW (lhs))
2563 return rhs;
2564 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2565 && ((TREE_CODE (lhs) == INTEGER_CST
2566 && TREE_OVERFLOW (lhs))
2567 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2568 && !TREE_CONSTANT (rhs))
2569 return lhs;
2570 else
2571 return fold_build2 (code, type, lhs, rhs);
2572 }
2573
2574 case 3:
2575 if (code == SAVE_EXPR)
2576 return exp;
2577 else if (code == COND_EXPR)
2578 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2579 max_size (TREE_OPERAND (exp, 1), max_p),
2580 max_size (TREE_OPERAND (exp, 2), max_p));
2581 }
2582
2583 /* Other tree classes cannot happen. */
2584 default:
2585 break;
2586 }
2587
2588 gcc_unreachable ();
2589}
2590\f
2591/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2592 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2593 Return a constructor for the template. */
2594
2595tree
2596build_template (tree template_type, tree array_type, tree expr)
2597{
2598 tree template_elts = NULL_TREE;
2599 tree bound_list = NULL_TREE;
2600 tree field;
2601
2602 while (TREE_CODE (array_type) == RECORD_TYPE
2603 && (TYPE_IS_PADDING_P (array_type)
2604 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2605 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2606
2607 if (TREE_CODE (array_type) == ARRAY_TYPE
2608 || (TREE_CODE (array_type) == INTEGER_TYPE
2609 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2610 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2611
2612 /* First make the list for a CONSTRUCTOR for the template. Go down the
2613 field list of the template instead of the type chain because this
2614 array might be an Ada array of arrays and we can't tell where the
2615 nested arrays stop being the underlying object. */
2616
2617 for (field = TYPE_FIELDS (template_type); field;
2618 (bound_list
2619 ? (bound_list = TREE_CHAIN (bound_list))
2620 : (array_type = TREE_TYPE (array_type))),
2621 field = TREE_CHAIN (TREE_CHAIN (field)))
2622 {
2623 tree bounds, min, max;
2624
2625 /* If we have a bound list, get the bounds from there. Likewise
2626 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2627 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2628 This will give us a maximum range. */
2629 if (bound_list)
2630 bounds = TREE_VALUE (bound_list);
2631 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2632 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2633 else if (expr && TREE_CODE (expr) == PARM_DECL
2634 && DECL_BY_COMPONENT_PTR_P (expr))
2635 bounds = TREE_TYPE (field);
2636 else
2637 gcc_unreachable ();
2638
2639 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2640 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2641
2642 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2643 substitute it from OBJECT. */
2644 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2645 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2646
2647 template_elts = tree_cons (TREE_CHAIN (field), max,
2648 tree_cons (field, min, template_elts));
2649 }
2650
2651 return gnat_build_constructor (template_type, nreverse (template_elts));
2652}
2653\f
6ca2b0a0 2654/* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
a1ab4c31
AC
2655 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2656 in the type contains in its DECL_INITIAL the expression to use when
2657 a constructor is made for the type. GNAT_ENTITY is an entity used
2658 to print out an error message if the mechanism cannot be applied to
2659 an object of that type and also for the name. */
2660
2661tree
d628c015 2662build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
2663{
2664 tree record_type = make_node (RECORD_TYPE);
2665 tree pointer32_type;
2666 tree field_list = 0;
2667 int class;
2668 int dtype = 0;
2669 tree inner_type;
2670 int ndim;
2671 int i;
2672 tree *idx_arr;
2673 tree tem;
2674
2675 /* If TYPE is an unconstrained array, use the underlying array type. */
2676 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2677 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2678
2679 /* If this is an array, compute the number of dimensions in the array,
2680 get the index types, and point to the inner type. */
2681 if (TREE_CODE (type) != ARRAY_TYPE)
2682 ndim = 0;
2683 else
2684 for (ndim = 1, inner_type = type;
2685 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2686 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2687 ndim++, inner_type = TREE_TYPE (inner_type))
2688 ;
2689
2690 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2691
d628c015 2692 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
2693 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2694 for (i = ndim - 1, inner_type = type;
2695 i >= 0;
2696 i--, inner_type = TREE_TYPE (inner_type))
2697 idx_arr[i] = TYPE_DOMAIN (inner_type);
2698 else
2699 for (i = 0, inner_type = type;
2700 i < ndim;
2701 i++, inner_type = TREE_TYPE (inner_type))
2702 idx_arr[i] = TYPE_DOMAIN (inner_type);
2703
2704 /* Now get the DTYPE value. */
2705 switch (TREE_CODE (type))
2706 {
2707 case INTEGER_TYPE:
2708 case ENUMERAL_TYPE:
01ddebf2 2709 case BOOLEAN_TYPE:
a1ab4c31
AC
2710 if (TYPE_VAX_FLOATING_POINT_P (type))
2711 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2712 {
2713 case 6:
2714 dtype = 10;
2715 break;
2716 case 9:
2717 dtype = 11;
2718 break;
2719 case 15:
2720 dtype = 27;
2721 break;
2722 }
2723 else
2724 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2725 {
2726 case 8:
2727 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2728 break;
2729 case 16:
2730 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2731 break;
2732 case 32:
2733 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2734 break;
2735 case 64:
2736 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2737 break;
2738 case 128:
2739 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2740 break;
2741 }
2742 break;
2743
2744 case REAL_TYPE:
2745 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2746 break;
2747
2748 case COMPLEX_TYPE:
2749 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2750 && TYPE_VAX_FLOATING_POINT_P (type))
2751 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2752 {
2753 case 6:
2754 dtype = 12;
2755 break;
2756 case 9:
2757 dtype = 13;
2758 break;
2759 case 15:
2760 dtype = 29;
2761 }
2762 else
2763 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2764 break;
2765
2766 case ARRAY_TYPE:
2767 dtype = 14;
2768 break;
2769
2770 default:
2771 break;
2772 }
2773
2774 /* Get the CLASS value. */
2775 switch (mech)
2776 {
2777 case By_Descriptor_A:
d628c015 2778 case By_Short_Descriptor_A:
a1ab4c31
AC
2779 class = 4;
2780 break;
2781 case By_Descriptor_NCA:
d628c015 2782 case By_Short_Descriptor_NCA:
a1ab4c31
AC
2783 class = 10;
2784 break;
2785 case By_Descriptor_SB:
d628c015 2786 case By_Short_Descriptor_SB:
a1ab4c31
AC
2787 class = 15;
2788 break;
2789 case By_Descriptor:
d628c015 2790 case By_Short_Descriptor:
a1ab4c31 2791 case By_Descriptor_S:
d628c015 2792 case By_Short_Descriptor_S:
a1ab4c31
AC
2793 default:
2794 class = 1;
2795 break;
2796 }
2797
2798 /* Make the type for a descriptor for VMS. The first four fields
2799 are the same for all types. */
2800
2801 field_list
2802 = chainon (field_list,
2803 make_descriptor_field
2804 ("LENGTH", gnat_type_for_size (16, 1), record_type,
d628c015
DR
2805 size_in_bytes ((mech == By_Descriptor_A ||
2806 mech == By_Short_Descriptor_A)
2807 ? inner_type : type)));
a1ab4c31
AC
2808
2809 field_list = chainon (field_list,
2810 make_descriptor_field ("DTYPE",
2811 gnat_type_for_size (8, 1),
2812 record_type, size_int (dtype)));
2813 field_list = chainon (field_list,
2814 make_descriptor_field ("CLASS",
2815 gnat_type_for_size (8, 1),
2816 record_type, size_int (class)));
2817
2818 /* Of course this will crash at run-time if the address space is not
2819 within the low 32 bits, but there is nothing else we can do. */
2820 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2821
2822 field_list
2823 = chainon (field_list,
2824 make_descriptor_field
2825 ("POINTER", pointer32_type, record_type,
2826 build_unary_op (ADDR_EXPR,
2827 pointer32_type,
2828 build0 (PLACEHOLDER_EXPR, type))));
2829
2830 switch (mech)
2831 {
2832 case By_Descriptor:
d628c015 2833 case By_Short_Descriptor:
a1ab4c31 2834 case By_Descriptor_S:
d628c015 2835 case By_Short_Descriptor_S:
a1ab4c31
AC
2836 break;
2837
2838 case By_Descriptor_SB:
d628c015 2839 case By_Short_Descriptor_SB:
a1ab4c31
AC
2840 field_list
2841 = chainon (field_list,
2842 make_descriptor_field
2843 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2844 TREE_CODE (type) == ARRAY_TYPE
2845 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2846 field_list
2847 = chainon (field_list,
2848 make_descriptor_field
2849 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2850 TREE_CODE (type) == ARRAY_TYPE
2851 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2852 break;
2853
2854 case By_Descriptor_A:
d628c015 2855 case By_Short_Descriptor_A:
a1ab4c31 2856 case By_Descriptor_NCA:
d628c015 2857 case By_Short_Descriptor_NCA:
a1ab4c31
AC
2858 field_list = chainon (field_list,
2859 make_descriptor_field ("SCALE",
2860 gnat_type_for_size (8, 1),
2861 record_type,
2862 size_zero_node));
2863
2864 field_list = chainon (field_list,
2865 make_descriptor_field ("DIGITS",
2866 gnat_type_for_size (8, 1),
2867 record_type,
2868 size_zero_node));
2869
2870 field_list
2871 = chainon (field_list,
2872 make_descriptor_field
2873 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
d628c015
DR
2874 size_int ((mech == By_Descriptor_NCA ||
2875 mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2876 ? 0
2877 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2878 : (TREE_CODE (type) == ARRAY_TYPE
2879 && TYPE_CONVENTION_FORTRAN_P (type)
2880 ? 224 : 192))));
2881
2882 field_list = chainon (field_list,
2883 make_descriptor_field ("DIMCT",
2884 gnat_type_for_size (8, 1),
2885 record_type,
2886 size_int (ndim)));
2887
2888 field_list = chainon (field_list,
2889 make_descriptor_field ("ARSIZE",
2890 gnat_type_for_size (32, 1),
2891 record_type,
2892 size_in_bytes (type)));
2893
2894 /* Now build a pointer to the 0,0,0... element. */
2895 tem = build0 (PLACEHOLDER_EXPR, type);
2896 for (i = 0, inner_type = type; i < ndim;
2897 i++, inner_type = TREE_TYPE (inner_type))
2898 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2899 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2900 NULL_TREE, NULL_TREE);
2901
2902 field_list
2903 = chainon (field_list,
2904 make_descriptor_field
2905 ("A0",
2906 build_pointer_type_for_mode (inner_type, SImode, false),
2907 record_type,
2908 build1 (ADDR_EXPR,
2909 build_pointer_type_for_mode (inner_type, SImode,
2910 false),
2911 tem)));
2912
2913 /* Next come the addressing coefficients. */
2914 tem = size_one_node;
2915 for (i = 0; i < ndim; i++)
2916 {
2917 char fname[3];
2918 tree idx_length
2919 = size_binop (MULT_EXPR, tem,
2920 size_binop (PLUS_EXPR,
2921 size_binop (MINUS_EXPR,
2922 TYPE_MAX_VALUE (idx_arr[i]),
2923 TYPE_MIN_VALUE (idx_arr[i])),
2924 size_int (1)));
2925
d628c015
DR
2926 fname[0] = ((mech == By_Descriptor_NCA ||
2927 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31
AC
2928 fname[1] = '0' + i, fname[2] = 0;
2929 field_list
2930 = chainon (field_list,
2931 make_descriptor_field (fname,
2932 gnat_type_for_size (32, 1),
2933 record_type, idx_length));
2934
d628c015 2935 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2936 tem = idx_length;
2937 }
2938
2939 /* Finally here are the bounds. */
2940 for (i = 0; i < ndim; i++)
2941 {
2942 char fname[3];
2943
2944 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2945 field_list
2946 = chainon (field_list,
2947 make_descriptor_field
2948 (fname, gnat_type_for_size (32, 1), record_type,
2949 TYPE_MIN_VALUE (idx_arr[i])));
2950
2951 fname[0] = 'U';
2952 field_list
2953 = chainon (field_list,
2954 make_descriptor_field
2955 (fname, gnat_type_for_size (32, 1), record_type,
2956 TYPE_MAX_VALUE (idx_arr[i])));
2957 }
2958 break;
2959
2960 default:
2961 post_error ("unsupported descriptor type for &", gnat_entity);
2962 }
2963
2964 finish_record_type (record_type, field_list, 0, true);
2965 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2966 NULL, true, false, gnat_entity);
2967
2968 return record_type;
2969}
2970
6ca2b0a0
DR
2971/* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2972 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2973 in the type contains in its DECL_INITIAL the expression to use when
2974 a constructor is made for the type. GNAT_ENTITY is an entity used
2975 to print out an error message if the mechanism cannot be applied to
2976 an object of that type and also for the name. */
2977
2978tree
d628c015 2979build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0
DR
2980{
2981 tree record64_type = make_node (RECORD_TYPE);
2982 tree pointer64_type;
2983 tree field_list64 = 0;
2984 int class;
2985 int dtype = 0;
2986 tree inner_type;
2987 int ndim;
2988 int i;
2989 tree *idx_arr;
2990 tree tem;
2991
2992 /* If TYPE is an unconstrained array, use the underlying array type. */
2993 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2994 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2995
2996 /* If this is an array, compute the number of dimensions in the array,
2997 get the index types, and point to the inner type. */
2998 if (TREE_CODE (type) != ARRAY_TYPE)
2999 ndim = 0;
3000 else
3001 for (ndim = 1, inner_type = type;
3002 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3003 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3004 ndim++, inner_type = TREE_TYPE (inner_type))
3005 ;
3006
3007 idx_arr = (tree *) alloca (ndim * sizeof (tree));
3008
3009 if (mech != By_Descriptor_NCA
3010 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3011 for (i = ndim - 1, inner_type = type;
3012 i >= 0;
3013 i--, inner_type = TREE_TYPE (inner_type))
3014 idx_arr[i] = TYPE_DOMAIN (inner_type);
3015 else
3016 for (i = 0, inner_type = type;
3017 i < ndim;
3018 i++, inner_type = TREE_TYPE (inner_type))
3019 idx_arr[i] = TYPE_DOMAIN (inner_type);
3020
3021 /* Now get the DTYPE value. */
3022 switch (TREE_CODE (type))
3023 {
3024 case INTEGER_TYPE:
3025 case ENUMERAL_TYPE:
01ddebf2 3026 case BOOLEAN_TYPE:
6ca2b0a0
DR
3027 if (TYPE_VAX_FLOATING_POINT_P (type))
3028 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3029 {
3030 case 6:
3031 dtype = 10;
3032 break;
3033 case 9:
3034 dtype = 11;
3035 break;
3036 case 15:
3037 dtype = 27;
3038 break;
3039 }
3040 else
3041 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3042 {
3043 case 8:
3044 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3045 break;
3046 case 16:
3047 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3048 break;
3049 case 32:
3050 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3051 break;
3052 case 64:
3053 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3054 break;
3055 case 128:
3056 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3057 break;
3058 }
3059 break;
3060
3061 case REAL_TYPE:
3062 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3063 break;
3064
3065 case COMPLEX_TYPE:
3066 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3067 && TYPE_VAX_FLOATING_POINT_P (type))
3068 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3069 {
3070 case 6:
3071 dtype = 12;
3072 break;
3073 case 9:
3074 dtype = 13;
3075 break;
3076 case 15:
3077 dtype = 29;
3078 }
3079 else
3080 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3081 break;
3082
3083 case ARRAY_TYPE:
3084 dtype = 14;
3085 break;
3086
3087 default:
3088 break;
3089 }
3090
3091 /* Get the CLASS value. */
3092 switch (mech)
3093 {
3094 case By_Descriptor_A:
3095 class = 4;
3096 break;
3097 case By_Descriptor_NCA:
3098 class = 10;
3099 break;
3100 case By_Descriptor_SB:
3101 class = 15;
3102 break;
3103 case By_Descriptor:
3104 case By_Descriptor_S:
3105 default:
3106 class = 1;
3107 break;
3108 }
3109
3110 /* Make the type for a 64bit descriptor for VMS. The first six fields
3111 are the same for all types. */
3112
3113 field_list64 = chainon (field_list64,
3114 make_descriptor_field ("MBO",
3115 gnat_type_for_size (16, 1),
3116 record64_type, size_int (1)));
3117
3118 field_list64 = chainon (field_list64,
3119 make_descriptor_field ("DTYPE",
3120 gnat_type_for_size (8, 1),
3121 record64_type, size_int (dtype)));
3122 field_list64 = chainon (field_list64,
3123 make_descriptor_field ("CLASS",
3124 gnat_type_for_size (8, 1),
3125 record64_type, size_int (class)));
3126
3127 field_list64 = chainon (field_list64,
3128 make_descriptor_field ("MBMO",
3129 gnat_type_for_size (32, 1),
3130 record64_type, ssize_int (-1)));
3131
3132 field_list64
3133 = chainon (field_list64,
3134 make_descriptor_field
3135 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
3136 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
3137
3138 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3139
3140 field_list64
3141 = chainon (field_list64,
3142 make_descriptor_field
3143 ("POINTER", pointer64_type, record64_type,
3144 build_unary_op (ADDR_EXPR,
3145 pointer64_type,
3146 build0 (PLACEHOLDER_EXPR, type))));
3147
3148 switch (mech)
3149 {
3150 case By_Descriptor:
3151 case By_Descriptor_S:
3152 break;
3153
3154 case By_Descriptor_SB:
3155 field_list64
3156 = chainon (field_list64,
3157 make_descriptor_field
3158 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
3159 TREE_CODE (type) == ARRAY_TYPE
3160 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3161 field_list64
3162 = chainon (field_list64,
3163 make_descriptor_field
3164 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
3165 TREE_CODE (type) == ARRAY_TYPE
3166 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3167 break;
3168
3169 case By_Descriptor_A:
3170 case By_Descriptor_NCA:
3171 field_list64 = chainon (field_list64,
3172 make_descriptor_field ("SCALE",
3173 gnat_type_for_size (8, 1),
3174 record64_type,
3175 size_zero_node));
3176
3177 field_list64 = chainon (field_list64,
3178 make_descriptor_field ("DIGITS",
3179 gnat_type_for_size (8, 1),
3180 record64_type,
3181 size_zero_node));
3182
3183 field_list64
3184 = chainon (field_list64,
3185 make_descriptor_field
3186 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3187 size_int (mech == By_Descriptor_NCA
3188 ? 0
3189 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3190 : (TREE_CODE (type) == ARRAY_TYPE
3191 && TYPE_CONVENTION_FORTRAN_P (type)
3192 ? 224 : 192))));
3193
3194 field_list64 = chainon (field_list64,
3195 make_descriptor_field ("DIMCT",
3196 gnat_type_for_size (8, 1),
3197 record64_type,
3198 size_int (ndim)));
3199
3200 field_list64 = chainon (field_list64,
3201 make_descriptor_field ("MBZ",
3202 gnat_type_for_size (32, 1),
3203 record64_type,
3204 size_int (0)));
3205 field_list64 = chainon (field_list64,
3206 make_descriptor_field ("ARSIZE",
3207 gnat_type_for_size (64, 1),
3208 record64_type,
3209 size_in_bytes (type)));
3210
3211 /* Now build a pointer to the 0,0,0... element. */
3212 tem = build0 (PLACEHOLDER_EXPR, type);
3213 for (i = 0, inner_type = type; i < ndim;
3214 i++, inner_type = TREE_TYPE (inner_type))
3215 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3216 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3217 NULL_TREE, NULL_TREE);
3218
3219 field_list64
3220 = chainon (field_list64,
3221 make_descriptor_field
3222 ("A0",
3223 build_pointer_type_for_mode (inner_type, DImode, false),
3224 record64_type,
3225 build1 (ADDR_EXPR,
3226 build_pointer_type_for_mode (inner_type, DImode,
3227 false),
3228 tem)));
3229
3230 /* Next come the addressing coefficients. */
3231 tem = size_one_node;
3232 for (i = 0; i < ndim; i++)
3233 {
3234 char fname[3];
3235 tree idx_length
3236 = size_binop (MULT_EXPR, tem,
3237 size_binop (PLUS_EXPR,
3238 size_binop (MINUS_EXPR,
3239 TYPE_MAX_VALUE (idx_arr[i]),
3240 TYPE_MIN_VALUE (idx_arr[i])),
3241 size_int (1)));
3242
3243 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3244 fname[1] = '0' + i, fname[2] = 0;
3245 field_list64
3246 = chainon (field_list64,
3247 make_descriptor_field (fname,
3248 gnat_type_for_size (64, 1),
3249 record64_type, idx_length));
3250
3251 if (mech == By_Descriptor_NCA)
3252 tem = idx_length;
3253 }
3254
3255 /* Finally here are the bounds. */
3256 for (i = 0; i < ndim; i++)
3257 {
3258 char fname[3];
3259
3260 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3261 field_list64
3262 = chainon (field_list64,
3263 make_descriptor_field
3264 (fname, gnat_type_for_size (64, 1), record64_type,
3265 TYPE_MIN_VALUE (idx_arr[i])));
3266
3267 fname[0] = 'U';
3268 field_list64
3269 = chainon (field_list64,
3270 make_descriptor_field
3271 (fname, gnat_type_for_size (64, 1), record64_type,
3272 TYPE_MAX_VALUE (idx_arr[i])));
3273 }
3274 break;
3275
3276 default:
3277 post_error ("unsupported descriptor type for &", gnat_entity);
3278 }
3279
3280 finish_record_type (record64_type, field_list64, 0, true);
3281 create_type_decl (create_concat_name (gnat_entity, "DESC64"), record64_type,
3282 NULL, true, false, gnat_entity);
3283
3284 return record64_type;
3285}
3286
a1ab4c31
AC
3287/* Utility routine for above code to make a field. */
3288
3289static tree
3290make_descriptor_field (const char *name, tree type,
3291 tree rec_type, tree initial)
3292{
3293 tree field
3294 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3295
3296 DECL_INITIAL (field) = initial;
3297 return field;
3298}
3299
d628c015
DR
3300/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3301 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3302 which the VMS descriptor is passed. */
a1ab4c31
AC
3303
3304static tree
d628c015
DR
3305convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3306{
3307 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3308 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3309 /* The CLASS field is the 3rd field in the descriptor. */
3310 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3311 /* The POINTER field is the 6th field in the descriptor. */
3312 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3313
3314 /* Retrieve the value of the POINTER field. */
3315 tree gnu_expr64
3316 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3317
3318 if (POINTER_TYPE_P (gnu_type))
3319 return convert (gnu_type, gnu_expr64);
3320
3321 else if (TYPE_FAT_POINTER_P (gnu_type))
3322 {
3323 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3324 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3325 tree template_type = TREE_TYPE (p_bounds_type);
3326 tree min_field = TYPE_FIELDS (template_type);
3327 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3328 tree template, template_addr, aflags, dimct, t, u;
3329 /* See the head comment of build_vms_descriptor. */
3330 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3331 tree lfield, ufield;
3332
3333 /* Convert POINTER to the type of the P_ARRAY field. */
3334 gnu_expr64 = convert (p_array_type, gnu_expr64);
3335
3336 switch (iclass)
3337 {
3338 case 1: /* Class S */
3339 case 15: /* Class SB */
3340 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3341 t = TREE_CHAIN (TREE_CHAIN (class));
3342 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3343 t = tree_cons (min_field,
3344 convert (TREE_TYPE (min_field), integer_one_node),
3345 tree_cons (max_field,
3346 convert (TREE_TYPE (max_field), t),
3347 NULL_TREE));
3348 template = gnat_build_constructor (template_type, t);
3349 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3350
3351 /* For class S, we are done. */
3352 if (iclass == 1)
3353 break;
3354
3355 /* Test that we really have a SB descriptor, like DEC Ada. */
3356 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3357 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3358 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3359 /* If so, there is already a template in the descriptor and
3360 it is located right after the POINTER field. The fields are
3361 64bits so they must be repacked. */
3362 t = TREE_CHAIN (pointer64);
3363 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3364 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3365
3366 t = TREE_CHAIN (t);
3367 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3368 ufield = convert
3369 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3370
3371 /* Build the template in the form of a constructor. */
3372 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3373 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3374 ufield, NULL_TREE));
3375 template = gnat_build_constructor (template_type, t);
3376
3377 /* Otherwise use the {1, LENGTH} template we build above. */
3378 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3379 build_unary_op (ADDR_EXPR, p_bounds_type,
3380 template),
3381 template_addr);
3382 break;
3383
3384 case 4: /* Class A */
3385 /* The AFLAGS field is the 3rd field after the pointer in the
3386 descriptor. */
3387 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3388 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3389 /* The DIMCT field is the next field in the descriptor after
3390 aflags. */
3391 t = TREE_CHAIN (t);
3392 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3393 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3394 or FL_COEFF or FL_BOUNDS not set. */
3395 u = build_int_cst (TREE_TYPE (aflags), 192);
3396 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3397 build_binary_op (NE_EXPR, integer_type_node,
3398 dimct,
3399 convert (TREE_TYPE (dimct),
3400 size_one_node)),
3401 build_binary_op (NE_EXPR, integer_type_node,
3402 build2 (BIT_AND_EXPR,
3403 TREE_TYPE (aflags),
3404 aflags, u),
3405 u));
3406 /* There is already a template in the descriptor and it is located
3407 in block 3. The fields are 64bits so they must be repacked. */
3408 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3409 (t)))));
3410 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3411 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3412
3413 t = TREE_CHAIN (t);
3414 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3415 ufield = convert
3416 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3417
3418 /* Build the template in the form of a constructor. */
3419 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3420 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3421 ufield, NULL_TREE));
3422 template = gnat_build_constructor (template_type, t);
3423 template = build3 (COND_EXPR, p_bounds_type, u,
3424 build_call_raise (CE_Length_Check_Failed, Empty,
3425 N_Raise_Constraint_Error),
3426 template);
3427 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3428 break;
3429
3430 case 10: /* Class NCA */
3431 default:
3432 post_error ("unsupported descriptor type for &", gnat_subprog);
3433 template_addr = integer_zero_node;
3434 break;
3435 }
3436
3437 /* Build the fat pointer in the form of a constructor. */
3438 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3439 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3440 template_addr, NULL_TREE));
3441 return gnat_build_constructor (gnu_type, t);
3442 }
3443
3444 else
3445 gcc_unreachable ();
3446}
3447
3448/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3449 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3450 which the VMS descriptor is passed. */
3451
3452static tree
3453convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
3454{
3455 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3456 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3457 /* The CLASS field is the 3rd field in the descriptor. */
3458 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3459 /* The POINTER field is the 4th field in the descriptor. */
3460 tree pointer = TREE_CHAIN (class);
3461
3462 /* Retrieve the value of the POINTER field. */
d628c015 3463 tree gnu_expr32
a1ab4c31
AC
3464 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3465
3466 if (POINTER_TYPE_P (gnu_type))
d628c015 3467 return convert (gnu_type, gnu_expr32);
a1ab4c31
AC
3468
3469 else if (TYPE_FAT_POINTER_P (gnu_type))
3470 {
3471 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3472 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3473 tree template_type = TREE_TYPE (p_bounds_type);
3474 tree min_field = TYPE_FIELDS (template_type);
3475 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3476 tree template, template_addr, aflags, dimct, t, u;
3477 /* See the head comment of build_vms_descriptor. */
3478 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3479
3480 /* Convert POINTER to the type of the P_ARRAY field. */
d628c015 3481 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31
AC
3482
3483 switch (iclass)
3484 {
3485 case 1: /* Class S */
3486 case 15: /* Class SB */
3487 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3488 t = TYPE_FIELDS (desc_type);
3489 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3490 t = tree_cons (min_field,
3491 convert (TREE_TYPE (min_field), integer_one_node),
3492 tree_cons (max_field,
3493 convert (TREE_TYPE (max_field), t),
3494 NULL_TREE));
3495 template = gnat_build_constructor (template_type, t);
3496 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3497
3498 /* For class S, we are done. */
3499 if (iclass == 1)
3500 break;
3501
3502 /* Test that we really have a SB descriptor, like DEC Ada. */
3503 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3504 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3505 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3506 /* If so, there is already a template in the descriptor and
3507 it is located right after the POINTER field. */
3508 t = TREE_CHAIN (pointer);
3509 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3510 /* Otherwise use the {1, LENGTH} template we build above. */
3511 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3512 build_unary_op (ADDR_EXPR, p_bounds_type,
3513 template),
3514 template_addr);
3515 break;
3516
3517 case 4: /* Class A */
3518 /* The AFLAGS field is the 7th field in the descriptor. */
3519 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3520 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3521 /* The DIMCT field is the 8th field in the descriptor. */
3522 t = TREE_CHAIN (t);
3523 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3524 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3525 or FL_COEFF or FL_BOUNDS not set. */
3526 u = build_int_cst (TREE_TYPE (aflags), 192);
3527 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3528 build_binary_op (NE_EXPR, integer_type_node,
3529 dimct,
3530 convert (TREE_TYPE (dimct),
3531 size_one_node)),
3532 build_binary_op (NE_EXPR, integer_type_node,
3533 build2 (BIT_AND_EXPR,
3534 TREE_TYPE (aflags),
3535 aflags, u),
3536 u));
a1ab4c31
AC
3537 /* There is already a template in the descriptor and it is
3538 located at the start of block 3 (12th field). */
3539 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3540 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
d628c015
DR
3541 template = build3 (COND_EXPR, p_bounds_type, u,
3542 build_call_raise (CE_Length_Check_Failed, Empty,
3543 N_Raise_Constraint_Error),
3544 template);
a1ab4c31
AC
3545 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3546 break;
3547
3548 case 10: /* Class NCA */
3549 default:
3550 post_error ("unsupported descriptor type for &", gnat_subprog);
3551 template_addr = integer_zero_node;
3552 break;
3553 }
3554
3555 /* Build the fat pointer in the form of a constructor. */
d628c015 3556 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
a1ab4c31
AC
3557 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3558 template_addr, NULL_TREE));
d628c015 3559
a1ab4c31
AC
3560 return gnat_build_constructor (gnu_type, t);
3561 }
3562
3563 else
3564 gcc_unreachable ();
3565}
3566
f252a7d6
EB
3567/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3568 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3569 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3570 VMS descriptor is passed. */
d628c015
DR
3571
3572static tree
f252a7d6
EB
3573convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3574 Entity_Id gnat_subprog)
d628c015
DR
3575{
3576 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3577 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3578 tree mbo = TYPE_FIELDS (desc_type);
3579 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3580 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
f252a7d6 3581 tree is64bit, gnu_expr32, gnu_expr64;
d628c015 3582
f252a7d6
EB
3583 /* If the field name is not MBO, it must be 32-bit and no alternate.
3584 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 3585 if (strcmp (mbostr, "MBO") != 0)
d628c015
DR
3586 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3587
f252a7d6 3588 /* Build the test for 64-bit descriptor. */
d628c015
DR
3589 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3590 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
f252a7d6
EB
3591 is64bit
3592 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3593 build_binary_op (EQ_EXPR, integer_type_node,
3594 convert (integer_type_node, mbo),
3595 integer_one_node),
3596 build_binary_op (EQ_EXPR, integer_type_node,
3597 convert (integer_type_node, mbmo),
3598 integer_minus_one_node));
3599
3600 /* Build the 2 possible end results. */
3601 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3602 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3603 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3604
3605 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015
DR
3606}
3607
a1ab4c31
AC
3608/* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3609 and the GNAT node GNAT_SUBPROG. */
3610
3611void
3612build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3613{
3614 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3615 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3616 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3617 tree gnu_body;
3618
3619 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3620 gnu_param_list = NULL_TREE;
3621
3622 begin_subprog_body (gnu_stub_decl);
3623 gnat_pushlevel ();
3624
3625 start_stmt_group ();
3626
3627 /* Loop over the parameters of the stub and translate any of them
3628 passed by descriptor into a by reference one. */
3629 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3630 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3631 gnu_stub_param;
3632 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3633 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3634 {
3635 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
f252a7d6
EB
3636 gnu_param
3637 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3638 gnu_stub_param,
3639 DECL_PARM_ALT_TYPE (gnu_stub_param),
3640 gnat_subprog);
a1ab4c31
AC
3641 else
3642 gnu_param = gnu_stub_param;
3643
3644 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3645 }
3646
3647 gnu_body = end_stmt_group ();
3648
3649 /* Invoke the internal subprogram. */
3650 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3651 gnu_subprog);
3652 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3653 gnu_subprog_addr,
3654 nreverse (gnu_param_list));
3655
3656 /* Propagate the return value, if any. */
3657 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3658 append_to_statement_list (gnu_subprog_call, &gnu_body);
3659 else
3660 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3661 gnu_subprog_call),
3662 &gnu_body);
3663
3664 gnat_poplevel ();
3665
3666 allocate_struct_function (gnu_stub_decl, false);
3667 end_subprog_body (gnu_body, false);
3668}
3669\f
3670/* Build a type to be used to represent an aliased object whose nominal
3671 type is an unconstrained array. This consists of a RECORD_TYPE containing
3672 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3673 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3674 is used to represent an arbitrary unconstrained object. Use NAME
3675 as the name of the record. */
3676
3677tree
3678build_unc_object_type (tree template_type, tree object_type, tree name)
3679{
3680 tree type = make_node (RECORD_TYPE);
3681 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3682 template_type, type, 0, 0, 0, 1);
3683 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3684 type, 0, 0, 0, 1);
3685
3686 TYPE_NAME (type) = name;
3687 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3688 finish_record_type (type,
3689 chainon (chainon (NULL_TREE, template_field),
3690 array_field),
3691 0, false);
3692
3693 return type;
3694}
3695
3696/* Same, taking a thin or fat pointer type instead of a template type. */
3697
3698tree
3699build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3700 tree name)
3701{
3702 tree template_type;
3703
3704 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3705
3706 template_type
3707 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3708 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3709 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3710 return build_unc_object_type (template_type, object_type, name);
3711}
3712
3713/* Shift the component offsets within an unconstrained object TYPE to make it
3714 suitable for use as a designated type for thin pointers. */
3715
3716void
3717shift_unc_components_for_thin_pointers (tree type)
3718{
3719 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3720 allocated past the BOUNDS template. The designated type is adjusted to
3721 have ARRAY at position zero and the template at a negative offset, so
3722 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3723
3724 tree bounds_field = TYPE_FIELDS (type);
3725 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3726
3727 DECL_FIELD_OFFSET (bounds_field)
3728 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3729
3730 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3731 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3732}
3733\f
3734/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3735 the normal case this is just two adjustments, but we have more to do
3736 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3737
3738void
3739update_pointer_to (tree old_type, tree new_type)
3740{
3741 tree ptr = TYPE_POINTER_TO (old_type);
3742 tree ref = TYPE_REFERENCE_TO (old_type);
3743 tree ptr1, ref1;
3744 tree type;
3745
3746 /* If this is the main variant, process all the other variants first. */
3747 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3748 for (type = TYPE_NEXT_VARIANT (old_type); type;
3749 type = TYPE_NEXT_VARIANT (type))
3750 update_pointer_to (type, new_type);
3751
3752 /* If no pointer or reference, we are done. */
3753 if (!ptr && !ref)
3754 return;
3755
3756 /* Merge the old type qualifiers in the new type.
3757
3758 Each old variant has qualifiers for specific reasons, and the new
3759 designated type as well. Each set of qualifiers represents useful
3760 information grabbed at some point, and merging the two simply unifies
3761 these inputs into the final type description.
3762
3763 Consider for instance a volatile type frozen after an access to constant
3764 type designating it. After the designated type freeze, we get here with a
3765 volatile new_type and a dummy old_type with a readonly variant, created
3766 when the access type was processed. We shall make a volatile and readonly
3767 designated type, because that's what it really is.
3768
3769 We might also get here for a non-dummy old_type variant with different
3770 qualifiers than the new_type ones, for instance in some cases of pointers
3771 to private record type elaboration (see the comments around the call to
3772 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3773 qualifiers in those cases too, to avoid accidentally discarding the
3774 initial set, and will often end up with old_type == new_type then. */
3775 new_type = build_qualified_type (new_type,
3776 TYPE_QUALS (old_type)
3777 | TYPE_QUALS (new_type));
3778
3779 /* If the new type and the old one are identical, there is nothing to
3780 update. */
3781 if (old_type == new_type)
3782 return;
3783
3784 /* Otherwise, first handle the simple case. */
3785 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3786 {
3787 TYPE_POINTER_TO (new_type) = ptr;
3788 TYPE_REFERENCE_TO (new_type) = ref;
3789
3790 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3791 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3792 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3793 TREE_TYPE (ptr1) = new_type;
3794
3795 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3796 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3797 ref1 = TYPE_NEXT_VARIANT (ref1))
3798 TREE_TYPE (ref1) = new_type;
3799 }
3800
3801 /* Now deal with the unconstrained array case. In this case the "pointer"
3802 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3803 Turn them into pointers to the correct types using update_pointer_to. */
3804 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3805 gcc_unreachable ();
3806
3807 else
3808 {
3809 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3810 tree array_field = TYPE_FIELDS (ptr);
3811 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3812 tree new_ptr = TYPE_POINTER_TO (new_type);
3813 tree new_ref;
3814 tree var;
3815
3816 /* Make pointers to the dummy template point to the real template. */
3817 update_pointer_to
3818 (TREE_TYPE (TREE_TYPE (bounds_field)),
3819 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3820
3821 /* The references to the template bounds present in the array type
3822 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3823 are updating ptr to make it a full replacement for new_ptr as
3824 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3825 to make it of type ptr. */
3826 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3827 build0 (PLACEHOLDER_EXPR, ptr),
3828 bounds_field, NULL_TREE);
3829
3830 /* Create the new array for the new PLACEHOLDER_EXPR and make
3831 pointers to the dummy array point to it.
3832
3833 ??? This is now the only use of substitute_in_type,
3834 which is a very "heavy" routine to do this, so it
3835 should be replaced at some point. */
3836 update_pointer_to
3837 (TREE_TYPE (TREE_TYPE (array_field)),
3838 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3839 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3840
3841 /* Make ptr the pointer to new_type. */
3842 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3843 = TREE_TYPE (new_type) = ptr;
3844
3845 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3846 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3847
3848 /* Now handle updating the allocation record, what the thin pointer
3849 points to. Update all pointers from the old record into the new
3850 one, update the type of the array field, and recompute the size. */
3851 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3852
3853 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3854 = TREE_TYPE (TREE_TYPE (array_field));
3855
3856 /* The size recomputation needs to account for alignment constraints, so
3857 we let layout_type work it out. This will reset the field offsets to
3858 what they would be in a regular record, so we shift them back to what
3859 we want them to be for a thin pointer designated type afterwards. */
3860 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3861 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3862 TYPE_SIZE (new_obj_rec) = 0;
3863 layout_type (new_obj_rec);
3864
3865 shift_unc_components_for_thin_pointers (new_obj_rec);
3866
3867 /* We are done, at last. */
3868 rest_of_record_type_compilation (ptr);
3869 }
3870}
3871\f
3872/* Convert a pointer to a constrained array into a pointer to a fat
3873 pointer. This involves making or finding a template. */
3874
3875static tree
3876convert_to_fat_pointer (tree type, tree expr)
3877{
3878 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3879 tree template, template_addr;
3880 tree etype = TREE_TYPE (expr);
3881
3882 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3883 pointer to the template and array. */
3884 if (integer_zerop (expr))
3885 return
3886 gnat_build_constructor
3887 (type,
3888 tree_cons (TYPE_FIELDS (type),
3889 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3890 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3891 convert (build_pointer_type (template_type),
3892 expr),
3893 NULL_TREE)));
3894
3895 /* If EXPR is a thin pointer, make the template and data from the record. */
3896
3897 else if (TYPE_THIN_POINTER_P (etype))
3898 {
3899 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3900
3901 expr = save_expr (expr);
3902 if (TREE_CODE (expr) == ADDR_EXPR)
3903 expr = TREE_OPERAND (expr, 0);
3904 else
3905 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3906
3907 template = build_component_ref (expr, NULL_TREE, fields, false);
3908 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3909 build_component_ref (expr, NULL_TREE,
3910 TREE_CHAIN (fields), false));
3911 }
3912 else
3913 /* Otherwise, build the constructor for the template. */
3914 template = build_template (template_type, TREE_TYPE (etype), expr);
3915
3916 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3917
3918 /* The result is a CONSTRUCTOR for the fat pointer.
3919
3920 If expr is an argument of a foreign convention subprogram, the type it
3921 points to is directly the component type. In this case, the expression
3922 type may not match the corresponding FIELD_DECL type at this point, so we
3923 call "convert" here to fix that up if necessary. This type consistency is
3924 required, for instance because it ensures that possible later folding of
3925 component_refs against this constructor always yields something of the
3926 same type as the initial reference.
3927
3928 Note that the call to "build_template" above is still fine, because it
3929 will only refer to the provided template_type in this case. */
3930 return
3931 gnat_build_constructor
3932 (type, tree_cons (TYPE_FIELDS (type),
3933 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3934 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3935 template_addr, NULL_TREE)));
3936}
3937\f
3938/* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3939 is something that is a fat pointer, so convert to it first if it EXPR
3940 is not already a fat pointer. */
3941
3942static tree
3943convert_to_thin_pointer (tree type, tree expr)
3944{
3945 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3946 expr
3947 = convert_to_fat_pointer
3948 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3949
3950 /* We get the pointer to the data and use a NOP_EXPR to make it the
3951 proper GCC type. */
3952 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3953 false);
3954 expr = build1 (NOP_EXPR, type, expr);
3955
3956 return expr;
3957}
3958\f
3959/* Create an expression whose value is that of EXPR,
3960 converted to type TYPE. The TREE_TYPE of the value
3961 is always TYPE. This function implements all reasonable
3962 conversions; callers should filter out those that are
3963 not permitted by the language being compiled. */
3964
3965tree
3966convert (tree type, tree expr)
3967{
3968 enum tree_code code = TREE_CODE (type);
3969 tree etype = TREE_TYPE (expr);
3970 enum tree_code ecode = TREE_CODE (etype);
3971
3972 /* If EXPR is already the right type, we are done. */
3973 if (type == etype)
3974 return expr;
3975
3976 /* If both input and output have padding and are of variable size, do this
3977 as an unchecked conversion. Likewise if one is a mere variant of the
3978 other, so we avoid a pointless unpad/repad sequence. */
3979 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3980 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3981 && (!TREE_CONSTANT (TYPE_SIZE (type))
3982 || !TREE_CONSTANT (TYPE_SIZE (etype))
3983 || gnat_types_compatible_p (type, etype)
3984 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3985 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3986 ;
3987
3988 /* If the output type has padding, convert to the inner type and
3989 make a constructor to build the record. */
3990 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3991 {
3992 /* If we previously converted from another type and our type is
3993 of variable size, remove the conversion to avoid the need for
3994 variable-size temporaries. Likewise for a conversion between
3995 original and packable version. */
3996 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3997 && (!TREE_CONSTANT (TYPE_SIZE (type))
3998 || (ecode == RECORD_TYPE
3999 && TYPE_NAME (etype)
4000 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4001 expr = TREE_OPERAND (expr, 0);
4002
4003 /* If we are just removing the padding from expr, convert the original
4004 object if we have variable size in order to avoid the need for some
4005 variable-size temporaries. Likewise if the padding is a mere variant
4006 of the other, so we avoid a pointless unpad/repad sequence. */
4007 if (TREE_CODE (expr) == COMPONENT_REF
4008 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
4009 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4010 && (!TREE_CONSTANT (TYPE_SIZE (type))
4011 || gnat_types_compatible_p (type,
4012 TREE_TYPE (TREE_OPERAND (expr, 0)))
4013 || (ecode == RECORD_TYPE
4014 && TYPE_NAME (etype)
4015 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4016 return convert (type, TREE_OPERAND (expr, 0));
4017
4018 /* If the result type is a padded type with a self-referentially-sized
4019 field and the expression type is a record, do this as an
4020 unchecked conversion. */
4021 else if (TREE_CODE (etype) == RECORD_TYPE
4022 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4023 return unchecked_convert (type, expr, false);
4024
4025 else
4026 return
4027 gnat_build_constructor (type,
4028 tree_cons (TYPE_FIELDS (type),
4029 convert (TREE_TYPE
4030 (TYPE_FIELDS (type)),
4031 expr),
4032 NULL_TREE));
4033 }
4034
4035 /* If the input type has padding, remove it and convert to the output type.
4036 The conditions ordering is arranged to ensure that the output type is not
4037 a padding type here, as it is not clear whether the conversion would
4038 always be correct if this was to happen. */
4039 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
4040 {
4041 tree unpadded;
4042
4043 /* If we have just converted to this padded type, just get the
4044 inner expression. */
4045 if (TREE_CODE (expr) == CONSTRUCTOR
4046 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
4047 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
4048 == TYPE_FIELDS (etype))
4049 unpadded
4050 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
4051
4052 /* Otherwise, build an explicit component reference. */
4053 else
4054 unpadded
4055 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4056
4057 return convert (type, unpadded);
4058 }
4059
4060 /* If the input is a biased type, adjust first. */
4061 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4062 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4063 fold_convert (TREE_TYPE (etype),
4064 expr),
4065 TYPE_MIN_VALUE (etype)));
4066
4067 /* If the input is a justified modular type, we need to extract the actual
4068 object before converting it to any other type with the exceptions of an
4069 unconstrained array or of a mere type variant. It is useful to avoid the
4070 extraction and conversion in the type variant case because it could end
4071 up replacing a VAR_DECL expr by a constructor and we might be about the
4072 take the address of the result. */
4073 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4074 && code != UNCONSTRAINED_ARRAY_TYPE
4075 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4076 return convert (type, build_component_ref (expr, NULL_TREE,
4077 TYPE_FIELDS (etype), false));
4078
4079 /* If converting to a type that contains a template, convert to the data
4080 type and then build the template. */
4081 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4082 {
4083 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
4084
4085 /* If the source already has a template, get a reference to the
4086 associated array only, as we are going to rebuild a template
4087 for the target type anyway. */
4088 expr = maybe_unconstrained_array (expr);
4089
4090 return
4091 gnat_build_constructor
4092 (type,
4093 tree_cons (TYPE_FIELDS (type),
4094 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4095 obj_type, NULL_TREE),
4096 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
4097 convert (obj_type, expr), NULL_TREE)));
4098 }
4099
4100 /* There are some special cases of expressions that we process
4101 specially. */
4102 switch (TREE_CODE (expr))
4103 {
4104 case ERROR_MARK:
4105 return expr;
4106
4107 case NULL_EXPR:
4108 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4109 conversion in gnat_expand_expr. NULL_EXPR does not represent
4110 and actual value, so no conversion is needed. */
4111 expr = copy_node (expr);
4112 TREE_TYPE (expr) = type;
4113 return expr;
4114
4115 case STRING_CST:
4116 /* If we are converting a STRING_CST to another constrained array type,
4117 just make a new one in the proper type. */
4118 if (code == ecode && AGGREGATE_TYPE_P (etype)
4119 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4120 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4121 {
4122 expr = copy_node (expr);
4123 TREE_TYPE (expr) = type;
4124 return expr;
4125 }
4126 break;
4127
4128 case CONSTRUCTOR:
4129 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4130 a new one in the proper type. */
4131 if (code == ecode && gnat_types_compatible_p (type, etype))
4132 {
4133 expr = copy_node (expr);
4134 TREE_TYPE (expr) = type;
4135 return expr;
4136 }
4137
4138 /* Likewise for a conversion between original and packable version, but
4139 we have to work harder in order to preserve type consistency. */
4140 if (code == ecode
4141 && code == RECORD_TYPE
4142 && TYPE_NAME (type) == TYPE_NAME (etype))
4143 {
4144 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4145 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4146 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4147 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4148 unsigned HOST_WIDE_INT idx;
4149 tree index, value;
4150
4151 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4152 {
4153 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4154 /* We expect only simple constructors. Otherwise, punt. */
4155 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4156 break;
4157 elt->index = field;
4158 elt->value = convert (TREE_TYPE (field), value);
4159 efield = TREE_CHAIN (efield);
4160 field = TREE_CHAIN (field);
4161 }
4162
4163 if (idx == len)
4164 {
4165 expr = copy_node (expr);
4166 TREE_TYPE (expr) = type;
4167 CONSTRUCTOR_ELTS (expr) = v;
4168 return expr;
4169 }
4170 }
4171 break;
4172
4173 case UNCONSTRAINED_ARRAY_REF:
4174 /* Convert this to the type of the inner array by getting the address of
4175 the array from the template. */
4176 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4177 build_component_ref (TREE_OPERAND (expr, 0),
4178 get_identifier ("P_ARRAY"),
4179 NULL_TREE, false));
4180 etype = TREE_TYPE (expr);
4181 ecode = TREE_CODE (etype);
4182 break;
4183
4184 case VIEW_CONVERT_EXPR:
4185 {
4186 /* GCC 4.x is very sensitive to type consistency overall, and view
4187 conversions thus are very frequent. Even though just "convert"ing
4188 the inner operand to the output type is fine in most cases, it
4189 might expose unexpected input/output type mismatches in special
4190 circumstances so we avoid such recursive calls when we can. */
4191 tree op0 = TREE_OPERAND (expr, 0);
4192
4193 /* If we are converting back to the original type, we can just
4194 lift the input conversion. This is a common occurrence with
4195 switches back-and-forth amongst type variants. */
4196 if (type == TREE_TYPE (op0))
4197 return op0;
4198
4199 /* Otherwise, if we're converting between two aggregate types, we
4200 might be allowed to substitute the VIEW_CONVERT_EXPR target type
4201 in place or to just convert the inner expression. */
4202 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4203 {
4204 /* If we are converting between mere variants, we can just
4205 substitute the VIEW_CONVERT_EXPR in place. */
4206 if (gnat_types_compatible_p (type, etype))
4207 return build1 (VIEW_CONVERT_EXPR, type, op0);
4208
4209 /* Otherwise, we may just bypass the input view conversion unless
4210 one of the types is a fat pointer, which is handled by
4211 specialized code below which relies on exact type matching. */
4212 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4213 return convert (type, op0);
4214 }
4215 }
4216 break;
4217
4218 case INDIRECT_REF:
4219 /* If both types are record types, just convert the pointer and
4220 make a new INDIRECT_REF.
4221
4222 ??? Disable this for now since it causes problems with the
4223 code in build_binary_op for MODIFY_EXPR which wants to
4224 strip off conversions. But that code really is a mess and
4225 we need to do this a much better way some time. */
4226 if (0
4227 && (TREE_CODE (type) == RECORD_TYPE
4228 || TREE_CODE (type) == UNION_TYPE)
4229 && (TREE_CODE (etype) == RECORD_TYPE
4230 || TREE_CODE (etype) == UNION_TYPE)
4231 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4232 return build_unary_op (INDIRECT_REF, NULL_TREE,
4233 convert (build_pointer_type (type),
4234 TREE_OPERAND (expr, 0)));
4235 break;
4236
4237 default:
4238 break;
4239 }
4240
4241 /* Check for converting to a pointer to an unconstrained array. */
4242 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4243 return convert_to_fat_pointer (type, expr);
4244
4245 /* If we are converting between two aggregate types that are mere
4246 variants, just make a VIEW_CONVERT_EXPR. */
4247 else if (code == ecode
4248 && AGGREGATE_TYPE_P (type)
4249 && gnat_types_compatible_p (type, etype))
4250 return build1 (VIEW_CONVERT_EXPR, type, expr);
4251
4252 /* In all other cases of related types, make a NOP_EXPR. */
4253 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4254 || (code == INTEGER_CST && ecode == INTEGER_CST
4255 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4256 return fold_convert (type, expr);
4257
4258 switch (code)
4259 {
4260 case VOID_TYPE:
4261 return fold_build1 (CONVERT_EXPR, type, expr);
4262
a1ab4c31
AC
4263 case INTEGER_TYPE:
4264 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4265 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4266 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4267 return unchecked_convert (type, expr, false);
4268 else if (TYPE_BIASED_REPRESENTATION_P (type))
4269 return fold_convert (type,
4270 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4271 convert (TREE_TYPE (type), expr),
4272 TYPE_MIN_VALUE (type)));
4273
4274 /* ... fall through ... */
4275
4276 case ENUMERAL_TYPE:
01ddebf2 4277 case BOOLEAN_TYPE:
a1ab4c31
AC
4278 /* If we are converting an additive expression to an integer type
4279 with lower precision, be wary of the optimization that can be
4280 applied by convert_to_integer. There are 2 problematic cases:
4281 - if the first operand was originally of a biased type,
4282 because we could be recursively called to convert it
4283 to an intermediate type and thus rematerialize the
4284 additive operator endlessly,
4285 - if the expression contains a placeholder, because an
4286 intermediate conversion that changes the sign could
4287 be inserted and thus introduce an artificial overflow
4288 at compile time when the placeholder is substituted. */
4289 if (code == INTEGER_TYPE
4290 && ecode == INTEGER_TYPE
4291 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4292 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4293 {
4294 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4295
4296 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4297 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4298 || CONTAINS_PLACEHOLDER_P (expr))
4299 return build1 (NOP_EXPR, type, expr);
4300 }
4301
4302 return fold (convert_to_integer (type, expr));
4303
4304 case POINTER_TYPE:
4305 case REFERENCE_TYPE:
4306 /* If converting between two pointers to records denoting
4307 both a template and type, adjust if needed to account
4308 for any differing offsets, since one might be negative. */
4309 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4310 {
4311 tree bit_diff
4312 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4313 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4314 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4315 sbitsize_int (BITS_PER_UNIT));
4316
4317 expr = build1 (NOP_EXPR, type, expr);
4318 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4319 if (integer_zerop (byte_diff))
4320 return expr;
4321
4322 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4323 fold (convert (sizetype, byte_diff)));
4324 }
4325
4326 /* If converting to a thin pointer, handle specially. */
4327 if (TYPE_THIN_POINTER_P (type)
4328 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4329 return convert_to_thin_pointer (type, expr);
4330
4331 /* If converting fat pointer to normal pointer, get the pointer to the
4332 array and then convert it. */
4333 else if (TYPE_FAT_POINTER_P (etype))
4334 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4335 NULL_TREE, false);
4336
4337 return fold (convert_to_pointer (type, expr));
4338
4339 case REAL_TYPE:
4340 return fold (convert_to_real (type, expr));
4341
4342 case RECORD_TYPE:
4343 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4344 return
4345 gnat_build_constructor
4346 (type, tree_cons (TYPE_FIELDS (type),
4347 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4348 NULL_TREE));
4349
4350 /* ... fall through ... */
4351
4352 case ARRAY_TYPE:
4353 /* In these cases, assume the front-end has validated the conversion.
4354 If the conversion is valid, it will be a bit-wise conversion, so
4355 it can be viewed as an unchecked conversion. */
4356 return unchecked_convert (type, expr, false);
4357
4358 case UNION_TYPE:
4359 /* This is a either a conversion between a tagged type and some
4360 subtype, which we have to mark as a UNION_TYPE because of
4361 overlapping fields or a conversion of an Unchecked_Union. */
4362 return unchecked_convert (type, expr, false);
4363
4364 case UNCONSTRAINED_ARRAY_TYPE:
4365 /* If EXPR is a constrained array, take its address, convert it to a
4366 fat pointer, and then dereference it. Likewise if EXPR is a
4367 record containing both a template and a constrained array.
4368 Note that a record representing a justified modular type
4369 always represents a packed constrained array. */
4370 if (ecode == ARRAY_TYPE
4371 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4372 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4373 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4374 return
4375 build_unary_op
4376 (INDIRECT_REF, NULL_TREE,
4377 convert_to_fat_pointer (TREE_TYPE (type),
4378 build_unary_op (ADDR_EXPR,
4379 NULL_TREE, expr)));
4380
4381 /* Do something very similar for converting one unconstrained
4382 array to another. */
4383 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4384 return
4385 build_unary_op (INDIRECT_REF, NULL_TREE,
4386 convert (TREE_TYPE (type),
4387 build_unary_op (ADDR_EXPR,
4388 NULL_TREE, expr)));
4389 else
4390 gcc_unreachable ();
4391
4392 case COMPLEX_TYPE:
4393 return fold (convert_to_complex (type, expr));
4394
4395 default:
4396 gcc_unreachable ();
4397 }
4398}
4399\f
4400/* Remove all conversions that are done in EXP. This includes converting
4401 from a padded type or to a justified modular type. If TRUE_ADDRESS
4402 is true, always return the address of the containing object even if
4403 the address is not bit-aligned. */
4404
4405tree
4406remove_conversions (tree exp, bool true_address)
4407{
4408 switch (TREE_CODE (exp))
4409 {
4410 case CONSTRUCTOR:
4411 if (true_address
4412 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4413 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4414 return
4415 remove_conversions (VEC_index (constructor_elt,
4416 CONSTRUCTOR_ELTS (exp), 0)->value,
4417 true);
4418 break;
4419
4420 case COMPONENT_REF:
4421 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4422 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4423 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4424 break;
4425
4426 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4427 CASE_CONVERT:
4428 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4429
4430 default:
4431 break;
4432 }
4433
4434 return exp;
4435}
4436\f
4437/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4438 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4439 likewise return an expression pointing to the underlying array. */
4440
4441tree
4442maybe_unconstrained_array (tree exp)
4443{
4444 enum tree_code code = TREE_CODE (exp);
4445 tree new;
4446
4447 switch (TREE_CODE (TREE_TYPE (exp)))
4448 {
4449 case UNCONSTRAINED_ARRAY_TYPE:
4450 if (code == UNCONSTRAINED_ARRAY_REF)
4451 {
4452 new
4453 = build_unary_op (INDIRECT_REF, NULL_TREE,
4454 build_component_ref (TREE_OPERAND (exp, 0),
4455 get_identifier ("P_ARRAY"),
4456 NULL_TREE, false));
4457 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4458 return new;
4459 }
4460
4461 else if (code == NULL_EXPR)
4462 return build1 (NULL_EXPR,
4463 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4464 (TREE_TYPE (TREE_TYPE (exp))))),
4465 TREE_OPERAND (exp, 0));
4466
4467 case RECORD_TYPE:
4468 /* If this is a padded type, convert to the unpadded type and see if
4469 it contains a template. */
4470 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4471 {
4472 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4473 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4474 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4475 return
4476 build_component_ref (new, NULL_TREE,
4477 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4478 0);
4479 }
4480 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4481 return
4482 build_component_ref (exp, NULL_TREE,
4483 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4484 break;
4485
4486 default:
4487 break;
4488 }
4489
4490 return exp;
4491}
4492\f
4493/* Return an expression that does an unchecked conversion of EXPR to TYPE.
4494 If NOTRUNC_P is true, truncation operations should be suppressed. */
4495
4496tree
4497unchecked_convert (tree type, tree expr, bool notrunc_p)
4498{
4499 tree etype = TREE_TYPE (expr);
4500
4501 /* If the expression is already the right type, we are done. */
4502 if (etype == type)
4503 return expr;
4504
4505 /* If both types types are integral just do a normal conversion.
4506 Likewise for a conversion to an unconstrained array. */
4507 if ((((INTEGRAL_TYPE_P (type)
4508 && !(TREE_CODE (type) == INTEGER_TYPE
4509 && TYPE_VAX_FLOATING_POINT_P (type)))
4510 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4511 || (TREE_CODE (type) == RECORD_TYPE
4512 && TYPE_JUSTIFIED_MODULAR_P (type)))
4513 && ((INTEGRAL_TYPE_P (etype)
4514 && !(TREE_CODE (etype) == INTEGER_TYPE
4515 && TYPE_VAX_FLOATING_POINT_P (etype)))
4516 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4517 || (TREE_CODE (etype) == RECORD_TYPE
4518 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4519 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4520 {
4521 tree rtype = type;
4522 bool final_unchecked = false;
4523
4524 if (TREE_CODE (etype) == INTEGER_TYPE
4525 && TYPE_BIASED_REPRESENTATION_P (etype))
4526 {
4527 tree ntype = copy_type (etype);
4528
4529 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4530 TYPE_MAIN_VARIANT (ntype) = ntype;
4531 expr = build1 (NOP_EXPR, ntype, expr);
4532 }
4533
4534 if (TREE_CODE (type) == INTEGER_TYPE
4535 && TYPE_BIASED_REPRESENTATION_P (type))
4536 {
4537 rtype = copy_type (type);
4538 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4539 TYPE_MAIN_VARIANT (rtype) = rtype;
4540 }
4541
4542 /* We have another special case: if we are unchecked converting subtype
4543 into a base type, we need to ensure that VRP doesn't propagate range
4544 information since this conversion may be done precisely to validate
4545 that the object is within the range it is supposed to have. */
4546 else if (TREE_CODE (expr) != INTEGER_CST
4547 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4548 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4549 || TREE_CODE (etype) == ENUMERAL_TYPE
4550 || TREE_CODE (etype) == BOOLEAN_TYPE))
4551 {
4552 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4553 in order not to be deemed an useless type conversion, it must
4554 be from subtype to base type.
4555
4556 ??? This may raise addressability and/or aliasing issues because
4557 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4558 address of its operand to be taken if it is deemed addressable
4559 and not already in GIMPLE form. */
4560 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4561 rtype = copy_type (rtype);
4562 TYPE_MAIN_VARIANT (rtype) = rtype;
4563 TREE_TYPE (rtype) = type;
4564 final_unchecked = true;
4565 }
4566
4567 expr = convert (rtype, expr);
4568 if (type != rtype)
4569 expr = fold_build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
4570 type, expr);
4571 }
4572
4573 /* If we are converting TO an integral type whose precision is not the
4574 same as its size, first unchecked convert to a record that contains
4575 an object of the output type. Then extract the field. */
4576 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4577 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4578 GET_MODE_BITSIZE (TYPE_MODE (type))))
4579 {
4580 tree rec_type = make_node (RECORD_TYPE);
4581 tree field = create_field_decl (get_identifier ("OBJ"), type,
4582 rec_type, 1, 0, 0, 0);
4583
4584 TYPE_FIELDS (rec_type) = field;
4585 layout_type (rec_type);
4586
4587 expr = unchecked_convert (rec_type, expr, notrunc_p);
4588 expr = build_component_ref (expr, NULL_TREE, field, 0);
4589 }
4590
4591 /* Similarly for integral input type whose precision is not equal to its
4592 size. */
4593 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4594 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4595 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4596 {
4597 tree rec_type = make_node (RECORD_TYPE);
4598 tree field
4599 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4600 1, 0, 0, 0);
4601
4602 TYPE_FIELDS (rec_type) = field;
4603 layout_type (rec_type);
4604
4605 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4606 expr = unchecked_convert (type, expr, notrunc_p);
4607 }
4608
4609 /* We have a special case when we are converting between two
4610 unconstrained array types. In that case, take the address,
4611 convert the fat pointer types, and dereference. */
4612 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4613 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4614 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4615 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4616 build_unary_op (ADDR_EXPR, NULL_TREE,
4617 expr)));
4618 else
4619 {
4620 expr = maybe_unconstrained_array (expr);
4621 etype = TREE_TYPE (expr);
4622 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4623 }
4624
4625 /* If the result is an integral type whose size is not equal to
4626 the size of the underlying machine type, sign- or zero-extend
4627 the result. We need not do this in the case where the input is
4628 an integral type of the same precision and signedness or if the output
4629 is a biased type or if both the input and output are unsigned. */
4630 if (!notrunc_p
4631 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4632 && !(TREE_CODE (type) == INTEGER_TYPE
4633 && TYPE_BIASED_REPRESENTATION_P (type))
4634 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4635 GET_MODE_BITSIZE (TYPE_MODE (type)))
4636 && !(INTEGRAL_TYPE_P (etype)
4637 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4638 && operand_equal_p (TYPE_RM_SIZE (type),
4639 (TYPE_RM_SIZE (etype) != 0
4640 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4641 0))
4642 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4643 {
4644 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4645 TYPE_UNSIGNED (type));
4646 tree shift_expr
4647 = convert (base_type,
4648 size_binop (MINUS_EXPR,
4649 bitsize_int
4650 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4651 TYPE_RM_SIZE (type)));
4652 expr
4653 = convert (type,
4654 build_binary_op (RSHIFT_EXPR, base_type,
4655 build_binary_op (LSHIFT_EXPR, base_type,
4656 convert (base_type, expr),
4657 shift_expr),
4658 shift_expr));
4659 }
4660
4661 /* An unchecked conversion should never raise Constraint_Error. The code
4662 below assumes that GCC's conversion routines overflow the same way that
4663 the underlying hardware does. This is probably true. In the rare case
4664 when it is false, we can rely on the fact that such conversions are
4665 erroneous anyway. */
4666 if (TREE_CODE (expr) == INTEGER_CST)
4667 TREE_OVERFLOW (expr) = 0;
4668
4669 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4670 show no longer constant. */
4671 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4672 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4673 OEP_ONLY_CONST))
4674 TREE_CONSTANT (expr) = 0;
4675
4676 return expr;
4677}
4678\f
4679/* Return the appropriate GCC tree code for the specified GNAT type,
4680 the latter being a record type as predicated by Is_Record_Type. */
4681
4682enum tree_code
4683tree_code_for_record_type (Entity_Id gnat_type)
4684{
4685 Node_Id component_list
4686 = Component_List (Type_Definition
4687 (Declaration_Node
4688 (Implementation_Base_Type (gnat_type))));
4689 Node_Id component;
4690
4691 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4692 we have a non-discriminant field outside a variant. In either case,
4693 it's a RECORD_TYPE. */
4694
4695 if (!Is_Unchecked_Union (gnat_type))
4696 return RECORD_TYPE;
4697
4698 for (component = First_Non_Pragma (Component_Items (component_list));
4699 Present (component);
4700 component = Next_Non_Pragma (component))
4701 if (Ekind (Defining_Entity (component)) == E_Component)
4702 return RECORD_TYPE;
4703
4704 return UNION_TYPE;
4705}
4706
4707/* Return true if GNU_TYPE is suitable as the type of a non-aliased
4708 component of an aggregate type. */
4709
4710bool
4711type_for_nonaliased_component_p (tree gnu_type)
4712{
4713 /* If the type is passed by reference, we may have pointers to the
4714 component so it cannot be made non-aliased. */
4715 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4716 return false;
4717
4718 /* We used to say that any component of aggregate type is aliased
4719 because the front-end may take 'Reference of it. The front-end
4720 has been enhanced in the meantime so as to use a renaming instead
4721 in most cases, but the back-end can probably take the address of
4722 such a component too so we go for the conservative stance.
4723
4724 For instance, we might need the address of any array type, even
4725 if normally passed by copy, to construct a fat pointer if the
4726 component is used as an actual for an unconstrained formal.
4727
4728 Likewise for record types: even if a specific record subtype is
4729 passed by copy, the parent type might be passed by ref (e.g. if
4730 it's of variable size) and we might take the address of a child
4731 component to pass to a parent formal. We have no way to check
4732 for such conditions here. */
4733 if (AGGREGATE_TYPE_P (gnu_type))
4734 return false;
4735
4736 return true;
4737}
4738
4739/* Perform final processing on global variables. */
4740
4741void
4742gnat_write_global_declarations (void)
4743{
4744 /* Proceed to optimize and emit assembly.
4745 FIXME: shouldn't be the front end's responsibility to call this. */
4746 cgraph_optimize ();
4747
4748 /* Emit debug info for all global declarations. */
4749 emit_debug_global_declarations (VEC_address (tree, global_decls),
4750 VEC_length (tree, global_decls));
4751}
4752
4753/* ************************************************************************
4754 * * GCC builtins support *
4755 * ************************************************************************ */
4756
4757/* The general scheme is fairly simple:
4758
4759 For each builtin function/type to be declared, gnat_install_builtins calls
4760 internal facilities which eventually get to gnat_push_decl, which in turn
4761 tracks the so declared builtin function decls in the 'builtin_decls' global
4762 datastructure. When an Intrinsic subprogram declaration is processed, we
4763 search this global datastructure to retrieve the associated BUILT_IN DECL
4764 node. */
4765
4766/* Search the chain of currently available builtin declarations for a node
4767 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4768 found, if any, or NULL_TREE otherwise. */
4769tree
4770builtin_decl_for (tree name)
4771{
4772 unsigned i;
4773 tree decl;
4774
4775 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4776 if (DECL_NAME (decl) == name)
4777 return decl;
4778
4779 return NULL_TREE;
4780}
4781
4782/* The code below eventually exposes gnat_install_builtins, which declares
4783 the builtin types and functions we might need, either internally or as
4784 user accessible facilities.
4785
4786 ??? This is a first implementation shot, still in rough shape. It is
4787 heavily inspired from the "C" family implementation, with chunks copied
4788 verbatim from there.
4789
4790 Two obvious TODO candidates are
4791 o Use a more efficient name/decl mapping scheme
4792 o Devise a middle-end infrastructure to avoid having to copy
4793 pieces between front-ends. */
4794
4795/* ----------------------------------------------------------------------- *
4796 * BUILTIN ELEMENTARY TYPES *
4797 * ----------------------------------------------------------------------- */
4798
4799/* Standard data types to be used in builtin argument declarations. */
4800
4801enum c_tree_index
4802{
4803 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4804 CTI_STRING_TYPE,
4805 CTI_CONST_STRING_TYPE,
4806
4807 CTI_MAX
4808};
4809
4810static tree c_global_trees[CTI_MAX];
4811
4812#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4813#define string_type_node c_global_trees[CTI_STRING_TYPE]
4814#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4815
4816/* ??? In addition some attribute handlers, we currently don't support a
4817 (small) number of builtin-types, which in turns inhibits support for a
4818 number of builtin functions. */
4819#define wint_type_node void_type_node
4820#define intmax_type_node void_type_node
4821#define uintmax_type_node void_type_node
4822
4823/* Build the void_list_node (void_type_node having been created). */
4824
4825static tree
4826build_void_list_node (void)
4827{
4828 tree t = build_tree_list (NULL_TREE, void_type_node);
4829 return t;
4830}
4831
4832/* Used to help initialize the builtin-types.def table. When a type of
4833 the correct size doesn't exist, use error_mark_node instead of NULL.
4834 The later results in segfaults even when a decl using the type doesn't
4835 get invoked. */
4836
4837static tree
4838builtin_type_for_size (int size, bool unsignedp)
4839{
4840 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4841 return type ? type : error_mark_node;
4842}
4843
4844/* Build/push the elementary type decls that builtin functions/types
4845 will need. */
4846
4847static void
4848install_builtin_elementary_types (void)
4849{
4850 signed_size_type_node = size_type_node;
4851 pid_type_node = integer_type_node;
4852 void_list_node = build_void_list_node ();
4853
4854 string_type_node = build_pointer_type (char_type_node);
4855 const_string_type_node
4856 = build_pointer_type (build_qualified_type
4857 (char_type_node, TYPE_QUAL_CONST));
4858}
4859
4860/* ----------------------------------------------------------------------- *
4861 * BUILTIN FUNCTION TYPES *
4862 * ----------------------------------------------------------------------- */
4863
4864/* Now, builtin function types per se. */
4865
4866enum c_builtin_type
4867{
4868#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4869#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4870#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4871#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4872#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4873#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4874#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4875#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4876#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4877#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4878#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4879#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4880#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4881#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4882#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4883 NAME,
4884#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4885#include "builtin-types.def"
4886#undef DEF_PRIMITIVE_TYPE
4887#undef DEF_FUNCTION_TYPE_0
4888#undef DEF_FUNCTION_TYPE_1
4889#undef DEF_FUNCTION_TYPE_2
4890#undef DEF_FUNCTION_TYPE_3
4891#undef DEF_FUNCTION_TYPE_4
4892#undef DEF_FUNCTION_TYPE_5
4893#undef DEF_FUNCTION_TYPE_6
4894#undef DEF_FUNCTION_TYPE_7
4895#undef DEF_FUNCTION_TYPE_VAR_0
4896#undef DEF_FUNCTION_TYPE_VAR_1
4897#undef DEF_FUNCTION_TYPE_VAR_2
4898#undef DEF_FUNCTION_TYPE_VAR_3
4899#undef DEF_FUNCTION_TYPE_VAR_4
4900#undef DEF_FUNCTION_TYPE_VAR_5
4901#undef DEF_POINTER_TYPE
4902 BT_LAST
4903};
4904
4905typedef enum c_builtin_type builtin_type;
4906
4907/* A temporary array used in communication with def_fn_type. */
4908static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4909
4910/* A helper function for install_builtin_types. Build function type
4911 for DEF with return type RET and N arguments. If VAR is true, then the
4912 function should be variadic after those N arguments.
4913
4914 Takes special care not to ICE if any of the types involved are
4915 error_mark_node, which indicates that said type is not in fact available
4916 (see builtin_type_for_size). In which case the function type as a whole
4917 should be error_mark_node. */
4918
4919static void
4920def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4921{
4922 tree args = NULL, t;
4923 va_list list;
4924 int i;
4925
4926 va_start (list, n);
4927 for (i = 0; i < n; ++i)
4928 {
4929 builtin_type a = va_arg (list, builtin_type);
4930 t = builtin_types[a];
4931 if (t == error_mark_node)
4932 goto egress;
4933 args = tree_cons (NULL_TREE, t, args);
4934 }
4935 va_end (list);
4936
4937 args = nreverse (args);
4938 if (!var)
4939 args = chainon (args, void_list_node);
4940
4941 t = builtin_types[ret];
4942 if (t == error_mark_node)
4943 goto egress;
4944 t = build_function_type (t, args);
4945
4946 egress:
4947 builtin_types[def] = t;
4948}
4949
4950/* Build the builtin function types and install them in the builtin_types
4951 array for later use in builtin function decls. */
4952
4953static void
4954install_builtin_function_types (void)
4955{
4956 tree va_list_ref_type_node;
4957 tree va_list_arg_type_node;
4958
4959 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4960 {
4961 va_list_arg_type_node = va_list_ref_type_node =
4962 build_pointer_type (TREE_TYPE (va_list_type_node));
4963 }
4964 else
4965 {
4966 va_list_arg_type_node = va_list_type_node;
4967 va_list_ref_type_node = build_reference_type (va_list_type_node);
4968 }
4969
4970#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4971 builtin_types[ENUM] = VALUE;
4972#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4973 def_fn_type (ENUM, RETURN, 0, 0);
4974#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4975 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4976#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4977 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4978#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4979 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4980#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4981 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4982#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4983 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4984#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4985 ARG6) \
4986 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4987#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4988 ARG6, ARG7) \
4989 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4990#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4991 def_fn_type (ENUM, RETURN, 1, 0);
4992#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4993 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4994#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4995 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4996#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4997 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4998#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4999 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5000#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5001 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5002#define DEF_POINTER_TYPE(ENUM, TYPE) \
5003 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5004
5005#include "builtin-types.def"
5006
5007#undef DEF_PRIMITIVE_TYPE
5008#undef DEF_FUNCTION_TYPE_1
5009#undef DEF_FUNCTION_TYPE_2
5010#undef DEF_FUNCTION_TYPE_3
5011#undef DEF_FUNCTION_TYPE_4
5012#undef DEF_FUNCTION_TYPE_5
5013#undef DEF_FUNCTION_TYPE_6
5014#undef DEF_FUNCTION_TYPE_VAR_0
5015#undef DEF_FUNCTION_TYPE_VAR_1
5016#undef DEF_FUNCTION_TYPE_VAR_2
5017#undef DEF_FUNCTION_TYPE_VAR_3
5018#undef DEF_FUNCTION_TYPE_VAR_4
5019#undef DEF_FUNCTION_TYPE_VAR_5
5020#undef DEF_POINTER_TYPE
5021 builtin_types[(int) BT_LAST] = NULL_TREE;
5022}
5023
5024/* ----------------------------------------------------------------------- *
5025 * BUILTIN ATTRIBUTES *
5026 * ----------------------------------------------------------------------- */
5027
5028enum built_in_attribute
5029{
5030#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5031#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5032#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5033#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5034#include "builtin-attrs.def"
5035#undef DEF_ATTR_NULL_TREE
5036#undef DEF_ATTR_INT
5037#undef DEF_ATTR_IDENT
5038#undef DEF_ATTR_TREE_LIST
5039 ATTR_LAST
5040};
5041
5042static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5043
5044static void
5045install_builtin_attributes (void)
5046{
5047 /* Fill in the built_in_attributes array. */
5048#define DEF_ATTR_NULL_TREE(ENUM) \
5049 built_in_attributes[(int) ENUM] = NULL_TREE;
5050#define DEF_ATTR_INT(ENUM, VALUE) \
5051 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5052#define DEF_ATTR_IDENT(ENUM, STRING) \
5053 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5054#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5055 built_in_attributes[(int) ENUM] \
5056 = tree_cons (built_in_attributes[(int) PURPOSE], \
5057 built_in_attributes[(int) VALUE], \
5058 built_in_attributes[(int) CHAIN]);
5059#include "builtin-attrs.def"
5060#undef DEF_ATTR_NULL_TREE
5061#undef DEF_ATTR_INT
5062#undef DEF_ATTR_IDENT
5063#undef DEF_ATTR_TREE_LIST
5064}
5065
5066/* Handle a "const" attribute; arguments as in
5067 struct attribute_spec.handler. */
5068
5069static tree
5070handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5071 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5072 bool *no_add_attrs)
5073{
5074 if (TREE_CODE (*node) == FUNCTION_DECL)
5075 TREE_READONLY (*node) = 1;
5076 else
5077 *no_add_attrs = true;
5078
5079 return NULL_TREE;
5080}
5081
5082/* Handle a "nothrow" attribute; arguments as in
5083 struct attribute_spec.handler. */
5084
5085static tree
5086handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5087 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5088 bool *no_add_attrs)
5089{
5090 if (TREE_CODE (*node) == FUNCTION_DECL)
5091 TREE_NOTHROW (*node) = 1;
5092 else
5093 *no_add_attrs = true;
5094
5095 return NULL_TREE;
5096}
5097
5098/* Handle a "pure" attribute; arguments as in
5099 struct attribute_spec.handler. */
5100
5101static tree
5102handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5103 int ARG_UNUSED (flags), bool *no_add_attrs)
5104{
5105 if (TREE_CODE (*node) == FUNCTION_DECL)
5106 DECL_PURE_P (*node) = 1;
5107 /* ??? TODO: Support types. */
5108 else
5109 {
5110 warning (OPT_Wattributes, "%qE attribute ignored", name);
5111 *no_add_attrs = true;
5112 }
5113
5114 return NULL_TREE;
5115}
5116
5117/* Handle a "no vops" attribute; arguments as in
5118 struct attribute_spec.handler. */
5119
5120static tree
5121handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5122 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5123 bool *ARG_UNUSED (no_add_attrs))
5124{
5125 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5126 DECL_IS_NOVOPS (*node) = 1;
5127 return NULL_TREE;
5128}
5129
5130/* Helper for nonnull attribute handling; fetch the operand number
5131 from the attribute argument list. */
5132
5133static bool
5134get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5135{
5136 /* Verify the arg number is a constant. */
5137 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5138 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5139 return false;
5140
5141 *valp = TREE_INT_CST_LOW (arg_num_expr);
5142 return true;
5143}
5144
5145/* Handle the "nonnull" attribute. */
5146static tree
5147handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5148 tree args, int ARG_UNUSED (flags),
5149 bool *no_add_attrs)
5150{
5151 tree type = *node;
5152 unsigned HOST_WIDE_INT attr_arg_num;
5153
5154 /* If no arguments are specified, all pointer arguments should be
5155 non-null. Verify a full prototype is given so that the arguments
5156 will have the correct types when we actually check them later. */
5157 if (!args)
5158 {
5159 if (!TYPE_ARG_TYPES (type))
5160 {
5161 error ("nonnull attribute without arguments on a non-prototype");
5162 *no_add_attrs = true;
5163 }
5164 return NULL_TREE;
5165 }
5166
5167 /* Argument list specified. Verify that each argument number references
5168 a pointer argument. */
5169 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5170 {
5171 tree argument;
5172 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5173
5174 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5175 {
5176 error ("nonnull argument has invalid operand number (argument %lu)",
5177 (unsigned long) attr_arg_num);
5178 *no_add_attrs = true;
5179 return NULL_TREE;
5180 }
5181
5182 argument = TYPE_ARG_TYPES (type);
5183 if (argument)
5184 {
5185 for (ck_num = 1; ; ck_num++)
5186 {
5187 if (!argument || ck_num == arg_num)
5188 break;
5189 argument = TREE_CHAIN (argument);
5190 }
5191
5192 if (!argument
5193 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5194 {
5195 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5196 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5197 *no_add_attrs = true;
5198 return NULL_TREE;
5199 }
5200
5201 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5202 {
5203 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5204 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5205 *no_add_attrs = true;
5206 return NULL_TREE;
5207 }
5208 }
5209 }
5210
5211 return NULL_TREE;
5212}
5213
5214/* Handle a "sentinel" attribute. */
5215
5216static tree
5217handle_sentinel_attribute (tree *node, tree name, tree args,
5218 int ARG_UNUSED (flags), bool *no_add_attrs)
5219{
5220 tree params = TYPE_ARG_TYPES (*node);
5221
5222 if (!params)
5223 {
5224 warning (OPT_Wattributes,
5225 "%qE attribute requires prototypes with named arguments", name);
5226 *no_add_attrs = true;
5227 }
5228 else
5229 {
5230 while (TREE_CHAIN (params))
5231 params = TREE_CHAIN (params);
5232
5233 if (VOID_TYPE_P (TREE_VALUE (params)))
5234 {
5235 warning (OPT_Wattributes,
5236 "%qE attribute only applies to variadic functions", name);
5237 *no_add_attrs = true;
5238 }
5239 }
5240
5241 if (args)
5242 {
5243 tree position = TREE_VALUE (args);
5244
5245 if (TREE_CODE (position) != INTEGER_CST)
5246 {
5247 warning (0, "requested position is not an integer constant");
5248 *no_add_attrs = true;
5249 }
5250 else
5251 {
5252 if (tree_int_cst_lt (position, integer_zero_node))
5253 {
5254 warning (0, "requested position is less than zero");
5255 *no_add_attrs = true;
5256 }
5257 }
5258 }
5259
5260 return NULL_TREE;
5261}
5262
5263/* Handle a "noreturn" attribute; arguments as in
5264 struct attribute_spec.handler. */
5265
5266static tree
5267handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5268 int ARG_UNUSED (flags), bool *no_add_attrs)
5269{
5270 tree type = TREE_TYPE (*node);
5271
5272 /* See FIXME comment in c_common_attribute_table. */
5273 if (TREE_CODE (*node) == FUNCTION_DECL)
5274 TREE_THIS_VOLATILE (*node) = 1;
5275 else if (TREE_CODE (type) == POINTER_TYPE
5276 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5277 TREE_TYPE (*node)
5278 = build_pointer_type
5279 (build_type_variant (TREE_TYPE (type),
5280 TYPE_READONLY (TREE_TYPE (type)), 1));
5281 else
5282 {
5283 warning (OPT_Wattributes, "%qE attribute ignored", name);
5284 *no_add_attrs = true;
5285 }
5286
5287 return NULL_TREE;
5288}
5289
5290/* Handle a "malloc" attribute; arguments as in
5291 struct attribute_spec.handler. */
5292
5293static tree
5294handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5295 int ARG_UNUSED (flags), bool *no_add_attrs)
5296{
5297 if (TREE_CODE (*node) == FUNCTION_DECL
5298 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5299 DECL_IS_MALLOC (*node) = 1;
5300 else
5301 {
5302 warning (OPT_Wattributes, "%qE attribute ignored", name);
5303 *no_add_attrs = true;
5304 }
5305
5306 return NULL_TREE;
5307}
5308
5309/* Fake handler for attributes we don't properly support. */
5310
5311tree
5312fake_attribute_handler (tree * ARG_UNUSED (node),
5313 tree ARG_UNUSED (name),
5314 tree ARG_UNUSED (args),
5315 int ARG_UNUSED (flags),
5316 bool * ARG_UNUSED (no_add_attrs))
5317{
5318 return NULL_TREE;
5319}
5320
5321/* Handle a "type_generic" attribute. */
5322
5323static tree
5324handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5325 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5326 bool * ARG_UNUSED (no_add_attrs))
5327{
5328 tree params;
5329
5330 /* Ensure we have a function type. */
5331 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5332
5333 params = TYPE_ARG_TYPES (*node);
5334 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5335 params = TREE_CHAIN (params);
5336
5337 /* Ensure we have a variadic function. */
5338 gcc_assert (!params);
5339
5340 return NULL_TREE;
5341}
5342
5343/* ----------------------------------------------------------------------- *
5344 * BUILTIN FUNCTIONS *
5345 * ----------------------------------------------------------------------- */
5346
5347/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5348 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5349 if nonansi_p and flag_no_nonansi_builtin. */
5350
5351static void
5352def_builtin_1 (enum built_in_function fncode,
5353 const char *name,
5354 enum built_in_class fnclass,
5355 tree fntype, tree libtype,
5356 bool both_p, bool fallback_p,
5357 bool nonansi_p ATTRIBUTE_UNUSED,
5358 tree fnattrs, bool implicit_p)
5359{
5360 tree decl;
5361 const char *libname;
5362
5363 /* Preserve an already installed decl. It most likely was setup in advance
5364 (e.g. as part of the internal builtins) for specific reasons. */
5365 if (built_in_decls[(int) fncode] != NULL_TREE)
5366 return;
5367
5368 gcc_assert ((!both_p && !fallback_p)
5369 || !strncmp (name, "__builtin_",
5370 strlen ("__builtin_")));
5371
5372 libname = name + strlen ("__builtin_");
5373 decl = add_builtin_function (name, fntype, fncode, fnclass,
5374 (fallback_p ? libname : NULL),
5375 fnattrs);
5376 if (both_p)
5377 /* ??? This is normally further controlled by command-line options
5378 like -fno-builtin, but we don't have them for Ada. */
5379 add_builtin_function (libname, libtype, fncode, fnclass,
5380 NULL, fnattrs);
5381
5382 built_in_decls[(int) fncode] = decl;
5383 if (implicit_p)
5384 implicit_built_in_decls[(int) fncode] = decl;
5385}
5386
5387static int flag_isoc94 = 0;
5388static int flag_isoc99 = 0;
5389
5390/* Install what the common builtins.def offers. */
5391
5392static void
5393install_builtin_functions (void)
5394{
5395#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5396 NONANSI_P, ATTRS, IMPLICIT, COND) \
5397 if (NAME && COND) \
5398 def_builtin_1 (ENUM, NAME, CLASS, \
5399 builtin_types[(int) TYPE], \
5400 builtin_types[(int) LIBTYPE], \
5401 BOTH_P, FALLBACK_P, NONANSI_P, \
5402 built_in_attributes[(int) ATTRS], IMPLICIT);
5403#include "builtins.def"
5404#undef DEF_BUILTIN
5405}
5406
5407/* ----------------------------------------------------------------------- *
5408 * BUILTIN FUNCTIONS *
5409 * ----------------------------------------------------------------------- */
5410
5411/* Install the builtin functions we might need. */
5412
5413void
5414gnat_install_builtins (void)
5415{
5416 install_builtin_elementary_types ();
5417 install_builtin_function_types ();
5418 install_builtin_attributes ();
5419
5420 /* Install builtins used by generic middle-end pieces first. Some of these
5421 know about internal specificities and control attributes accordingly, for
5422 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5423 the generic definition from builtins.def. */
5424 build_common_builtin_nodes ();
5425
5426 /* Now, install the target specific builtins, such as the AltiVec family on
5427 ppc, and the common set as exposed by builtins.def. */
5428 targetm.init_builtins ();
5429 install_builtin_functions ();
5430}
5431
5432#include "gt-ada-utils.h"
5433#include "gtype-ada.h"
This page took 0.574491 seconds and 5 git commands to generate.