]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils.c
utils.c (gnat_poplevel): Use block_chainon.
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2011, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "diagnostic-core.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41 #include "tree-dump.h"
42 #include "tree-inline.h"
43 #include "tree-iterator.h"
44
45 #include "ada.h"
46 #include "types.h"
47 #include "atree.h"
48 #include "elists.h"
49 #include "namet.h"
50 #include "nlists.h"
51 #include "stringt.h"
52 #include "uintp.h"
53 #include "fe.h"
54 #include "sinfo.h"
55 #include "einfo.h"
56 #include "ada-tree.h"
57 #include "gigi.h"
58
59 #ifndef MAX_BITS_PER_WORD
60 #define MAX_BITS_PER_WORD BITS_PER_WORD
61 #endif
62
63 /* If nonzero, pretend we are allocating at global level. */
64 int force_global;
65
66 /* The default alignment of "double" floating-point types, i.e. floating
67 point types whose size is equal to 64 bits, or 0 if this alignment is
68 not specifically capped. */
69 int double_float_alignment;
70
71 /* The default alignment of "double" or larger scalar types, i.e. scalar
72 types whose size is greater or equal to 64 bits, or 0 if this alignment
73 is not specifically capped. */
74 int double_scalar_alignment;
75
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
78
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98
99 /* Fake handler for attributes we don't properly support, typically because
100 they'd require dragging a lot of the common-c front-end circuitry. */
101 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102
103 /* Table of machine-independent internal attributes for Ada. We support
104 this minimal set of attributes to accommodate the needs of builtins. */
105 const struct attribute_spec gnat_internal_attribute_table[] =
106 {
107 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
108 affects_type_identity } */
109 { "const", 0, 0, true, false, false, handle_const_attribute,
110 false },
111 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
112 false },
113 { "pure", 0, 0, true, false, false, handle_pure_attribute,
114 false },
115 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
116 false },
117 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
118 false },
119 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
120 false },
121 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
122 false },
123 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
124 false },
125 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
126 false },
127 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
128 false },
129
130 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
131 false },
132 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
133 false },
134 { "may_alias", 0, 0, false, true, false, NULL, false },
135
136 /* ??? format and format_arg are heavy and not supported, which actually
137 prevents support for stdio builtins, which we however declare as part
138 of the common builtins.def contents. */
139 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
140 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
141
142 { NULL, 0, 0, false, false, false, NULL, false }
143 };
144
145 /* Associates a GNAT tree node to a GCC tree node. It is used in
146 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
147 of `save_gnu_tree' for more info. */
148 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
149
150 #define GET_GNU_TREE(GNAT_ENTITY) \
151 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
152
153 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
155
156 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
157 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
158
159 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
160 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
161
162 #define GET_DUMMY_NODE(GNAT_ENTITY) \
163 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
164
165 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
167
168 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
169 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
170
171 /* This variable keeps a table for types for each precision so that we only
172 allocate each of them once. Signed and unsigned types are kept separate.
173
174 Note that these types are only used when fold-const requests something
175 special. Perhaps we should NOT share these types; we'll see how it
176 goes later. */
177 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
178
179 /* Likewise for float types, but record these by mode. */
180 static GTY(()) tree float_types[NUM_MACHINE_MODES];
181
182 /* For each binding contour we allocate a binding_level structure to indicate
183 the binding depth. */
184
185 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
186 /* The binding level containing this one (the enclosing binding level). */
187 struct gnat_binding_level *chain;
188 /* The BLOCK node for this level. */
189 tree block;
190 /* If nonzero, the setjmp buffer that needs to be updated for any
191 variable-sized definition within this context. */
192 tree jmpbuf_decl;
193 };
194
195 /* The binding level currently in effect. */
196 static GTY(()) struct gnat_binding_level *current_binding_level;
197
198 /* A chain of gnat_binding_level structures awaiting reuse. */
199 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
200
201 /* An array of global declarations. */
202 static GTY(()) VEC(tree,gc) *global_decls;
203
204 /* An array of builtin function declarations. */
205 static GTY(()) VEC(tree,gc) *builtin_decls;
206
207 /* An array of global renaming pointers. */
208 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
209
210 /* A chain of unused BLOCK nodes. */
211 static GTY((deletable)) tree free_block_chain;
212
213 static tree merge_sizes (tree, tree, tree, bool, bool);
214 static tree compute_related_constant (tree, tree);
215 static tree split_plus (tree, tree *);
216 static tree float_type_for_precision (int, enum machine_mode);
217 static tree convert_to_fat_pointer (tree, tree);
218 static tree convert_to_thin_pointer (tree, tree);
219 static bool potential_alignment_gap (tree, tree, tree);
220 static void process_attributes (tree, struct attrib *);
221 \f
222 /* Initialize the association of GNAT nodes to GCC trees. */
223
224 void
225 init_gnat_to_gnu (void)
226 {
227 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
228 }
229
230 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
231 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
232 If NO_CHECK is true, the latter check is suppressed.
233
234 If GNU_DECL is zero, reset a previous association. */
235
236 void
237 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
238 {
239 /* Check that GNAT_ENTITY is not already defined and that it is being set
240 to something which is a decl. If that is not the case, this usually
241 means GNAT_ENTITY is defined twice, but occasionally is due to some
242 Gigi problem. */
243 gcc_assert (!(gnu_decl
244 && (PRESENT_GNU_TREE (gnat_entity)
245 || (!no_check && !DECL_P (gnu_decl)))));
246
247 SET_GNU_TREE (gnat_entity, gnu_decl);
248 }
249
250 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
251 that was associated with it. If there is no such tree node, abort.
252
253 In some cases, such as delayed elaboration or expressions that need to
254 be elaborated only once, GNAT_ENTITY is really not an entity. */
255
256 tree
257 get_gnu_tree (Entity_Id gnat_entity)
258 {
259 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
260 return GET_GNU_TREE (gnat_entity);
261 }
262
263 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
264
265 bool
266 present_gnu_tree (Entity_Id gnat_entity)
267 {
268 return PRESENT_GNU_TREE (gnat_entity);
269 }
270 \f
271 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
272
273 void
274 init_dummy_type (void)
275 {
276 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
277 }
278
279 /* Make a dummy type corresponding to GNAT_TYPE. */
280
281 tree
282 make_dummy_type (Entity_Id gnat_type)
283 {
284 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
285 tree gnu_type;
286
287 /* If there is an equivalent type, get its underlying type. */
288 if (Present (gnat_underlying))
289 gnat_underlying = Underlying_Type (gnat_underlying);
290
291 /* If there was no equivalent type (can only happen when just annotating
292 types) or underlying type, go back to the original type. */
293 if (No (gnat_underlying))
294 gnat_underlying = gnat_type;
295
296 /* If it there already a dummy type, use that one. Else make one. */
297 if (PRESENT_DUMMY_NODE (gnat_underlying))
298 return GET_DUMMY_NODE (gnat_underlying);
299
300 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
301 an ENUMERAL_TYPE. */
302 gnu_type = make_node (Is_Record_Type (gnat_underlying)
303 ? tree_code_for_record_type (gnat_underlying)
304 : ENUMERAL_TYPE);
305 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
306 TYPE_DUMMY_P (gnu_type) = 1;
307 TYPE_STUB_DECL (gnu_type)
308 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
309 if (Is_By_Reference_Type (gnat_type))
310 TREE_ADDRESSABLE (gnu_type) = 1;
311
312 SET_DUMMY_NODE (gnat_underlying, gnu_type);
313
314 return gnu_type;
315 }
316
317 /* Return the dummy type that was made for GNAT_TYPE, if any. */
318
319 tree
320 get_dummy_type (Entity_Id gnat_type)
321 {
322 return GET_DUMMY_NODE (gnat_type);
323 }
324
325 /* Build dummy fat and thin pointer types whose designated type is specified
326 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
327
328 void
329 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
330 {
331 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
332 tree gnu_fat_type, fields, gnu_object_type;
333
334 gnu_template_type = make_node (RECORD_TYPE);
335 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
336 TYPE_DUMMY_P (gnu_template_type) = 1;
337 gnu_ptr_template = build_pointer_type (gnu_template_type);
338
339 gnu_array_type = make_node (ENUMERAL_TYPE);
340 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
341 TYPE_DUMMY_P (gnu_array_type) = 1;
342 gnu_ptr_array = build_pointer_type (gnu_array_type);
343
344 gnu_fat_type = make_node (RECORD_TYPE);
345 /* Build a stub DECL to trigger the special processing for fat pointer types
346 in gnat_pushdecl. */
347 TYPE_NAME (gnu_fat_type)
348 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
349 gnu_fat_type);
350 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
351 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
352 DECL_CHAIN (fields)
353 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
354 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
355 finish_fat_pointer_type (gnu_fat_type, fields);
356 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
357 /* Suppress debug info until after the type is completed. */
358 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
359
360 gnu_object_type = make_node (RECORD_TYPE);
361 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
362 TYPE_DUMMY_P (gnu_object_type) = 1;
363
364 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
365 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
366 }
367 \f
368 /* Return nonzero if we are currently in the global binding level. */
369
370 int
371 global_bindings_p (void)
372 {
373 return ((force_global || !current_function_decl) ? -1 : 0);
374 }
375
376 /* Enter a new binding level. */
377
378 void
379 gnat_pushlevel (void)
380 {
381 struct gnat_binding_level *newlevel = NULL;
382
383 /* Reuse a struct for this binding level, if there is one. */
384 if (free_binding_level)
385 {
386 newlevel = free_binding_level;
387 free_binding_level = free_binding_level->chain;
388 }
389 else
390 newlevel = ggc_alloc_gnat_binding_level ();
391
392 /* Use a free BLOCK, if any; otherwise, allocate one. */
393 if (free_block_chain)
394 {
395 newlevel->block = free_block_chain;
396 free_block_chain = BLOCK_CHAIN (free_block_chain);
397 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
398 }
399 else
400 newlevel->block = make_node (BLOCK);
401
402 /* Point the BLOCK we just made to its parent. */
403 if (current_binding_level)
404 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
405
406 BLOCK_VARS (newlevel->block) = NULL_TREE;
407 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
408 TREE_USED (newlevel->block) = 1;
409
410 /* Add this level to the front of the chain (stack) of active levels. */
411 newlevel->chain = current_binding_level;
412 newlevel->jmpbuf_decl = NULL_TREE;
413 current_binding_level = newlevel;
414 }
415
416 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
417 and point FNDECL to this BLOCK. */
418
419 void
420 set_current_block_context (tree fndecl)
421 {
422 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
423 DECL_INITIAL (fndecl) = current_binding_level->block;
424 set_block_for_group (current_binding_level->block);
425 }
426
427 /* Set the jmpbuf_decl for the current binding level to DECL. */
428
429 void
430 set_block_jmpbuf_decl (tree decl)
431 {
432 current_binding_level->jmpbuf_decl = decl;
433 }
434
435 /* Get the jmpbuf_decl, if any, for the current binding level. */
436
437 tree
438 get_block_jmpbuf_decl (void)
439 {
440 return current_binding_level->jmpbuf_decl;
441 }
442
443 /* Exit a binding level. Set any BLOCK into the current code group. */
444
445 void
446 gnat_poplevel (void)
447 {
448 struct gnat_binding_level *level = current_binding_level;
449 tree block = level->block;
450
451 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
452 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
453
454 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
455 are no variables free the block and merge its subblocks into those of its
456 parent block. Otherwise, add it to the list of its parent. */
457 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
458 ;
459 else if (BLOCK_VARS (block) == NULL_TREE)
460 {
461 BLOCK_SUBBLOCKS (level->chain->block)
462 = block_chainon (BLOCK_SUBBLOCKS (block),
463 BLOCK_SUBBLOCKS (level->chain->block));
464 BLOCK_CHAIN (block) = free_block_chain;
465 free_block_chain = block;
466 }
467 else
468 {
469 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
470 BLOCK_SUBBLOCKS (level->chain->block) = block;
471 TREE_USED (block) = 1;
472 set_block_for_group (block);
473 }
474
475 /* Free this binding structure. */
476 current_binding_level = level->chain;
477 level->chain = free_binding_level;
478 free_binding_level = level;
479 }
480
481 /* Exit a binding level and discard the associated BLOCK. */
482
483 void
484 gnat_zaplevel (void)
485 {
486 struct gnat_binding_level *level = current_binding_level;
487 tree block = level->block;
488
489 BLOCK_CHAIN (block) = free_block_chain;
490 free_block_chain = block;
491
492 /* Free this binding structure. */
493 current_binding_level = level->chain;
494 level->chain = free_binding_level;
495 free_binding_level = level;
496 }
497 \f
498 /* Records a ..._DECL node DECL as belonging to the current lexical scope
499 and uses GNAT_NODE for location information and propagating flags. */
500
501 void
502 gnat_pushdecl (tree decl, Node_Id gnat_node)
503 {
504 /* If this decl is public external or at toplevel, there is no context. */
505 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
506 DECL_CONTEXT (decl) = 0;
507 else
508 {
509 DECL_CONTEXT (decl) = current_function_decl;
510
511 /* Functions imported in another function are not really nested.
512 For really nested functions mark them initially as needing
513 a static chain for uses of that flag before unnesting;
514 lower_nested_functions will then recompute it. */
515 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
516 DECL_STATIC_CHAIN (decl) = 1;
517 }
518
519 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
520
521 /* Set the location of DECL and emit a declaration for it. */
522 if (Present (gnat_node))
523 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
524 add_decl_expr (decl, gnat_node);
525
526 /* Put the declaration on the list. The list of declarations is in reverse
527 order. The list will be reversed later. Put global declarations in the
528 globals list and local ones in the current block. But skip TYPE_DECLs
529 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
530 with the debugger and aren't needed anyway. */
531 if (!(TREE_CODE (decl) == TYPE_DECL
532 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
533 {
534 if (global_bindings_p ())
535 {
536 VEC_safe_push (tree, gc, global_decls, decl);
537
538 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
539 VEC_safe_push (tree, gc, builtin_decls, decl);
540 }
541 else if (!DECL_EXTERNAL (decl))
542 {
543 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
544 BLOCK_VARS (current_binding_level->block) = decl;
545 }
546 }
547
548 /* For the declaration of a type, set its name if it either is not already
549 set or if the previous type name was not derived from a source name.
550 We'd rather have the type named with a real name and all the pointer
551 types to the same object have the same POINTER_TYPE node. Code in the
552 equivalent function of c-decl.c makes a copy of the type node here, but
553 that may cause us trouble with incomplete types. We make an exception
554 for fat pointer types because the compiler automatically builds them
555 for unconstrained array types and the debugger uses them to represent
556 both these and pointers to these. */
557 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
558 {
559 tree t = TREE_TYPE (decl);
560
561 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
562 {
563 /* Array and pointer types aren't "tagged" types so we force the
564 type to be associated with its typedef in the DWARF back-end,
565 in order to make sure that the latter is always preserved. */
566 if (!DECL_ARTIFICIAL (decl)
567 && (TREE_CODE (t) == ARRAY_TYPE
568 || TREE_CODE (t) == POINTER_TYPE))
569 {
570 tree tt = build_distinct_type_copy (t);
571 if (TREE_CODE (t) == POINTER_TYPE)
572 TYPE_NEXT_PTR_TO (t) = tt;
573 TYPE_NAME (tt) = DECL_NAME (decl);
574 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
575 DECL_ORIGINAL_TYPE (decl) = tt;
576 }
577 }
578 else if (TYPE_IS_FAT_POINTER_P (t))
579 {
580 /* We need a variant for the placeholder machinery to work. */
581 tree tt = build_variant_type_copy (t);
582 TYPE_NAME (tt) = decl;
583 TREE_USED (tt) = TREE_USED (t);
584 TREE_TYPE (decl) = tt;
585 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
586 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
587 else
588 DECL_ORIGINAL_TYPE (decl) = t;
589 DECL_ARTIFICIAL (decl) = 0;
590 t = NULL_TREE;
591 }
592 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
593 ;
594 else
595 t = NULL_TREE;
596
597 /* Propagate the name to all the anonymous variants. This is needed
598 for the type qualifiers machinery to work properly. */
599 if (t)
600 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
601 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
602 TYPE_NAME (t) = decl;
603 }
604 }
605 \f
606 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
607 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
608
609 void
610 record_builtin_type (const char *name, tree type, bool artificial_p)
611 {
612 tree type_decl = build_decl (input_location,
613 TYPE_DECL, get_identifier (name), type);
614 DECL_ARTIFICIAL (type_decl) = artificial_p;
615 gnat_pushdecl (type_decl, Empty);
616
617 if (debug_hooks->type_decl)
618 debug_hooks->type_decl (type_decl, false);
619 }
620 \f
621 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
622 finish constructing the record type as a fat pointer type. */
623
624 void
625 finish_fat_pointer_type (tree record_type, tree field_list)
626 {
627 /* Make sure we can put it into a register. */
628 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
629
630 /* Show what it really is. */
631 TYPE_FAT_POINTER_P (record_type) = 1;
632
633 /* Do not emit debug info for it since the types of its fields may still be
634 incomplete at this point. */
635 finish_record_type (record_type, field_list, 0, false);
636
637 /* Force type_contains_placeholder_p to return true on it. Although the
638 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
639 type but the representation of the unconstrained array. */
640 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
641 }
642
643 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
644 finish constructing the record or union type. If REP_LEVEL is zero, this
645 record has no representation clause and so will be entirely laid out here.
646 If REP_LEVEL is one, this record has a representation clause and has been
647 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
648 this record is derived from a parent record and thus inherits its layout;
649 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
650 we need to write debug information about this type. */
651
652 void
653 finish_record_type (tree record_type, tree field_list, int rep_level,
654 bool debug_info_p)
655 {
656 enum tree_code code = TREE_CODE (record_type);
657 tree name = TYPE_NAME (record_type);
658 tree ada_size = bitsize_zero_node;
659 tree size = bitsize_zero_node;
660 bool had_size = TYPE_SIZE (record_type) != 0;
661 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
662 bool had_align = TYPE_ALIGN (record_type) != 0;
663 tree field;
664
665 TYPE_FIELDS (record_type) = field_list;
666
667 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
668 generate debug info and have a parallel type. */
669 if (name && TREE_CODE (name) == TYPE_DECL)
670 name = DECL_NAME (name);
671 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
672
673 /* Globally initialize the record first. If this is a rep'ed record,
674 that just means some initializations; otherwise, layout the record. */
675 if (rep_level > 0)
676 {
677 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
678
679 if (!had_size_unit)
680 TYPE_SIZE_UNIT (record_type) = size_zero_node;
681
682 if (!had_size)
683 TYPE_SIZE (record_type) = bitsize_zero_node;
684
685 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
686 out just like a UNION_TYPE, since the size will be fixed. */
687 else if (code == QUAL_UNION_TYPE)
688 code = UNION_TYPE;
689 }
690 else
691 {
692 /* Ensure there isn't a size already set. There can be in an error
693 case where there is a rep clause but all fields have errors and
694 no longer have a position. */
695 TYPE_SIZE (record_type) = 0;
696 layout_type (record_type);
697 }
698
699 /* At this point, the position and size of each field is known. It was
700 either set before entry by a rep clause, or by laying out the type above.
701
702 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
703 to compute the Ada size; the GCC size and alignment (for rep'ed records
704 that are not padding types); and the mode (for rep'ed records). We also
705 clear the DECL_BIT_FIELD indication for the cases we know have not been
706 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
707
708 if (code == QUAL_UNION_TYPE)
709 field_list = nreverse (field_list);
710
711 for (field = field_list; field; field = DECL_CHAIN (field))
712 {
713 tree type = TREE_TYPE (field);
714 tree pos = bit_position (field);
715 tree this_size = DECL_SIZE (field);
716 tree this_ada_size;
717
718 if ((TREE_CODE (type) == RECORD_TYPE
719 || TREE_CODE (type) == UNION_TYPE
720 || TREE_CODE (type) == QUAL_UNION_TYPE)
721 && !TYPE_FAT_POINTER_P (type)
722 && !TYPE_CONTAINS_TEMPLATE_P (type)
723 && TYPE_ADA_SIZE (type))
724 this_ada_size = TYPE_ADA_SIZE (type);
725 else
726 this_ada_size = this_size;
727
728 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
729 if (DECL_BIT_FIELD (field)
730 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
731 {
732 unsigned int align = TYPE_ALIGN (type);
733
734 /* In the general case, type alignment is required. */
735 if (value_factor_p (pos, align))
736 {
737 /* The enclosing record type must be sufficiently aligned.
738 Otherwise, if no alignment was specified for it and it
739 has been laid out already, bump its alignment to the
740 desired one if this is compatible with its size. */
741 if (TYPE_ALIGN (record_type) >= align)
742 {
743 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
744 DECL_BIT_FIELD (field) = 0;
745 }
746 else if (!had_align
747 && rep_level == 0
748 && value_factor_p (TYPE_SIZE (record_type), align))
749 {
750 TYPE_ALIGN (record_type) = align;
751 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
752 DECL_BIT_FIELD (field) = 0;
753 }
754 }
755
756 /* In the non-strict alignment case, only byte alignment is. */
757 if (!STRICT_ALIGNMENT
758 && DECL_BIT_FIELD (field)
759 && value_factor_p (pos, BITS_PER_UNIT))
760 DECL_BIT_FIELD (field) = 0;
761 }
762
763 /* If we still have DECL_BIT_FIELD set at this point, we know that the
764 field is technically not addressable. Except that it can actually
765 be addressed if it is BLKmode and happens to be properly aligned. */
766 if (DECL_BIT_FIELD (field)
767 && !(DECL_MODE (field) == BLKmode
768 && value_factor_p (pos, BITS_PER_UNIT)))
769 DECL_NONADDRESSABLE_P (field) = 1;
770
771 /* A type must be as aligned as its most aligned field that is not
772 a bit-field. But this is already enforced by layout_type. */
773 if (rep_level > 0 && !DECL_BIT_FIELD (field))
774 TYPE_ALIGN (record_type)
775 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
776
777 switch (code)
778 {
779 case UNION_TYPE:
780 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
781 size = size_binop (MAX_EXPR, size, this_size);
782 break;
783
784 case QUAL_UNION_TYPE:
785 ada_size
786 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
787 this_ada_size, ada_size);
788 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
789 this_size, size);
790 break;
791
792 case RECORD_TYPE:
793 /* Since we know here that all fields are sorted in order of
794 increasing bit position, the size of the record is one
795 higher than the ending bit of the last field processed
796 unless we have a rep clause, since in that case we might
797 have a field outside a QUAL_UNION_TYPE that has a higher ending
798 position. So use a MAX in that case. Also, if this field is a
799 QUAL_UNION_TYPE, we need to take into account the previous size in
800 the case of empty variants. */
801 ada_size
802 = merge_sizes (ada_size, pos, this_ada_size,
803 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
804 size
805 = merge_sizes (size, pos, this_size,
806 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
807 break;
808
809 default:
810 gcc_unreachable ();
811 }
812 }
813
814 if (code == QUAL_UNION_TYPE)
815 nreverse (field_list);
816
817 if (rep_level < 2)
818 {
819 /* If this is a padding record, we never want to make the size smaller
820 than what was specified in it, if any. */
821 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
822 size = TYPE_SIZE (record_type);
823
824 /* Now set any of the values we've just computed that apply. */
825 if (!TYPE_FAT_POINTER_P (record_type)
826 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
827 SET_TYPE_ADA_SIZE (record_type, ada_size);
828
829 if (rep_level > 0)
830 {
831 tree size_unit = had_size_unit
832 ? TYPE_SIZE_UNIT (record_type)
833 : convert (sizetype,
834 size_binop (CEIL_DIV_EXPR, size,
835 bitsize_unit_node));
836 unsigned int align = TYPE_ALIGN (record_type);
837
838 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
839 TYPE_SIZE_UNIT (record_type)
840 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
841
842 compute_record_mode (record_type);
843 }
844 }
845
846 if (debug_info_p)
847 rest_of_record_type_compilation (record_type);
848 }
849
850 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
851 associated with it. It need not be invoked directly in most cases since
852 finish_record_type takes care of doing so, but this can be necessary if
853 a parallel type is to be attached to the record type. */
854
855 void
856 rest_of_record_type_compilation (tree record_type)
857 {
858 tree field_list = TYPE_FIELDS (record_type);
859 tree field;
860 enum tree_code code = TREE_CODE (record_type);
861 bool var_size = false;
862
863 for (field = field_list; field; field = DECL_CHAIN (field))
864 {
865 /* We need to make an XVE/XVU record if any field has variable size,
866 whether or not the record does. For example, if we have a union,
867 it may be that all fields, rounded up to the alignment, have the
868 same size, in which case we'll use that size. But the debug
869 output routines (except Dwarf2) won't be able to output the fields,
870 so we need to make the special record. */
871 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
872 /* If a field has a non-constant qualifier, the record will have
873 variable size too. */
874 || (code == QUAL_UNION_TYPE
875 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
876 {
877 var_size = true;
878 break;
879 }
880 }
881
882 /* If this record is of variable size, rename it so that the
883 debugger knows it is and make a new, parallel, record
884 that tells the debugger how the record is laid out. See
885 exp_dbug.ads. But don't do this for records that are padding
886 since they confuse GDB. */
887 if (var_size && !TYPE_IS_PADDING_P (record_type))
888 {
889 tree new_record_type
890 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
891 ? UNION_TYPE : TREE_CODE (record_type));
892 tree orig_name = TYPE_NAME (record_type), new_name;
893 tree last_pos = bitsize_zero_node;
894 tree old_field, prev_old_field = NULL_TREE;
895
896 if (TREE_CODE (orig_name) == TYPE_DECL)
897 orig_name = DECL_NAME (orig_name);
898
899 new_name
900 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
901 ? "XVU" : "XVE");
902 TYPE_NAME (new_record_type) = new_name;
903 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
904 TYPE_STUB_DECL (new_record_type)
905 = create_type_stub_decl (new_name, new_record_type);
906 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
907 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
908 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
909 TYPE_SIZE_UNIT (new_record_type)
910 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
911
912 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
913
914 /* Now scan all the fields, replacing each field with a new
915 field corresponding to the new encoding. */
916 for (old_field = TYPE_FIELDS (record_type); old_field;
917 old_field = DECL_CHAIN (old_field))
918 {
919 tree field_type = TREE_TYPE (old_field);
920 tree field_name = DECL_NAME (old_field);
921 tree new_field;
922 tree curpos = bit_position (old_field);
923 bool var = false;
924 unsigned int align = 0;
925 tree pos;
926
927 /* See how the position was modified from the last position.
928
929 There are two basic cases we support: a value was added
930 to the last position or the last position was rounded to
931 a boundary and they something was added. Check for the
932 first case first. If not, see if there is any evidence
933 of rounding. If so, round the last position and try
934 again.
935
936 If this is a union, the position can be taken as zero. */
937
938 /* Some computations depend on the shape of the position expression,
939 so strip conversions to make sure it's exposed. */
940 curpos = remove_conversions (curpos, true);
941
942 if (TREE_CODE (new_record_type) == UNION_TYPE)
943 pos = bitsize_zero_node, align = 0;
944 else
945 pos = compute_related_constant (curpos, last_pos);
946
947 if (!pos && TREE_CODE (curpos) == MULT_EXPR
948 && host_integerp (TREE_OPERAND (curpos, 1), 1))
949 {
950 tree offset = TREE_OPERAND (curpos, 0);
951 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
952
953 /* An offset which is a bitwise AND with a negative power of 2
954 means an alignment corresponding to this power of 2. Note
955 that, as sizetype is sign-extended but nonetheless unsigned,
956 we don't directly use tree_int_cst_sgn. */
957 offset = remove_conversions (offset, true);
958 if (TREE_CODE (offset) == BIT_AND_EXPR
959 && host_integerp (TREE_OPERAND (offset, 1), 0)
960 && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
961 {
962 unsigned int pow
963 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
964 if (exact_log2 (pow) > 0)
965 align *= pow;
966 }
967
968 pos = compute_related_constant (curpos,
969 round_up (last_pos, align));
970 }
971 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
972 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
973 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
974 && host_integerp (TREE_OPERAND
975 (TREE_OPERAND (curpos, 0), 1),
976 1))
977 {
978 align
979 = tree_low_cst
980 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
981 pos = compute_related_constant (curpos,
982 round_up (last_pos, align));
983 }
984 else if (potential_alignment_gap (prev_old_field, old_field,
985 pos))
986 {
987 align = TYPE_ALIGN (field_type);
988 pos = compute_related_constant (curpos,
989 round_up (last_pos, align));
990 }
991
992 /* If we can't compute a position, set it to zero.
993
994 ??? We really should abort here, but it's too much work
995 to get this correct for all cases. */
996
997 if (!pos)
998 pos = bitsize_zero_node;
999
1000 /* See if this type is variable-sized and make a pointer type
1001 and indicate the indirection if so. Beware that the debug
1002 back-end may adjust the position computed above according
1003 to the alignment of the field type, i.e. the pointer type
1004 in this case, if we don't preventively counter that. */
1005 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1006 {
1007 field_type = build_pointer_type (field_type);
1008 if (align != 0 && TYPE_ALIGN (field_type) > align)
1009 {
1010 field_type = copy_node (field_type);
1011 TYPE_ALIGN (field_type) = align;
1012 }
1013 var = true;
1014 }
1015
1016 /* Make a new field name, if necessary. */
1017 if (var || align != 0)
1018 {
1019 char suffix[16];
1020
1021 if (align != 0)
1022 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1023 align / BITS_PER_UNIT);
1024 else
1025 strcpy (suffix, "XVL");
1026
1027 field_name = concat_name (field_name, suffix);
1028 }
1029
1030 new_field
1031 = create_field_decl (field_name, field_type, new_record_type,
1032 DECL_SIZE (old_field), pos, 0, 0);
1033 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1034 TYPE_FIELDS (new_record_type) = new_field;
1035
1036 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1037 zero. The only time it's not the last field of the record
1038 is when there are other components at fixed positions after
1039 it (meaning there was a rep clause for every field) and we
1040 want to be able to encode them. */
1041 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1042 (TREE_CODE (TREE_TYPE (old_field))
1043 == QUAL_UNION_TYPE)
1044 ? bitsize_zero_node
1045 : DECL_SIZE (old_field));
1046 prev_old_field = old_field;
1047 }
1048
1049 TYPE_FIELDS (new_record_type)
1050 = nreverse (TYPE_FIELDS (new_record_type));
1051
1052 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1053 }
1054
1055 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1056 }
1057
1058 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1059
1060 void
1061 add_parallel_type (tree decl, tree parallel_type)
1062 {
1063 tree d = decl;
1064
1065 while (DECL_PARALLEL_TYPE (d))
1066 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
1067
1068 SET_DECL_PARALLEL_TYPE (d, parallel_type);
1069 }
1070
1071 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1072 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1073 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1074 replace a value of zero with the old size. If HAS_REP is true, we take the
1075 MAX of the end position of this field with LAST_SIZE. In all other cases,
1076 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1077
1078 static tree
1079 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1080 bool has_rep)
1081 {
1082 tree type = TREE_TYPE (last_size);
1083 tree new_size;
1084
1085 if (!special || TREE_CODE (size) != COND_EXPR)
1086 {
1087 new_size = size_binop (PLUS_EXPR, first_bit, size);
1088 if (has_rep)
1089 new_size = size_binop (MAX_EXPR, last_size, new_size);
1090 }
1091
1092 else
1093 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1094 integer_zerop (TREE_OPERAND (size, 1))
1095 ? last_size : merge_sizes (last_size, first_bit,
1096 TREE_OPERAND (size, 1),
1097 1, has_rep),
1098 integer_zerop (TREE_OPERAND (size, 2))
1099 ? last_size : merge_sizes (last_size, first_bit,
1100 TREE_OPERAND (size, 2),
1101 1, has_rep));
1102
1103 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1104 when fed through substitute_in_expr) into thinking that a constant
1105 size is not constant. */
1106 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1107 new_size = TREE_OPERAND (new_size, 0);
1108
1109 return new_size;
1110 }
1111
1112 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1113 related by the addition of a constant. Return that constant if so. */
1114
1115 static tree
1116 compute_related_constant (tree op0, tree op1)
1117 {
1118 tree op0_var, op1_var;
1119 tree op0_con = split_plus (op0, &op0_var);
1120 tree op1_con = split_plus (op1, &op1_var);
1121 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1122
1123 if (operand_equal_p (op0_var, op1_var, 0))
1124 return result;
1125 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1126 return result;
1127 else
1128 return 0;
1129 }
1130
1131 /* Utility function of above to split a tree OP which may be a sum, into a
1132 constant part, which is returned, and a variable part, which is stored
1133 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1134 bitsizetype. */
1135
1136 static tree
1137 split_plus (tree in, tree *pvar)
1138 {
1139 /* Strip NOPS in order to ease the tree traversal and maximize the
1140 potential for constant or plus/minus discovery. We need to be careful
1141 to always return and set *pvar to bitsizetype trees, but it's worth
1142 the effort. */
1143 STRIP_NOPS (in);
1144
1145 *pvar = convert (bitsizetype, in);
1146
1147 if (TREE_CODE (in) == INTEGER_CST)
1148 {
1149 *pvar = bitsize_zero_node;
1150 return convert (bitsizetype, in);
1151 }
1152 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1153 {
1154 tree lhs_var, rhs_var;
1155 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1156 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1157
1158 if (lhs_var == TREE_OPERAND (in, 0)
1159 && rhs_var == TREE_OPERAND (in, 1))
1160 return bitsize_zero_node;
1161
1162 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1163 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1164 }
1165 else
1166 return bitsize_zero_node;
1167 }
1168 \f
1169 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1170 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1171 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1172 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1173 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1174 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1175 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1176 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1177 invisible reference. */
1178
1179 tree
1180 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1181 bool return_unconstrained_p, bool return_by_direct_ref_p,
1182 bool return_by_invisi_ref_p)
1183 {
1184 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1185 the subprogram formal parameters. This list is generated by traversing
1186 the input list of PARM_DECL nodes. */
1187 tree param_type_list = NULL_TREE;
1188 tree t, type;
1189
1190 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1191 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (t), param_type_list);
1192
1193 /* The list of the function parameter types has to be terminated by the void
1194 type to signal to the back-end that we are not dealing with a variable
1195 parameter subprogram, but that it has a fixed number of parameters. */
1196 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1197
1198 /* The list of argument types has been created in reverse so reverse it. */
1199 param_type_list = nreverse (param_type_list);
1200
1201 type = build_function_type (return_type, param_type_list);
1202
1203 /* TYPE may have been shared since GCC hashes types. If it has a different
1204 CICO_LIST, make a copy. Likewise for the various flags. */
1205 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1206 return_by_direct_ref_p, return_by_invisi_ref_p))
1207 {
1208 type = copy_type (type);
1209 TYPE_CI_CO_LIST (type) = cico_list;
1210 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1211 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1212 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1213 }
1214
1215 return type;
1216 }
1217 \f
1218 /* Return a copy of TYPE but safe to modify in any way. */
1219
1220 tree
1221 copy_type (tree type)
1222 {
1223 tree new_type = copy_node (type);
1224
1225 /* Unshare the language-specific data. */
1226 if (TYPE_LANG_SPECIFIC (type))
1227 {
1228 TYPE_LANG_SPECIFIC (new_type) = NULL;
1229 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1230 }
1231
1232 /* And the contents of the language-specific slot if needed. */
1233 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1234 && TYPE_RM_VALUES (type))
1235 {
1236 TYPE_RM_VALUES (new_type) = NULL_TREE;
1237 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1238 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1239 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1240 }
1241
1242 /* copy_node clears this field instead of copying it, because it is
1243 aliased with TREE_CHAIN. */
1244 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1245
1246 TYPE_POINTER_TO (new_type) = 0;
1247 TYPE_REFERENCE_TO (new_type) = 0;
1248 TYPE_MAIN_VARIANT (new_type) = new_type;
1249 TYPE_NEXT_VARIANT (new_type) = 0;
1250
1251 return new_type;
1252 }
1253 \f
1254 /* Return a subtype of sizetype with range MIN to MAX and whose
1255 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1256 of the associated TYPE_DECL. */
1257
1258 tree
1259 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1260 {
1261 /* First build a type for the desired range. */
1262 tree type = build_nonshared_range_type (sizetype, min, max);
1263
1264 /* Then set the index type. */
1265 SET_TYPE_INDEX_TYPE (type, index);
1266 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1267
1268 return type;
1269 }
1270
1271 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1272 sizetype is used. */
1273
1274 tree
1275 create_range_type (tree type, tree min, tree max)
1276 {
1277 tree range_type;
1278
1279 if (type == NULL_TREE)
1280 type = sizetype;
1281
1282 /* First build a type with the base range. */
1283 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
1284 TYPE_MAX_VALUE (type));
1285
1286 /* Then set the actual range. */
1287 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
1288 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
1289
1290 return range_type;
1291 }
1292 \f
1293 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1294 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1295 its data type. */
1296
1297 tree
1298 create_type_stub_decl (tree type_name, tree type)
1299 {
1300 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1301 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1302 emitted in DWARF. */
1303 tree type_decl = build_decl (input_location,
1304 TYPE_DECL, type_name, type);
1305 DECL_ARTIFICIAL (type_decl) = 1;
1306 return type_decl;
1307 }
1308
1309 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1310 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1311 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1312 true if we need to write debug information about this type. GNAT_NODE
1313 is used for the position of the decl. */
1314
1315 tree
1316 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1317 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1318 {
1319 enum tree_code code = TREE_CODE (type);
1320 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1321 tree type_decl;
1322
1323 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1324 gcc_assert (!TYPE_IS_DUMMY_P (type));
1325
1326 /* If the type hasn't been named yet, we're naming it; preserve an existing
1327 TYPE_STUB_DECL that has been attached to it for some purpose. */
1328 if (!named && TYPE_STUB_DECL (type))
1329 {
1330 type_decl = TYPE_STUB_DECL (type);
1331 DECL_NAME (type_decl) = type_name;
1332 }
1333 else
1334 type_decl = build_decl (input_location,
1335 TYPE_DECL, type_name, type);
1336
1337 DECL_ARTIFICIAL (type_decl) = artificial_p;
1338
1339 /* Add this decl to the current binding level. */
1340 gnat_pushdecl (type_decl, gnat_node);
1341
1342 process_attributes (type_decl, attr_list);
1343
1344 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1345 This causes the name to be also viewed as a "tag" by the debug
1346 back-end, with the advantage that no DW_TAG_typedef is emitted
1347 for artificial "tagged" types in DWARF. */
1348 if (!named)
1349 TYPE_STUB_DECL (type) = type_decl;
1350
1351 /* Pass the type declaration to the debug back-end unless this is an
1352 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1353 type for which debugging information was not requested, or else an
1354 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1355 handled separately. And do not pass dummy types either. */
1356 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1357 DECL_IGNORED_P (type_decl) = 1;
1358 else if (code != ENUMERAL_TYPE
1359 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1360 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1361 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1362 && !(code == RECORD_TYPE
1363 && TYPE_IS_DUMMY_P
1364 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1365 rest_of_type_decl_compilation (type_decl);
1366
1367 return type_decl;
1368 }
1369 \f
1370 /* Return a VAR_DECL or CONST_DECL node.
1371
1372 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1373 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1374 the GCC tree for an optional initial expression; NULL_TREE if none.
1375
1376 CONST_FLAG is true if this variable is constant, in which case we might
1377 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1378
1379 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1380 definition to be made visible outside of the current compilation unit, for
1381 instance variable definitions in a package specification.
1382
1383 EXTERN_FLAG is true when processing an external variable declaration (as
1384 opposed to a definition: no storage is to be allocated for the variable).
1385
1386 STATIC_FLAG is only relevant when not at top level. In that case
1387 it indicates whether to always allocate storage to the variable.
1388
1389 GNAT_NODE is used for the position of the decl. */
1390
1391 tree
1392 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1393 bool const_flag, bool public_flag, bool extern_flag,
1394 bool static_flag, bool const_decl_allowed_p,
1395 struct attrib *attr_list, Node_Id gnat_node)
1396 {
1397 bool init_const
1398 = (var_init != 0
1399 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1400 && (global_bindings_p () || static_flag
1401 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1402 : TREE_CONSTANT (var_init)));
1403
1404 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1405 case the initializer may be used in-lieu of the DECL node (as done in
1406 Identifier_to_gnu). This is useful to prevent the need of elaboration
1407 code when an identifier for which such a decl is made is in turn used as
1408 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1409 but extra constraints apply to this choice (see below) and are not
1410 relevant to the distinction we wish to make. */
1411 bool constant_p = const_flag && init_const;
1412
1413 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1414 and may be used for scalars in general but not for aggregates. */
1415 tree var_decl
1416 = build_decl (input_location,
1417 (constant_p && const_decl_allowed_p
1418 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1419 var_name, type);
1420
1421 /* If this is external, throw away any initializations (they will be done
1422 elsewhere) unless this is a constant for which we would like to remain
1423 able to get the initializer. If we are defining a global here, leave a
1424 constant initialization and save any variable elaborations for the
1425 elaboration routine. If we are just annotating types, throw away the
1426 initialization if it isn't a constant. */
1427 if ((extern_flag && !constant_p)
1428 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1429 var_init = NULL_TREE;
1430
1431 /* At the global level, an initializer requiring code to be generated
1432 produces elaboration statements. Check that such statements are allowed,
1433 that is, not violating a No_Elaboration_Code restriction. */
1434 if (global_bindings_p () && var_init != 0 && !init_const)
1435 Check_Elaboration_Code_Allowed (gnat_node);
1436
1437 DECL_INITIAL (var_decl) = var_init;
1438 TREE_READONLY (var_decl) = const_flag;
1439 DECL_EXTERNAL (var_decl) = extern_flag;
1440 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1441 TREE_CONSTANT (var_decl) = constant_p;
1442 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1443 = TYPE_VOLATILE (type);
1444
1445 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1446 try to fiddle with DECL_COMMON. However, on platforms that don't
1447 support global BSS sections, uninitialized global variables would
1448 go in DATA instead, thus increasing the size of the executable. */
1449 if (!flag_no_common
1450 && TREE_CODE (var_decl) == VAR_DECL
1451 && TREE_PUBLIC (var_decl)
1452 && !have_global_bss_p ())
1453 DECL_COMMON (var_decl) = 1;
1454
1455 /* At the global binding level, we need to allocate static storage for the
1456 variable if it isn't external. Otherwise, we allocate automatic storage
1457 unless requested not to. */
1458 TREE_STATIC (var_decl)
1459 = !extern_flag && (static_flag || global_bindings_p ());
1460
1461 /* For an external constant whose initializer is not absolute, do not emit
1462 debug info. In DWARF this would mean a global relocation in a read-only
1463 section which runs afoul of the PE-COFF run-time relocation mechanism. */
1464 if (extern_flag
1465 && constant_p
1466 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1467 != null_pointer_node)
1468 DECL_IGNORED_P (var_decl) = 1;
1469
1470 /* Add this decl to the current binding level. */
1471 gnat_pushdecl (var_decl, gnat_node);
1472
1473 if (TREE_SIDE_EFFECTS (var_decl))
1474 TREE_ADDRESSABLE (var_decl) = 1;
1475
1476 if (TREE_CODE (var_decl) == VAR_DECL)
1477 {
1478 if (asm_name)
1479 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1480 process_attributes (var_decl, attr_list);
1481 if (global_bindings_p ())
1482 rest_of_decl_compilation (var_decl, true, 0);
1483 }
1484 else
1485 expand_decl (var_decl);
1486
1487 return var_decl;
1488 }
1489 \f
1490 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1491
1492 static bool
1493 aggregate_type_contains_array_p (tree type)
1494 {
1495 switch (TREE_CODE (type))
1496 {
1497 case RECORD_TYPE:
1498 case UNION_TYPE:
1499 case QUAL_UNION_TYPE:
1500 {
1501 tree field;
1502 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1503 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1504 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1505 return true;
1506 return false;
1507 }
1508
1509 case ARRAY_TYPE:
1510 return true;
1511
1512 default:
1513 gcc_unreachable ();
1514 }
1515 }
1516
1517 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1518 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
1519 nonzero, it is the specified size of the field. If POS is nonzero, it is
1520 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
1521 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
1522 means we are allowed to take the address of the field; if it is negative,
1523 we should not make a bitfield, which is used by make_aligning_type. */
1524
1525 tree
1526 create_field_decl (tree field_name, tree field_type, tree record_type,
1527 tree size, tree pos, int packed, int addressable)
1528 {
1529 tree field_decl = build_decl (input_location,
1530 FIELD_DECL, field_name, field_type);
1531
1532 DECL_CONTEXT (field_decl) = record_type;
1533 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1534
1535 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1536 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1537 Likewise for an aggregate without specified position that contains an
1538 array, because in this case slices of variable length of this array
1539 must be handled by GCC and variable-sized objects need to be aligned
1540 to at least a byte boundary. */
1541 if (packed && (TYPE_MODE (field_type) == BLKmode
1542 || (!pos
1543 && AGGREGATE_TYPE_P (field_type)
1544 && aggregate_type_contains_array_p (field_type))))
1545 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1546
1547 /* If a size is specified, use it. Otherwise, if the record type is packed
1548 compute a size to use, which may differ from the object's natural size.
1549 We always set a size in this case to trigger the checks for bitfield
1550 creation below, which is typically required when no position has been
1551 specified. */
1552 if (size)
1553 size = convert (bitsizetype, size);
1554 else if (packed == 1)
1555 {
1556 size = rm_size (field_type);
1557 if (TYPE_MODE (field_type) == BLKmode)
1558 size = round_up (size, BITS_PER_UNIT);
1559 }
1560
1561 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1562 specified for two reasons: first if the size differs from the natural
1563 size. Second, if the alignment is insufficient. There are a number of
1564 ways the latter can be true.
1565
1566 We never make a bitfield if the type of the field has a nonconstant size,
1567 because no such entity requiring bitfield operations should reach here.
1568
1569 We do *preventively* make a bitfield when there might be the need for it
1570 but we don't have all the necessary information to decide, as is the case
1571 of a field with no specified position in a packed record.
1572
1573 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1574 in layout_decl or finish_record_type to clear the bit_field indication if
1575 it is in fact not needed. */
1576 if (addressable >= 0
1577 && size
1578 && TREE_CODE (size) == INTEGER_CST
1579 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1580 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1581 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1582 || packed
1583 || (TYPE_ALIGN (record_type) != 0
1584 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1585 {
1586 DECL_BIT_FIELD (field_decl) = 1;
1587 DECL_SIZE (field_decl) = size;
1588 if (!packed && !pos)
1589 {
1590 if (TYPE_ALIGN (record_type) != 0
1591 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1592 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1593 else
1594 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1595 }
1596 }
1597
1598 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1599
1600 /* Bump the alignment if need be, either for bitfield/packing purposes or
1601 to satisfy the type requirements if no such consideration applies. When
1602 we get the alignment from the type, indicate if this is from an explicit
1603 user request, which prevents stor-layout from lowering it later on. */
1604 {
1605 unsigned int bit_align
1606 = (DECL_BIT_FIELD (field_decl) ? 1
1607 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1608
1609 if (bit_align > DECL_ALIGN (field_decl))
1610 DECL_ALIGN (field_decl) = bit_align;
1611 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1612 {
1613 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1614 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1615 }
1616 }
1617
1618 if (pos)
1619 {
1620 /* We need to pass in the alignment the DECL is known to have.
1621 This is the lowest-order bit set in POS, but no more than
1622 the alignment of the record, if one is specified. Note
1623 that an alignment of 0 is taken as infinite. */
1624 unsigned int known_align;
1625
1626 if (host_integerp (pos, 1))
1627 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1628 else
1629 known_align = BITS_PER_UNIT;
1630
1631 if (TYPE_ALIGN (record_type)
1632 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1633 known_align = TYPE_ALIGN (record_type);
1634
1635 layout_decl (field_decl, known_align);
1636 SET_DECL_OFFSET_ALIGN (field_decl,
1637 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1638 : BITS_PER_UNIT);
1639 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1640 &DECL_FIELD_BIT_OFFSET (field_decl),
1641 DECL_OFFSET_ALIGN (field_decl), pos);
1642 }
1643
1644 /* In addition to what our caller says, claim the field is addressable if we
1645 know that its type is not suitable.
1646
1647 The field may also be "technically" nonaddressable, meaning that even if
1648 we attempt to take the field's address we will actually get the address
1649 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1650 value we have at this point is not accurate enough, so we don't account
1651 for this here and let finish_record_type decide. */
1652 if (!addressable && !type_for_nonaliased_component_p (field_type))
1653 addressable = 1;
1654
1655 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1656
1657 return field_decl;
1658 }
1659 \f
1660 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1661 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1662 (either an In parameter or an address of a pass-by-ref parameter). */
1663
1664 tree
1665 create_param_decl (tree param_name, tree param_type, bool readonly)
1666 {
1667 tree param_decl = build_decl (input_location,
1668 PARM_DECL, param_name, param_type);
1669
1670 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1671 can lead to various ABI violations. */
1672 if (targetm.calls.promote_prototypes (NULL_TREE)
1673 && INTEGRAL_TYPE_P (param_type)
1674 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1675 {
1676 /* We have to be careful about biased types here. Make a subtype
1677 of integer_type_node with the proper biasing. */
1678 if (TREE_CODE (param_type) == INTEGER_TYPE
1679 && TYPE_BIASED_REPRESENTATION_P (param_type))
1680 {
1681 tree subtype
1682 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1683 TREE_TYPE (subtype) = integer_type_node;
1684 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1685 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1686 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1687 param_type = subtype;
1688 }
1689 else
1690 param_type = integer_type_node;
1691 }
1692
1693 DECL_ARG_TYPE (param_decl) = param_type;
1694 TREE_READONLY (param_decl) = readonly;
1695 return param_decl;
1696 }
1697 \f
1698 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1699
1700 static void
1701 process_attributes (tree decl, struct attrib *attr_list)
1702 {
1703 for (; attr_list; attr_list = attr_list->next)
1704 switch (attr_list->type)
1705 {
1706 case ATTR_MACHINE_ATTRIBUTE:
1707 input_location = DECL_SOURCE_LOCATION (decl);
1708 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1709 NULL_TREE),
1710 ATTR_FLAG_TYPE_IN_PLACE);
1711 break;
1712
1713 case ATTR_LINK_ALIAS:
1714 if (! DECL_EXTERNAL (decl))
1715 {
1716 TREE_STATIC (decl) = 1;
1717 assemble_alias (decl, attr_list->name);
1718 }
1719 break;
1720
1721 case ATTR_WEAK_EXTERNAL:
1722 if (SUPPORTS_WEAK)
1723 declare_weak (decl);
1724 else
1725 post_error ("?weak declarations not supported on this target",
1726 attr_list->error_point);
1727 break;
1728
1729 case ATTR_LINK_SECTION:
1730 if (targetm.have_named_sections)
1731 {
1732 DECL_SECTION_NAME (decl)
1733 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1734 IDENTIFIER_POINTER (attr_list->name));
1735 DECL_COMMON (decl) = 0;
1736 }
1737 else
1738 post_error ("?section attributes are not supported for this target",
1739 attr_list->error_point);
1740 break;
1741
1742 case ATTR_LINK_CONSTRUCTOR:
1743 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1744 TREE_USED (decl) = 1;
1745 break;
1746
1747 case ATTR_LINK_DESTRUCTOR:
1748 DECL_STATIC_DESTRUCTOR (decl) = 1;
1749 TREE_USED (decl) = 1;
1750 break;
1751
1752 case ATTR_THREAD_LOCAL_STORAGE:
1753 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1754 DECL_COMMON (decl) = 0;
1755 break;
1756 }
1757 }
1758 \f
1759 /* Record DECL as a global renaming pointer. */
1760
1761 void
1762 record_global_renaming_pointer (tree decl)
1763 {
1764 gcc_assert (DECL_RENAMED_OBJECT (decl));
1765 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1766 }
1767
1768 /* Invalidate the global renaming pointers. */
1769
1770 void
1771 invalidate_global_renaming_pointers (void)
1772 {
1773 unsigned int i;
1774 tree iter;
1775
1776 FOR_EACH_VEC_ELT (tree, global_renaming_pointers, i, iter)
1777 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1778
1779 VEC_free (tree, gc, global_renaming_pointers);
1780 }
1781
1782 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1783 a power of 2. */
1784
1785 bool
1786 value_factor_p (tree value, HOST_WIDE_INT factor)
1787 {
1788 if (host_integerp (value, 1))
1789 return tree_low_cst (value, 1) % factor == 0;
1790
1791 if (TREE_CODE (value) == MULT_EXPR)
1792 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1793 || value_factor_p (TREE_OPERAND (value, 1), factor));
1794
1795 return false;
1796 }
1797
1798 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1799 unless we can prove these 2 fields are laid out in such a way that no gap
1800 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1801 is the distance in bits between the end of PREV_FIELD and the starting
1802 position of CURR_FIELD. It is ignored if null. */
1803
1804 static bool
1805 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1806 {
1807 /* If this is the first field of the record, there cannot be any gap */
1808 if (!prev_field)
1809 return false;
1810
1811 /* If the previous field is a union type, then return False: The only
1812 time when such a field is not the last field of the record is when
1813 there are other components at fixed positions after it (meaning there
1814 was a rep clause for every field), in which case we don't want the
1815 alignment constraint to override them. */
1816 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1817 return false;
1818
1819 /* If the distance between the end of prev_field and the beginning of
1820 curr_field is constant, then there is a gap if the value of this
1821 constant is not null. */
1822 if (offset && host_integerp (offset, 1))
1823 return !integer_zerop (offset);
1824
1825 /* If the size and position of the previous field are constant,
1826 then check the sum of this size and position. There will be a gap
1827 iff it is not multiple of the current field alignment. */
1828 if (host_integerp (DECL_SIZE (prev_field), 1)
1829 && host_integerp (bit_position (prev_field), 1))
1830 return ((tree_low_cst (bit_position (prev_field), 1)
1831 + tree_low_cst (DECL_SIZE (prev_field), 1))
1832 % DECL_ALIGN (curr_field) != 0);
1833
1834 /* If both the position and size of the previous field are multiples
1835 of the current field alignment, there cannot be any gap. */
1836 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1837 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1838 return false;
1839
1840 /* Fallback, return that there may be a potential gap */
1841 return true;
1842 }
1843
1844 /* Returns a LABEL_DECL node for LABEL_NAME. */
1845
1846 tree
1847 create_label_decl (tree label_name)
1848 {
1849 tree label_decl = build_decl (input_location,
1850 LABEL_DECL, label_name, void_type_node);
1851
1852 DECL_CONTEXT (label_decl) = current_function_decl;
1853 DECL_MODE (label_decl) = VOIDmode;
1854 DECL_SOURCE_LOCATION (label_decl) = input_location;
1855
1856 return label_decl;
1857 }
1858 \f
1859 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1860 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1861 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1862 PARM_DECL nodes chained through the TREE_CHAIN field).
1863
1864 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1865 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1866
1867 tree
1868 create_subprog_decl (tree subprog_name, tree asm_name,
1869 tree subprog_type, tree param_decl_list, bool inline_flag,
1870 bool public_flag, bool extern_flag,
1871 struct attrib *attr_list, Node_Id gnat_node)
1872 {
1873 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1874 subprog_type);
1875 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1876 TREE_TYPE (subprog_type));
1877
1878 /* If this is a non-inline function nested inside an inlined external
1879 function, we cannot honor both requests without cloning the nested
1880 function in the current unit since it is private to the other unit.
1881 We could inline the nested function as well but it's probably better
1882 to err on the side of too little inlining. */
1883 if (!inline_flag
1884 && !public_flag
1885 && current_function_decl
1886 && DECL_DECLARED_INLINE_P (current_function_decl)
1887 && DECL_EXTERNAL (current_function_decl))
1888 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1889
1890 DECL_EXTERNAL (subprog_decl) = extern_flag;
1891 TREE_PUBLIC (subprog_decl) = public_flag;
1892 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1893 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1894 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1895 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1896 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1897
1898 DECL_ARTIFICIAL (result_decl) = 1;
1899 DECL_IGNORED_P (result_decl) = 1;
1900 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1901 DECL_RESULT (subprog_decl) = result_decl;
1902
1903 if (asm_name)
1904 {
1905 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1906
1907 /* The expand_main_function circuitry expects "main_identifier_node" to
1908 designate the DECL_NAME of the 'main' entry point, in turn expected
1909 to be declared as the "main" function literally by default. Ada
1910 program entry points are typically declared with a different name
1911 within the binder generated file, exported as 'main' to satisfy the
1912 system expectations. Force main_identifier_node in this case. */
1913 if (asm_name == main_identifier_node)
1914 DECL_NAME (subprog_decl) = main_identifier_node;
1915 }
1916
1917 /* Add this decl to the current binding level. */
1918 gnat_pushdecl (subprog_decl, gnat_node);
1919
1920 process_attributes (subprog_decl, attr_list);
1921
1922 /* Output the assembler code and/or RTL for the declaration. */
1923 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1924
1925 return subprog_decl;
1926 }
1927 \f
1928 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1929 body. This routine needs to be invoked before processing the declarations
1930 appearing in the subprogram. */
1931
1932 void
1933 begin_subprog_body (tree subprog_decl)
1934 {
1935 tree param_decl;
1936
1937 announce_function (subprog_decl);
1938
1939 /* This function is being defined. */
1940 TREE_STATIC (subprog_decl) = 1;
1941
1942 current_function_decl = subprog_decl;
1943
1944 /* Enter a new binding level and show that all the parameters belong to
1945 this function. */
1946 gnat_pushlevel ();
1947
1948 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1949 param_decl = DECL_CHAIN (param_decl))
1950 DECL_CONTEXT (param_decl) = subprog_decl;
1951
1952 make_decl_rtl (subprog_decl);
1953
1954 /* We handle pending sizes via the elaboration of types, so we don't need to
1955 save them. This causes them to be marked as part of the outer function
1956 and then discarded. */
1957 get_pending_sizes ();
1958 }
1959
1960 /* Finish the definition of the current subprogram BODY and finalize it. */
1961
1962 void
1963 end_subprog_body (tree body)
1964 {
1965 tree fndecl = current_function_decl;
1966
1967 /* Attach the BLOCK for this level to the function and pop the level. */
1968 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1969 DECL_INITIAL (fndecl) = current_binding_level->block;
1970 gnat_poplevel ();
1971
1972 /* We handle pending sizes via the elaboration of types, so we don't
1973 need to save them. */
1974 get_pending_sizes ();
1975
1976 /* Mark the RESULT_DECL as being in this subprogram. */
1977 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1978
1979 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
1980 if (TREE_CODE (body) == BIND_EXPR)
1981 {
1982 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
1983 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
1984 }
1985
1986 DECL_SAVED_TREE (fndecl) = body;
1987
1988 current_function_decl = DECL_CONTEXT (fndecl);
1989
1990 /* We cannot track the location of errors past this point. */
1991 error_gnat_node = Empty;
1992
1993 /* If we're only annotating types, don't actually compile this function. */
1994 if (type_annotate_only)
1995 return;
1996
1997 /* Dump functions before gimplification. */
1998 dump_function (TDI_original, fndecl);
1999
2000 /* ??? This special handling of nested functions is probably obsolete. */
2001 if (!DECL_CONTEXT (fndecl))
2002 cgraph_finalize_function (fndecl, false);
2003 else
2004 /* Register this function with cgraph just far enough to get it
2005 added to our parent's nested function list. */
2006 (void) cgraph_get_create_node (fndecl);
2007 }
2008
2009 tree
2010 gnat_builtin_function (tree decl)
2011 {
2012 gnat_pushdecl (decl, Empty);
2013 return decl;
2014 }
2015
2016 /* Return an integer type with the number of bits of precision given by
2017 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2018 it is a signed type. */
2019
2020 tree
2021 gnat_type_for_size (unsigned precision, int unsignedp)
2022 {
2023 tree t;
2024 char type_name[20];
2025
2026 if (precision <= 2 * MAX_BITS_PER_WORD
2027 && signed_and_unsigned_types[precision][unsignedp])
2028 return signed_and_unsigned_types[precision][unsignedp];
2029
2030 if (unsignedp)
2031 t = make_unsigned_type (precision);
2032 else
2033 t = make_signed_type (precision);
2034
2035 if (precision <= 2 * MAX_BITS_PER_WORD)
2036 signed_and_unsigned_types[precision][unsignedp] = t;
2037
2038 if (!TYPE_NAME (t))
2039 {
2040 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2041 TYPE_NAME (t) = get_identifier (type_name);
2042 }
2043
2044 return t;
2045 }
2046
2047 /* Likewise for floating-point types. */
2048
2049 static tree
2050 float_type_for_precision (int precision, enum machine_mode mode)
2051 {
2052 tree t;
2053 char type_name[20];
2054
2055 if (float_types[(int) mode])
2056 return float_types[(int) mode];
2057
2058 float_types[(int) mode] = t = make_node (REAL_TYPE);
2059 TYPE_PRECISION (t) = precision;
2060 layout_type (t);
2061
2062 gcc_assert (TYPE_MODE (t) == mode);
2063 if (!TYPE_NAME (t))
2064 {
2065 sprintf (type_name, "FLOAT_%d", precision);
2066 TYPE_NAME (t) = get_identifier (type_name);
2067 }
2068
2069 return t;
2070 }
2071
2072 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2073 an unsigned type; otherwise a signed type is returned. */
2074
2075 tree
2076 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2077 {
2078 if (mode == BLKmode)
2079 return NULL_TREE;
2080
2081 if (mode == VOIDmode)
2082 return void_type_node;
2083
2084 if (COMPLEX_MODE_P (mode))
2085 return NULL_TREE;
2086
2087 if (SCALAR_FLOAT_MODE_P (mode))
2088 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2089
2090 if (SCALAR_INT_MODE_P (mode))
2091 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2092
2093 if (VECTOR_MODE_P (mode))
2094 {
2095 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2096 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2097 if (inner_type)
2098 return build_vector_type_for_mode (inner_type, mode);
2099 }
2100
2101 return NULL_TREE;
2102 }
2103
2104 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2105
2106 tree
2107 gnat_unsigned_type (tree type_node)
2108 {
2109 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2110
2111 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2112 {
2113 type = copy_node (type);
2114 TREE_TYPE (type) = type_node;
2115 }
2116 else if (TREE_TYPE (type_node)
2117 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2118 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2119 {
2120 type = copy_node (type);
2121 TREE_TYPE (type) = TREE_TYPE (type_node);
2122 }
2123
2124 return type;
2125 }
2126
2127 /* Return the signed version of a TYPE_NODE, a scalar type. */
2128
2129 tree
2130 gnat_signed_type (tree type_node)
2131 {
2132 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2133
2134 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2135 {
2136 type = copy_node (type);
2137 TREE_TYPE (type) = type_node;
2138 }
2139 else if (TREE_TYPE (type_node)
2140 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2141 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2142 {
2143 type = copy_node (type);
2144 TREE_TYPE (type) = TREE_TYPE (type_node);
2145 }
2146
2147 return type;
2148 }
2149
2150 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2151 transparently converted to each other. */
2152
2153 int
2154 gnat_types_compatible_p (tree t1, tree t2)
2155 {
2156 enum tree_code code;
2157
2158 /* This is the default criterion. */
2159 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2160 return 1;
2161
2162 /* We only check structural equivalence here. */
2163 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2164 return 0;
2165
2166 /* Vector types are also compatible if they have the same number of subparts
2167 and the same form of (scalar) element type. */
2168 if (code == VECTOR_TYPE
2169 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2170 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2171 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2172 return 1;
2173
2174 /* Array types are also compatible if they are constrained and have the same
2175 domain(s) and the same component type. */
2176 if (code == ARRAY_TYPE
2177 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2178 || (TYPE_DOMAIN (t1)
2179 && TYPE_DOMAIN (t2)
2180 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2181 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2182 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2183 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2184 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2185 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2186 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2187 return 1;
2188
2189 /* Padding record types are also compatible if they pad the same
2190 type and have the same constant size. */
2191 if (code == RECORD_TYPE
2192 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2193 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2194 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2195 return 1;
2196
2197 return 0;
2198 }
2199
2200 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2201
2202 bool
2203 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2204 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2205 {
2206 return TYPE_CI_CO_LIST (t) == cico_list
2207 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2208 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2209 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2210 }
2211 \f
2212 /* EXP is an expression for the size of an object. If this size contains
2213 discriminant references, replace them with the maximum (if MAX_P) or
2214 minimum (if !MAX_P) possible value of the discriminant. */
2215
2216 tree
2217 max_size (tree exp, bool max_p)
2218 {
2219 enum tree_code code = TREE_CODE (exp);
2220 tree type = TREE_TYPE (exp);
2221
2222 switch (TREE_CODE_CLASS (code))
2223 {
2224 case tcc_declaration:
2225 case tcc_constant:
2226 return exp;
2227
2228 case tcc_vl_exp:
2229 if (code == CALL_EXPR)
2230 {
2231 tree t, *argarray;
2232 int n, i;
2233
2234 t = maybe_inline_call_in_expr (exp);
2235 if (t)
2236 return max_size (t, max_p);
2237
2238 n = call_expr_nargs (exp);
2239 gcc_assert (n > 0);
2240 argarray = XALLOCAVEC (tree, n);
2241 for (i = 0; i < n; i++)
2242 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2243 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2244 }
2245 break;
2246
2247 case tcc_reference:
2248 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2249 modify. Otherwise, we treat it like a variable. */
2250 if (!CONTAINS_PLACEHOLDER_P (exp))
2251 return exp;
2252
2253 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2254 return
2255 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2256
2257 case tcc_comparison:
2258 return max_p ? size_one_node : size_zero_node;
2259
2260 case tcc_unary:
2261 case tcc_binary:
2262 case tcc_expression:
2263 switch (TREE_CODE_LENGTH (code))
2264 {
2265 case 1:
2266 if (code == NON_LVALUE_EXPR)
2267 return max_size (TREE_OPERAND (exp, 0), max_p);
2268 else
2269 return
2270 fold_build1 (code, type,
2271 max_size (TREE_OPERAND (exp, 0),
2272 code == NEGATE_EXPR ? !max_p : max_p));
2273
2274 case 2:
2275 if (code == COMPOUND_EXPR)
2276 return max_size (TREE_OPERAND (exp, 1), max_p);
2277
2278 {
2279 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2280 tree rhs = max_size (TREE_OPERAND (exp, 1),
2281 code == MINUS_EXPR ? !max_p : max_p);
2282
2283 /* Special-case wanting the maximum value of a MIN_EXPR.
2284 In that case, if one side overflows, return the other.
2285 sizetype is signed, but we know sizes are non-negative.
2286 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2287 overflowing and the RHS a variable. */
2288 if (max_p
2289 && code == MIN_EXPR
2290 && TREE_CODE (rhs) == INTEGER_CST
2291 && TREE_OVERFLOW (rhs))
2292 return lhs;
2293 else if (max_p
2294 && code == MIN_EXPR
2295 && TREE_CODE (lhs) == INTEGER_CST
2296 && TREE_OVERFLOW (lhs))
2297 return rhs;
2298 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2299 && TREE_CODE (lhs) == INTEGER_CST
2300 && TREE_OVERFLOW (lhs)
2301 && !TREE_CONSTANT (rhs))
2302 return lhs;
2303 else
2304 return fold_build2 (code, type, lhs, rhs);
2305 }
2306
2307 case 3:
2308 if (code == SAVE_EXPR)
2309 return exp;
2310 else if (code == COND_EXPR)
2311 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2312 max_size (TREE_OPERAND (exp, 1), max_p),
2313 max_size (TREE_OPERAND (exp, 2), max_p));
2314 }
2315
2316 /* Other tree classes cannot happen. */
2317 default:
2318 break;
2319 }
2320
2321 gcc_unreachable ();
2322 }
2323 \f
2324 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2325 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2326 Return a constructor for the template. */
2327
2328 tree
2329 build_template (tree template_type, tree array_type, tree expr)
2330 {
2331 VEC(constructor_elt,gc) *template_elts = NULL;
2332 tree bound_list = NULL_TREE;
2333 tree field;
2334
2335 while (TREE_CODE (array_type) == RECORD_TYPE
2336 && (TYPE_PADDING_P (array_type)
2337 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2338 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2339
2340 if (TREE_CODE (array_type) == ARRAY_TYPE
2341 || (TREE_CODE (array_type) == INTEGER_TYPE
2342 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2343 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2344
2345 /* First make the list for a CONSTRUCTOR for the template. Go down the
2346 field list of the template instead of the type chain because this
2347 array might be an Ada array of arrays and we can't tell where the
2348 nested arrays stop being the underlying object. */
2349
2350 for (field = TYPE_FIELDS (template_type); field;
2351 (bound_list
2352 ? (bound_list = TREE_CHAIN (bound_list))
2353 : (array_type = TREE_TYPE (array_type))),
2354 field = DECL_CHAIN (DECL_CHAIN (field)))
2355 {
2356 tree bounds, min, max;
2357
2358 /* If we have a bound list, get the bounds from there. Likewise
2359 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2360 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2361 This will give us a maximum range. */
2362 if (bound_list)
2363 bounds = TREE_VALUE (bound_list);
2364 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2365 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2366 else if (expr && TREE_CODE (expr) == PARM_DECL
2367 && DECL_BY_COMPONENT_PTR_P (expr))
2368 bounds = TREE_TYPE (field);
2369 else
2370 gcc_unreachable ();
2371
2372 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2373 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2374
2375 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2376 substitute it from OBJECT. */
2377 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2378 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2379
2380 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
2381 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
2382 }
2383
2384 return gnat_build_constructor (template_type, template_elts);
2385 }
2386 \f
2387 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
2388 being built; the new decl is chained on to the front of the list. */
2389
2390 static tree
2391 make_descriptor_field (const char *name, tree type, tree rec_type,
2392 tree initial, tree field_list)
2393 {
2394 tree field
2395 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
2396 NULL_TREE, 0, 0);
2397
2398 DECL_INITIAL (field) = initial;
2399 DECL_CHAIN (field) = field_list;
2400 return field;
2401 }
2402
2403 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2404 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2405 type contains in its DECL_INITIAL the expression to use when a constructor
2406 is made for the type. GNAT_ENTITY is an entity used to print out an error
2407 message if the mechanism cannot be applied to an object of that type and
2408 also for the name. */
2409
2410 tree
2411 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2412 {
2413 tree record_type = make_node (RECORD_TYPE);
2414 tree pointer32_type, pointer64_type;
2415 tree field_list = NULL_TREE;
2416 int klass, ndim, i, dtype = 0;
2417 tree inner_type, tem;
2418 tree *idx_arr;
2419
2420 /* If TYPE is an unconstrained array, use the underlying array type. */
2421 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2422 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2423
2424 /* If this is an array, compute the number of dimensions in the array,
2425 get the index types, and point to the inner type. */
2426 if (TREE_CODE (type) != ARRAY_TYPE)
2427 ndim = 0;
2428 else
2429 for (ndim = 1, inner_type = type;
2430 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2431 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2432 ndim++, inner_type = TREE_TYPE (inner_type))
2433 ;
2434
2435 idx_arr = XALLOCAVEC (tree, ndim);
2436
2437 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2438 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2439 for (i = ndim - 1, inner_type = type;
2440 i >= 0;
2441 i--, inner_type = TREE_TYPE (inner_type))
2442 idx_arr[i] = TYPE_DOMAIN (inner_type);
2443 else
2444 for (i = 0, inner_type = type;
2445 i < ndim;
2446 i++, inner_type = TREE_TYPE (inner_type))
2447 idx_arr[i] = TYPE_DOMAIN (inner_type);
2448
2449 /* Now get the DTYPE value. */
2450 switch (TREE_CODE (type))
2451 {
2452 case INTEGER_TYPE:
2453 case ENUMERAL_TYPE:
2454 case BOOLEAN_TYPE:
2455 if (TYPE_VAX_FLOATING_POINT_P (type))
2456 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2457 {
2458 case 6:
2459 dtype = 10;
2460 break;
2461 case 9:
2462 dtype = 11;
2463 break;
2464 case 15:
2465 dtype = 27;
2466 break;
2467 }
2468 else
2469 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2470 {
2471 case 8:
2472 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2473 break;
2474 case 16:
2475 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2476 break;
2477 case 32:
2478 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2479 break;
2480 case 64:
2481 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2482 break;
2483 case 128:
2484 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2485 break;
2486 }
2487 break;
2488
2489 case REAL_TYPE:
2490 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2491 break;
2492
2493 case COMPLEX_TYPE:
2494 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2495 && TYPE_VAX_FLOATING_POINT_P (type))
2496 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2497 {
2498 case 6:
2499 dtype = 12;
2500 break;
2501 case 9:
2502 dtype = 13;
2503 break;
2504 case 15:
2505 dtype = 29;
2506 }
2507 else
2508 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2509 break;
2510
2511 case ARRAY_TYPE:
2512 dtype = 14;
2513 break;
2514
2515 default:
2516 break;
2517 }
2518
2519 /* Get the CLASS value. */
2520 switch (mech)
2521 {
2522 case By_Descriptor_A:
2523 case By_Short_Descriptor_A:
2524 klass = 4;
2525 break;
2526 case By_Descriptor_NCA:
2527 case By_Short_Descriptor_NCA:
2528 klass = 10;
2529 break;
2530 case By_Descriptor_SB:
2531 case By_Short_Descriptor_SB:
2532 klass = 15;
2533 break;
2534 case By_Descriptor:
2535 case By_Short_Descriptor:
2536 case By_Descriptor_S:
2537 case By_Short_Descriptor_S:
2538 default:
2539 klass = 1;
2540 break;
2541 }
2542
2543 /* Make the type for a descriptor for VMS. The first four fields are the
2544 same for all types. */
2545 field_list
2546 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
2547 size_in_bytes ((mech == By_Descriptor_A
2548 || mech == By_Short_Descriptor_A)
2549 ? inner_type : type),
2550 field_list);
2551 field_list
2552 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
2553 size_int (dtype), field_list);
2554 field_list
2555 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
2556 size_int (klass), field_list);
2557
2558 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2559 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2560
2561 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
2562 that we cannot build a template call to the CE routine as it would get a
2563 wrong source location; instead we use a second placeholder for it. */
2564 tem = build_unary_op (ADDR_EXPR, pointer64_type,
2565 build0 (PLACEHOLDER_EXPR, type));
2566 tem = build3 (COND_EXPR, pointer32_type,
2567 build_binary_op (GE_EXPR, boolean_type_node, tem,
2568 build_int_cstu (pointer64_type, 0x80000000)),
2569 build0 (PLACEHOLDER_EXPR, void_type_node),
2570 convert (pointer32_type, tem));
2571
2572 field_list
2573 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
2574 field_list);
2575
2576 switch (mech)
2577 {
2578 case By_Descriptor:
2579 case By_Short_Descriptor:
2580 case By_Descriptor_S:
2581 case By_Short_Descriptor_S:
2582 break;
2583
2584 case By_Descriptor_SB:
2585 case By_Short_Descriptor_SB:
2586 field_list
2587 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
2588 record_type,
2589 (TREE_CODE (type) == ARRAY_TYPE
2590 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2591 : size_zero_node),
2592 field_list);
2593 field_list
2594 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
2595 record_type,
2596 (TREE_CODE (type) == ARRAY_TYPE
2597 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2598 : size_zero_node),
2599 field_list);
2600 break;
2601
2602 case By_Descriptor_A:
2603 case By_Short_Descriptor_A:
2604 case By_Descriptor_NCA:
2605 case By_Short_Descriptor_NCA:
2606 field_list
2607 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2608 record_type, size_zero_node, field_list);
2609
2610 field_list
2611 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2612 record_type, size_zero_node, field_list);
2613
2614 field_list
2615 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2616 record_type,
2617 size_int ((mech == By_Descriptor_NCA
2618 || mech == By_Short_Descriptor_NCA)
2619 ? 0
2620 /* Set FL_COLUMN, FL_COEFF, and
2621 FL_BOUNDS. */
2622 : (TREE_CODE (type) == ARRAY_TYPE
2623 && TYPE_CONVENTION_FORTRAN_P
2624 (type)
2625 ? 224 : 192)),
2626 field_list);
2627
2628 field_list
2629 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2630 record_type, size_int (ndim), field_list);
2631
2632 field_list
2633 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
2634 record_type, size_in_bytes (type),
2635 field_list);
2636
2637 /* Now build a pointer to the 0,0,0... element. */
2638 tem = build0 (PLACEHOLDER_EXPR, type);
2639 for (i = 0, inner_type = type; i < ndim;
2640 i++, inner_type = TREE_TYPE (inner_type))
2641 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2642 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2643 NULL_TREE, NULL_TREE);
2644
2645 field_list
2646 = make_descriptor_field ("A0", pointer32_type, record_type,
2647 build1 (ADDR_EXPR, pointer32_type, tem),
2648 field_list);
2649
2650 /* Next come the addressing coefficients. */
2651 tem = size_one_node;
2652 for (i = 0; i < ndim; i++)
2653 {
2654 char fname[3];
2655 tree idx_length
2656 = size_binop (MULT_EXPR, tem,
2657 size_binop (PLUS_EXPR,
2658 size_binop (MINUS_EXPR,
2659 TYPE_MAX_VALUE (idx_arr[i]),
2660 TYPE_MIN_VALUE (idx_arr[i])),
2661 size_int (1)));
2662
2663 fname[0] = ((mech == By_Descriptor_NCA ||
2664 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2665 fname[1] = '0' + i, fname[2] = 0;
2666 field_list
2667 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2668 record_type, idx_length, field_list);
2669
2670 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2671 tem = idx_length;
2672 }
2673
2674 /* Finally here are the bounds. */
2675 for (i = 0; i < ndim; i++)
2676 {
2677 char fname[3];
2678
2679 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2680 field_list
2681 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2682 record_type, TYPE_MIN_VALUE (idx_arr[i]),
2683 field_list);
2684
2685 fname[0] = 'U';
2686 field_list
2687 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2688 record_type, TYPE_MAX_VALUE (idx_arr[i]),
2689 field_list);
2690 }
2691 break;
2692
2693 default:
2694 post_error ("unsupported descriptor type for &", gnat_entity);
2695 }
2696
2697 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2698 finish_record_type (record_type, nreverse (field_list), 0, false);
2699 return record_type;
2700 }
2701
2702 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2703 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2704 type contains in its DECL_INITIAL the expression to use when a constructor
2705 is made for the type. GNAT_ENTITY is an entity used to print out an error
2706 message if the mechanism cannot be applied to an object of that type and
2707 also for the name. */
2708
2709 tree
2710 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2711 {
2712 tree record_type = make_node (RECORD_TYPE);
2713 tree pointer64_type;
2714 tree field_list = NULL_TREE;
2715 int klass, ndim, i, dtype = 0;
2716 tree inner_type, tem;
2717 tree *idx_arr;
2718
2719 /* If TYPE is an unconstrained array, use the underlying array type. */
2720 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2721 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2722
2723 /* If this is an array, compute the number of dimensions in the array,
2724 get the index types, and point to the inner type. */
2725 if (TREE_CODE (type) != ARRAY_TYPE)
2726 ndim = 0;
2727 else
2728 for (ndim = 1, inner_type = type;
2729 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2730 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2731 ndim++, inner_type = TREE_TYPE (inner_type))
2732 ;
2733
2734 idx_arr = XALLOCAVEC (tree, ndim);
2735
2736 if (mech != By_Descriptor_NCA
2737 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2738 for (i = ndim - 1, inner_type = type;
2739 i >= 0;
2740 i--, inner_type = TREE_TYPE (inner_type))
2741 idx_arr[i] = TYPE_DOMAIN (inner_type);
2742 else
2743 for (i = 0, inner_type = type;
2744 i < ndim;
2745 i++, inner_type = TREE_TYPE (inner_type))
2746 idx_arr[i] = TYPE_DOMAIN (inner_type);
2747
2748 /* Now get the DTYPE value. */
2749 switch (TREE_CODE (type))
2750 {
2751 case INTEGER_TYPE:
2752 case ENUMERAL_TYPE:
2753 case BOOLEAN_TYPE:
2754 if (TYPE_VAX_FLOATING_POINT_P (type))
2755 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2756 {
2757 case 6:
2758 dtype = 10;
2759 break;
2760 case 9:
2761 dtype = 11;
2762 break;
2763 case 15:
2764 dtype = 27;
2765 break;
2766 }
2767 else
2768 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2769 {
2770 case 8:
2771 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2772 break;
2773 case 16:
2774 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2775 break;
2776 case 32:
2777 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2778 break;
2779 case 64:
2780 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2781 break;
2782 case 128:
2783 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2784 break;
2785 }
2786 break;
2787
2788 case REAL_TYPE:
2789 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2790 break;
2791
2792 case COMPLEX_TYPE:
2793 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2794 && TYPE_VAX_FLOATING_POINT_P (type))
2795 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2796 {
2797 case 6:
2798 dtype = 12;
2799 break;
2800 case 9:
2801 dtype = 13;
2802 break;
2803 case 15:
2804 dtype = 29;
2805 }
2806 else
2807 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2808 break;
2809
2810 case ARRAY_TYPE:
2811 dtype = 14;
2812 break;
2813
2814 default:
2815 break;
2816 }
2817
2818 /* Get the CLASS value. */
2819 switch (mech)
2820 {
2821 case By_Descriptor_A:
2822 klass = 4;
2823 break;
2824 case By_Descriptor_NCA:
2825 klass = 10;
2826 break;
2827 case By_Descriptor_SB:
2828 klass = 15;
2829 break;
2830 case By_Descriptor:
2831 case By_Descriptor_S:
2832 default:
2833 klass = 1;
2834 break;
2835 }
2836
2837 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2838 are the same for all types. */
2839 field_list
2840 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2841 record_type, size_int (1), field_list);
2842 field_list
2843 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2844 record_type, size_int (dtype), field_list);
2845 field_list
2846 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2847 record_type, size_int (klass), field_list);
2848 field_list
2849 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2850 record_type, ssize_int (-1), field_list);
2851 field_list
2852 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2853 record_type,
2854 size_in_bytes (mech == By_Descriptor_A
2855 ? inner_type : type),
2856 field_list);
2857
2858 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2859
2860 field_list
2861 = make_descriptor_field ("POINTER", pointer64_type, record_type,
2862 build_unary_op (ADDR_EXPR, pointer64_type,
2863 build0 (PLACEHOLDER_EXPR, type)),
2864 field_list);
2865
2866 switch (mech)
2867 {
2868 case By_Descriptor:
2869 case By_Descriptor_S:
2870 break;
2871
2872 case By_Descriptor_SB:
2873 field_list
2874 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
2875 record_type,
2876 (TREE_CODE (type) == ARRAY_TYPE
2877 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2878 : size_zero_node),
2879 field_list);
2880 field_list
2881 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
2882 record_type,
2883 (TREE_CODE (type) == ARRAY_TYPE
2884 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2885 : size_zero_node),
2886 field_list);
2887 break;
2888
2889 case By_Descriptor_A:
2890 case By_Descriptor_NCA:
2891 field_list
2892 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2893 record_type, size_zero_node, field_list);
2894
2895 field_list
2896 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2897 record_type, size_zero_node, field_list);
2898
2899 dtype = (mech == By_Descriptor_NCA
2900 ? 0
2901 /* Set FL_COLUMN, FL_COEFF, and
2902 FL_BOUNDS. */
2903 : (TREE_CODE (type) == ARRAY_TYPE
2904 && TYPE_CONVENTION_FORTRAN_P (type)
2905 ? 224 : 192));
2906 field_list
2907 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2908 record_type, size_int (dtype),
2909 field_list);
2910
2911 field_list
2912 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2913 record_type, size_int (ndim), field_list);
2914
2915 field_list
2916 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
2917 record_type, size_int (0), field_list);
2918 field_list
2919 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
2920 record_type, size_in_bytes (type),
2921 field_list);
2922
2923 /* Now build a pointer to the 0,0,0... element. */
2924 tem = build0 (PLACEHOLDER_EXPR, type);
2925 for (i = 0, inner_type = type; i < ndim;
2926 i++, inner_type = TREE_TYPE (inner_type))
2927 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2928 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2929 NULL_TREE, NULL_TREE);
2930
2931 field_list
2932 = make_descriptor_field ("A0", pointer64_type, record_type,
2933 build1 (ADDR_EXPR, pointer64_type, tem),
2934 field_list);
2935
2936 /* Next come the addressing coefficients. */
2937 tem = size_one_node;
2938 for (i = 0; i < ndim; i++)
2939 {
2940 char fname[3];
2941 tree idx_length
2942 = size_binop (MULT_EXPR, tem,
2943 size_binop (PLUS_EXPR,
2944 size_binop (MINUS_EXPR,
2945 TYPE_MAX_VALUE (idx_arr[i]),
2946 TYPE_MIN_VALUE (idx_arr[i])),
2947 size_int (1)));
2948
2949 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2950 fname[1] = '0' + i, fname[2] = 0;
2951 field_list
2952 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2953 record_type, idx_length, field_list);
2954
2955 if (mech == By_Descriptor_NCA)
2956 tem = idx_length;
2957 }
2958
2959 /* Finally here are the bounds. */
2960 for (i = 0; i < ndim; i++)
2961 {
2962 char fname[3];
2963
2964 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2965 field_list
2966 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2967 record_type,
2968 TYPE_MIN_VALUE (idx_arr[i]), field_list);
2969
2970 fname[0] = 'U';
2971 field_list
2972 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2973 record_type,
2974 TYPE_MAX_VALUE (idx_arr[i]), field_list);
2975 }
2976 break;
2977
2978 default:
2979 post_error ("unsupported descriptor type for &", gnat_entity);
2980 }
2981
2982 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
2983 finish_record_type (record_type, nreverse (field_list), 0, false);
2984 return record_type;
2985 }
2986
2987 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
2988 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
2989
2990 tree
2991 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
2992 {
2993 VEC(constructor_elt,gc) *v = NULL;
2994 tree field;
2995
2996 gnu_expr = maybe_unconstrained_array (gnu_expr);
2997 gnu_expr = gnat_protect_expr (gnu_expr);
2998 gnat_mark_addressable (gnu_expr);
2999
3000 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3001 routine in case we have a 32-bit descriptor. */
3002 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3003 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3004 N_Raise_Constraint_Error),
3005 gnu_expr);
3006
3007 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3008 {
3009 tree value
3010 = convert (TREE_TYPE (field),
3011 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3012 gnu_expr));
3013 CONSTRUCTOR_APPEND_ELT (v, field, value);
3014 }
3015
3016 return gnat_build_constructor (gnu_type, v);
3017 }
3018
3019 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3020 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3021 which the VMS descriptor is passed. */
3022
3023 static tree
3024 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3025 {
3026 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3027 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3028 /* The CLASS field is the 3rd field in the descriptor. */
3029 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3030 /* The POINTER field is the 6th field in the descriptor. */
3031 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3032
3033 /* Retrieve the value of the POINTER field. */
3034 tree gnu_expr64
3035 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3036
3037 if (POINTER_TYPE_P (gnu_type))
3038 return convert (gnu_type, gnu_expr64);
3039
3040 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3041 {
3042 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3043 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3044 tree template_type = TREE_TYPE (p_bounds_type);
3045 tree min_field = TYPE_FIELDS (template_type);
3046 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3047 tree template_tree, template_addr, aflags, dimct, t, u;
3048 /* See the head comment of build_vms_descriptor. */
3049 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3050 tree lfield, ufield;
3051 VEC(constructor_elt,gc) *v;
3052
3053 /* Convert POINTER to the pointer-to-array type. */
3054 gnu_expr64 = convert (p_array_type, gnu_expr64);
3055
3056 switch (iklass)
3057 {
3058 case 1: /* Class S */
3059 case 15: /* Class SB */
3060 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3061 v = VEC_alloc (constructor_elt, gc, 2);
3062 t = DECL_CHAIN (DECL_CHAIN (klass));
3063 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3064 CONSTRUCTOR_APPEND_ELT (v, min_field,
3065 convert (TREE_TYPE (min_field),
3066 integer_one_node));
3067 CONSTRUCTOR_APPEND_ELT (v, max_field,
3068 convert (TREE_TYPE (max_field), t));
3069 template_tree = gnat_build_constructor (template_type, v);
3070 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3071
3072 /* For class S, we are done. */
3073 if (iklass == 1)
3074 break;
3075
3076 /* Test that we really have a SB descriptor, like DEC Ada. */
3077 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3078 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3079 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3080 /* If so, there is already a template in the descriptor and
3081 it is located right after the POINTER field. The fields are
3082 64bits so they must be repacked. */
3083 t = TREE_CHAIN (pointer);
3084 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3085 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3086
3087 t = TREE_CHAIN (t);
3088 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3089 ufield = convert
3090 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3091
3092 /* Build the template in the form of a constructor. */
3093 v = VEC_alloc (constructor_elt, gc, 2);
3094 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3095 CONSTRUCTOR_APPEND_ELT (v, TREE_CHAIN (TYPE_FIELDS (template_type)),
3096 ufield);
3097 template_tree = gnat_build_constructor (template_type, v);
3098
3099 /* Otherwise use the {1, LENGTH} template we build above. */
3100 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3101 build_unary_op (ADDR_EXPR, p_bounds_type,
3102 template_tree),
3103 template_addr);
3104 break;
3105
3106 case 4: /* Class A */
3107 /* The AFLAGS field is the 3rd field after the pointer in the
3108 descriptor. */
3109 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3110 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3111 /* The DIMCT field is the next field in the descriptor after
3112 aflags. */
3113 t = TREE_CHAIN (t);
3114 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3115 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3116 or FL_COEFF or FL_BOUNDS not set. */
3117 u = build_int_cst (TREE_TYPE (aflags), 192);
3118 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3119 build_binary_op (NE_EXPR, boolean_type_node,
3120 dimct,
3121 convert (TREE_TYPE (dimct),
3122 size_one_node)),
3123 build_binary_op (NE_EXPR, boolean_type_node,
3124 build2 (BIT_AND_EXPR,
3125 TREE_TYPE (aflags),
3126 aflags, u),
3127 u));
3128 /* There is already a template in the descriptor and it is located
3129 in block 3. The fields are 64bits so they must be repacked. */
3130 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3131 (t)))));
3132 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3133 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3134
3135 t = TREE_CHAIN (t);
3136 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3137 ufield = convert
3138 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3139
3140 /* Build the template in the form of a constructor. */
3141 v = VEC_alloc (constructor_elt, gc, 2);
3142 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3143 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3144 ufield);
3145 template_tree = gnat_build_constructor (template_type, v);
3146 template_tree = build3 (COND_EXPR, template_type, u,
3147 build_call_raise (CE_Length_Check_Failed, Empty,
3148 N_Raise_Constraint_Error),
3149 template_tree);
3150 template_addr
3151 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3152 break;
3153
3154 case 10: /* Class NCA */
3155 default:
3156 post_error ("unsupported descriptor type for &", gnat_subprog);
3157 template_addr = integer_zero_node;
3158 break;
3159 }
3160
3161 /* Build the fat pointer in the form of a constructor. */
3162 v = VEC_alloc (constructor_elt, gc, 2);
3163 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3164 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3165 template_addr);
3166 return gnat_build_constructor (gnu_type, v);
3167 }
3168
3169 else
3170 gcc_unreachable ();
3171 }
3172
3173 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3174 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3175 which the VMS descriptor is passed. */
3176
3177 static tree
3178 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3179 {
3180 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3181 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3182 /* The CLASS field is the 3rd field in the descriptor. */
3183 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3184 /* The POINTER field is the 4th field in the descriptor. */
3185 tree pointer = DECL_CHAIN (klass);
3186
3187 /* Retrieve the value of the POINTER field. */
3188 tree gnu_expr32
3189 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3190
3191 if (POINTER_TYPE_P (gnu_type))
3192 return convert (gnu_type, gnu_expr32);
3193
3194 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3195 {
3196 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3197 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3198 tree template_type = TREE_TYPE (p_bounds_type);
3199 tree min_field = TYPE_FIELDS (template_type);
3200 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3201 tree template_tree, template_addr, aflags, dimct, t, u;
3202 /* See the head comment of build_vms_descriptor. */
3203 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3204 VEC(constructor_elt,gc) *v;
3205
3206 /* Convert POINTER to the pointer-to-array type. */
3207 gnu_expr32 = convert (p_array_type, gnu_expr32);
3208
3209 switch (iklass)
3210 {
3211 case 1: /* Class S */
3212 case 15: /* Class SB */
3213 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3214 v = VEC_alloc (constructor_elt, gc, 2);
3215 t = TYPE_FIELDS (desc_type);
3216 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3217 CONSTRUCTOR_APPEND_ELT (v, min_field,
3218 convert (TREE_TYPE (min_field),
3219 integer_one_node));
3220 CONSTRUCTOR_APPEND_ELT (v, max_field,
3221 convert (TREE_TYPE (max_field), t));
3222 template_tree = gnat_build_constructor (template_type, v);
3223 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3224
3225 /* For class S, we are done. */
3226 if (iklass == 1)
3227 break;
3228
3229 /* Test that we really have a SB descriptor, like DEC Ada. */
3230 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3231 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3232 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3233 /* If so, there is already a template in the descriptor and
3234 it is located right after the POINTER field. */
3235 t = TREE_CHAIN (pointer);
3236 template_tree
3237 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3238 /* Otherwise use the {1, LENGTH} template we build above. */
3239 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3240 build_unary_op (ADDR_EXPR, p_bounds_type,
3241 template_tree),
3242 template_addr);
3243 break;
3244
3245 case 4: /* Class A */
3246 /* The AFLAGS field is the 7th field in the descriptor. */
3247 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3248 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3249 /* The DIMCT field is the 8th field in the descriptor. */
3250 t = TREE_CHAIN (t);
3251 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3252 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3253 or FL_COEFF or FL_BOUNDS not set. */
3254 u = build_int_cst (TREE_TYPE (aflags), 192);
3255 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3256 build_binary_op (NE_EXPR, boolean_type_node,
3257 dimct,
3258 convert (TREE_TYPE (dimct),
3259 size_one_node)),
3260 build_binary_op (NE_EXPR, boolean_type_node,
3261 build2 (BIT_AND_EXPR,
3262 TREE_TYPE (aflags),
3263 aflags, u),
3264 u));
3265 /* There is already a template in the descriptor and it is
3266 located at the start of block 3 (12th field). */
3267 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
3268 template_tree
3269 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3270 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3271 build_call_raise (CE_Length_Check_Failed, Empty,
3272 N_Raise_Constraint_Error),
3273 template_tree);
3274 template_addr
3275 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3276 break;
3277
3278 case 10: /* Class NCA */
3279 default:
3280 post_error ("unsupported descriptor type for &", gnat_subprog);
3281 template_addr = integer_zero_node;
3282 break;
3283 }
3284
3285 /* Build the fat pointer in the form of a constructor. */
3286 v = VEC_alloc (constructor_elt, gc, 2);
3287 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
3288 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3289 template_addr);
3290
3291 return gnat_build_constructor (gnu_type, v);
3292 }
3293
3294 else
3295 gcc_unreachable ();
3296 }
3297
3298 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3299 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3300 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
3301 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
3302 passed. */
3303
3304 static tree
3305 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3306 bool by_ref, Entity_Id gnat_subprog)
3307 {
3308 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3309 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3310 tree mbo = TYPE_FIELDS (desc_type);
3311 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3312 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
3313 tree real_type, is64bit, gnu_expr32, gnu_expr64;
3314
3315 if (by_ref)
3316 real_type = TREE_TYPE (gnu_type);
3317 else
3318 real_type = gnu_type;
3319
3320 /* If the field name is not MBO, it must be 32-bit and no alternate.
3321 Otherwise primary must be 64-bit and alternate 32-bit. */
3322 if (strcmp (mbostr, "MBO") != 0)
3323 {
3324 tree ret = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3325 if (by_ref)
3326 ret = build_unary_op (ADDR_EXPR, gnu_type, ret);
3327 return ret;
3328 }
3329
3330 /* Build the test for 64-bit descriptor. */
3331 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3332 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3333 is64bit
3334 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
3335 build_binary_op (EQ_EXPR, boolean_type_node,
3336 convert (integer_type_node, mbo),
3337 integer_one_node),
3338 build_binary_op (EQ_EXPR, boolean_type_node,
3339 convert (integer_type_node, mbmo),
3340 integer_minus_one_node));
3341
3342 /* Build the 2 possible end results. */
3343 gnu_expr64 = convert_vms_descriptor64 (real_type, gnu_expr, gnat_subprog);
3344 if (by_ref)
3345 gnu_expr64 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr64);
3346 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3347 gnu_expr32 = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3348 if (by_ref)
3349 gnu_expr32 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr32);
3350
3351 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3352 }
3353
3354 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3355 and the GNAT node GNAT_SUBPROG. */
3356
3357 void
3358 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3359 {
3360 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3361 tree gnu_subprog_param, gnu_stub_param, gnu_param;
3362 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3363 VEC(tree,gc) *gnu_param_vec = NULL;
3364
3365 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3366
3367 /* Initialize the information structure for the function. */
3368 allocate_struct_function (gnu_stub_decl, false);
3369 set_cfun (NULL);
3370
3371 begin_subprog_body (gnu_stub_decl);
3372
3373 start_stmt_group ();
3374 gnat_pushlevel ();
3375
3376 /* Loop over the parameters of the stub and translate any of them
3377 passed by descriptor into a by reference one. */
3378 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3379 gnu_subprog_param = DECL_ARGUMENTS (gnu_subprog);
3380 gnu_stub_param;
3381 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3382 gnu_subprog_param = TREE_CHAIN (gnu_subprog_param))
3383 {
3384 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3385 {
3386 gcc_assert (DECL_BY_REF_P (gnu_subprog_param));
3387 gnu_param
3388 = convert_vms_descriptor (TREE_TYPE (gnu_subprog_param),
3389 gnu_stub_param,
3390 DECL_PARM_ALT_TYPE (gnu_stub_param),
3391 DECL_BY_DOUBLE_REF_P (gnu_subprog_param),
3392 gnat_subprog);
3393 }
3394 else
3395 gnu_param = gnu_stub_param;
3396
3397 VEC_safe_push (tree, gc, gnu_param_vec, gnu_param);
3398 }
3399
3400 /* Invoke the internal subprogram. */
3401 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3402 gnu_subprog);
3403 gnu_subprog_call = build_call_vec (TREE_TYPE (gnu_subprog_type),
3404 gnu_subprog_addr, gnu_param_vec);
3405
3406 /* Propagate the return value, if any. */
3407 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3408 add_stmt (gnu_subprog_call);
3409 else
3410 add_stmt (build_return_expr (DECL_RESULT (gnu_stub_decl),
3411 gnu_subprog_call));
3412
3413 gnat_poplevel ();
3414 end_subprog_body (end_stmt_group ());
3415 }
3416 \f
3417 /* Build a type to be used to represent an aliased object whose nominal type
3418 is an unconstrained array. This consists of a RECORD_TYPE containing a
3419 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3420 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3421 an arbitrary unconstrained object. Use NAME as the name of the record.
3422 DEBUG_INFO_P is true if we need to write debug information for the type. */
3423
3424 tree
3425 build_unc_object_type (tree template_type, tree object_type, tree name,
3426 bool debug_info_p)
3427 {
3428 tree type = make_node (RECORD_TYPE);
3429 tree template_field
3430 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3431 NULL_TREE, NULL_TREE, 0, 1);
3432 tree array_field
3433 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3434 NULL_TREE, NULL_TREE, 0, 1);
3435
3436 TYPE_NAME (type) = name;
3437 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3438 DECL_CHAIN (template_field) = array_field;
3439 finish_record_type (type, template_field, 0, true);
3440
3441 /* Declare it now since it will never be declared otherwise. This is
3442 necessary to ensure that its subtrees are properly marked. */
3443 create_type_decl (name, type, NULL, true, debug_info_p, Empty);
3444
3445 return type;
3446 }
3447
3448 /* Same, taking a thin or fat pointer type instead of a template type. */
3449
3450 tree
3451 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3452 tree name, bool debug_info_p)
3453 {
3454 tree template_type;
3455
3456 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3457
3458 template_type
3459 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3460 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3461 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3462
3463 return
3464 build_unc_object_type (template_type, object_type, name, debug_info_p);
3465 }
3466
3467 /* Shift the component offsets within an unconstrained object TYPE to make it
3468 suitable for use as a designated type for thin pointers. */
3469
3470 void
3471 shift_unc_components_for_thin_pointers (tree type)
3472 {
3473 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3474 allocated past the BOUNDS template. The designated type is adjusted to
3475 have ARRAY at position zero and the template at a negative offset, so
3476 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3477
3478 tree bounds_field = TYPE_FIELDS (type);
3479 tree array_field = DECL_CHAIN (TYPE_FIELDS (type));
3480
3481 DECL_FIELD_OFFSET (bounds_field)
3482 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3483
3484 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3485 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3486 }
3487 \f
3488 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3489 In the normal case this is just two adjustments, but we have more to
3490 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3491
3492 void
3493 update_pointer_to (tree old_type, tree new_type)
3494 {
3495 tree ptr = TYPE_POINTER_TO (old_type);
3496 tree ref = TYPE_REFERENCE_TO (old_type);
3497 tree t;
3498
3499 /* If this is the main variant, process all the other variants first. */
3500 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3501 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3502 update_pointer_to (t, new_type);
3503
3504 /* If no pointers and no references, we are done. */
3505 if (!ptr && !ref)
3506 return;
3507
3508 /* Merge the old type qualifiers in the new type.
3509
3510 Each old variant has qualifiers for specific reasons, and the new
3511 designated type as well. Each set of qualifiers represents useful
3512 information grabbed at some point, and merging the two simply unifies
3513 these inputs into the final type description.
3514
3515 Consider for instance a volatile type frozen after an access to constant
3516 type designating it; after the designated type's freeze, we get here with
3517 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3518 when the access type was processed. We will make a volatile and readonly
3519 designated type, because that's what it really is.
3520
3521 We might also get here for a non-dummy OLD_TYPE variant with different
3522 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3523 to private record type elaboration (see the comments around the call to
3524 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3525 the qualifiers in those cases too, to avoid accidentally discarding the
3526 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3527 new_type
3528 = build_qualified_type (new_type,
3529 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3530
3531 /* If old type and new type are identical, there is nothing to do. */
3532 if (old_type == new_type)
3533 return;
3534
3535 /* Otherwise, first handle the simple case. */
3536 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3537 {
3538 tree new_ptr, new_ref;
3539
3540 /* If pointer or reference already points to new type, nothing to do.
3541 This can happen as update_pointer_to can be invoked multiple times
3542 on the same couple of types because of the type variants. */
3543 if ((ptr && TREE_TYPE (ptr) == new_type)
3544 || (ref && TREE_TYPE (ref) == new_type))
3545 return;
3546
3547 /* Chain PTR and its variants at the end. */
3548 new_ptr = TYPE_POINTER_TO (new_type);
3549 if (new_ptr)
3550 {
3551 while (TYPE_NEXT_PTR_TO (new_ptr))
3552 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3553 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3554 }
3555 else
3556 TYPE_POINTER_TO (new_type) = ptr;
3557
3558 /* Now adjust them. */
3559 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3560 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3561 TREE_TYPE (t) = new_type;
3562
3563 /* If we have adjusted named types, finalize them. This is necessary
3564 since we had forced a DWARF typedef for them in gnat_pushdecl. */
3565 for (ptr = TYPE_POINTER_TO (old_type); ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3566 if (TYPE_NAME (ptr) && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL)
3567 rest_of_type_decl_compilation (TYPE_NAME (ptr));
3568
3569 /* Chain REF and its variants at the end. */
3570 new_ref = TYPE_REFERENCE_TO (new_type);
3571 if (new_ref)
3572 {
3573 while (TYPE_NEXT_REF_TO (new_ref))
3574 new_ref = TYPE_NEXT_REF_TO (new_ref);
3575 TYPE_NEXT_REF_TO (new_ref) = ref;
3576 }
3577 else
3578 TYPE_REFERENCE_TO (new_type) = ref;
3579
3580 /* Now adjust them. */
3581 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3582 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3583 TREE_TYPE (t) = new_type;
3584
3585 TYPE_POINTER_TO (old_type) = NULL_TREE;
3586 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3587 }
3588
3589 /* Now deal with the unconstrained array case. In this case the pointer
3590 is actually a record where both fields are pointers to dummy nodes.
3591 Turn them into pointers to the correct types using update_pointer_to.
3592 Likewise for the pointer to the object record (thin pointer). */
3593 else
3594 {
3595 tree new_ptr = TYPE_POINTER_TO (new_type);
3596
3597 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3598
3599 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3600 since update_pointer_to can be invoked multiple times on the same
3601 couple of types because of the type variants. */
3602 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3603 return;
3604
3605 update_pointer_to
3606 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3607 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3608
3609 update_pointer_to
3610 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3611 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3612
3613 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3614 TYPE_OBJECT_RECORD_TYPE (new_type));
3615
3616 TYPE_POINTER_TO (old_type) = NULL_TREE;
3617 }
3618 }
3619 \f
3620 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3621 unconstrained one. This involves making or finding a template. */
3622
3623 static tree
3624 convert_to_fat_pointer (tree type, tree expr)
3625 {
3626 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3627 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3628 tree etype = TREE_TYPE (expr);
3629 tree template_tree;
3630 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3631
3632 /* If EXPR is null, make a fat pointer that contains null pointers to the
3633 template and array. */
3634 if (integer_zerop (expr))
3635 {
3636 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3637 convert (p_array_type, expr));
3638 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3639 convert (build_pointer_type (template_type),
3640 expr));
3641 return gnat_build_constructor (type, v);
3642 }
3643
3644 /* If EXPR is a thin pointer, make template and data from the record.. */
3645 else if (TYPE_IS_THIN_POINTER_P (etype))
3646 {
3647 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3648
3649 expr = gnat_protect_expr (expr);
3650 if (TREE_CODE (expr) == ADDR_EXPR)
3651 expr = TREE_OPERAND (expr, 0);
3652 else
3653 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3654
3655 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3656 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3657 build_component_ref (expr, NULL_TREE,
3658 DECL_CHAIN (fields), false));
3659 }
3660
3661 /* Otherwise, build the constructor for the template. */
3662 else
3663 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3664
3665 /* The final result is a constructor for the fat pointer.
3666
3667 If EXPR is an argument of a foreign convention subprogram, the type it
3668 points to is directly the component type. In this case, the expression
3669 type may not match the corresponding FIELD_DECL type at this point, so we
3670 call "convert" here to fix that up if necessary. This type consistency is
3671 required, for instance because it ensures that possible later folding of
3672 COMPONENT_REFs against this constructor always yields something of the
3673 same type as the initial reference.
3674
3675 Note that the call to "build_template" above is still fine because it
3676 will only refer to the provided TEMPLATE_TYPE in this case. */
3677 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3678 convert (p_array_type, expr));
3679 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3680 build_unary_op (ADDR_EXPR, NULL_TREE,
3681 template_tree));
3682 return gnat_build_constructor (type, v);
3683 }
3684 \f
3685 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3686 is something that is a fat pointer, so convert to it first if it EXPR
3687 is not already a fat pointer. */
3688
3689 static tree
3690 convert_to_thin_pointer (tree type, tree expr)
3691 {
3692 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3693 expr
3694 = convert_to_fat_pointer
3695 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3696
3697 /* We get the pointer to the data and use a NOP_EXPR to make it the
3698 proper GCC type. */
3699 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3700 false);
3701 expr = build1 (NOP_EXPR, type, expr);
3702
3703 return expr;
3704 }
3705 \f
3706 /* Create an expression whose value is that of EXPR,
3707 converted to type TYPE. The TREE_TYPE of the value
3708 is always TYPE. This function implements all reasonable
3709 conversions; callers should filter out those that are
3710 not permitted by the language being compiled. */
3711
3712 tree
3713 convert (tree type, tree expr)
3714 {
3715 tree etype = TREE_TYPE (expr);
3716 enum tree_code ecode = TREE_CODE (etype);
3717 enum tree_code code = TREE_CODE (type);
3718
3719 /* If the expression is already of the right type, we are done. */
3720 if (etype == type)
3721 return expr;
3722
3723 /* If both input and output have padding and are of variable size, do this
3724 as an unchecked conversion. Likewise if one is a mere variant of the
3725 other, so we avoid a pointless unpad/repad sequence. */
3726 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3727 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3728 && (!TREE_CONSTANT (TYPE_SIZE (type))
3729 || !TREE_CONSTANT (TYPE_SIZE (etype))
3730 || gnat_types_compatible_p (type, etype)
3731 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3732 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3733 ;
3734
3735 /* If the output type has padding, convert to the inner type and make a
3736 constructor to build the record, unless a variable size is involved. */
3737 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3738 {
3739 VEC(constructor_elt,gc) *v;
3740
3741 /* If we previously converted from another type and our type is
3742 of variable size, remove the conversion to avoid the need for
3743 variable-sized temporaries. Likewise for a conversion between
3744 original and packable version. */
3745 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3746 && (!TREE_CONSTANT (TYPE_SIZE (type))
3747 || (ecode == RECORD_TYPE
3748 && TYPE_NAME (etype)
3749 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3750 expr = TREE_OPERAND (expr, 0);
3751
3752 /* If we are just removing the padding from expr, convert the original
3753 object if we have variable size in order to avoid the need for some
3754 variable-sized temporaries. Likewise if the padding is a variant
3755 of the other, so we avoid a pointless unpad/repad sequence. */
3756 if (TREE_CODE (expr) == COMPONENT_REF
3757 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3758 && (!TREE_CONSTANT (TYPE_SIZE (type))
3759 || gnat_types_compatible_p (type,
3760 TREE_TYPE (TREE_OPERAND (expr, 0)))
3761 || (ecode == RECORD_TYPE
3762 && TYPE_NAME (etype)
3763 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3764 return convert (type, TREE_OPERAND (expr, 0));
3765
3766 /* If the inner type is of self-referential size and the expression type
3767 is a record, do this as an unchecked conversion. But first pad the
3768 expression if possible to have the same size on both sides. */
3769 if (ecode == RECORD_TYPE
3770 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3771 {
3772 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
3773 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3774 false, false, false, true),
3775 expr);
3776 return unchecked_convert (type, expr, false);
3777 }
3778
3779 /* If we are converting between array types with variable size, do the
3780 final conversion as an unchecked conversion, again to avoid the need
3781 for some variable-sized temporaries. If valid, this conversion is
3782 very likely purely technical and without real effects. */
3783 if (ecode == ARRAY_TYPE
3784 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3785 && !TREE_CONSTANT (TYPE_SIZE (etype))
3786 && !TREE_CONSTANT (TYPE_SIZE (type)))
3787 return unchecked_convert (type,
3788 convert (TREE_TYPE (TYPE_FIELDS (type)),
3789 expr),
3790 false);
3791
3792 v = VEC_alloc (constructor_elt, gc, 1);
3793 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3794 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
3795 return gnat_build_constructor (type, v);
3796 }
3797
3798 /* If the input type has padding, remove it and convert to the output type.
3799 The conditions ordering is arranged to ensure that the output type is not
3800 a padding type here, as it is not clear whether the conversion would
3801 always be correct if this was to happen. */
3802 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3803 {
3804 tree unpadded;
3805
3806 /* If we have just converted to this padded type, just get the
3807 inner expression. */
3808 if (TREE_CODE (expr) == CONSTRUCTOR
3809 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3810 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3811 == TYPE_FIELDS (etype))
3812 unpadded
3813 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3814
3815 /* Otherwise, build an explicit component reference. */
3816 else
3817 unpadded
3818 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3819
3820 return convert (type, unpadded);
3821 }
3822
3823 /* If the input is a biased type, adjust first. */
3824 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3825 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3826 fold_convert (TREE_TYPE (etype),
3827 expr),
3828 TYPE_MIN_VALUE (etype)));
3829
3830 /* If the input is a justified modular type, we need to extract the actual
3831 object before converting it to any other type with the exceptions of an
3832 unconstrained array or of a mere type variant. It is useful to avoid the
3833 extraction and conversion in the type variant case because it could end
3834 up replacing a VAR_DECL expr by a constructor and we might be about the
3835 take the address of the result. */
3836 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3837 && code != UNCONSTRAINED_ARRAY_TYPE
3838 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3839 return convert (type, build_component_ref (expr, NULL_TREE,
3840 TYPE_FIELDS (etype), false));
3841
3842 /* If converting to a type that contains a template, convert to the data
3843 type and then build the template. */
3844 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3845 {
3846 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3847 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3848
3849 /* If the source already has a template, get a reference to the
3850 associated array only, as we are going to rebuild a template
3851 for the target type anyway. */
3852 expr = maybe_unconstrained_array (expr);
3853
3854 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3855 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3856 obj_type, NULL_TREE));
3857 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3858 convert (obj_type, expr));
3859 return gnat_build_constructor (type, v);
3860 }
3861
3862 /* There are some special cases of expressions that we process
3863 specially. */
3864 switch (TREE_CODE (expr))
3865 {
3866 case ERROR_MARK:
3867 return expr;
3868
3869 case NULL_EXPR:
3870 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3871 conversion in gnat_expand_expr. NULL_EXPR does not represent
3872 and actual value, so no conversion is needed. */
3873 expr = copy_node (expr);
3874 TREE_TYPE (expr) = type;
3875 return expr;
3876
3877 case STRING_CST:
3878 /* If we are converting a STRING_CST to another constrained array type,
3879 just make a new one in the proper type. */
3880 if (code == ecode && AGGREGATE_TYPE_P (etype)
3881 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3882 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3883 {
3884 expr = copy_node (expr);
3885 TREE_TYPE (expr) = type;
3886 return expr;
3887 }
3888 break;
3889
3890 case VECTOR_CST:
3891 /* If we are converting a VECTOR_CST to a mere variant type, just make
3892 a new one in the proper type. */
3893 if (code == ecode && gnat_types_compatible_p (type, etype))
3894 {
3895 expr = copy_node (expr);
3896 TREE_TYPE (expr) = type;
3897 return expr;
3898 }
3899
3900 case CONSTRUCTOR:
3901 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3902 a new one in the proper type. */
3903 if (code == ecode && gnat_types_compatible_p (type, etype))
3904 {
3905 expr = copy_node (expr);
3906 TREE_TYPE (expr) = type;
3907 return expr;
3908 }
3909
3910 /* Likewise for a conversion between original and packable version, or
3911 conversion between types of the same size and with the same list of
3912 fields, but we have to work harder to preserve type consistency. */
3913 if (code == ecode
3914 && code == RECORD_TYPE
3915 && (TYPE_NAME (type) == TYPE_NAME (etype)
3916 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
3917
3918 {
3919 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3920 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3921 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3922 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3923 unsigned HOST_WIDE_INT idx;
3924 tree index, value;
3925
3926 /* Whether we need to clear TREE_CONSTANT et al. on the output
3927 constructor when we convert in place. */
3928 bool clear_constant = false;
3929
3930 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3931 {
3932 constructor_elt *elt;
3933 /* We expect only simple constructors. */
3934 if (!SAME_FIELD_P (index, efield))
3935 break;
3936 /* The field must be the same. */
3937 if (!SAME_FIELD_P (efield, field))
3938 break;
3939 elt = VEC_quick_push (constructor_elt, v, NULL);
3940 elt->index = field;
3941 elt->value = convert (TREE_TYPE (field), value);
3942
3943 /* If packing has made this field a bitfield and the input
3944 value couldn't be emitted statically any more, we need to
3945 clear TREE_CONSTANT on our output. */
3946 if (!clear_constant
3947 && TREE_CONSTANT (expr)
3948 && !CONSTRUCTOR_BITFIELD_P (efield)
3949 && CONSTRUCTOR_BITFIELD_P (field)
3950 && !initializer_constant_valid_for_bitfield_p (value))
3951 clear_constant = true;
3952
3953 efield = DECL_CHAIN (efield);
3954 field = DECL_CHAIN (field);
3955 }
3956
3957 /* If we have been able to match and convert all the input fields
3958 to their output type, convert in place now. We'll fallback to a
3959 view conversion downstream otherwise. */
3960 if (idx == len)
3961 {
3962 expr = copy_node (expr);
3963 TREE_TYPE (expr) = type;
3964 CONSTRUCTOR_ELTS (expr) = v;
3965 if (clear_constant)
3966 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
3967 return expr;
3968 }
3969 }
3970
3971 /* Likewise for a conversion between array type and vector type with a
3972 compatible representative array. */
3973 else if (code == VECTOR_TYPE
3974 && ecode == ARRAY_TYPE
3975 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3976 etype))
3977 {
3978 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3979 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3980 VEC(constructor_elt,gc) *v;
3981 unsigned HOST_WIDE_INT ix;
3982 tree value;
3983
3984 /* Build a VECTOR_CST from a *constant* array constructor. */
3985 if (TREE_CONSTANT (expr))
3986 {
3987 bool constant_p = true;
3988
3989 /* Iterate through elements and check if all constructor
3990 elements are *_CSTs. */
3991 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3992 if (!CONSTANT_CLASS_P (value))
3993 {
3994 constant_p = false;
3995 break;
3996 }
3997
3998 if (constant_p)
3999 return build_vector_from_ctor (type,
4000 CONSTRUCTOR_ELTS (expr));
4001 }
4002
4003 /* Otherwise, build a regular vector constructor. */
4004 v = VEC_alloc (constructor_elt, gc, len);
4005 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4006 {
4007 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4008 elt->index = NULL_TREE;
4009 elt->value = value;
4010 }
4011 expr = copy_node (expr);
4012 TREE_TYPE (expr) = type;
4013 CONSTRUCTOR_ELTS (expr) = v;
4014 return expr;
4015 }
4016 break;
4017
4018 case UNCONSTRAINED_ARRAY_REF:
4019 /* Convert this to the type of the inner array by getting the address of
4020 the array from the template. */
4021 expr = TREE_OPERAND (expr, 0);
4022 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4023 build_component_ref (expr, NULL_TREE,
4024 TYPE_FIELDS
4025 (TREE_TYPE (expr)),
4026 false));
4027 etype = TREE_TYPE (expr);
4028 ecode = TREE_CODE (etype);
4029 break;
4030
4031 case VIEW_CONVERT_EXPR:
4032 {
4033 /* GCC 4.x is very sensitive to type consistency overall, and view
4034 conversions thus are very frequent. Even though just "convert"ing
4035 the inner operand to the output type is fine in most cases, it
4036 might expose unexpected input/output type mismatches in special
4037 circumstances so we avoid such recursive calls when we can. */
4038 tree op0 = TREE_OPERAND (expr, 0);
4039
4040 /* If we are converting back to the original type, we can just
4041 lift the input conversion. This is a common occurrence with
4042 switches back-and-forth amongst type variants. */
4043 if (type == TREE_TYPE (op0))
4044 return op0;
4045
4046 /* Otherwise, if we're converting between two aggregate or vector
4047 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4048 target type in place or to just convert the inner expression. */
4049 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4050 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4051 {
4052 /* If we are converting between mere variants, we can just
4053 substitute the VIEW_CONVERT_EXPR in place. */
4054 if (gnat_types_compatible_p (type, etype))
4055 return build1 (VIEW_CONVERT_EXPR, type, op0);
4056
4057 /* Otherwise, we may just bypass the input view conversion unless
4058 one of the types is a fat pointer, which is handled by
4059 specialized code below which relies on exact type matching. */
4060 else if (!TYPE_IS_FAT_POINTER_P (type)
4061 && !TYPE_IS_FAT_POINTER_P (etype))
4062 return convert (type, op0);
4063 }
4064 }
4065 break;
4066
4067 default:
4068 break;
4069 }
4070
4071 /* Check for converting to a pointer to an unconstrained array. */
4072 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4073 return convert_to_fat_pointer (type, expr);
4074
4075 /* If we are converting between two aggregate or vector types that are mere
4076 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4077 to a vector type from its representative array type. */
4078 else if ((code == ecode
4079 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4080 && gnat_types_compatible_p (type, etype))
4081 || (code == VECTOR_TYPE
4082 && ecode == ARRAY_TYPE
4083 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4084 etype)))
4085 return build1 (VIEW_CONVERT_EXPR, type, expr);
4086
4087 /* If we are converting between tagged types, try to upcast properly. */
4088 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4089 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4090 {
4091 tree child_etype = etype;
4092 do {
4093 tree field = TYPE_FIELDS (child_etype);
4094 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4095 return build_component_ref (expr, NULL_TREE, field, false);
4096 child_etype = TREE_TYPE (field);
4097 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4098 }
4099
4100 /* If we are converting from a smaller form of record type back to it, just
4101 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4102 size on both sides. */
4103 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4104 && smaller_form_type_p (etype, type))
4105 {
4106 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4107 false, false, false, true),
4108 expr);
4109 return build1 (VIEW_CONVERT_EXPR, type, expr);
4110 }
4111
4112 /* In all other cases of related types, make a NOP_EXPR. */
4113 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4114 return fold_convert (type, expr);
4115
4116 switch (code)
4117 {
4118 case VOID_TYPE:
4119 return fold_build1 (CONVERT_EXPR, type, expr);
4120
4121 case INTEGER_TYPE:
4122 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4123 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4124 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4125 return unchecked_convert (type, expr, false);
4126 else if (TYPE_BIASED_REPRESENTATION_P (type))
4127 return fold_convert (type,
4128 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4129 convert (TREE_TYPE (type), expr),
4130 TYPE_MIN_VALUE (type)));
4131
4132 /* ... fall through ... */
4133
4134 case ENUMERAL_TYPE:
4135 case BOOLEAN_TYPE:
4136 /* If we are converting an additive expression to an integer type
4137 with lower precision, be wary of the optimization that can be
4138 applied by convert_to_integer. There are 2 problematic cases:
4139 - if the first operand was originally of a biased type,
4140 because we could be recursively called to convert it
4141 to an intermediate type and thus rematerialize the
4142 additive operator endlessly,
4143 - if the expression contains a placeholder, because an
4144 intermediate conversion that changes the sign could
4145 be inserted and thus introduce an artificial overflow
4146 at compile time when the placeholder is substituted. */
4147 if (code == INTEGER_TYPE
4148 && ecode == INTEGER_TYPE
4149 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4150 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4151 {
4152 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4153
4154 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4155 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4156 || CONTAINS_PLACEHOLDER_P (expr))
4157 return build1 (NOP_EXPR, type, expr);
4158 }
4159
4160 return fold (convert_to_integer (type, expr));
4161
4162 case POINTER_TYPE:
4163 case REFERENCE_TYPE:
4164 /* If converting between two pointers to records denoting
4165 both a template and type, adjust if needed to account
4166 for any differing offsets, since one might be negative. */
4167 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4168 {
4169 tree bit_diff
4170 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4171 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4172 tree byte_diff
4173 = size_binop (CEIL_DIV_EXPR, bit_diff, sbitsize_unit_node);
4174 expr = build1 (NOP_EXPR, type, expr);
4175 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4176 if (integer_zerop (byte_diff))
4177 return expr;
4178
4179 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4180 fold (convert (sizetype, byte_diff)));
4181 }
4182
4183 /* If converting to a thin pointer, handle specially. */
4184 if (TYPE_IS_THIN_POINTER_P (type)
4185 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4186 return convert_to_thin_pointer (type, expr);
4187
4188 /* If converting fat pointer to normal pointer, get the pointer to the
4189 array and then convert it. */
4190 else if (TYPE_IS_FAT_POINTER_P (etype))
4191 expr
4192 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4193
4194 return fold (convert_to_pointer (type, expr));
4195
4196 case REAL_TYPE:
4197 return fold (convert_to_real (type, expr));
4198
4199 case RECORD_TYPE:
4200 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4201 {
4202 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4203
4204 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4205 convert (TREE_TYPE (TYPE_FIELDS (type)),
4206 expr));
4207 return gnat_build_constructor (type, v);
4208 }
4209
4210 /* ... fall through ... */
4211
4212 case ARRAY_TYPE:
4213 /* In these cases, assume the front-end has validated the conversion.
4214 If the conversion is valid, it will be a bit-wise conversion, so
4215 it can be viewed as an unchecked conversion. */
4216 return unchecked_convert (type, expr, false);
4217
4218 case UNION_TYPE:
4219 /* This is a either a conversion between a tagged type and some
4220 subtype, which we have to mark as a UNION_TYPE because of
4221 overlapping fields or a conversion of an Unchecked_Union. */
4222 return unchecked_convert (type, expr, false);
4223
4224 case UNCONSTRAINED_ARRAY_TYPE:
4225 /* If the input is a VECTOR_TYPE, convert to the representative
4226 array type first. */
4227 if (ecode == VECTOR_TYPE)
4228 {
4229 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4230 etype = TREE_TYPE (expr);
4231 ecode = TREE_CODE (etype);
4232 }
4233
4234 /* If EXPR is a constrained array, take its address, convert it to a
4235 fat pointer, and then dereference it. Likewise if EXPR is a
4236 record containing both a template and a constrained array.
4237 Note that a record representing a justified modular type
4238 always represents a packed constrained array. */
4239 if (ecode == ARRAY_TYPE
4240 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4241 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4242 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4243 return
4244 build_unary_op
4245 (INDIRECT_REF, NULL_TREE,
4246 convert_to_fat_pointer (TREE_TYPE (type),
4247 build_unary_op (ADDR_EXPR,
4248 NULL_TREE, expr)));
4249
4250 /* Do something very similar for converting one unconstrained
4251 array to another. */
4252 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4253 return
4254 build_unary_op (INDIRECT_REF, NULL_TREE,
4255 convert (TREE_TYPE (type),
4256 build_unary_op (ADDR_EXPR,
4257 NULL_TREE, expr)));
4258 else
4259 gcc_unreachable ();
4260
4261 case COMPLEX_TYPE:
4262 return fold (convert_to_complex (type, expr));
4263
4264 default:
4265 gcc_unreachable ();
4266 }
4267 }
4268 \f
4269 /* Remove all conversions that are done in EXP. This includes converting
4270 from a padded type or to a justified modular type. If TRUE_ADDRESS
4271 is true, always return the address of the containing object even if
4272 the address is not bit-aligned. */
4273
4274 tree
4275 remove_conversions (tree exp, bool true_address)
4276 {
4277 switch (TREE_CODE (exp))
4278 {
4279 case CONSTRUCTOR:
4280 if (true_address
4281 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4282 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4283 return
4284 remove_conversions (VEC_index (constructor_elt,
4285 CONSTRUCTOR_ELTS (exp), 0)->value,
4286 true);
4287 break;
4288
4289 case COMPONENT_REF:
4290 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4291 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4292 break;
4293
4294 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4295 CASE_CONVERT:
4296 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4297
4298 default:
4299 break;
4300 }
4301
4302 return exp;
4303 }
4304 \f
4305 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4306 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4307 likewise return an expression pointing to the underlying array. */
4308
4309 tree
4310 maybe_unconstrained_array (tree exp)
4311 {
4312 enum tree_code code = TREE_CODE (exp);
4313 tree new_exp;
4314
4315 switch (TREE_CODE (TREE_TYPE (exp)))
4316 {
4317 case UNCONSTRAINED_ARRAY_TYPE:
4318 if (code == UNCONSTRAINED_ARRAY_REF)
4319 {
4320 new_exp = TREE_OPERAND (exp, 0);
4321 new_exp
4322 = build_unary_op (INDIRECT_REF, NULL_TREE,
4323 build_component_ref (new_exp, NULL_TREE,
4324 TYPE_FIELDS
4325 (TREE_TYPE (new_exp)),
4326 false));
4327 TREE_READONLY (new_exp) = TREE_READONLY (exp);
4328 return new_exp;
4329 }
4330
4331 else if (code == NULL_EXPR)
4332 return build1 (NULL_EXPR,
4333 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4334 (TREE_TYPE (TREE_TYPE (exp))))),
4335 TREE_OPERAND (exp, 0));
4336
4337 case RECORD_TYPE:
4338 /* If this is a padded type, convert to the unpadded type and see if
4339 it contains a template. */
4340 if (TYPE_PADDING_P (TREE_TYPE (exp)))
4341 {
4342 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4343 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4344 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4345 return
4346 build_component_ref (new_exp, NULL_TREE,
4347 DECL_CHAIN
4348 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4349 false);
4350 }
4351 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4352 return
4353 build_component_ref (exp, NULL_TREE,
4354 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))),
4355 false);
4356 break;
4357
4358 default:
4359 break;
4360 }
4361
4362 return exp;
4363 }
4364
4365 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4366 TYPE_REPRESENTATIVE_ARRAY. */
4367
4368 tree
4369 maybe_vector_array (tree exp)
4370 {
4371 tree etype = TREE_TYPE (exp);
4372
4373 if (VECTOR_TYPE_P (etype))
4374 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4375
4376 return exp;
4377 }
4378 \f
4379 /* Return true if EXPR is an expression that can be folded as an operand
4380 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4381
4382 static bool
4383 can_fold_for_view_convert_p (tree expr)
4384 {
4385 tree t1, t2;
4386
4387 /* The folder will fold NOP_EXPRs between integral types with the same
4388 precision (in the middle-end's sense). We cannot allow it if the
4389 types don't have the same precision in the Ada sense as well. */
4390 if (TREE_CODE (expr) != NOP_EXPR)
4391 return true;
4392
4393 t1 = TREE_TYPE (expr);
4394 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4395
4396 /* Defer to the folder for non-integral conversions. */
4397 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4398 return true;
4399
4400 /* Only fold conversions that preserve both precisions. */
4401 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4402 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4403 return true;
4404
4405 return false;
4406 }
4407
4408 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4409 If NOTRUNC_P is true, truncation operations should be suppressed.
4410
4411 Special care is required with (source or target) integral types whose
4412 precision is not equal to their size, to make sure we fetch or assign
4413 the value bits whose location might depend on the endianness, e.g.
4414
4415 Rmsize : constant := 8;
4416 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4417
4418 type Bit_Array is array (1 .. Rmsize) of Boolean;
4419 pragma Pack (Bit_Array);
4420
4421 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4422
4423 Value : Int := 2#1000_0001#;
4424 Vbits : Bit_Array := To_Bit_Array (Value);
4425
4426 we expect the 8 bits at Vbits'Address to always contain Value, while
4427 their original location depends on the endianness, at Value'Address
4428 on a little-endian architecture but not on a big-endian one. */
4429
4430 tree
4431 unchecked_convert (tree type, tree expr, bool notrunc_p)
4432 {
4433 tree etype = TREE_TYPE (expr);
4434 enum tree_code ecode = TREE_CODE (etype);
4435 enum tree_code code = TREE_CODE (type);
4436 int c;
4437
4438 /* If the expression is already of the right type, we are done. */
4439 if (etype == type)
4440 return expr;
4441
4442 /* If both types types are integral just do a normal conversion.
4443 Likewise for a conversion to an unconstrained array. */
4444 if ((((INTEGRAL_TYPE_P (type)
4445 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
4446 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4447 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4448 && ((INTEGRAL_TYPE_P (etype)
4449 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
4450 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4451 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4452 || code == UNCONSTRAINED_ARRAY_TYPE)
4453 {
4454 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4455 {
4456 tree ntype = copy_type (etype);
4457 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4458 TYPE_MAIN_VARIANT (ntype) = ntype;
4459 expr = build1 (NOP_EXPR, ntype, expr);
4460 }
4461
4462 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4463 {
4464 tree rtype = copy_type (type);
4465 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4466 TYPE_MAIN_VARIANT (rtype) = rtype;
4467 expr = convert (rtype, expr);
4468 expr = build1 (NOP_EXPR, type, expr);
4469 }
4470 else
4471 expr = convert (type, expr);
4472 }
4473
4474 /* If we are converting to an integral type whose precision is not equal
4475 to its size, first unchecked convert to a record that contains an
4476 object of the output type. Then extract the field. */
4477 else if (INTEGRAL_TYPE_P (type)
4478 && TYPE_RM_SIZE (type)
4479 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4480 GET_MODE_BITSIZE (TYPE_MODE (type))))
4481 {
4482 tree rec_type = make_node (RECORD_TYPE);
4483 tree field = create_field_decl (get_identifier ("OBJ"), type, rec_type,
4484 NULL_TREE, NULL_TREE, 1, 0);
4485
4486 TYPE_FIELDS (rec_type) = field;
4487 layout_type (rec_type);
4488
4489 expr = unchecked_convert (rec_type, expr, notrunc_p);
4490 expr = build_component_ref (expr, NULL_TREE, field, false);
4491 }
4492
4493 /* Similarly if we are converting from an integral type whose precision
4494 is not equal to its size. */
4495 else if (INTEGRAL_TYPE_P (etype)
4496 && TYPE_RM_SIZE (etype)
4497 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4498 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4499 {
4500 tree rec_type = make_node (RECORD_TYPE);
4501 tree field = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4502 NULL_TREE, NULL_TREE, 1, 0);
4503 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4504
4505 TYPE_FIELDS (rec_type) = field;
4506 layout_type (rec_type);
4507
4508 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4509 expr = gnat_build_constructor (rec_type, v);
4510 expr = unchecked_convert (type, expr, notrunc_p);
4511 }
4512
4513 /* If we are converting from a scalar type to a type with a different size,
4514 we need to pad to have the same size on both sides.
4515
4516 ??? We cannot do it unconditionally because unchecked conversions are
4517 used liberally by the front-end to implement polymorphism, e.g. in:
4518
4519 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4520 return p___size__4 (p__object!(S191s.all));
4521
4522 so we skip all expressions that are references. */
4523 else if (!REFERENCE_CLASS_P (expr)
4524 && !AGGREGATE_TYPE_P (etype)
4525 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4526 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4527 {
4528 if (c < 0)
4529 {
4530 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4531 false, false, false, true),
4532 expr);
4533 expr = unchecked_convert (type, expr, notrunc_p);
4534 }
4535 else
4536 {
4537 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4538 false, false, false, true);
4539 expr = unchecked_convert (rec_type, expr, notrunc_p);
4540 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4541 false);
4542 }
4543 }
4544
4545 /* We have a special case when we are converting between two unconstrained
4546 array types. In that case, take the address, convert the fat pointer
4547 types, and dereference. */
4548 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4549 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4550 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4551 build_unary_op (ADDR_EXPR, NULL_TREE,
4552 expr)));
4553
4554 /* Another special case is when we are converting to a vector type from its
4555 representative array type; this a regular conversion. */
4556 else if (code == VECTOR_TYPE
4557 && ecode == ARRAY_TYPE
4558 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4559 etype))
4560 expr = convert (type, expr);
4561
4562 else
4563 {
4564 expr = maybe_unconstrained_array (expr);
4565 etype = TREE_TYPE (expr);
4566 ecode = TREE_CODE (etype);
4567 if (can_fold_for_view_convert_p (expr))
4568 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4569 else
4570 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4571 }
4572
4573 /* If the result is an integral type whose precision is not equal to its
4574 size, sign- or zero-extend the result. We need not do this if the input
4575 is an integral type of the same precision and signedness or if the output
4576 is a biased type or if both the input and output are unsigned. */
4577 if (!notrunc_p
4578 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4579 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4580 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4581 GET_MODE_BITSIZE (TYPE_MODE (type)))
4582 && !(INTEGRAL_TYPE_P (etype)
4583 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4584 && operand_equal_p (TYPE_RM_SIZE (type),
4585 (TYPE_RM_SIZE (etype) != 0
4586 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4587 0))
4588 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4589 {
4590 tree base_type
4591 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4592 tree shift_expr
4593 = convert (base_type,
4594 size_binop (MINUS_EXPR,
4595 bitsize_int
4596 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4597 TYPE_RM_SIZE (type)));
4598 expr
4599 = convert (type,
4600 build_binary_op (RSHIFT_EXPR, base_type,
4601 build_binary_op (LSHIFT_EXPR, base_type,
4602 convert (base_type, expr),
4603 shift_expr),
4604 shift_expr));
4605 }
4606
4607 /* An unchecked conversion should never raise Constraint_Error. The code
4608 below assumes that GCC's conversion routines overflow the same way that
4609 the underlying hardware does. This is probably true. In the rare case
4610 when it is false, we can rely on the fact that such conversions are
4611 erroneous anyway. */
4612 if (TREE_CODE (expr) == INTEGER_CST)
4613 TREE_OVERFLOW (expr) = 0;
4614
4615 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4616 show no longer constant. */
4617 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4618 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4619 OEP_ONLY_CONST))
4620 TREE_CONSTANT (expr) = 0;
4621
4622 return expr;
4623 }
4624 \f
4625 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4626 the latter being a record type as predicated by Is_Record_Type. */
4627
4628 enum tree_code
4629 tree_code_for_record_type (Entity_Id gnat_type)
4630 {
4631 Node_Id component_list
4632 = Component_List (Type_Definition
4633 (Declaration_Node
4634 (Implementation_Base_Type (gnat_type))));
4635 Node_Id component;
4636
4637 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4638 we have a non-discriminant field outside a variant. In either case,
4639 it's a RECORD_TYPE. */
4640
4641 if (!Is_Unchecked_Union (gnat_type))
4642 return RECORD_TYPE;
4643
4644 for (component = First_Non_Pragma (Component_Items (component_list));
4645 Present (component);
4646 component = Next_Non_Pragma (component))
4647 if (Ekind (Defining_Entity (component)) == E_Component)
4648 return RECORD_TYPE;
4649
4650 return UNION_TYPE;
4651 }
4652
4653 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4654 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4655 according to the presence of an alignment clause on the type or, if it
4656 is an array, on the component type. */
4657
4658 bool
4659 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4660 {
4661 gnat_type = Underlying_Type (gnat_type);
4662
4663 *align_clause = Present (Alignment_Clause (gnat_type));
4664
4665 if (Is_Array_Type (gnat_type))
4666 {
4667 gnat_type = Underlying_Type (Component_Type (gnat_type));
4668 if (Present (Alignment_Clause (gnat_type)))
4669 *align_clause = true;
4670 }
4671
4672 if (!Is_Floating_Point_Type (gnat_type))
4673 return false;
4674
4675 if (UI_To_Int (Esize (gnat_type)) != 64)
4676 return false;
4677
4678 return true;
4679 }
4680
4681 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4682 size is greater or equal to 64 bits, or an array of such a type. Set
4683 ALIGN_CLAUSE according to the presence of an alignment clause on the
4684 type or, if it is an array, on the component type. */
4685
4686 bool
4687 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4688 {
4689 gnat_type = Underlying_Type (gnat_type);
4690
4691 *align_clause = Present (Alignment_Clause (gnat_type));
4692
4693 if (Is_Array_Type (gnat_type))
4694 {
4695 gnat_type = Underlying_Type (Component_Type (gnat_type));
4696 if (Present (Alignment_Clause (gnat_type)))
4697 *align_clause = true;
4698 }
4699
4700 if (!Is_Scalar_Type (gnat_type))
4701 return false;
4702
4703 if (UI_To_Int (Esize (gnat_type)) < 64)
4704 return false;
4705
4706 return true;
4707 }
4708
4709 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4710 component of an aggregate type. */
4711
4712 bool
4713 type_for_nonaliased_component_p (tree gnu_type)
4714 {
4715 /* If the type is passed by reference, we may have pointers to the
4716 component so it cannot be made non-aliased. */
4717 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4718 return false;
4719
4720 /* We used to say that any component of aggregate type is aliased
4721 because the front-end may take 'Reference of it. The front-end
4722 has been enhanced in the meantime so as to use a renaming instead
4723 in most cases, but the back-end can probably take the address of
4724 such a component too so we go for the conservative stance.
4725
4726 For instance, we might need the address of any array type, even
4727 if normally passed by copy, to construct a fat pointer if the
4728 component is used as an actual for an unconstrained formal.
4729
4730 Likewise for record types: even if a specific record subtype is
4731 passed by copy, the parent type might be passed by ref (e.g. if
4732 it's of variable size) and we might take the address of a child
4733 component to pass to a parent formal. We have no way to check
4734 for such conditions here. */
4735 if (AGGREGATE_TYPE_P (gnu_type))
4736 return false;
4737
4738 return true;
4739 }
4740
4741 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
4742
4743 bool
4744 smaller_form_type_p (tree type, tree orig_type)
4745 {
4746 tree size, osize;
4747
4748 /* We're not interested in variants here. */
4749 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
4750 return false;
4751
4752 /* Like a variant, a packable version keeps the original TYPE_NAME. */
4753 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
4754 return false;
4755
4756 size = TYPE_SIZE (type);
4757 osize = TYPE_SIZE (orig_type);
4758
4759 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
4760 return false;
4761
4762 return tree_int_cst_lt (size, osize) != 0;
4763 }
4764
4765 /* Perform final processing on global variables. */
4766
4767 static GTY (()) tree dummy_global;
4768
4769 void
4770 gnat_write_global_declarations (void)
4771 {
4772 /* If we have declared types as used at the global level, insert them in
4773 the global hash table. We use a dummy variable for this purpose. */
4774 if (!VEC_empty (tree, types_used_by_cur_var_decl))
4775 {
4776 dummy_global
4777 = build_decl (BUILTINS_LOCATION, VAR_DECL, NULL_TREE, void_type_node);
4778 TREE_STATIC (dummy_global) = 1;
4779 TREE_ASM_WRITTEN (dummy_global) = 1;
4780 varpool_mark_needed_node (varpool_node (dummy_global));
4781
4782 while (!VEC_empty (tree, types_used_by_cur_var_decl))
4783 {
4784 tree t = VEC_pop (tree, types_used_by_cur_var_decl);
4785 types_used_by_var_decl_insert (t, dummy_global);
4786 }
4787 }
4788
4789 /* Proceed to optimize and emit assembly.
4790 FIXME: shouldn't be the front end's responsibility to call this. */
4791 cgraph_finalize_compilation_unit ();
4792
4793 /* Emit debug info for all global declarations. */
4794 emit_debug_global_declarations (VEC_address (tree, global_decls),
4795 VEC_length (tree, global_decls));
4796 }
4797
4798 /* ************************************************************************
4799 * * GCC builtins support *
4800 * ************************************************************************ */
4801
4802 /* The general scheme is fairly simple:
4803
4804 For each builtin function/type to be declared, gnat_install_builtins calls
4805 internal facilities which eventually get to gnat_push_decl, which in turn
4806 tracks the so declared builtin function decls in the 'builtin_decls' global
4807 datastructure. When an Intrinsic subprogram declaration is processed, we
4808 search this global datastructure to retrieve the associated BUILT_IN DECL
4809 node. */
4810
4811 /* Search the chain of currently available builtin declarations for a node
4812 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4813 found, if any, or NULL_TREE otherwise. */
4814 tree
4815 builtin_decl_for (tree name)
4816 {
4817 unsigned i;
4818 tree decl;
4819
4820 FOR_EACH_VEC_ELT (tree, builtin_decls, i, decl)
4821 if (DECL_NAME (decl) == name)
4822 return decl;
4823
4824 return NULL_TREE;
4825 }
4826
4827 /* The code below eventually exposes gnat_install_builtins, which declares
4828 the builtin types and functions we might need, either internally or as
4829 user accessible facilities.
4830
4831 ??? This is a first implementation shot, still in rough shape. It is
4832 heavily inspired from the "C" family implementation, with chunks copied
4833 verbatim from there.
4834
4835 Two obvious TODO candidates are
4836 o Use a more efficient name/decl mapping scheme
4837 o Devise a middle-end infrastructure to avoid having to copy
4838 pieces between front-ends. */
4839
4840 /* ----------------------------------------------------------------------- *
4841 * BUILTIN ELEMENTARY TYPES *
4842 * ----------------------------------------------------------------------- */
4843
4844 /* Standard data types to be used in builtin argument declarations. */
4845
4846 enum c_tree_index
4847 {
4848 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4849 CTI_STRING_TYPE,
4850 CTI_CONST_STRING_TYPE,
4851
4852 CTI_MAX
4853 };
4854
4855 static tree c_global_trees[CTI_MAX];
4856
4857 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4858 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4859 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4860
4861 /* ??? In addition some attribute handlers, we currently don't support a
4862 (small) number of builtin-types, which in turns inhibits support for a
4863 number of builtin functions. */
4864 #define wint_type_node void_type_node
4865 #define intmax_type_node void_type_node
4866 #define uintmax_type_node void_type_node
4867
4868 /* Build the void_list_node (void_type_node having been created). */
4869
4870 static tree
4871 build_void_list_node (void)
4872 {
4873 tree t = build_tree_list (NULL_TREE, void_type_node);
4874 return t;
4875 }
4876
4877 /* Used to help initialize the builtin-types.def table. When a type of
4878 the correct size doesn't exist, use error_mark_node instead of NULL.
4879 The later results in segfaults even when a decl using the type doesn't
4880 get invoked. */
4881
4882 static tree
4883 builtin_type_for_size (int size, bool unsignedp)
4884 {
4885 tree type = gnat_type_for_size (size, unsignedp);
4886 return type ? type : error_mark_node;
4887 }
4888
4889 /* Build/push the elementary type decls that builtin functions/types
4890 will need. */
4891
4892 static void
4893 install_builtin_elementary_types (void)
4894 {
4895 signed_size_type_node = gnat_signed_type (size_type_node);
4896 pid_type_node = integer_type_node;
4897 void_list_node = build_void_list_node ();
4898
4899 string_type_node = build_pointer_type (char_type_node);
4900 const_string_type_node
4901 = build_pointer_type (build_qualified_type
4902 (char_type_node, TYPE_QUAL_CONST));
4903 }
4904
4905 /* ----------------------------------------------------------------------- *
4906 * BUILTIN FUNCTION TYPES *
4907 * ----------------------------------------------------------------------- */
4908
4909 /* Now, builtin function types per se. */
4910
4911 enum c_builtin_type
4912 {
4913 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4914 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4915 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4916 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4917 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4918 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4919 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4920 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4921 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4922 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4923 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4924 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4925 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4926 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4927 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4928 NAME,
4929 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4930 #include "builtin-types.def"
4931 #undef DEF_PRIMITIVE_TYPE
4932 #undef DEF_FUNCTION_TYPE_0
4933 #undef DEF_FUNCTION_TYPE_1
4934 #undef DEF_FUNCTION_TYPE_2
4935 #undef DEF_FUNCTION_TYPE_3
4936 #undef DEF_FUNCTION_TYPE_4
4937 #undef DEF_FUNCTION_TYPE_5
4938 #undef DEF_FUNCTION_TYPE_6
4939 #undef DEF_FUNCTION_TYPE_7
4940 #undef DEF_FUNCTION_TYPE_VAR_0
4941 #undef DEF_FUNCTION_TYPE_VAR_1
4942 #undef DEF_FUNCTION_TYPE_VAR_2
4943 #undef DEF_FUNCTION_TYPE_VAR_3
4944 #undef DEF_FUNCTION_TYPE_VAR_4
4945 #undef DEF_FUNCTION_TYPE_VAR_5
4946 #undef DEF_POINTER_TYPE
4947 BT_LAST
4948 };
4949
4950 typedef enum c_builtin_type builtin_type;
4951
4952 /* A temporary array used in communication with def_fn_type. */
4953 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4954
4955 /* A helper function for install_builtin_types. Build function type
4956 for DEF with return type RET and N arguments. If VAR is true, then the
4957 function should be variadic after those N arguments.
4958
4959 Takes special care not to ICE if any of the types involved are
4960 error_mark_node, which indicates that said type is not in fact available
4961 (see builtin_type_for_size). In which case the function type as a whole
4962 should be error_mark_node. */
4963
4964 static void
4965 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4966 {
4967 tree args = NULL, t;
4968 va_list list;
4969 int i;
4970
4971 va_start (list, n);
4972 for (i = 0; i < n; ++i)
4973 {
4974 builtin_type a = (builtin_type) va_arg (list, int);
4975 t = builtin_types[a];
4976 if (t == error_mark_node)
4977 goto egress;
4978 args = tree_cons (NULL_TREE, t, args);
4979 }
4980 va_end (list);
4981
4982 args = nreverse (args);
4983 if (!var)
4984 args = chainon (args, void_list_node);
4985
4986 t = builtin_types[ret];
4987 if (t == error_mark_node)
4988 goto egress;
4989 t = build_function_type (t, args);
4990
4991 egress:
4992 builtin_types[def] = t;
4993 va_end (list);
4994 }
4995
4996 /* Build the builtin function types and install them in the builtin_types
4997 array for later use in builtin function decls. */
4998
4999 static void
5000 install_builtin_function_types (void)
5001 {
5002 tree va_list_ref_type_node;
5003 tree va_list_arg_type_node;
5004
5005 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5006 {
5007 va_list_arg_type_node = va_list_ref_type_node =
5008 build_pointer_type (TREE_TYPE (va_list_type_node));
5009 }
5010 else
5011 {
5012 va_list_arg_type_node = va_list_type_node;
5013 va_list_ref_type_node = build_reference_type (va_list_type_node);
5014 }
5015
5016 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5017 builtin_types[ENUM] = VALUE;
5018 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5019 def_fn_type (ENUM, RETURN, 0, 0);
5020 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5021 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5022 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5023 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5024 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5025 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5026 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5027 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5028 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5029 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5030 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5031 ARG6) \
5032 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5033 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5034 ARG6, ARG7) \
5035 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5036 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5037 def_fn_type (ENUM, RETURN, 1, 0);
5038 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5039 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5040 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5041 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5042 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5043 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5044 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5045 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5046 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5047 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5048 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5049 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5050
5051 #include "builtin-types.def"
5052
5053 #undef DEF_PRIMITIVE_TYPE
5054 #undef DEF_FUNCTION_TYPE_1
5055 #undef DEF_FUNCTION_TYPE_2
5056 #undef DEF_FUNCTION_TYPE_3
5057 #undef DEF_FUNCTION_TYPE_4
5058 #undef DEF_FUNCTION_TYPE_5
5059 #undef DEF_FUNCTION_TYPE_6
5060 #undef DEF_FUNCTION_TYPE_VAR_0
5061 #undef DEF_FUNCTION_TYPE_VAR_1
5062 #undef DEF_FUNCTION_TYPE_VAR_2
5063 #undef DEF_FUNCTION_TYPE_VAR_3
5064 #undef DEF_FUNCTION_TYPE_VAR_4
5065 #undef DEF_FUNCTION_TYPE_VAR_5
5066 #undef DEF_POINTER_TYPE
5067 builtin_types[(int) BT_LAST] = NULL_TREE;
5068 }
5069
5070 /* ----------------------------------------------------------------------- *
5071 * BUILTIN ATTRIBUTES *
5072 * ----------------------------------------------------------------------- */
5073
5074 enum built_in_attribute
5075 {
5076 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5077 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5078 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5079 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5080 #include "builtin-attrs.def"
5081 #undef DEF_ATTR_NULL_TREE
5082 #undef DEF_ATTR_INT
5083 #undef DEF_ATTR_IDENT
5084 #undef DEF_ATTR_TREE_LIST
5085 ATTR_LAST
5086 };
5087
5088 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5089
5090 static void
5091 install_builtin_attributes (void)
5092 {
5093 /* Fill in the built_in_attributes array. */
5094 #define DEF_ATTR_NULL_TREE(ENUM) \
5095 built_in_attributes[(int) ENUM] = NULL_TREE;
5096 #define DEF_ATTR_INT(ENUM, VALUE) \
5097 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5098 #define DEF_ATTR_IDENT(ENUM, STRING) \
5099 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5100 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5101 built_in_attributes[(int) ENUM] \
5102 = tree_cons (built_in_attributes[(int) PURPOSE], \
5103 built_in_attributes[(int) VALUE], \
5104 built_in_attributes[(int) CHAIN]);
5105 #include "builtin-attrs.def"
5106 #undef DEF_ATTR_NULL_TREE
5107 #undef DEF_ATTR_INT
5108 #undef DEF_ATTR_IDENT
5109 #undef DEF_ATTR_TREE_LIST
5110 }
5111
5112 /* Handle a "const" attribute; arguments as in
5113 struct attribute_spec.handler. */
5114
5115 static tree
5116 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5117 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5118 bool *no_add_attrs)
5119 {
5120 if (TREE_CODE (*node) == FUNCTION_DECL)
5121 TREE_READONLY (*node) = 1;
5122 else
5123 *no_add_attrs = true;
5124
5125 return NULL_TREE;
5126 }
5127
5128 /* Handle a "nothrow" attribute; arguments as in
5129 struct attribute_spec.handler. */
5130
5131 static tree
5132 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5133 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5134 bool *no_add_attrs)
5135 {
5136 if (TREE_CODE (*node) == FUNCTION_DECL)
5137 TREE_NOTHROW (*node) = 1;
5138 else
5139 *no_add_attrs = true;
5140
5141 return NULL_TREE;
5142 }
5143
5144 /* Handle a "pure" attribute; arguments as in
5145 struct attribute_spec.handler. */
5146
5147 static tree
5148 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5149 int ARG_UNUSED (flags), bool *no_add_attrs)
5150 {
5151 if (TREE_CODE (*node) == FUNCTION_DECL)
5152 DECL_PURE_P (*node) = 1;
5153 /* ??? TODO: Support types. */
5154 else
5155 {
5156 warning (OPT_Wattributes, "%qs attribute ignored",
5157 IDENTIFIER_POINTER (name));
5158 *no_add_attrs = true;
5159 }
5160
5161 return NULL_TREE;
5162 }
5163
5164 /* Handle a "no vops" attribute; arguments as in
5165 struct attribute_spec.handler. */
5166
5167 static tree
5168 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5169 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5170 bool *ARG_UNUSED (no_add_attrs))
5171 {
5172 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5173 DECL_IS_NOVOPS (*node) = 1;
5174 return NULL_TREE;
5175 }
5176
5177 /* Helper for nonnull attribute handling; fetch the operand number
5178 from the attribute argument list. */
5179
5180 static bool
5181 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5182 {
5183 /* Verify the arg number is a constant. */
5184 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5185 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5186 return false;
5187
5188 *valp = TREE_INT_CST_LOW (arg_num_expr);
5189 return true;
5190 }
5191
5192 /* Handle the "nonnull" attribute. */
5193 static tree
5194 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5195 tree args, int ARG_UNUSED (flags),
5196 bool *no_add_attrs)
5197 {
5198 tree type = *node;
5199 unsigned HOST_WIDE_INT attr_arg_num;
5200
5201 /* If no arguments are specified, all pointer arguments should be
5202 non-null. Verify a full prototype is given so that the arguments
5203 will have the correct types when we actually check them later. */
5204 if (!args)
5205 {
5206 if (!prototype_p (type))
5207 {
5208 error ("nonnull attribute without arguments on a non-prototype");
5209 *no_add_attrs = true;
5210 }
5211 return NULL_TREE;
5212 }
5213
5214 /* Argument list specified. Verify that each argument number references
5215 a pointer argument. */
5216 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5217 {
5218 tree argument;
5219 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5220
5221 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5222 {
5223 error ("nonnull argument has invalid operand number (argument %lu)",
5224 (unsigned long) attr_arg_num);
5225 *no_add_attrs = true;
5226 return NULL_TREE;
5227 }
5228
5229 argument = TYPE_ARG_TYPES (type);
5230 if (argument)
5231 {
5232 for (ck_num = 1; ; ck_num++)
5233 {
5234 if (!argument || ck_num == arg_num)
5235 break;
5236 argument = TREE_CHAIN (argument);
5237 }
5238
5239 if (!argument
5240 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5241 {
5242 error ("nonnull argument with out-of-range operand number "
5243 "(argument %lu, operand %lu)",
5244 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5245 *no_add_attrs = true;
5246 return NULL_TREE;
5247 }
5248
5249 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5250 {
5251 error ("nonnull argument references non-pointer operand "
5252 "(argument %lu, operand %lu)",
5253 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5254 *no_add_attrs = true;
5255 return NULL_TREE;
5256 }
5257 }
5258 }
5259
5260 return NULL_TREE;
5261 }
5262
5263 /* Handle a "sentinel" attribute. */
5264
5265 static tree
5266 handle_sentinel_attribute (tree *node, tree name, tree args,
5267 int ARG_UNUSED (flags), bool *no_add_attrs)
5268 {
5269 if (!prototype_p (*node))
5270 {
5271 warning (OPT_Wattributes,
5272 "%qs attribute requires prototypes with named arguments",
5273 IDENTIFIER_POINTER (name));
5274 *no_add_attrs = true;
5275 }
5276 else
5277 {
5278 if (!stdarg_p (*node))
5279 {
5280 warning (OPT_Wattributes,
5281 "%qs attribute only applies to variadic functions",
5282 IDENTIFIER_POINTER (name));
5283 *no_add_attrs = true;
5284 }
5285 }
5286
5287 if (args)
5288 {
5289 tree position = TREE_VALUE (args);
5290
5291 if (TREE_CODE (position) != INTEGER_CST)
5292 {
5293 warning (0, "requested position is not an integer constant");
5294 *no_add_attrs = true;
5295 }
5296 else
5297 {
5298 if (tree_int_cst_lt (position, integer_zero_node))
5299 {
5300 warning (0, "requested position is less than zero");
5301 *no_add_attrs = true;
5302 }
5303 }
5304 }
5305
5306 return NULL_TREE;
5307 }
5308
5309 /* Handle a "noreturn" attribute; arguments as in
5310 struct attribute_spec.handler. */
5311
5312 static tree
5313 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5314 int ARG_UNUSED (flags), bool *no_add_attrs)
5315 {
5316 tree type = TREE_TYPE (*node);
5317
5318 /* See FIXME comment in c_common_attribute_table. */
5319 if (TREE_CODE (*node) == FUNCTION_DECL)
5320 TREE_THIS_VOLATILE (*node) = 1;
5321 else if (TREE_CODE (type) == POINTER_TYPE
5322 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5323 TREE_TYPE (*node)
5324 = build_pointer_type
5325 (build_type_variant (TREE_TYPE (type),
5326 TYPE_READONLY (TREE_TYPE (type)), 1));
5327 else
5328 {
5329 warning (OPT_Wattributes, "%qs attribute ignored",
5330 IDENTIFIER_POINTER (name));
5331 *no_add_attrs = true;
5332 }
5333
5334 return NULL_TREE;
5335 }
5336
5337 /* Handle a "leaf" attribute; arguments as in
5338 struct attribute_spec.handler. */
5339
5340 static tree
5341 handle_leaf_attribute (tree *node, tree name,
5342 tree ARG_UNUSED (args),
5343 int ARG_UNUSED (flags), bool *no_add_attrs)
5344 {
5345 if (TREE_CODE (*node) != FUNCTION_DECL)
5346 {
5347 warning (OPT_Wattributes, "%qE attribute ignored", name);
5348 *no_add_attrs = true;
5349 }
5350 if (!TREE_PUBLIC (*node))
5351 {
5352 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5353 *no_add_attrs = true;
5354 }
5355
5356 return NULL_TREE;
5357 }
5358
5359 /* Handle a "malloc" attribute; arguments as in
5360 struct attribute_spec.handler. */
5361
5362 static tree
5363 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5364 int ARG_UNUSED (flags), bool *no_add_attrs)
5365 {
5366 if (TREE_CODE (*node) == FUNCTION_DECL
5367 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5368 DECL_IS_MALLOC (*node) = 1;
5369 else
5370 {
5371 warning (OPT_Wattributes, "%qs attribute ignored",
5372 IDENTIFIER_POINTER (name));
5373 *no_add_attrs = true;
5374 }
5375
5376 return NULL_TREE;
5377 }
5378
5379 /* Fake handler for attributes we don't properly support. */
5380
5381 tree
5382 fake_attribute_handler (tree * ARG_UNUSED (node),
5383 tree ARG_UNUSED (name),
5384 tree ARG_UNUSED (args),
5385 int ARG_UNUSED (flags),
5386 bool * ARG_UNUSED (no_add_attrs))
5387 {
5388 return NULL_TREE;
5389 }
5390
5391 /* Handle a "type_generic" attribute. */
5392
5393 static tree
5394 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5395 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5396 bool * ARG_UNUSED (no_add_attrs))
5397 {
5398 /* Ensure we have a function type. */
5399 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5400
5401 /* Ensure we have a variadic function. */
5402 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5403
5404 return NULL_TREE;
5405 }
5406
5407 /* Handle a "vector_size" attribute; arguments as in
5408 struct attribute_spec.handler. */
5409
5410 static tree
5411 handle_vector_size_attribute (tree *node, tree name, tree args,
5412 int ARG_UNUSED (flags),
5413 bool *no_add_attrs)
5414 {
5415 unsigned HOST_WIDE_INT vecsize, nunits;
5416 enum machine_mode orig_mode;
5417 tree type = *node, new_type, size;
5418
5419 *no_add_attrs = true;
5420
5421 size = TREE_VALUE (args);
5422
5423 if (!host_integerp (size, 1))
5424 {
5425 warning (OPT_Wattributes, "%qs attribute ignored",
5426 IDENTIFIER_POINTER (name));
5427 return NULL_TREE;
5428 }
5429
5430 /* Get the vector size (in bytes). */
5431 vecsize = tree_low_cst (size, 1);
5432
5433 /* We need to provide for vector pointers, vector arrays, and
5434 functions returning vectors. For example:
5435
5436 __attribute__((vector_size(16))) short *foo;
5437
5438 In this case, the mode is SI, but the type being modified is
5439 HI, so we need to look further. */
5440
5441 while (POINTER_TYPE_P (type)
5442 || TREE_CODE (type) == FUNCTION_TYPE
5443 || TREE_CODE (type) == ARRAY_TYPE)
5444 type = TREE_TYPE (type);
5445
5446 /* Get the mode of the type being modified. */
5447 orig_mode = TYPE_MODE (type);
5448
5449 if ((!INTEGRAL_TYPE_P (type)
5450 && !SCALAR_FLOAT_TYPE_P (type)
5451 && !FIXED_POINT_TYPE_P (type))
5452 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5453 && GET_MODE_CLASS (orig_mode) != MODE_INT
5454 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5455 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5456 || TREE_CODE (type) == BOOLEAN_TYPE)
5457 {
5458 error ("invalid vector type for attribute %qs",
5459 IDENTIFIER_POINTER (name));
5460 return NULL_TREE;
5461 }
5462
5463 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5464 {
5465 error ("vector size not an integral multiple of component size");
5466 return NULL;
5467 }
5468
5469 if (vecsize == 0)
5470 {
5471 error ("zero vector size");
5472 return NULL;
5473 }
5474
5475 /* Calculate how many units fit in the vector. */
5476 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5477 if (nunits & (nunits - 1))
5478 {
5479 error ("number of components of the vector not a power of two");
5480 return NULL_TREE;
5481 }
5482
5483 new_type = build_vector_type (type, nunits);
5484
5485 /* Build back pointers if needed. */
5486 *node = reconstruct_complex_type (*node, new_type);
5487
5488 return NULL_TREE;
5489 }
5490
5491 /* Handle a "vector_type" attribute; arguments as in
5492 struct attribute_spec.handler. */
5493
5494 static tree
5495 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5496 int ARG_UNUSED (flags),
5497 bool *no_add_attrs)
5498 {
5499 /* Vector representative type and size. */
5500 tree rep_type = *node;
5501 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5502 tree rep_name;
5503
5504 /* Vector size in bytes and number of units. */
5505 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5506
5507 /* Vector element type and mode. */
5508 tree elem_type;
5509 enum machine_mode elem_mode;
5510
5511 *no_add_attrs = true;
5512
5513 /* Get the representative array type, possibly nested within a
5514 padding record e.g. for alignment purposes. */
5515
5516 if (TYPE_IS_PADDING_P (rep_type))
5517 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5518
5519 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5520 {
5521 error ("attribute %qs applies to array types only",
5522 IDENTIFIER_POINTER (name));
5523 return NULL_TREE;
5524 }
5525
5526 /* Silently punt on variable sizes. We can't make vector types for them,
5527 need to ignore them on front-end generated subtypes of unconstrained
5528 bases, and this attribute is for binding implementors, not end-users, so
5529 we should never get there from legitimate explicit uses. */
5530
5531 if (!host_integerp (rep_size, 1))
5532 return NULL_TREE;
5533
5534 /* Get the element type/mode and check this is something we know
5535 how to make vectors of. */
5536
5537 elem_type = TREE_TYPE (rep_type);
5538 elem_mode = TYPE_MODE (elem_type);
5539
5540 if ((!INTEGRAL_TYPE_P (elem_type)
5541 && !SCALAR_FLOAT_TYPE_P (elem_type)
5542 && !FIXED_POINT_TYPE_P (elem_type))
5543 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5544 && GET_MODE_CLASS (elem_mode) != MODE_INT
5545 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5546 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5547 {
5548 error ("invalid element type for attribute %qs",
5549 IDENTIFIER_POINTER (name));
5550 return NULL_TREE;
5551 }
5552
5553 /* Sanity check the vector size and element type consistency. */
5554
5555 vec_bytes = tree_low_cst (rep_size, 1);
5556
5557 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5558 {
5559 error ("vector size not an integral multiple of component size");
5560 return NULL;
5561 }
5562
5563 if (vec_bytes == 0)
5564 {
5565 error ("zero vector size");
5566 return NULL;
5567 }
5568
5569 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5570 if (vec_units & (vec_units - 1))
5571 {
5572 error ("number of components of the vector not a power of two");
5573 return NULL_TREE;
5574 }
5575
5576 /* Build the vector type and replace. */
5577
5578 *node = build_vector_type (elem_type, vec_units);
5579 rep_name = TYPE_NAME (rep_type);
5580 if (TREE_CODE (rep_name) == TYPE_DECL)
5581 rep_name = DECL_NAME (rep_name);
5582 TYPE_NAME (*node) = rep_name;
5583 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5584
5585 return NULL_TREE;
5586 }
5587
5588 /* ----------------------------------------------------------------------- *
5589 * BUILTIN FUNCTIONS *
5590 * ----------------------------------------------------------------------- */
5591
5592 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5593 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5594 if nonansi_p and flag_no_nonansi_builtin. */
5595
5596 static void
5597 def_builtin_1 (enum built_in_function fncode,
5598 const char *name,
5599 enum built_in_class fnclass,
5600 tree fntype, tree libtype,
5601 bool both_p, bool fallback_p,
5602 bool nonansi_p ATTRIBUTE_UNUSED,
5603 tree fnattrs, bool implicit_p)
5604 {
5605 tree decl;
5606 const char *libname;
5607
5608 /* Preserve an already installed decl. It most likely was setup in advance
5609 (e.g. as part of the internal builtins) for specific reasons. */
5610 if (built_in_decls[(int) fncode] != NULL_TREE)
5611 return;
5612
5613 gcc_assert ((!both_p && !fallback_p)
5614 || !strncmp (name, "__builtin_",
5615 strlen ("__builtin_")));
5616
5617 libname = name + strlen ("__builtin_");
5618 decl = add_builtin_function (name, fntype, fncode, fnclass,
5619 (fallback_p ? libname : NULL),
5620 fnattrs);
5621 if (both_p)
5622 /* ??? This is normally further controlled by command-line options
5623 like -fno-builtin, but we don't have them for Ada. */
5624 add_builtin_function (libname, libtype, fncode, fnclass,
5625 NULL, fnattrs);
5626
5627 built_in_decls[(int) fncode] = decl;
5628 if (implicit_p)
5629 implicit_built_in_decls[(int) fncode] = decl;
5630 }
5631
5632 static int flag_isoc94 = 0;
5633 static int flag_isoc99 = 0;
5634
5635 /* Install what the common builtins.def offers. */
5636
5637 static void
5638 install_builtin_functions (void)
5639 {
5640 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5641 NONANSI_P, ATTRS, IMPLICIT, COND) \
5642 if (NAME && COND) \
5643 def_builtin_1 (ENUM, NAME, CLASS, \
5644 builtin_types[(int) TYPE], \
5645 builtin_types[(int) LIBTYPE], \
5646 BOTH_P, FALLBACK_P, NONANSI_P, \
5647 built_in_attributes[(int) ATTRS], IMPLICIT);
5648 #include "builtins.def"
5649 #undef DEF_BUILTIN
5650 }
5651
5652 /* ----------------------------------------------------------------------- *
5653 * BUILTIN FUNCTIONS *
5654 * ----------------------------------------------------------------------- */
5655
5656 /* Install the builtin functions we might need. */
5657
5658 void
5659 gnat_install_builtins (void)
5660 {
5661 install_builtin_elementary_types ();
5662 install_builtin_function_types ();
5663 install_builtin_attributes ();
5664
5665 /* Install builtins used by generic middle-end pieces first. Some of these
5666 know about internal specificities and control attributes accordingly, for
5667 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5668 the generic definition from builtins.def. */
5669 build_common_builtin_nodes ();
5670
5671 /* Now, install the target specific builtins, such as the AltiVec family on
5672 ppc, and the common set as exposed by builtins.def. */
5673 targetm.init_builtins ();
5674 install_builtin_functions ();
5675 }
5676
5677 #include "gt-ada-utils.h"
5678 #include "gtype-ada.h"
This page took 0.309043 seconds and 6 git commands to generate.