1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
37 extern void compiler_error ();
39 static tree get_identifier_list
PROTO((tree
));
40 static tree bot_manip
PROTO((tree
));
41 static tree perm_manip
PROTO((tree
));
42 static tree build_cplus_array_type_1
PROTO((tree
, tree
));
43 static void list_hash_add
PROTO((int, tree
));
44 static int list_hash
PROTO((tree
, tree
, tree
));
45 static tree list_hash_lookup
PROTO((int, int, int, int, tree
, tree
,
47 static void propagate_binfo_offsets
PROTO((tree
, tree
));
48 static void unshare_base_binfos
PROTO((tree
));
49 static int avoid_overlap
PROTO((tree
, tree
));
51 #define CEIL(x,y) (((x) + (y) - 1) / (y))
53 /* Return nonzero if REF is an lvalue valid for this language.
54 Lvalues can be assigned, unless they have TREE_READONLY.
55 Lvalues can have their address taken, unless they have DECL_REGISTER. */
61 if (! language_lvalue_valid (ref
))
64 if (TREE_CODE (TREE_TYPE (ref
)) == REFERENCE_TYPE
)
67 if (ref
== current_class_ptr
&& flag_this_is_variable
<= 0)
70 switch (TREE_CODE (ref
))
72 /* preincrements and predecrements are valid lvals, provided
73 what they refer to are valid lvals. */
74 case PREINCREMENT_EXPR
:
75 case PREDECREMENT_EXPR
:
80 case WITH_CLEANUP_EXPR
:
81 return real_lvalue_p (TREE_OPERAND (ref
, 0));
87 if (TREE_READONLY (ref
) && ! TREE_STATIC (ref
)
88 && DECL_LANG_SPECIFIC (ref
)
89 && DECL_IN_AGGR_P (ref
))
96 if (TREE_CODE (TREE_TYPE (ref
)) != FUNCTION_TYPE
97 && TREE_CODE (TREE_TYPE (ref
)) != METHOD_TYPE
)
101 /* A currently unresolved scope ref. */
103 my_friendly_abort (103);
105 if (TREE_CODE (TREE_OPERAND (ref
, 1)) == FUNCTION_DECL
)
107 return real_lvalue_p (TREE_OPERAND (ref
, 0))
108 && real_lvalue_p (TREE_OPERAND (ref
, 1));
112 return (real_lvalue_p (TREE_OPERAND (ref
, 1))
113 && real_lvalue_p (TREE_OPERAND (ref
, 2)));
119 return real_lvalue_p (TREE_OPERAND (ref
, 1));
123 return (real_lvalue_p (TREE_OPERAND (ref
, 0))
124 && real_lvalue_p (TREE_OPERAND (ref
, 1)));
133 /* This differs from real_lvalue_p in that class rvalues are considered
139 if (! language_lvalue_valid (ref
))
142 if (TREE_CODE (TREE_TYPE (ref
)) == REFERENCE_TYPE
)
145 if (ref
== current_class_ptr
&& flag_this_is_variable
<= 0)
148 switch (TREE_CODE (ref
))
150 /* preincrements and predecrements are valid lvals, provided
151 what they refer to are valid lvals. */
152 case PREINCREMENT_EXPR
:
153 case PREDECREMENT_EXPR
:
160 case WITH_CLEANUP_EXPR
:
161 return lvalue_p (TREE_OPERAND (ref
, 0));
167 if (TREE_READONLY (ref
) && ! TREE_STATIC (ref
)
168 && DECL_LANG_SPECIFIC (ref
)
169 && DECL_IN_AGGR_P (ref
))
176 if (TREE_CODE (TREE_TYPE (ref
)) != FUNCTION_TYPE
177 && TREE_CODE (TREE_TYPE (ref
)) != METHOD_TYPE
)
185 if (IS_AGGR_TYPE (TREE_TYPE (ref
)))
189 /* A currently unresolved scope ref. */
191 my_friendly_abort (103);
193 if (TREE_CODE (TREE_OPERAND (ref
, 1)) == FUNCTION_DECL
)
195 return lvalue_p (TREE_OPERAND (ref
, 0))
196 && lvalue_p (TREE_OPERAND (ref
, 1));
200 return (lvalue_p (TREE_OPERAND (ref
, 1))
201 && lvalue_p (TREE_OPERAND (ref
, 2)));
207 return lvalue_p (TREE_OPERAND (ref
, 1));
211 return (lvalue_p (TREE_OPERAND (ref
, 0))
212 && lvalue_p (TREE_OPERAND (ref
, 1)));
221 /* Return nonzero if REF is an lvalue valid for this language;
222 otherwise, print an error message and return zero. */
225 lvalue_or_else (ref
, string
)
229 int win
= lvalue_p (ref
);
231 error ("non-lvalue in %s", string
);
235 /* INIT is a CALL_EXPR which needs info about its target.
236 TYPE is the type that this initialization should appear to have.
238 Build an encapsulation of the initialization to perform
239 and return it so that it can be processed by language-independent
240 and language-specific expression expanders. */
243 build_cplus_new (type
, init
)
250 if (TREE_CODE (init
) != CALL_EXPR
&& TREE_CODE (init
) != AGGR_INIT_EXPR
)
253 slot
= build (VAR_DECL
, type
);
254 DECL_ARTIFICIAL (slot
) = 1;
255 layout_decl (slot
, 0);
256 rval
= build (AGGR_INIT_EXPR
, type
,
257 TREE_OPERAND (init
, 0), TREE_OPERAND (init
, 1), slot
);
258 TREE_SIDE_EFFECTS (rval
) = 1;
259 rval
= build (TARGET_EXPR
, type
, slot
, rval
, NULL_TREE
, NULL_TREE
);
260 TREE_SIDE_EFFECTS (rval
) = 1;
265 /* Encapsulate the expression INIT in a TARGET_EXPR. */
268 get_target_expr (init
)
274 slot
= build (VAR_DECL
, TREE_TYPE (init
));
275 DECL_ARTIFICIAL (slot
) = 1;
276 layout_decl (slot
, 0);
277 rval
= build (TARGET_EXPR
, TREE_TYPE (init
), slot
, init
,
278 NULL_TREE
, NULL_TREE
);
279 TREE_SIDE_EFFECTS (rval
) = 1;
284 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
285 these CALL_EXPRs with tree nodes that will perform the cleanups. */
288 break_out_cleanups (exp
)
293 if (TREE_CODE (tmp
) == CALL_EXPR
294 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp
)))
295 return build_cplus_new (TREE_TYPE (tmp
), tmp
);
297 while (TREE_CODE (tmp
) == NOP_EXPR
298 || TREE_CODE (tmp
) == CONVERT_EXPR
299 || TREE_CODE (tmp
) == NON_LVALUE_EXPR
)
301 if (TREE_CODE (TREE_OPERAND (tmp
, 0)) == CALL_EXPR
302 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp
, 0))))
304 TREE_OPERAND (tmp
, 0)
305 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp
, 0)),
306 TREE_OPERAND (tmp
, 0));
310 tmp
= TREE_OPERAND (tmp
, 0);
315 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
316 copies where they are found. Returns a deep copy all nodes transitively
317 containing CALL_EXPRs. */
320 break_out_calls (exp
)
323 register tree t1
, t2
= NULL_TREE
;
324 register enum tree_code code
;
325 register int changed
= 0;
328 if (exp
== NULL_TREE
)
331 code
= TREE_CODE (exp
);
333 if (code
== CALL_EXPR
)
334 return copy_node (exp
);
336 /* Don't try and defeat a save_expr, as it should only be done once. */
337 if (code
== SAVE_EXPR
)
340 switch (TREE_CODE_CLASS (code
))
345 case 'c': /* a constant */
346 case 't': /* a type node */
347 case 'x': /* something random, like an identifier or an ERROR_MARK. */
350 case 'd': /* A decl node */
351 #if 0 /* This is bogus. jason 9/21/94 */
353 t1
= break_out_calls (DECL_INITIAL (exp
));
354 if (t1
!= DECL_INITIAL (exp
))
356 exp
= copy_node (exp
);
357 DECL_INITIAL (exp
) = t1
;
362 case 'b': /* A block node */
364 /* Don't know how to handle these correctly yet. Must do a
365 break_out_calls on all DECL_INITIAL values for local variables,
366 and also break_out_calls on all sub-blocks and sub-statements. */
371 case 'e': /* an expression */
372 case 'r': /* a reference */
373 case 's': /* an expression with side effects */
374 for (i
= tree_code_length
[(int) code
] - 1; i
>= 0; i
--)
376 t1
= break_out_calls (TREE_OPERAND (exp
, i
));
377 if (t1
!= TREE_OPERAND (exp
, i
))
379 exp
= copy_node (exp
);
380 TREE_OPERAND (exp
, i
) = t1
;
385 case '<': /* a comparison expression */
386 case '2': /* a binary arithmetic expression */
387 t2
= break_out_calls (TREE_OPERAND (exp
, 1));
388 if (t2
!= TREE_OPERAND (exp
, 1))
390 case '1': /* a unary arithmetic expression */
391 t1
= break_out_calls (TREE_OPERAND (exp
, 0));
392 if (t1
!= TREE_OPERAND (exp
, 0))
396 if (tree_code_length
[(int) code
] == 1)
397 return build1 (code
, TREE_TYPE (exp
), t1
);
399 return build (code
, TREE_TYPE (exp
), t1
, t2
);
406 extern struct obstack
*current_obstack
;
407 extern struct obstack permanent_obstack
, class_obstack
;
408 extern struct obstack
*saveable_obstack
;
409 extern struct obstack
*expression_obstack
;
411 /* Here is how primitive or already-canonicalized types' hash
412 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
413 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
415 /* Construct, lay out and return the type of methods belonging to class
416 BASETYPE and whose arguments are described by ARGTYPES and whose values
417 are described by RETTYPE. If each type exists already, reuse it. */
420 build_cplus_method_type (basetype
, rettype
, argtypes
)
421 tree basetype
, rettype
, argtypes
;
427 /* Make a node of the sort we want. */
428 t
= make_node (METHOD_TYPE
);
430 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
431 TREE_TYPE (t
) = rettype
;
432 if (IS_SIGNATURE (basetype
))
433 ptype
= build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype
),
434 TYPE_READONLY (basetype
),
435 TYPE_VOLATILE (basetype
));
437 ptype
= build_pointer_type (basetype
);
439 /* The actual arglist for this function includes a "hidden" argument
440 which is "this". Put it into the list of argument types. */
442 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
443 TYPE_ARG_TYPES (t
) = argtypes
;
444 TREE_SIDE_EFFECTS (argtypes
) = 1; /* Mark first argtype as "artificial". */
446 /* If we already have such a type, use the old one and free this one.
447 Note that it also frees up the above cons cell if found. */
448 hashcode
= TYPE_HASH (basetype
) + TYPE_HASH (rettype
) + type_hash_list (argtypes
);
449 t
= type_hash_canon (hashcode
, t
);
451 if (TYPE_SIZE (t
) == 0)
458 build_cplus_array_type_1 (elt_type
, index_type
)
462 register struct obstack
*ambient_obstack
= current_obstack
;
463 register struct obstack
*ambient_saveable_obstack
= saveable_obstack
;
466 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
467 make this permanent too. */
468 if (TREE_PERMANENT (elt_type
)
469 && (index_type
== 0 || TREE_PERMANENT (index_type
)))
471 current_obstack
= &permanent_obstack
;
472 saveable_obstack
= &permanent_obstack
;
475 if (processing_template_decl
)
477 t
= make_node (ARRAY_TYPE
);
478 TREE_TYPE (t
) = elt_type
;
479 TYPE_DOMAIN (t
) = index_type
;
482 t
= build_array_type (elt_type
, index_type
);
484 /* Push these needs up so that initialization takes place
486 TYPE_NEEDS_CONSTRUCTING (t
) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type
));
487 TYPE_NEEDS_DESTRUCTOR (t
) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type
));
488 current_obstack
= ambient_obstack
;
489 saveable_obstack
= ambient_saveable_obstack
;
494 build_cplus_array_type (elt_type
, index_type
)
499 int constp
= TYPE_READONLY (elt_type
);
500 int volatilep
= TYPE_VOLATILE (elt_type
);
501 elt_type
= TYPE_MAIN_VARIANT (elt_type
);
503 t
= build_cplus_array_type_1 (elt_type
, index_type
);
505 if (constp
|| volatilep
)
506 t
= cp_build_type_variant (t
, constp
, volatilep
);
511 /* Make a variant type in the proper way for C/C++, propagating qualifiers
512 down to the element type of an array. */
515 cp_build_type_variant (type
, constp
, volatilep
)
517 int constp
, volatilep
;
519 if (type
== error_mark_node
)
522 if (TREE_CODE (type
) == ARRAY_TYPE
)
524 tree real_main_variant
= TYPE_MAIN_VARIANT (type
);
526 push_obstacks (TYPE_OBSTACK (real_main_variant
),
527 TYPE_OBSTACK (real_main_variant
));
528 type
= build_cplus_array_type_1 (cp_build_type_variant
529 (TREE_TYPE (type
), constp
, volatilep
),
532 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
533 make a copy. (TYPE might have come from the hash table and
534 REAL_MAIN_VARIANT might be in some function's obstack.) */
536 if (TYPE_OBSTACK (type
) != TYPE_OBSTACK (real_main_variant
))
538 type
= copy_node (type
);
539 TYPE_POINTER_TO (type
) = TYPE_REFERENCE_TO (type
) = 0;
542 TYPE_MAIN_VARIANT (type
) = real_main_variant
;
546 return build_type_variant (type
, constp
, volatilep
);
549 /* Add OFFSET to all base types of T.
551 OFFSET, which is a type offset, is number of bytes.
553 Note that we don't have to worry about having two paths to the
554 same base type, since this type owns its association list. */
557 propagate_binfo_offsets (binfo
, offset
)
561 tree binfos
= BINFO_BASETYPES (binfo
);
562 int i
, n_baselinks
= binfos
? TREE_VEC_LENGTH (binfos
) : 0;
564 for (i
= 0; i
< n_baselinks
; /* note increment is done in the loop. */)
566 tree base_binfo
= TREE_VEC_ELT (binfos
, i
);
568 if (TREE_VIA_VIRTUAL (base_binfo
))
573 tree base_binfos
= BINFO_BASETYPES (base_binfo
);
574 tree delta
= NULL_TREE
;
576 for (j
= i
+1; j
< n_baselinks
; j
++)
577 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos
, j
)))
579 /* The next basetype offset must take into account the space
580 between the classes, not just the size of each class. */
581 delta
= size_binop (MINUS_EXPR
,
582 BINFO_OFFSET (TREE_VEC_ELT (binfos
, j
)),
583 BINFO_OFFSET (base_binfo
));
588 if (BINFO_OFFSET_ZEROP (base_binfo
))
589 BINFO_OFFSET (base_binfo
) = offset
;
591 BINFO_OFFSET (base_binfo
)
592 = size_binop (PLUS_EXPR
, BINFO_OFFSET (base_binfo
), offset
);
594 BINFO_OFFSET (base_binfo
) = offset
;
597 unshare_base_binfos (base_binfo
);
599 /* Go to our next class that counts for offset propagation. */
602 offset
= size_binop (PLUS_EXPR
, offset
, delta
);
607 /* Makes new binfos for the indirect bases under BASE_BINFO, and updates
608 BINFO_OFFSET for them and their bases. */
611 unshare_base_binfos (base_binfo
)
614 if (BINFO_BASETYPES (base_binfo
))
616 tree base_binfos
= BINFO_BASETYPES (base_binfo
);
617 tree chain
= NULL_TREE
;
620 /* Now unshare the structure beneath BASE_BINFO. */
621 for (j
= TREE_VEC_LENGTH (base_binfos
)-1;
624 tree base_base_binfo
= TREE_VEC_ELT (base_binfos
, j
);
625 if (! TREE_VIA_VIRTUAL (base_base_binfo
))
626 TREE_VEC_ELT (base_binfos
, j
)
627 = make_binfo (BINFO_OFFSET (base_base_binfo
),
629 BINFO_VTABLE (base_base_binfo
),
630 BINFO_VIRTUALS (base_base_binfo
),
632 chain
= TREE_VEC_ELT (base_binfos
, j
);
633 TREE_VIA_PUBLIC (chain
) = TREE_VIA_PUBLIC (base_base_binfo
);
634 TREE_VIA_PROTECTED (chain
) = TREE_VIA_PROTECTED (base_base_binfo
);
635 BINFO_INHERITANCE_CHAIN (chain
) = base_binfo
;
638 /* Completely unshare potentially shared data, and
639 update what is ours. */
640 propagate_binfo_offsets (base_binfo
, BINFO_OFFSET (base_binfo
));
644 /* Finish the work of layout_record, now taking virtual bases into account.
645 Also compute the actual offsets that our base classes will have.
646 This must be performed after the fields are laid out, since virtual
647 baseclasses must lay down at the end of the record.
649 Returns the maximum number of virtual functions any of the
650 baseclasses provide. */
653 layout_basetypes (rec
, max
)
657 tree binfos
= TYPE_BINFO_BASETYPES (rec
);
658 int i
, n_baseclasses
= binfos
? TREE_VEC_LENGTH (binfos
) : 0;
660 /* Get all the virtual base types that this type uses.
661 The TREE_VALUE slot holds the virtual baseclass type. */
662 tree vbase_types
= get_vbase_types (rec
);
664 unsigned int record_align
= MAX (BITS_PER_UNIT
, TYPE_ALIGN (rec
));
665 unsigned int desired_align
;
667 /* Record size so far is CONST_SIZE bits, where CONST_SIZE is an integer. */
668 register unsigned int const_size
= 0;
669 unsigned int nonvirtual_const_size
;
671 #ifdef STRUCTURE_SIZE_BOUNDARY
672 /* Packed structures don't need to have minimum size. */
673 if (! TYPE_PACKED (rec
))
674 record_align
= MAX (record_align
, STRUCTURE_SIZE_BOUNDARY
);
677 CLASSTYPE_VBASECLASSES (rec
) = vbase_types
;
679 my_friendly_assert (TREE_CODE (TYPE_SIZE (rec
)) == INTEGER_CST
, 19970302);
680 const_size
= TREE_INT_CST_LOW (TYPE_SIZE (rec
));
682 nonvirtual_const_size
= const_size
;
686 tree basetype
= BINFO_TYPE (vbase_types
);
689 desired_align
= TYPE_ALIGN (basetype
);
690 record_align
= MAX (record_align
, desired_align
);
693 offset
= integer_zero_node
;
696 /* Give each virtual base type the alignment it wants. */
697 const_size
= CEIL (const_size
, desired_align
) * desired_align
;
698 offset
= size_int (CEIL (const_size
, BITS_PER_UNIT
));
701 if (CLASSTYPE_VSIZE (basetype
) > max
)
702 max
= CLASSTYPE_VSIZE (basetype
);
703 BINFO_OFFSET (vbase_types
) = offset
;
705 /* Every virtual baseclass takes a least a UNIT, so that we can
706 take it's address and get something different for each base. */
707 const_size
+= MAX (BITS_PER_UNIT
,
708 TREE_INT_CST_LOW (CLASSTYPE_SIZE (basetype
)));
710 vbase_types
= TREE_CHAIN (vbase_types
);
715 /* Because a virtual base might take a single byte above,
716 we have to re-adjust the total size to make sure it is
717 a multiple of the alignment. */
718 /* Give the whole object the alignment it wants. */
719 const_size
= CEIL (const_size
, record_align
) * record_align
;
722 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
723 here, as that is for this class, without any virtual base classes. */
724 TYPE_ALIGN (rec
) = record_align
;
725 if (const_size
!= nonvirtual_const_size
)
726 TYPE_SIZE (rec
) = size_int (const_size
);
728 /* Now propagate offset information throughout the lattice. */
729 for (i
= 0; i
< n_baseclasses
; i
++)
731 register tree base_binfo
= TREE_VEC_ELT (binfos
, i
);
732 register tree basetype
= BINFO_TYPE (base_binfo
);
733 tree field
= TYPE_FIELDS (rec
);
735 if (TREE_VIA_VIRTUAL (base_binfo
))
738 my_friendly_assert (TREE_TYPE (field
) == basetype
, 23897);
739 BINFO_OFFSET (base_binfo
)
740 = size_int (CEIL (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
)),
742 unshare_base_binfos (base_binfo
);
743 TYPE_FIELDS (rec
) = TREE_CHAIN (field
);
746 for (vbase_types
= CLASSTYPE_VBASECLASSES (rec
); vbase_types
;
747 vbase_types
= TREE_CHAIN (vbase_types
))
749 BINFO_INHERITANCE_CHAIN (vbase_types
) = TYPE_BINFO (rec
);
750 unshare_base_binfos (vbase_types
);
756 /* If the empty base field in DECL overlaps with a base of the same type in
757 NEWDECL, which is either another base field or the first data field of
758 the class, pad the base just before NEWDECL and return 1. Otherwise,
762 avoid_overlap (decl
, newdecl
)
767 if (newdecl
== NULL_TREE
768 || ! types_overlap_p (TREE_TYPE (decl
), TREE_TYPE (newdecl
)))
771 for (field
= decl
; TREE_CHAIN (field
) && TREE_CHAIN (field
) != newdecl
;
772 field
= TREE_CHAIN (field
))
775 DECL_SIZE (field
) = integer_one_node
;
780 /* Returns a list of fields to stand in for the base class subobjects
781 of REC. These fields are later removed by layout_basetypes. */
784 build_base_fields (rec
)
787 /* Chain to hold all the new FIELD_DECLs which stand in for base class
789 tree base_decls
= NULL_TREE
;
790 tree binfos
= TYPE_BINFO_BASETYPES (rec
);
791 int n_baseclasses
= binfos
? TREE_VEC_LENGTH (binfos
) : 0;
793 int i
, saw_empty
= 0;
794 unsigned int base_align
= 0;
796 for (i
= 0; i
< n_baseclasses
; ++i
)
798 register tree base_binfo
= TREE_VEC_ELT (binfos
, i
);
799 register tree basetype
= BINFO_TYPE (base_binfo
);
801 if (TYPE_SIZE (basetype
) == 0)
802 /* This error is now reported in xref_tag, thus giving better
803 location information. */
806 if (TREE_VIA_VIRTUAL (base_binfo
))
809 decl
= build_lang_field_decl (FIELD_DECL
, NULL_TREE
, basetype
);
810 DECL_ARTIFICIAL (decl
) = 1;
811 DECL_FIELD_CONTEXT (decl
) = DECL_CLASS_CONTEXT (decl
) = rec
;
812 DECL_SIZE (decl
) = CLASSTYPE_SIZE (basetype
);
813 DECL_ALIGN (decl
) = CLASSTYPE_ALIGN (basetype
);
814 TREE_CHAIN (decl
) = base_decls
;
819 /* Brain damage for backwards compatibility. For no good reason,
820 the old layout_basetypes made every base at least as large as
821 the alignment for the bases up to that point, gratuitously
822 wasting space. So we do the same thing here. */
823 base_align
= MAX (base_align
, DECL_ALIGN (decl
));
825 = size_int (MAX (TREE_INT_CST_LOW (DECL_SIZE (decl
)),
828 else if (DECL_SIZE (decl
) == integer_zero_node
)
832 /* Reverse the list of fields so we allocate the bases in the proper
834 base_decls
= nreverse (base_decls
);
836 /* In the presence of empty base classes, we run the risk of allocating
837 two objects of the same class on top of one another. Avoid that. */
838 if (flag_new_abi
&& saw_empty
)
839 for (decl
= base_decls
; decl
; decl
= TREE_CHAIN (decl
))
841 if (DECL_SIZE (decl
) == integer_zero_node
)
843 /* First step through the following bases until we find
844 an overlap or a non-empty base. */
845 for (nextdecl
= TREE_CHAIN (decl
); nextdecl
;
846 nextdecl
= TREE_CHAIN (nextdecl
))
848 if (avoid_overlap (decl
, nextdecl
)
849 || DECL_SIZE (nextdecl
) != integer_zero_node
)
853 /* If we're still looking, also check against the first
855 for (nextdecl
= TYPE_FIELDS (rec
);
856 nextdecl
&& TREE_CODE (nextdecl
) != FIELD_DECL
;
857 nextdecl
= TREE_CHAIN (nextdecl
))
859 avoid_overlap (decl
, nextdecl
);
867 /* Returns list of virtual base class pointers in a FIELD_DECL chain. */
870 build_vbase_pointer_fields (rec
)
873 /* Chain to hold all the new FIELD_DECLs which point at virtual
875 tree vbase_decls
= NULL_TREE
;
876 tree binfos
= TYPE_BINFO_BASETYPES (rec
);
877 int n_baseclasses
= binfos
? TREE_VEC_LENGTH (binfos
) : 0;
881 /* Handle basetypes almost like fields, but record their
882 offsets differently. */
884 for (i
= 0; i
< n_baseclasses
; i
++)
886 register tree base_binfo
= TREE_VEC_ELT (binfos
, i
);
887 register tree basetype
= BINFO_TYPE (base_binfo
);
889 if (TYPE_SIZE (basetype
) == 0)
890 /* This error is now reported in xref_tag, thus giving better
891 location information. */
894 /* All basetypes are recorded in the association list of the
897 if (TREE_VIA_VIRTUAL (base_binfo
))
900 char *name
= (char *)alloca (TYPE_NAME_LENGTH (basetype
)
901 + sizeof (VBASE_NAME
) + 1);
903 /* The offset for a virtual base class is only used in computing
904 virtual function tables and for initializing virtual base
905 pointers. It is built once `get_vbase_types' is called. */
907 /* If this basetype can come from another vbase pointer
908 without an additional indirection, we will share
909 that pointer. If an indirection is involved, we
910 make our own pointer. */
911 for (j
= 0; j
< n_baseclasses
; j
++)
913 tree other_base_binfo
= TREE_VEC_ELT (binfos
, j
);
914 if (! TREE_VIA_VIRTUAL (other_base_binfo
)
915 && binfo_member (basetype
,
916 CLASSTYPE_VBASECLASSES (BINFO_TYPE
921 sprintf (name
, VBASE_NAME_FORMAT
, TYPE_NAME_STRING (basetype
));
922 decl
= build_lang_field_decl (FIELD_DECL
, get_identifier (name
),
923 build_pointer_type (basetype
));
924 /* If you change any of the below, take a look at all the
925 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
927 DECL_ASSEMBLER_NAME (decl
) = get_identifier (VTABLE_BASE
);
928 DECL_VIRTUAL_P (decl
) = 1;
929 DECL_ARTIFICIAL (decl
) = 1;
930 DECL_FIELD_CONTEXT (decl
) = rec
;
931 DECL_CLASS_CONTEXT (decl
) = rec
;
932 DECL_FCONTEXT (decl
) = basetype
;
933 DECL_SAVED_INSNS (decl
) = NULL_RTX
;
934 DECL_FIELD_SIZE (decl
) = 0;
935 DECL_ALIGN (decl
) = TYPE_ALIGN (ptr_type_node
);
936 TREE_CHAIN (decl
) = vbase_decls
;
937 BINFO_VPTR_FIELD (base_binfo
) = decl
;
941 /* The space this decl occupies has already been accounted for. */
949 /* Hashing of lists so that we don't make duplicates.
950 The entry point is `list_hash_canon'. */
952 /* Each hash table slot is a bucket containing a chain
953 of these structures. */
957 struct list_hash
*next
; /* Next structure in the bucket. */
958 int hashcode
; /* Hash code of this list. */
959 tree list
; /* The list recorded here. */
962 /* Now here is the hash table. When recording a list, it is added
963 to the slot whose index is the hash code mod the table size.
964 Note that the hash table is used for several kinds of lists.
965 While all these live in the same table, they are completely independent,
966 and the hash code is computed differently for each of these. */
968 #define TYPE_HASH_SIZE 59
969 static struct list_hash
*list_hash_table
[TYPE_HASH_SIZE
];
971 /* Compute a hash code for a list (chain of TREE_LIST nodes
972 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
973 TREE_COMMON slots), by adding the hash codes of the individual entries. */
976 list_hash (purpose
, value
, chain
)
977 tree purpose
, value
, chain
;
979 register int hashcode
= 0;
982 hashcode
+= TYPE_HASH (chain
);
985 hashcode
+= TYPE_HASH (value
);
989 hashcode
+= TYPE_HASH (purpose
);
995 /* Look in the type hash table for a type isomorphic to TYPE.
996 If one is found, return it. Otherwise return 0. */
999 list_hash_lookup (hashcode
, via_public
, via_protected
, via_virtual
,
1000 purpose
, value
, chain
)
1001 int hashcode
, via_public
, via_virtual
, via_protected
;
1002 tree purpose
, value
, chain
;
1004 register struct list_hash
*h
;
1006 for (h
= list_hash_table
[hashcode
% TYPE_HASH_SIZE
]; h
; h
= h
->next
)
1007 if (h
->hashcode
== hashcode
1008 && TREE_VIA_VIRTUAL (h
->list
) == via_virtual
1009 && TREE_VIA_PUBLIC (h
->list
) == via_public
1010 && TREE_VIA_PROTECTED (h
->list
) == via_protected
1011 && TREE_PURPOSE (h
->list
) == purpose
1012 && TREE_VALUE (h
->list
) == value
1013 && TREE_CHAIN (h
->list
) == chain
)
1018 /* Add an entry to the list-hash-table
1019 for a list TYPE whose hash code is HASHCODE. */
1022 list_hash_add (hashcode
, list
)
1026 register struct list_hash
*h
;
1028 h
= (struct list_hash
*) obstack_alloc (&class_obstack
, sizeof (struct list_hash
));
1029 h
->hashcode
= hashcode
;
1031 h
->next
= list_hash_table
[hashcode
% TYPE_HASH_SIZE
];
1032 list_hash_table
[hashcode
% TYPE_HASH_SIZE
] = h
;
1035 /* Given TYPE, and HASHCODE its hash code, return the canonical
1036 object for an identical list if one already exists.
1037 Otherwise, return TYPE, and record it as the canonical object
1038 if it is a permanent object.
1040 To use this function, first create a list of the sort you want.
1041 Then compute its hash code from the fields of the list that
1042 make it different from other similar lists.
1043 Then call this function and use the value.
1044 This function frees the list you pass in if it is a duplicate. */
1046 /* Set to 1 to debug without canonicalization. Never set by program. */
1048 static int debug_no_list_hash
= 0;
1051 hash_tree_cons (via_public
, via_virtual
, via_protected
, purpose
, value
, chain
)
1052 int via_public
, via_virtual
, via_protected
;
1053 tree purpose
, value
, chain
;
1055 struct obstack
*ambient_obstack
= current_obstack
;
1059 if (! debug_no_list_hash
)
1061 hashcode
= list_hash (purpose
, value
, chain
);
1062 t
= list_hash_lookup (hashcode
, via_public
, via_protected
, via_virtual
,
1063 purpose
, value
, chain
);
1068 current_obstack
= &class_obstack
;
1070 t
= tree_cons (purpose
, value
, chain
);
1071 TREE_VIA_PUBLIC (t
) = via_public
;
1072 TREE_VIA_PROTECTED (t
) = via_protected
;
1073 TREE_VIA_VIRTUAL (t
) = via_virtual
;
1075 /* If this is a new list, record it for later reuse. */
1076 if (! debug_no_list_hash
)
1077 list_hash_add (hashcode
, t
);
1079 current_obstack
= ambient_obstack
;
1083 /* Constructor for hashed lists. */
1086 hash_tree_chain (value
, chain
)
1089 return hash_tree_cons (0, 0, 0, NULL_TREE
, value
, chain
);
1092 /* Similar, but used for concatenating two lists. */
1095 hash_chainon (list1
, list2
)
1102 if (TREE_CHAIN (list1
) == NULL_TREE
)
1103 return hash_tree_chain (TREE_VALUE (list1
), list2
);
1104 return hash_tree_chain (TREE_VALUE (list1
),
1105 hash_chainon (TREE_CHAIN (list1
), list2
));
1109 get_identifier_list (value
)
1112 tree list
= IDENTIFIER_AS_LIST (value
);
1113 if (list
!= NULL_TREE
1114 && (TREE_CODE (list
) != TREE_LIST
1115 || TREE_VALUE (list
) != value
))
1117 else if (IDENTIFIER_HAS_TYPE_VALUE (value
)
1118 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value
)) == RECORD_TYPE
1119 && IDENTIFIER_TYPE_VALUE (value
)
1120 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value
)))
1122 tree type
= IDENTIFIER_TYPE_VALUE (value
);
1124 if (TYPE_PTRMEMFUNC_P (type
))
1126 else if (type
== current_class_type
)
1127 /* Don't mess up the constructor name. */
1128 list
= tree_cons (NULL_TREE
, value
, NULL_TREE
);
1131 if (! CLASSTYPE_ID_AS_LIST (type
))
1132 CLASSTYPE_ID_AS_LIST (type
)
1133 = perm_tree_cons (NULL_TREE
, TYPE_IDENTIFIER (type
), NULL_TREE
);
1134 list
= CLASSTYPE_ID_AS_LIST (type
);
1141 get_decl_list (value
)
1144 tree list
= NULL_TREE
;
1146 if (TREE_CODE (value
) == IDENTIFIER_NODE
)
1147 list
= get_identifier_list (value
);
1148 else if (TREE_CODE (value
) == RECORD_TYPE
1149 && TYPE_LANG_SPECIFIC (value
)
1150 && value
== TYPE_MAIN_VARIANT (value
))
1151 list
= CLASSTYPE_AS_LIST (value
);
1153 if (list
!= NULL_TREE
)
1155 my_friendly_assert (TREE_CHAIN (list
) == NULL_TREE
, 301);
1159 return build_decl_list (NULL_TREE
, value
);
1162 /* Build an association between TYPE and some parameters:
1164 OFFSET is the offset added to `this' to convert it to a pointer
1167 BINFO is the base binfo to use, if we are deriving from one. This
1168 is necessary, as we want specialized parent binfos from base
1169 classes, so that the VTABLE_NAMEs of bases are for the most derived
1170 type, instead of the simple type.
1172 VTABLE is the virtual function table with which to initialize
1173 sub-objects of type TYPE.
1175 VIRTUALS are the virtual functions sitting in VTABLE.
1177 CHAIN are more associations we must retain. */
1180 make_binfo (offset
, binfo
, vtable
, virtuals
, chain
)
1182 tree vtable
, virtuals
;
1185 tree new_binfo
= make_tree_vec (6);
1188 if (TREE_CODE (binfo
) == TREE_VEC
)
1189 type
= BINFO_TYPE (binfo
);
1193 binfo
= TYPE_BINFO (binfo
);
1196 TREE_CHAIN (new_binfo
) = chain
;
1198 TREE_USED (new_binfo
) = TREE_USED (chain
);
1200 TREE_TYPE (new_binfo
) = TYPE_MAIN_VARIANT (type
);
1201 BINFO_OFFSET (new_binfo
) = offset
;
1202 BINFO_VTABLE (new_binfo
) = vtable
;
1203 BINFO_VIRTUALS (new_binfo
) = virtuals
;
1204 BINFO_VPTR_FIELD (new_binfo
) = NULL_TREE
;
1206 if (binfo
&& BINFO_BASETYPES (binfo
) != NULL_TREE
)
1207 BINFO_BASETYPES (new_binfo
) = copy_node (BINFO_BASETYPES (binfo
));
1211 /* Return the binfo value for ELEM in TYPE. */
1214 binfo_value (elem
, type
)
1218 if (get_base_distance (elem
, type
, 0, (tree
*)0) == -2)
1219 compiler_error ("base class `%s' ambiguous in binfo_value",
1220 TYPE_NAME_STRING (elem
));
1222 return TYPE_BINFO (type
);
1223 if (TREE_CODE (elem
) == RECORD_TYPE
&& TYPE_BINFO (elem
) == type
)
1225 return get_binfo (elem
, type
, 0);
1232 register tree prev
= 0, tmp
, next
;
1233 for (tmp
= path
; tmp
; tmp
= next
)
1235 next
= BINFO_INHERITANCE_CHAIN (tmp
);
1236 BINFO_INHERITANCE_CHAIN (tmp
) = prev
;
1246 unsigned HOST_WIDE_INT n
;
1249 fprintf (stderr
, "type \"%s\"; offset = %d\n",
1250 TYPE_NAME_STRING (BINFO_TYPE (elem
)),
1251 TREE_INT_CST_LOW (BINFO_OFFSET (elem
)));
1252 fprintf (stderr
, "vtable type:\n");
1253 debug_tree (BINFO_TYPE (elem
));
1254 if (BINFO_VTABLE (elem
))
1255 fprintf (stderr
, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem
))));
1257 fprintf (stderr
, "no vtable decl yet\n");
1258 fprintf (stderr
, "virtuals:\n");
1259 virtuals
= BINFO_VIRTUALS (elem
);
1261 n
= skip_rtti_stuff (&virtuals
);
1265 tree fndecl
= TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals
)), 0);
1266 fprintf (stderr
, "%s [%d =? %d]\n",
1267 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl
)),
1268 n
, TREE_INT_CST_LOW (DECL_VINDEX (fndecl
)));
1270 virtuals
= TREE_CHAIN (virtuals
);
1274 /* Initialize an CPLUS_BINDING node that does not live on an obstack. */
1278 struct tree_binding
* node
;
1280 static struct tree_binding
* source
;
1283 extern struct obstack permanent_obstack
;
1284 push_obstacks (&permanent_obstack
, &permanent_obstack
);
1285 source
= (struct tree_binding
*)make_node (CPLUS_BINDING
);
1289 TREE_PERMANENT ((tree
)node
) = 0;
1298 if (TREE_CODE (t
) == FUNCTION_DECL
)
1300 else if (TREE_CODE (t
) == OVERLOAD
)
1302 for (i
=0; t
; t
= OVL_CHAIN (t
))
1307 my_friendly_abort (359);
1312 is_overloaded_fn (x
)
1315 /* XXX A baselink is also considered an overloaded function. */
1316 if (TREE_CODE (x
) == TREE_LIST
)
1318 my_friendly_assert (TREE_CODE (TREE_PURPOSE (x
)) == TREE_VEC
, 388);
1321 return (TREE_CODE (x
) == FUNCTION_DECL
1322 || TREE_CODE (x
) == TEMPLATE_ID_EXPR
1323 || DECL_FUNCTION_TEMPLATE_P (x
)
1324 || TREE_CODE (x
) == OVERLOAD
);
1328 really_overloaded_fn (x
)
1331 /* A baselink is also considered an overloaded function.
1332 This might also be an ambiguous class member. */
1333 while (TREE_CODE (x
) == TREE_LIST
)
1335 return (TREE_CODE (x
) == OVERLOAD
1336 && (TREE_CHAIN (x
) != NULL_TREE
1337 || DECL_FUNCTION_TEMPLATE_P (OVL_FUNCTION (x
))));
1344 my_friendly_assert (is_overloaded_fn (from
), 9);
1345 /* A baselink is also considered an overloaded function. */
1346 if (TREE_CODE (from
) == TREE_LIST
)
1347 from
= TREE_VALUE (from
);
1348 return OVL_CURRENT (from
);
1351 /* Return a new OVL node, concatenating it with the old one. */
1354 ovl_cons (decl
, chain
)
1358 tree result
= make_node (OVERLOAD
);
1359 TREE_TYPE (result
) = unknown_type_node
;
1360 OVL_FUNCTION (result
) = decl
;
1361 TREE_CHAIN (result
) = chain
;
1366 /* Same as ovl_cons, but on the scratch_obstack. */
1369 scratch_ovl_cons (value
, chain
)
1373 register struct obstack
*ambient_obstack
= current_obstack
;
1374 extern struct obstack
*expression_obstack
;
1375 current_obstack
= expression_obstack
;
1376 node
= ovl_cons (value
, chain
);
1377 current_obstack
= ambient_obstack
;
1381 /* Build a new overloaded function. If this is the first one,
1382 just return it; otherwise, ovl_cons the _DECLs */
1385 build_overload (decl
, chain
)
1391 if (TREE_CODE (chain
) != OVERLOAD
)
1392 chain
= ovl_cons (chain
, NULL_TREE
);
1393 return ovl_cons (decl
, chain
);
1396 /* True if fn is in ovl. */
1399 ovl_member (fn
, ovl
)
1405 if (!ovl
|| TREE_CODE (ovl
) != OVERLOAD
)
1407 for (; ovl
; ovl
= OVL_CHAIN (ovl
))
1408 if (OVL_FUNCTION (ovl
) == fn
)
1414 is_aggr_type_2 (t1
, t2
)
1417 if (TREE_CODE (t1
) != TREE_CODE (t2
))
1419 return IS_AGGR_TYPE (t1
) && IS_AGGR_TYPE (t2
);
1422 #define PRINT_RING_SIZE 4
1425 lang_printable_name (decl
, v
)
1429 static tree decl_ring
[PRINT_RING_SIZE
];
1430 static char *print_ring
[PRINT_RING_SIZE
];
1431 static int ring_counter
;
1434 /* Only cache functions. */
1436 || TREE_CODE (decl
) != FUNCTION_DECL
1437 || DECL_LANG_SPECIFIC (decl
) == 0)
1438 return lang_decl_name (decl
, v
);
1440 /* See if this print name is lying around. */
1441 for (i
= 0; i
< PRINT_RING_SIZE
; i
++)
1442 if (decl_ring
[i
] == decl
)
1443 /* yes, so return it. */
1444 return print_ring
[i
];
1446 if (++ring_counter
== PRINT_RING_SIZE
)
1449 if (current_function_decl
!= NULL_TREE
)
1451 if (decl_ring
[ring_counter
] == current_function_decl
)
1453 if (ring_counter
== PRINT_RING_SIZE
)
1455 if (decl_ring
[ring_counter
] == current_function_decl
)
1456 my_friendly_abort (106);
1459 if (print_ring
[ring_counter
])
1460 free (print_ring
[ring_counter
]);
1462 print_ring
[ring_counter
] = xstrdup (lang_decl_name (decl
, v
));
1463 decl_ring
[ring_counter
] = decl
;
1464 return print_ring
[ring_counter
];
1467 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1468 listed in RAISES. */
1471 build_exception_variant (type
, raises
)
1475 tree v
= TYPE_MAIN_VARIANT (type
);
1476 int constp
= TYPE_READONLY (type
);
1477 int volatilep
= TYPE_VOLATILE (type
);
1479 for (; v
; v
= TYPE_NEXT_VARIANT (v
))
1481 if (TYPE_READONLY (v
) != constp
1482 || TYPE_VOLATILE (v
) != volatilep
)
1485 /* @@ This should do set equality, not exact match. */
1486 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v
), raises
))
1487 /* List of exceptions raised matches previously found list.
1489 @@ Nice to free up storage used in consing up the
1490 @@ list of exceptions raised. */
1494 /* Need to build a new variant. */
1495 v
= build_type_copy (type
);
1497 if (raises
&& ! TREE_PERMANENT (raises
))
1499 push_obstacks_nochange ();
1500 end_temporary_allocation ();
1501 raises
= copy_list (raises
);
1505 TYPE_RAISES_EXCEPTIONS (v
) = raises
;
1509 /* Given a TEMPLATE_TEMPLATE_PARM node T, create a new one together with its
1510 lang_specific field and its corresponding TEMPLATE_DECL node */
1513 copy_template_template_parm (t
)
1516 tree
template = TYPE_NAME (t
);
1517 tree t2
= make_lang_type (TEMPLATE_TEMPLATE_PARM
);
1518 template = copy_node (template);
1519 copy_lang_decl (template);
1520 TREE_TYPE (template) = t2
;
1521 TYPE_NAME (t2
) = template;
1522 TYPE_STUB_DECL (t2
) = template;
1524 /* No need to copy these */
1525 TYPE_FIELDS (t2
) = TYPE_FIELDS (t
);
1526 CLASSTYPE_TEMPLATE_INFO (t2
) = CLASSTYPE_TEMPLATE_INFO (t
);
1530 /* Subroutine of copy_to_permanent
1532 Assuming T is a node build bottom-up, make it all exist on
1533 permanent obstack, if it is not permanent already. */
1538 tree (*func
) PROTO((tree
));
1545 if (tmp
= func (t
), tmp
!= NULL_TREE
)
1548 switch (TREE_CODE (t
))
1551 return error_mark_node
;
1556 /* Rather than aborting, return error_mark_node. This allows us
1557 to report a sensible error message on code like this:
1559 void g() { int i; f<i>(7); }
1563 void g() { const int i = 7; f<i>(7); }
1565 however, we must actually return the constant initializer. */
1566 tmp
= decl_constant_value (t
);
1568 return mapcar (tmp
, func
);
1570 return error_mark_node
;
1574 tree chain
= TREE_CHAIN (t
);
1576 TREE_CHAIN (t
) = mapcar (chain
, func
);
1577 TREE_TYPE (t
) = mapcar (TREE_TYPE (t
), func
);
1578 DECL_INITIAL (t
) = mapcar (DECL_INITIAL (t
), func
);
1579 DECL_SIZE (t
) = mapcar (DECL_SIZE (t
), func
);
1585 tree chain
= TREE_CHAIN (t
);
1587 TREE_PURPOSE (t
) = mapcar (TREE_PURPOSE (t
), func
);
1588 TREE_VALUE (t
) = mapcar (TREE_VALUE (t
), func
);
1589 TREE_CHAIN (t
) = mapcar (chain
, func
);
1595 tree chain
= OVL_CHAIN (t
);
1597 OVL_FUNCTION (t
) = mapcar (OVL_FUNCTION (t
), func
);
1598 OVL_CHAIN (t
) = mapcar (chain
, func
);
1604 int len
= TREE_VEC_LENGTH (t
);
1608 TREE_VEC_ELT (t
, len
) = mapcar (TREE_VEC_ELT (t
, len
), func
);
1615 return copy_node (t
);
1619 case AGGR_INIT_EXPR
:
1621 TREE_OPERAND (t
, 0) = mapcar (TREE_OPERAND (t
, 0), func
);
1622 TREE_OPERAND (t
, 1) = mapcar (TREE_OPERAND (t
, 1), func
);
1623 TREE_OPERAND (t
, 2) = mapcar (TREE_OPERAND (t
, 2), func
);
1628 TREE_OPERAND (t
, 0) = mapcar (TREE_OPERAND (t
, 0), func
);
1635 case TRUNC_DIV_EXPR
:
1636 case TRUNC_MOD_EXPR
:
1644 case BIT_ANDTC_EXPR
:
1645 case TRUTH_ANDIF_EXPR
:
1646 case TRUTH_ORIF_EXPR
:
1654 case FLOOR_DIV_EXPR
:
1655 case ROUND_DIV_EXPR
:
1657 case FLOOR_MOD_EXPR
:
1658 case ROUND_MOD_EXPR
:
1660 case PREDECREMENT_EXPR
:
1661 case PREINCREMENT_EXPR
:
1662 case POSTDECREMENT_EXPR
:
1663 case POSTINCREMENT_EXPR
:
1666 case TRY_CATCH_EXPR
:
1667 case WITH_CLEANUP_EXPR
:
1669 TREE_OPERAND (t
, 0) = mapcar (TREE_OPERAND (t
, 0), func
);
1670 TREE_OPERAND (t
, 1) = mapcar (TREE_OPERAND (t
, 1), func
);
1675 TREE_TYPE (t
) = mapcar (TREE_TYPE (t
), func
);
1676 TREE_OPERAND (t
, 0) = mapcar (TREE_OPERAND (t
, 0), func
);
1677 TREE_OPERAND (t
, 1) = mapcar (TREE_OPERAND (t
, 1), func
);
1679 /* tree.def says that operand two is RTL, but
1680 build_call_declarator puts trees in there. */
1681 if (TREE_OPERAND (t
, 2)
1682 && TREE_CODE (TREE_OPERAND (t
, 2)) == TREE_LIST
)
1683 TREE_OPERAND (t
, 2) = mapcar (TREE_OPERAND (t
, 2), func
);
1685 TREE_OPERAND (t
, 2) = NULL_TREE
;
1693 case TRUTH_NOT_EXPR
:
1696 case CLEANUP_POINT_EXPR
:
1698 TREE_OPERAND (t
, 0) = mapcar (TREE_OPERAND (t
, 0), func
);
1702 tmp
= build_pointer_type (mapcar (TREE_TYPE (t
), func
));
1703 return cp_build_type_variant (tmp
, TYPE_READONLY (t
), TYPE_VOLATILE (t
));
1704 case REFERENCE_TYPE
:
1705 tmp
= build_reference_type (mapcar (TREE_TYPE (t
), func
));
1706 return cp_build_type_variant (tmp
, TYPE_READONLY (t
), TYPE_VOLATILE (t
));
1708 tmp
= build_function_type (mapcar (TREE_TYPE (t
), func
),
1709 mapcar (TYPE_ARG_TYPES (t
), func
));
1710 return cp_build_type_variant (tmp
, TYPE_READONLY (t
), TYPE_VOLATILE (t
));
1712 tmp
= build_cplus_array_type (mapcar (TREE_TYPE (t
), func
),
1713 mapcar (TYPE_DOMAIN (t
), func
));
1714 return cp_build_type_variant (tmp
, TYPE_READONLY (t
), TYPE_VOLATILE (t
));
1716 tmp
= build_index_type (mapcar (TYPE_MAX_VALUE (t
), func
));
1717 return cp_build_type_variant (tmp
, TYPE_READONLY (t
), TYPE_VOLATILE (t
));
1719 tmp
= build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t
), func
),
1720 mapcar (TREE_TYPE (t
), func
));
1721 return cp_build_type_variant (tmp
, TYPE_READONLY (t
), TYPE_VOLATILE (t
));
1723 tmp
= build_cplus_method_type
1724 (mapcar (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (t
))), func
),
1725 mapcar (TREE_TYPE (t
), func
),
1726 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t
)), func
));
1727 return cp_build_type_variant (tmp
, TYPE_READONLY (t
), TYPE_VOLATILE (t
));
1731 TREE_REALPART (t
) = mapcar (TREE_REALPART (t
), func
);
1732 TREE_IMAGPART (t
) = mapcar (TREE_REALPART (t
), func
);
1737 CONSTRUCTOR_ELTS (t
) = mapcar (CONSTRUCTOR_ELTS (t
), func
);
1740 case TEMPLATE_TEMPLATE_PARM
:
1741 return copy_template_template_parm (t
);
1745 TREE_OPERAND (t
, 0) = mapcar (TREE_OPERAND (t
, 0), func
);
1746 TREE_OPERAND (t
, 1) = mapcar (TREE_OPERAND (t
, 1), func
);
1747 TREE_OPERAND (t
, 2) = NULL_TREE
;
1751 if (TYPE_PTRMEMFUNC_P (t
))
1752 return build_ptrmemfunc_type
1753 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t
), func
));
1754 /* else fall through */
1756 /* This list is incomplete, but should suffice for now.
1757 It is very important that `sorry' not call
1758 `report_error_function'. That could cause an infinite loop. */
1760 sorry ("initializer contains unrecognized tree code");
1761 return error_mark_node
;
1764 my_friendly_abort (107);
1773 if (TREE_PERMANENT (t
))
1776 /* Support `void f () { extern int i; A<&i> a; }' */
1777 if ((TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == FUNCTION_DECL
)
1782 /* copy_rtx won't make a new SYMBOL_REF, so call make_decl_rtl again. */
1784 make_decl_rtl (t
, NULL_PTR
, 1);
1791 /* Assuming T is a node built bottom-up, make it all exist on
1792 permanent obstack, if it is not permanent already. */
1795 copy_to_permanent (t
)
1798 if (t
== NULL_TREE
|| TREE_PERMANENT (t
))
1801 push_obstacks_nochange ();
1802 end_temporary_allocation ();
1804 t
= mapcar (t
, perm_manip
);
1811 #ifdef GATHER_STATISTICS
1812 extern int depth_reached
;
1816 print_lang_statistics ()
1818 extern struct obstack decl_obstack
;
1819 print_obstack_statistics ("class_obstack", &class_obstack
);
1820 print_obstack_statistics ("decl_obstack", &decl_obstack
);
1821 print_search_statistics ();
1822 print_class_statistics ();
1823 #ifdef GATHER_STATISTICS
1824 fprintf (stderr
, "maximum template instantiation depth reached: %d\n",
1829 /* This is used by the `assert' macro. It is provided in libgcc.a,
1830 which `cc' doesn't know how to link. Note that the C++ front-end
1831 no longer actually uses the `assert' macro (instead, it calls
1832 my_friendly_assert). But all of the back-end files still need this. */
1835 __eprintf (string
, expression
, line
, filename
)
1838 const char *expression
;
1840 const char *filename
;
1848 fprintf (stderr
, string
, expression
, line
, filename
);
1853 /* Return, as an INTEGER_CST node, the number of elements for TYPE
1854 (which is an ARRAY_TYPE). This counts only elements of the top
1858 array_type_nelts_top (type
)
1861 return fold (build (PLUS_EXPR
, sizetype
,
1862 array_type_nelts (type
),
1866 /* Return, as an INTEGER_CST node, the number of elements for TYPE
1867 (which is an ARRAY_TYPE). This one is a recursive count of all
1868 ARRAY_TYPEs that are clumped together. */
1871 array_type_nelts_total (type
)
1874 tree sz
= array_type_nelts_top (type
);
1875 type
= TREE_TYPE (type
);
1876 while (TREE_CODE (type
) == ARRAY_TYPE
)
1878 tree n
= array_type_nelts_top (type
);
1879 sz
= fold (build (MULT_EXPR
, sizetype
, sz
, n
));
1880 type
= TREE_TYPE (type
);
1890 if (TREE_CODE (t
) != TREE_LIST
&& ! TREE_SIDE_EFFECTS (t
))
1892 else if (TREE_CODE (t
) == TARGET_EXPR
)
1894 if (TREE_CODE (TREE_OPERAND (t
, 1)) == AGGR_INIT_EXPR
)
1896 mark_used (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 1), 0), 0));
1897 return build_cplus_new
1898 (TREE_TYPE (t
), break_out_target_exprs (TREE_OPERAND (t
, 1)));
1901 TREE_OPERAND (t
, 0) = build (VAR_DECL
, TREE_TYPE (t
));
1902 layout_decl (TREE_OPERAND (t
, 0), 0);
1905 else if (TREE_CODE (t
) == CALL_EXPR
)
1906 mark_used (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
1911 /* Actually, we'll just clean out the target exprs for the moment. */
1914 break_out_target_exprs (t
)
1917 return mapcar (t
, bot_manip
);
1920 /* Obstack used for allocating nodes in template function and variable
1923 /* Similar to `build_nt', except we build
1924 on the permanent_obstack, regardless. */
1927 build_min_nt
VPROTO((enum tree_code code
, ...))
1930 enum tree_code code
;
1932 register struct obstack
*ambient_obstack
= expression_obstack
;
1935 register int length
;
1941 code
= va_arg (p
, enum tree_code
);
1944 expression_obstack
= &permanent_obstack
;
1946 t
= make_node (code
);
1947 length
= tree_code_length
[(int) code
];
1948 TREE_COMPLEXITY (t
) = lineno
;
1950 for (i
= 0; i
< length
; i
++)
1952 tree x
= va_arg (p
, tree
);
1953 TREE_OPERAND (t
, i
) = copy_to_permanent (x
);
1957 expression_obstack
= ambient_obstack
;
1961 /* Similar to `build', except we build
1962 on the permanent_obstack, regardless. */
1965 build_min
VPROTO((enum tree_code code
, tree tt
, ...))
1968 enum tree_code code
;
1971 register struct obstack
*ambient_obstack
= expression_obstack
;
1974 register int length
;
1980 code
= va_arg (p
, enum tree_code
);
1981 tt
= va_arg (p
, tree
);
1984 expression_obstack
= &permanent_obstack
;
1986 t
= make_node (code
);
1987 length
= tree_code_length
[(int) code
];
1989 TREE_COMPLEXITY (t
) = lineno
;
1991 for (i
= 0; i
< length
; i
++)
1993 tree x
= va_arg (p
, tree
);
1994 TREE_OPERAND (t
, i
) = copy_to_permanent (x
);
1998 expression_obstack
= ambient_obstack
;
2002 /* Same as `tree_cons' but make a permanent object. */
2005 min_tree_cons (purpose
, value
, chain
)
2006 tree purpose
, value
, chain
;
2009 register struct obstack
*ambient_obstack
= current_obstack
;
2010 current_obstack
= &permanent_obstack
;
2012 node
= tree_cons (copy_to_permanent (purpose
),
2013 copy_to_permanent (value
), chain
);
2014 current_obstack
= ambient_obstack
;
2022 if (TREE_CODE (t
) == TYPE_DECL
)
2024 if (TREE_CODE_CLASS (TREE_CODE (t
)) == 't')
2025 return TYPE_STUB_DECL (t
);
2027 my_friendly_abort (42);
2029 /* Stop compiler from complaining control reaches end of non-void function. */
2034 can_free (obstack
, t
)
2035 struct obstack
*obstack
;
2040 if (TREE_CODE (t
) == TREE_VEC
)
2041 size
= (TREE_VEC_LENGTH (t
)-1) * sizeof (tree
) + sizeof (struct tree_vec
);
2043 my_friendly_abort (42);
2045 #define ROUND(x) ((x + obstack_alignment_mask (obstack)) \
2046 & ~ obstack_alignment_mask (obstack))
2047 if ((char *)t
+ ROUND (size
) == obstack_next_free (obstack
))
2054 /* Return first vector element whose BINFO_TYPE is ELEM.
2055 Return 0 if ELEM is not in VEC. VEC may be NULL_TREE. */
2058 vec_binfo_member (elem
, vec
)
2064 for (i
= 0; i
< TREE_VEC_LENGTH (vec
); ++i
)
2065 if (comptypes (elem
, BINFO_TYPE (TREE_VEC_ELT (vec
, i
)), 1))
2066 return TREE_VEC_ELT (vec
, i
);
2071 /* Kludge around the fact that DECL_CONTEXT for virtual functions returns
2072 the wrong thing for decl_function_context. Hopefully the uses in the
2073 backend won't matter, since we don't need a static chain for local class
2077 hack_decl_function_context (decl
)
2080 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_FUNCTION_MEMBER_P (decl
))
2081 return decl_function_context (TYPE_MAIN_DECL (DECL_CLASS_CONTEXT (decl
)));
2082 return decl_function_context (decl
);
2085 /* Return truthvalue of whether T1 is the same tree structure as T2.
2086 Return 1 if they are the same.
2087 Return 0 if they are understandably different.
2088 Return -1 if either contains tree structure not understood by
2092 cp_tree_equal (t1
, t2
)
2095 register enum tree_code code1
, code2
;
2100 if (t1
== 0 || t2
== 0)
2103 code1
= TREE_CODE (t1
);
2104 code2
= TREE_CODE (t2
);
2106 if (code1
== NOP_EXPR
|| code1
== CONVERT_EXPR
|| code1
== NON_LVALUE_EXPR
)
2108 if (code2
== NOP_EXPR
|| code2
== CONVERT_EXPR
|| code2
== NON_LVALUE_EXPR
)
2109 return cp_tree_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
2111 return cp_tree_equal (TREE_OPERAND (t1
, 0), t2
);
2113 else if (code2
== NOP_EXPR
|| code2
== CONVERT_EXPR
2114 || code2
== NON_LVALUE_EXPR
)
2115 return cp_tree_equal (t1
, TREE_OPERAND (t2
, 0));
2123 return TREE_INT_CST_LOW (t1
) == TREE_INT_CST_LOW (t2
)
2124 && TREE_INT_CST_HIGH (t1
) == TREE_INT_CST_HIGH (t2
);
2127 return REAL_VALUES_EQUAL (TREE_REAL_CST (t1
), TREE_REAL_CST (t2
));
2130 return TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
2131 && !bcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
2132 TREE_STRING_LENGTH (t1
));
2138 return cp_tree_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
2141 cmp
= cp_tree_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
2144 return simple_cst_list_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
2147 /* Special case: if either target is an unallocated VAR_DECL,
2148 it means that it's going to be unified with whatever the
2149 TARGET_EXPR is really supposed to initialize, so treat it
2150 as being equivalent to anything. */
2151 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
2152 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
2153 && DECL_RTL (TREE_OPERAND (t1
, 0)) == 0)
2154 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
2155 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
2156 && DECL_RTL (TREE_OPERAND (t2
, 0)) == 0))
2159 cmp
= cp_tree_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
2162 return cp_tree_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
2164 case WITH_CLEANUP_EXPR
:
2165 cmp
= cp_tree_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
2168 return cp_tree_equal (TREE_OPERAND (t1
, 2), TREE_OPERAND (t1
, 2));
2171 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
2172 return cp_tree_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
2181 case TEMPLATE_PARM_INDEX
:
2182 return TEMPLATE_PARM_IDX (t1
) == TEMPLATE_PARM_IDX (t2
)
2183 && TEMPLATE_PARM_LEVEL (t1
) == TEMPLATE_PARM_LEVEL (t2
);
2187 if (TREE_CODE (TREE_OPERAND (t1
, 0)) != TREE_CODE (TREE_OPERAND (t2
, 0)))
2189 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t1
, 0))) == 't')
2190 return comptypes (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0), 1);
2197 switch (TREE_CODE_CLASS (code1
))
2207 for (i
=0; i
<tree_code_length
[(int) code1
]; ++i
)
2209 cmp
= cp_tree_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
2219 /* Similar to make_tree_vec, but build on a temporary obstack. */
2226 register struct obstack
*ambient_obstack
= current_obstack
;
2227 current_obstack
= expression_obstack
;
2228 node
= make_tree_vec (len
);
2229 current_obstack
= ambient_obstack
;
2233 /* Build a wrapper around some pointer PTR so we can use it as a tree. */
2236 build_ptr_wrapper (ptr
)
2239 tree t
= make_node (WRAPPER
);
2240 WRAPPER_PTR (t
) = ptr
;
2244 /* Same, but on the expression_obstack. */
2247 build_expr_ptr_wrapper (ptr
)
2251 push_expression_obstack ();
2252 t
= build_ptr_wrapper (ptr
);
2257 /* Build a wrapper around some integer I so we can use it as a tree. */
2260 build_int_wrapper (i
)
2263 tree t
= make_node (WRAPPER
);
2264 WRAPPER_INT (t
) = i
;
2269 push_expression_obstack ()
2271 push_obstacks_nochange ();
2272 current_obstack
= expression_obstack
;
2275 /* The type of ARG when used as an lvalue. */
2281 tree type
= TREE_TYPE (arg
);
2282 if (TREE_CODE (arg
) == OVERLOAD
)
2283 type
= unknown_type_node
;
2284 return cp_build_type_variant
2285 (type
, TREE_READONLY (arg
), TREE_THIS_VOLATILE (arg
));
2288 /* The type of ARG for printing error messages; denote lvalues with
2295 tree type
= TREE_TYPE (arg
);
2296 if (TREE_CODE (type
) == ARRAY_TYPE
)
2298 else if (real_lvalue_p (arg
))
2299 type
= build_reference_type (lvalue_type (arg
));
2300 else if (IS_AGGR_TYPE (type
))
2301 type
= lvalue_type (arg
);
2306 /* Does FUNCTION use a variable-length argument list? */
2309 varargs_function_p (function
)
2312 tree parm
= TYPE_ARG_TYPES (TREE_TYPE (function
));
2313 for (; parm
; parm
= TREE_CHAIN (parm
))
2314 if (TREE_VALUE (parm
) == void_type_node
)
2319 /* Returns 1 if decl is a member of a class. */
2325 tree ctx
= DECL_CONTEXT (decl
);
2326 return (ctx
&& TREE_CODE_CLASS (TREE_CODE (ctx
)) == 't');