]> gcc.gnu.org Git - gcc.git/blob - gcc/cp/tree.c
*** empty log message ***
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21 #include "config.h"
22 #include <stdio.h>
23 #include "obstack.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "flags.h"
27
28 #define CEIL(x,y) (((x) + (y) - 1) / (y))
29
30 /* Return nonzero if REF is an lvalue valid for this language.
31 Lvalues can be assigned, unless they have TREE_READONLY.
32 Lvalues can have their address taken, unless they have DECL_REGISTER. */
33
34 int
35 lvalue_p (ref)
36 tree ref;
37 {
38 if (! language_lvalue_valid (ref))
39 return 0;
40
41 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
42 return 1;
43
44 if (ref == current_class_decl && flag_this_is_variable <= 0)
45 return 0;
46
47 switch (TREE_CODE (ref))
48 {
49 /* preincrements and predecrements are valid lvals, provided
50 what they refer to are valid lvals. */
51 case PREINCREMENT_EXPR:
52 case PREDECREMENT_EXPR:
53 case COMPONENT_REF:
54 case SAVE_EXPR:
55 return lvalue_p (TREE_OPERAND (ref, 0));
56
57 case STRING_CST:
58 return 1;
59
60 case VAR_DECL:
61 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
62 && DECL_LANG_SPECIFIC (ref)
63 && DECL_IN_AGGR_P (ref))
64 return 0;
65 case INDIRECT_REF:
66 case ARRAY_REF:
67 case PARM_DECL:
68 case RESULT_DECL:
69 case ERROR_MARK:
70 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
71 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
72 return 1;
73 break;
74
75 case WITH_CLEANUP_EXPR:
76 return lvalue_p (TREE_OPERAND (ref, 0));
77
78 case TARGET_EXPR:
79 return 1;
80
81 case CALL_EXPR:
82 if (TREE_ADDRESSABLE (TREE_TYPE (ref)))
83 return 1;
84 break;
85
86 /* A currently unresolved scope ref. */
87 case SCOPE_REF:
88 my_friendly_abort (103);
89 case OFFSET_REF:
90 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
91 return 1;
92 return lvalue_p (TREE_OPERAND (ref, 0))
93 && lvalue_p (TREE_OPERAND (ref, 1));
94 break;
95
96 case COND_EXPR:
97 return (lvalue_p (TREE_OPERAND (ref, 1))
98 && lvalue_p (TREE_OPERAND (ref, 2)));
99
100 case MODIFY_EXPR:
101 return 1;
102
103 case COMPOUND_EXPR:
104 return lvalue_p (TREE_OPERAND (ref, 1));
105 }
106
107 return 0;
108 }
109
110 /* Return nonzero if REF is an lvalue valid for this language;
111 otherwise, print an error message and return zero. */
112
113 int
114 lvalue_or_else (ref, string)
115 tree ref;
116 char *string;
117 {
118 int win = lvalue_p (ref);
119 if (! win)
120 error ("non-lvalue in %s", string);
121 return win;
122 }
123
124 /* INIT is a CALL_EXPR which needs info about its target.
125 TYPE is the type that this initialization should appear to have.
126
127 Build an encapsulation of the initialization to perform
128 and return it so that it can be processed by language-independent
129 and language-specific expression expanders.
130
131 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
132 Otherwise, cleanups are not built here. For example, when building
133 an initialization for a stack slot, since the called function handles
134 the cleanup, we would not want to do it here. */
135 tree
136 build_cplus_new (type, init, with_cleanup_p)
137 tree type;
138 tree init;
139 int with_cleanup_p;
140 {
141 tree slot = build (VAR_DECL, type);
142 tree rval = build (NEW_EXPR, type,
143 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
144 TREE_SIDE_EFFECTS (rval) = 1;
145 TREE_ADDRESSABLE (rval) = 1;
146 rval = build (TARGET_EXPR, type, slot, rval, 0);
147 TREE_SIDE_EFFECTS (rval) = 1;
148 TREE_ADDRESSABLE (rval) = 1;
149
150 #if 0
151 if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type))
152 {
153 TREE_OPERAND (rval, 2) = error_mark_node;
154 rval = build (WITH_CLEANUP_EXPR, type, rval, 0,
155 build_delete (TYPE_POINTER_TO (type),
156 build_unary_op (ADDR_EXPR, slot, 0),
157 integer_two_node,
158 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0));
159 TREE_SIDE_EFFECTS (rval) = 1;
160 TREE_ADDRESSABLE (rval) = 1;
161 }
162 #endif
163 return rval;
164 }
165
166 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
167 these CALL_EXPRs with tree nodes that will perform the cleanups. */
168
169 tree
170 break_out_cleanups (exp)
171 tree exp;
172 {
173 tree tmp = exp;
174
175 if (TREE_CODE (tmp) == CALL_EXPR
176 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
177 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
178
179 while (TREE_CODE (tmp) == NOP_EXPR
180 || TREE_CODE (tmp) == CONVERT_EXPR
181 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
182 {
183 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
184 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
185 {
186 TREE_OPERAND (tmp, 0)
187 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
188 TREE_OPERAND (tmp, 0), 1);
189 break;
190 }
191 else
192 tmp = TREE_OPERAND (tmp, 0);
193 }
194 return exp;
195 }
196
197 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
198 copies where they are found. Returns a deep copy all nodes transitively
199 containing CALL_EXPRs. */
200
201 tree
202 break_out_calls (exp)
203 tree exp;
204 {
205 register tree t1, t2;
206 register enum tree_code code;
207 register int changed = 0;
208 register int i;
209
210 if (exp == NULL_TREE)
211 return exp;
212
213 code = TREE_CODE (exp);
214
215 if (code == CALL_EXPR)
216 return copy_node (exp);
217
218 /* Don't try and defeat a save_expr, as it should only be done once. */
219 if (code == SAVE_EXPR)
220 return exp;
221
222 switch (TREE_CODE_CLASS (code))
223 {
224 default:
225 abort ();
226
227 case 'c': /* a constant */
228 case 't': /* a type node */
229 case 'x': /* something random, like an identifier or an ERROR_MARK. */
230 return exp;
231
232 case 'd': /* A decl node */
233 #if 0 /* This is bogus. jason 9/21/94 */
234
235 t1 = break_out_calls (DECL_INITIAL (exp));
236 if (t1 != DECL_INITIAL (exp))
237 {
238 exp = copy_node (exp);
239 DECL_INITIAL (exp) = t1;
240 }
241 #endif
242 return exp;
243
244 case 'b': /* A block node */
245 {
246 /* Don't know how to handle these correctly yet. Must do a
247 break_out_calls on all DECL_INITIAL values for local variables,
248 and also break_out_calls on all sub-blocks and sub-statements. */
249 abort ();
250 }
251 return exp;
252
253 case 'e': /* an expression */
254 case 'r': /* a reference */
255 case 's': /* an expression with side effects */
256 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
257 {
258 t1 = break_out_calls (TREE_OPERAND (exp, i));
259 if (t1 != TREE_OPERAND (exp, i))
260 {
261 exp = copy_node (exp);
262 TREE_OPERAND (exp, i) = t1;
263 }
264 }
265 return exp;
266
267 case '<': /* a comparison expression */
268 case '2': /* a binary arithmetic expression */
269 t2 = break_out_calls (TREE_OPERAND (exp, 1));
270 if (t2 != TREE_OPERAND (exp, 1))
271 changed = 1;
272 case '1': /* a unary arithmetic expression */
273 t1 = break_out_calls (TREE_OPERAND (exp, 0));
274 if (t1 != TREE_OPERAND (exp, 0))
275 changed = 1;
276 if (changed)
277 {
278 if (tree_code_length[(int) code] == 1)
279 return build1 (code, TREE_TYPE (exp), t1);
280 else
281 return build (code, TREE_TYPE (exp), t1, t2);
282 }
283 return exp;
284 }
285
286 }
287 \f
288 extern struct obstack *current_obstack;
289 extern struct obstack permanent_obstack, class_obstack;
290 extern struct obstack *saveable_obstack;
291
292 /* Here is how primitive or already-canonicalized types' hash
293 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
294 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
295
296 /* Construct, lay out and return the type of methods belonging to class
297 BASETYPE and whose arguments are described by ARGTYPES and whose values
298 are described by RETTYPE. If each type exists already, reuse it. */
299 tree
300 build_cplus_method_type (basetype, rettype, argtypes)
301 tree basetype, rettype, argtypes;
302 {
303 register tree t;
304 tree ptype;
305 int hashcode;
306
307 /* Make a node of the sort we want. */
308 t = make_node (METHOD_TYPE);
309
310 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
311 TREE_TYPE (t) = rettype;
312 if (IS_SIGNATURE (basetype))
313 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
314 TYPE_READONLY (basetype),
315 TYPE_VOLATILE (basetype));
316 else
317 {
318 ptype = build_pointer_type (basetype);
319 ptype = build_type_variant (ptype, 1, 0);
320 }
321 /* The actual arglist for this function includes a "hidden" argument
322 which is "this". Put it into the list of argument types. */
323
324 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
325 TYPE_ARG_TYPES (t) = argtypes;
326 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
327
328 /* If we already have such a type, use the old one and free this one.
329 Note that it also frees up the above cons cell if found. */
330 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
331 t = type_hash_canon (hashcode, t);
332
333 if (TYPE_SIZE (t) == 0)
334 layout_type (t);
335
336 return t;
337 }
338
339 tree
340 build_cplus_staticfn_type (basetype, rettype, argtypes)
341 tree basetype, rettype, argtypes;
342 {
343 register tree t;
344 int hashcode;
345
346 /* Make a node of the sort we want. */
347 t = make_node (FUNCTION_TYPE);
348
349 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
350 TREE_TYPE (t) = rettype;
351
352 TYPE_ARG_TYPES (t) = argtypes;
353
354 /* If we already have such a type, use the old one and free this one.
355 Note that it also frees up the above cons cell if found. */
356 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
357 t = type_hash_canon (hashcode, t);
358
359 if (TYPE_SIZE (t) == 0)
360 layout_type (t);
361
362 return t;
363 }
364
365 tree
366 build_cplus_array_type (elt_type, index_type)
367 tree elt_type;
368 tree index_type;
369 {
370 register struct obstack *ambient_obstack = current_obstack;
371 register struct obstack *ambient_saveable_obstack = saveable_obstack;
372 tree t;
373
374 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
375 make this permanent too. */
376 if (TREE_PERMANENT (elt_type)
377 && (index_type == 0 || TREE_PERMANENT (index_type)))
378 {
379 current_obstack = &permanent_obstack;
380 saveable_obstack = &permanent_obstack;
381 }
382
383 t = build_array_type (elt_type, index_type);
384
385 /* Push these needs up so that initialization takes place
386 more easily. */
387 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
388 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
389 current_obstack = ambient_obstack;
390 saveable_obstack = ambient_saveable_obstack;
391 return t;
392 }
393 \f
394 /* Make a variant type in the proper way for C/C++, propagating qualifiers
395 down to the element type of an array. */
396
397 tree
398 cp_build_type_variant (type, constp, volatilep)
399 tree type;
400 int constp, volatilep;
401 {
402 if (TREE_CODE (type) == ARRAY_TYPE)
403 {
404 tree real_main_variant = TYPE_MAIN_VARIANT (type);
405
406 push_obstacks (TYPE_OBSTACK (real_main_variant),
407 TYPE_OBSTACK (real_main_variant));
408 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
409 constp, volatilep),
410 TYPE_DOMAIN (type));
411
412 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
413 make a copy. (TYPE might have come from the hash table and
414 REAL_MAIN_VARIANT might be in some function's obstack.) */
415
416 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
417 {
418 type = copy_node (type);
419 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
420 }
421
422 TYPE_MAIN_VARIANT (type) = real_main_variant;
423 pop_obstacks ();
424 }
425 return build_type_variant (type, constp, volatilep);
426 }
427 \f
428 /* Add OFFSET to all base types of T.
429
430 OFFSET, which is a type offset, is number of bytes.
431
432 Note that we don't have to worry about having two paths to the
433 same base type, since this type owns its association list. */
434 void
435 propagate_binfo_offsets (binfo, offset)
436 tree binfo;
437 tree offset;
438 {
439 tree binfos = BINFO_BASETYPES (binfo);
440 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
441
442 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
443 {
444 tree base_binfo = TREE_VEC_ELT (binfos, i);
445
446 if (TREE_VIA_VIRTUAL (base_binfo))
447 i += 1;
448 else
449 {
450 int j;
451 tree base_binfos = BINFO_BASETYPES (base_binfo);
452 tree delta;
453
454 for (j = i+1; j < n_baselinks; j++)
455 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
456 {
457 /* The next basetype offset must take into account the space
458 between the classes, not just the size of each class. */
459 delta = size_binop (MINUS_EXPR,
460 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
461 BINFO_OFFSET (base_binfo));
462 break;
463 }
464
465 #if 0
466 if (BINFO_OFFSET_ZEROP (base_binfo))
467 BINFO_OFFSET (base_binfo) = offset;
468 else
469 BINFO_OFFSET (base_binfo)
470 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
471 #else
472 BINFO_OFFSET (base_binfo) = offset;
473 #endif
474 if (base_binfos)
475 {
476 int k;
477 tree chain = NULL_TREE;
478
479 /* Now unshare the structure beneath BASE_BINFO. */
480 for (k = TREE_VEC_LENGTH (base_binfos)-1;
481 k >= 0; k--)
482 {
483 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
484 if (! TREE_VIA_VIRTUAL (base_base_binfo))
485 TREE_VEC_ELT (base_binfos, k)
486 = make_binfo (BINFO_OFFSET (base_base_binfo),
487 base_base_binfo,
488 BINFO_VTABLE (base_base_binfo),
489 BINFO_VIRTUALS (base_base_binfo),
490 chain);
491 chain = TREE_VEC_ELT (base_binfos, k);
492 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
493 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
494 }
495 /* Now propagate the offset to the base types. */
496 propagate_binfo_offsets (base_binfo, offset);
497 }
498
499 /* Go to our next class that counts for offset propagation. */
500 i = j;
501 if (i < n_baselinks)
502 offset = size_binop (PLUS_EXPR, offset, delta);
503 }
504 }
505 }
506
507 /* Compute the actual offsets that our virtual base classes
508 will have *for this type*. This must be performed after
509 the fields are laid out, since virtual baseclasses must
510 lay down at the end of the record.
511
512 Returns the maximum number of virtual functions any of the virtual
513 baseclasses provide. */
514 int
515 layout_vbasetypes (rec, max)
516 tree rec;
517 int max;
518 {
519 /* Get all the virtual base types that this type uses.
520 The TREE_VALUE slot holds the virtual baseclass type. */
521 tree vbase_types = get_vbase_types (rec);
522
523 #ifdef STRUCTURE_SIZE_BOUNDARY
524 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
525 #else
526 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
527 #endif
528 int desired_align;
529
530 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
531 where CONST_SIZE is an integer
532 and VAR_SIZE is a tree expression.
533 If VAR_SIZE is null, the size is just CONST_SIZE.
534 Naturally we try to avoid using VAR_SIZE. */
535 register unsigned const_size = 0;
536 register tree var_size = 0;
537 int nonvirtual_const_size;
538 tree nonvirtual_var_size;
539
540 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
541
542 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
543 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
544 else
545 var_size = TYPE_SIZE (rec);
546
547 nonvirtual_const_size = const_size;
548 nonvirtual_var_size = var_size;
549
550 while (vbase_types)
551 {
552 tree basetype = BINFO_TYPE (vbase_types);
553 tree offset;
554
555 desired_align = TYPE_ALIGN (basetype);
556 record_align = MAX (record_align, desired_align);
557
558 if (const_size == 0)
559 offset = integer_zero_node;
560 else
561 {
562 /* Give each virtual base type the alignment it wants. */
563 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
564 * TYPE_ALIGN (basetype);
565 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
566 }
567
568 if (CLASSTYPE_VSIZE (basetype) > max)
569 max = CLASSTYPE_VSIZE (basetype);
570 BINFO_OFFSET (vbase_types) = offset;
571
572 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
573 const_size += MAX (BITS_PER_UNIT,
574 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
575 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
576 else if (var_size == 0)
577 var_size = TYPE_SIZE (basetype);
578 else
579 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
580
581 vbase_types = TREE_CHAIN (vbase_types);
582 }
583
584 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
585 here, as that is for this class, without any virtual base classes. */
586 TYPE_ALIGN (rec) = record_align;
587 if (const_size != nonvirtual_const_size)
588 {
589 CLASSTYPE_VBASE_SIZE (rec)
590 = size_int (const_size - nonvirtual_const_size);
591 TYPE_SIZE (rec) = size_int (const_size);
592 }
593
594 /* Now propagate offset information throughout the lattice
595 under the vbase type. */
596 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
597 vbase_types = TREE_CHAIN (vbase_types))
598 {
599 tree base_binfos = BINFO_BASETYPES (vbase_types);
600
601 if (base_binfos)
602 {
603 tree chain = NULL_TREE;
604 int j;
605 /* Now unshare the structure beneath BASE_BINFO. */
606
607 for (j = TREE_VEC_LENGTH (base_binfos)-1;
608 j >= 0; j--)
609 {
610 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
611 if (! TREE_VIA_VIRTUAL (base_base_binfo))
612 TREE_VEC_ELT (base_binfos, j)
613 = make_binfo (BINFO_OFFSET (base_base_binfo),
614 base_base_binfo,
615 BINFO_VTABLE (base_base_binfo),
616 BINFO_VIRTUALS (base_base_binfo),
617 chain);
618 chain = TREE_VEC_ELT (base_binfos, j);
619 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
620 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
621 }
622
623 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
624 }
625 }
626
627 return max;
628 }
629
630 /* Lay out the base types of a record type, REC.
631 Tentatively set the size and alignment of REC
632 according to the base types alone.
633
634 Offsets for immediate nonvirtual baseclasses are also computed here.
635
636 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
637 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
638
639 Returns list of virtual base classes in a FIELD_DECL chain. */
640 tree
641 layout_basetypes (rec, binfos)
642 tree rec, binfos;
643 {
644 /* Chain to hold all the new FIELD_DECLs which point at virtual
645 base classes. */
646 tree vbase_decls = NULL_TREE;
647
648 #ifdef STRUCTURE_SIZE_BOUNDARY
649 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
650 #else
651 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
652 #endif
653
654 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
655 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
656 the size is just CONST_SIZE. Naturally we try to avoid using
657 VAR_SIZE. And so far, we've been sucessful. */
658 #if 0
659 register tree var_size = 0;
660 #endif
661
662 register unsigned const_size = 0;
663 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
664
665 /* Handle basetypes almost like fields, but record their
666 offsets differently. */
667
668 for (i = 0; i < n_baseclasses; i++)
669 {
670 int inc, desired_align, int_vbase_size;
671 register tree base_binfo = TREE_VEC_ELT (binfos, i);
672 register tree basetype = BINFO_TYPE (base_binfo);
673 tree decl, offset;
674
675 if (TYPE_SIZE (basetype) == 0)
676 {
677 #if 0
678 /* This error is now reported in xref_tag, thus giving better
679 location information. */
680 error_with_aggr_type (base_binfo,
681 "base class `%s' has incomplete type");
682
683 TREE_VIA_PUBLIC (base_binfo) = 1;
684 TREE_VIA_PROTECTED (base_binfo) = 0;
685 TREE_VIA_VIRTUAL (base_binfo) = 0;
686
687 /* Should handle this better so that
688
689 class A;
690 class B: private A { virtual void F(); };
691
692 does not dump core when compiled. */
693 my_friendly_abort (121);
694 #endif
695 continue;
696 }
697
698 /* All basetypes are recorded in the association list of the
699 derived type. */
700
701 if (TREE_VIA_VIRTUAL (base_binfo))
702 {
703 int j;
704 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
705 + sizeof (VBASE_NAME) + 1);
706
707 /* The offset for a virtual base class is only used in computing
708 virtual function tables and for initializing virtual base
709 pointers. It is built once `get_vbase_types' is called. */
710
711 /* If this basetype can come from another vbase pointer
712 without an additional indirection, we will share
713 that pointer. If an indirection is involved, we
714 make our own pointer. */
715 for (j = 0; j < n_baseclasses; j++)
716 {
717 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
718 if (! TREE_VIA_VIRTUAL (other_base_binfo)
719 && binfo_member (basetype,
720 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
721 goto got_it;
722 }
723 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
724 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
725 build_pointer_type (basetype));
726 /* If you change any of the below, take a look at all the
727 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
728 them too. */
729 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
730 DECL_VIRTUAL_P (decl) = 1;
731 DECL_FIELD_CONTEXT (decl) = rec;
732 DECL_CLASS_CONTEXT (decl) = rec;
733 DECL_FCONTEXT (decl) = basetype;
734 DECL_FIELD_SIZE (decl) = 0;
735 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
736 TREE_CHAIN (decl) = vbase_decls;
737 BINFO_VPTR_FIELD (base_binfo) = decl;
738 vbase_decls = decl;
739
740 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
741 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
742 {
743 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
744 "destructor `%s' non-virtual");
745 warning ("in inheritance relationship `%s: virtual %s'",
746 TYPE_NAME_STRING (rec),
747 TYPE_NAME_STRING (basetype));
748 }
749 got_it:
750 /* The space this decl occupies has already been accounted for. */
751 continue;
752 }
753
754 if (const_size == 0)
755 offset = integer_zero_node;
756 else
757 {
758 /* Give each base type the alignment it wants. */
759 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
760 * TYPE_ALIGN (basetype);
761 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
762
763 #if 0
764 /* bpk: Disabled this check until someone is willing to
765 claim it as theirs and explain exactly what circumstances
766 warrant the warning. */
767 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
768 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
769 {
770 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
771 "destructor `%s' non-virtual");
772 warning ("in inheritance relationship `%s:%s %s'",
773 TYPE_NAME_STRING (rec),
774 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
775 TYPE_NAME_STRING (basetype));
776 }
777 #endif
778 }
779 BINFO_OFFSET (base_binfo) = offset;
780 if (CLASSTYPE_VSIZE (basetype))
781 {
782 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
783 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
784 }
785 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
786 TYPE_BINFO (rec) = base_binfo;
787
788 /* Add only the amount of storage not present in
789 the virtual baseclasses. */
790
791 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
792 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
793 {
794 inc = MAX (record_align,
795 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
796 - int_vbase_size));
797
798 /* Record must have at least as much alignment as any field. */
799 desired_align = TYPE_ALIGN (basetype);
800 record_align = MAX (record_align, desired_align);
801
802 const_size += inc;
803 }
804 }
805
806 if (const_size)
807 CLASSTYPE_SIZE (rec) = size_int (const_size);
808 else
809 CLASSTYPE_SIZE (rec) = integer_zero_node;
810 CLASSTYPE_ALIGN (rec) = record_align;
811
812 return vbase_decls;
813 }
814 \f
815 /* Hashing of lists so that we don't make duplicates.
816 The entry point is `list_hash_canon'. */
817
818 /* Each hash table slot is a bucket containing a chain
819 of these structures. */
820
821 struct list_hash
822 {
823 struct list_hash *next; /* Next structure in the bucket. */
824 int hashcode; /* Hash code of this list. */
825 tree list; /* The list recorded here. */
826 };
827
828 /* Now here is the hash table. When recording a list, it is added
829 to the slot whose index is the hash code mod the table size.
830 Note that the hash table is used for several kinds of lists.
831 While all these live in the same table, they are completely independent,
832 and the hash code is computed differently for each of these. */
833
834 #define TYPE_HASH_SIZE 59
835 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
836
837 /* Compute a hash code for a list (chain of TREE_LIST nodes
838 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
839 TREE_COMMON slots), by adding the hash codes of the individual entries. */
840
841 int
842 list_hash (list)
843 tree list;
844 {
845 register int hashcode = 0;
846
847 if (TREE_CHAIN (list))
848 hashcode += TYPE_HASH (TREE_CHAIN (list));
849
850 if (TREE_VALUE (list))
851 hashcode += TYPE_HASH (TREE_VALUE (list));
852 else
853 hashcode += 1007;
854 if (TREE_PURPOSE (list))
855 hashcode += TYPE_HASH (TREE_PURPOSE (list));
856 else
857 hashcode += 1009;
858 return hashcode;
859 }
860
861 /* Look in the type hash table for a type isomorphic to TYPE.
862 If one is found, return it. Otherwise return 0. */
863
864 tree
865 list_hash_lookup (hashcode, list)
866 int hashcode;
867 tree list;
868 {
869 register struct list_hash *h;
870 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
871 if (h->hashcode == hashcode
872 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
873 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
874 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
875 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
876 && TREE_VALUE (h->list) == TREE_VALUE (list)
877 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
878 {
879 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
880 return h->list;
881 }
882 return 0;
883 }
884
885 /* Add an entry to the list-hash-table
886 for a list TYPE whose hash code is HASHCODE. */
887
888 void
889 list_hash_add (hashcode, list)
890 int hashcode;
891 tree list;
892 {
893 register struct list_hash *h;
894
895 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
896 h->hashcode = hashcode;
897 h->list = list;
898 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
899 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
900 }
901
902 /* Given TYPE, and HASHCODE its hash code, return the canonical
903 object for an identical list if one already exists.
904 Otherwise, return TYPE, and record it as the canonical object
905 if it is a permanent object.
906
907 To use this function, first create a list of the sort you want.
908 Then compute its hash code from the fields of the list that
909 make it different from other similar lists.
910 Then call this function and use the value.
911 This function frees the list you pass in if it is a duplicate. */
912
913 /* Set to 1 to debug without canonicalization. Never set by program. */
914 static int debug_no_list_hash = 0;
915
916 tree
917 list_hash_canon (hashcode, list)
918 int hashcode;
919 tree list;
920 {
921 tree t1;
922
923 if (debug_no_list_hash)
924 return list;
925
926 t1 = list_hash_lookup (hashcode, list);
927 if (t1 != 0)
928 {
929 obstack_free (&class_obstack, list);
930 return t1;
931 }
932
933 /* If this is a new list, record it for later reuse. */
934 list_hash_add (hashcode, list);
935
936 return list;
937 }
938
939 tree
940 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
941 int via_public, via_virtual, via_protected;
942 tree purpose, value, chain;
943 {
944 struct obstack *ambient_obstack = current_obstack;
945 tree t;
946 int hashcode;
947
948 current_obstack = &class_obstack;
949 t = tree_cons (purpose, value, chain);
950 TREE_VIA_PUBLIC (t) = via_public;
951 TREE_VIA_PROTECTED (t) = via_protected;
952 TREE_VIA_VIRTUAL (t) = via_virtual;
953 hashcode = list_hash (t);
954 t = list_hash_canon (hashcode, t);
955 current_obstack = ambient_obstack;
956 return t;
957 }
958
959 /* Constructor for hashed lists. */
960 tree
961 hash_tree_chain (value, chain)
962 tree value, chain;
963 {
964 struct obstack *ambient_obstack = current_obstack;
965 tree t;
966 int hashcode;
967
968 current_obstack = &class_obstack;
969 t = tree_cons (NULL_TREE, value, chain);
970 hashcode = list_hash (t);
971 t = list_hash_canon (hashcode, t);
972 current_obstack = ambient_obstack;
973 return t;
974 }
975
976 /* Similar, but used for concatenating two lists. */
977 tree
978 hash_chainon (list1, list2)
979 tree list1, list2;
980 {
981 if (list2 == 0)
982 return list1;
983 if (list1 == 0)
984 return list2;
985 if (TREE_CHAIN (list1) == NULL_TREE)
986 return hash_tree_chain (TREE_VALUE (list1), list2);
987 return hash_tree_chain (TREE_VALUE (list1),
988 hash_chainon (TREE_CHAIN (list1), list2));
989 }
990
991 static tree
992 get_identifier_list (value)
993 tree value;
994 {
995 tree list = IDENTIFIER_AS_LIST (value);
996 if (list != NULL_TREE
997 && (TREE_CODE (list) != TREE_LIST
998 || TREE_VALUE (list) != value))
999 list = NULL_TREE;
1000 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1001 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1002 && IDENTIFIER_TYPE_VALUE (value)
1003 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1004 {
1005 tree type = IDENTIFIER_TYPE_VALUE (value);
1006
1007 if (TYPE_PTRMEMFUNC_P (type))
1008 list = NULL_TREE;
1009 else if (type == current_class_type)
1010 /* Don't mess up the constructor name. */
1011 list = tree_cons (NULL_TREE, value, NULL_TREE);
1012 else
1013 {
1014 register tree id;
1015 /* This will return the correct thing for regular types,
1016 nested types, and templates. Yay! */
1017 if (TYPE_NESTED_NAME (type))
1018 id = TYPE_NESTED_NAME (type);
1019 else
1020 id = TYPE_IDENTIFIER (type);
1021
1022 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
1023 CLASSTYPE_ID_AS_LIST (type)
1024 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
1025 list = CLASSTYPE_ID_AS_LIST (type);
1026 }
1027 }
1028 return list;
1029 }
1030
1031 tree
1032 get_decl_list (value)
1033 tree value;
1034 {
1035 tree list = NULL_TREE;
1036
1037 if (TREE_CODE (value) == IDENTIFIER_NODE)
1038 list = get_identifier_list (value);
1039 else if (TREE_CODE (value) == RECORD_TYPE
1040 && TYPE_LANG_SPECIFIC (value))
1041 list = CLASSTYPE_AS_LIST (value);
1042
1043 if (list != NULL_TREE)
1044 {
1045 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1046 return list;
1047 }
1048
1049 return build_decl_list (NULL_TREE, value);
1050 }
1051
1052 /* Look in the type hash table for a type isomorphic to
1053 `build_tree_list (NULL_TREE, VALUE)'.
1054 If one is found, return it. Otherwise return 0. */
1055
1056 tree
1057 list_hash_lookup_or_cons (value)
1058 tree value;
1059 {
1060 register int hashcode = TYPE_HASH (value);
1061 register struct list_hash *h;
1062 struct obstack *ambient_obstack;
1063 tree list = NULL_TREE;
1064
1065 if (TREE_CODE (value) == IDENTIFIER_NODE)
1066 list = get_identifier_list (value);
1067 else if (TREE_CODE (value) == TYPE_DECL
1068 && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE
1069 && TYPE_LANG_SPECIFIC (TREE_TYPE (value)))
1070 list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value));
1071 else if (TREE_CODE (value) == RECORD_TYPE
1072 && TYPE_LANG_SPECIFIC (value))
1073 list = CLASSTYPE_AS_LIST (value);
1074
1075 if (list != NULL_TREE)
1076 {
1077 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302);
1078 return list;
1079 }
1080
1081 if (debug_no_list_hash)
1082 return hash_tree_chain (value, NULL_TREE);
1083
1084 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1085 if (h->hashcode == hashcode
1086 && TREE_VIA_VIRTUAL (h->list) == 0
1087 && TREE_VIA_PUBLIC (h->list) == 0
1088 && TREE_VIA_PROTECTED (h->list) == 0
1089 && TREE_PURPOSE (h->list) == 0
1090 && TREE_VALUE (h->list) == value)
1091 {
1092 my_friendly_assert (TREE_TYPE (h->list) == 0, 303);
1093 my_friendly_assert (TREE_CHAIN (h->list) == 0, 304);
1094 return h->list;
1095 }
1096
1097 ambient_obstack = current_obstack;
1098 current_obstack = &class_obstack;
1099 list = build_tree_list (NULL_TREE, value);
1100 list_hash_add (hashcode, list);
1101 current_obstack = ambient_obstack;
1102 return list;
1103 }
1104 \f
1105 /* Build an association between TYPE and some parameters:
1106
1107 OFFSET is the offset added to `this' to convert it to a pointer
1108 of type `TYPE *'
1109
1110 BINFO is the base binfo to use, if we are deriving from one. This
1111 is necessary, as we want specialized parent binfos from base
1112 classes, so that the VTABLE_NAMEs of bases are for the most derived
1113 type, instead of of the simple type.
1114
1115 VTABLE is the virtual function table with which to initialize
1116 sub-objects of type TYPE.
1117
1118 VIRTUALS are the virtual functions sitting in VTABLE.
1119
1120 CHAIN are more associations we must retain. */
1121
1122 tree
1123 make_binfo (offset, binfo, vtable, virtuals, chain)
1124 tree offset, binfo;
1125 tree vtable, virtuals;
1126 tree chain;
1127 {
1128 tree new_binfo = make_tree_vec (6);
1129 tree type;
1130
1131 if (TREE_CODE (binfo) == TREE_VEC)
1132 type = BINFO_TYPE (binfo);
1133 else
1134 {
1135 type = binfo;
1136 binfo = TYPE_BINFO (binfo);
1137 }
1138
1139 TREE_CHAIN (new_binfo) = chain;
1140 if (chain)
1141 TREE_USED (new_binfo) = TREE_USED (chain);
1142
1143 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1144 BINFO_OFFSET (new_binfo) = offset;
1145 BINFO_VTABLE (new_binfo) = vtable;
1146 BINFO_VIRTUALS (new_binfo) = virtuals;
1147 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1148
1149 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1150 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1151 return new_binfo;
1152 }
1153
1154 tree
1155 copy_binfo (list)
1156 tree list;
1157 {
1158 tree binfo = copy_list (list);
1159 tree rval = binfo;
1160 while (binfo)
1161 {
1162 TREE_USED (binfo) = 0;
1163 if (BINFO_BASETYPES (binfo))
1164 BINFO_BASETYPES (binfo) = copy_node (BINFO_BASETYPES (binfo));
1165 binfo = TREE_CHAIN (binfo);
1166 }
1167 return rval;
1168 }
1169
1170 /* Return the binfo value for ELEM in TYPE. */
1171
1172 tree
1173 binfo_value (elem, type)
1174 tree elem;
1175 tree type;
1176 {
1177 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1178 compiler_error ("base class `%s' ambiguous in binfo_value",
1179 TYPE_NAME_STRING (elem));
1180 if (elem == type)
1181 return TYPE_BINFO (type);
1182 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1183 return type;
1184 return get_binfo (elem, type, 0);
1185 }
1186
1187 tree
1188 reverse_path (path)
1189 tree path;
1190 {
1191 register tree prev = 0, tmp, next;
1192 for (tmp = path; tmp; tmp = next)
1193 {
1194 next = BINFO_INHERITANCE_CHAIN (tmp);
1195 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1196 prev = tmp;
1197 }
1198 return prev;
1199 }
1200
1201 tree
1202 virtual_member (elem, list)
1203 tree elem;
1204 tree list;
1205 {
1206 tree t;
1207 tree rval, nval;
1208
1209 for (t = list; t; t = TREE_CHAIN (t))
1210 if (elem == BINFO_TYPE (t))
1211 return t;
1212 rval = 0;
1213 for (t = list; t; t = TREE_CHAIN (t))
1214 {
1215 tree binfos = BINFO_BASETYPES (t);
1216 int i;
1217
1218 if (binfos != NULL_TREE)
1219 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1220 {
1221 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1222 if (nval)
1223 {
1224 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1225 my_friendly_abort (104);
1226 rval = nval;
1227 }
1228 }
1229 }
1230 return rval;
1231 }
1232
1233 void
1234 debug_binfo (elem)
1235 tree elem;
1236 {
1237 int i;
1238 tree virtuals;
1239
1240 fprintf (stderr, "type \"%s\"; offset = %d\n",
1241 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1242 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1243 fprintf (stderr, "vtable type:\n");
1244 debug_tree (BINFO_TYPE (elem));
1245 if (BINFO_VTABLE (elem))
1246 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1247 else
1248 fprintf (stderr, "no vtable decl yet\n");
1249 fprintf (stderr, "virtuals:\n");
1250 virtuals = BINFO_VIRTUALS (elem);
1251 if (virtuals != 0)
1252 {
1253 virtuals = TREE_CHAIN (virtuals);
1254 if (flag_dossier)
1255 virtuals = TREE_CHAIN (virtuals);
1256 }
1257 i = 1;
1258 while (virtuals)
1259 {
1260 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1261 fprintf (stderr, "%s [%d =? %d]\n",
1262 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1263 i, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1264 virtuals = TREE_CHAIN (virtuals);
1265 i += 1;
1266 }
1267 }
1268
1269 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1270 We expect a null pointer to mark the end of the chain.
1271 This is the Lisp primitive `length'. */
1272
1273 int
1274 decl_list_length (t)
1275 tree t;
1276 {
1277 register tree tail;
1278 register int len = 0;
1279
1280 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1281 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1282 for (tail = t; tail; tail = DECL_CHAIN (tail))
1283 len++;
1284
1285 return len;
1286 }
1287
1288 int
1289 count_functions (t)
1290 tree t;
1291 {
1292 if (TREE_CODE (t) == FUNCTION_DECL)
1293 return 1;
1294 else if (TREE_CODE (t) == TREE_LIST)
1295 return decl_list_length (TREE_VALUE (t));
1296
1297 my_friendly_abort (359);
1298 return 0;
1299 }
1300
1301 /* Like value_member, but for DECL_CHAINs. */
1302 tree
1303 decl_value_member (elem, list)
1304 tree elem, list;
1305 {
1306 while (list)
1307 {
1308 if (elem == list)
1309 return list;
1310 list = DECL_CHAIN (list);
1311 }
1312 return NULL_TREE;
1313 }
1314
1315 int
1316 is_overloaded_fn (x)
1317 tree x;
1318 {
1319 if (TREE_CODE (x) == FUNCTION_DECL)
1320 return 1;
1321
1322 if (TREE_CODE (x) == TREE_LIST
1323 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1324 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1325 return 1;
1326
1327 return 0;
1328 }
1329
1330 int
1331 really_overloaded_fn (x)
1332 tree x;
1333 {
1334 if (TREE_CODE (x) == TREE_LIST
1335 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1336 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1337 return 1;
1338
1339 return 0;
1340 }
1341
1342 tree
1343 get_first_fn (from)
1344 tree from;
1345 {
1346 if (TREE_CODE (from) == FUNCTION_DECL)
1347 return from;
1348
1349 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1350
1351 return TREE_VALUE (from);
1352 }
1353
1354 tree
1355 fnaddr_from_vtable_entry (entry)
1356 tree entry;
1357 {
1358 if (flag_vtable_thunks)
1359 {
1360 tree func = entry;
1361 if (TREE_CODE (func) == ADDR_EXPR)
1362 func = TREE_OPERAND (func, 0);
1363 if (TREE_CODE (func) == THUNK_DECL)
1364 return DECL_INITIAL (func);
1365 else
1366 return entry;
1367 }
1368 else
1369 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1370 }
1371
1372 void
1373 set_fnaddr_from_vtable_entry (entry, value)
1374 tree entry, value;
1375 {
1376 if (flag_vtable_thunks)
1377 abort ();
1378 else
1379 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value;
1380 }
1381
1382 tree
1383 function_arg_chain (t)
1384 tree t;
1385 {
1386 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1387 }
1388
1389 int
1390 promotes_to_aggr_type (t, code)
1391 tree t;
1392 enum tree_code code;
1393 {
1394 if (TREE_CODE (t) == code)
1395 t = TREE_TYPE (t);
1396 return IS_AGGR_TYPE (t);
1397 }
1398
1399 int
1400 is_aggr_type_2 (t1, t2)
1401 tree t1, t2;
1402 {
1403 if (TREE_CODE (t1) != TREE_CODE (t2))
1404 return 0;
1405 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1406 }
1407
1408 /* Give message using types TYPE1 and TYPE2 as arguments.
1409 PFN is the function which will print the message;
1410 S is the format string for PFN to use. */
1411 void
1412 message_2_types (pfn, s, type1, type2)
1413 void (*pfn) ();
1414 char *s;
1415 tree type1, type2;
1416 {
1417 tree name1 = TYPE_NAME (type1);
1418 tree name2 = TYPE_NAME (type2);
1419 if (TREE_CODE (name1) == TYPE_DECL)
1420 name1 = DECL_NAME (name1);
1421 if (TREE_CODE (name2) == TYPE_DECL)
1422 name2 = DECL_NAME (name2);
1423 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1424 }
1425 \f
1426 #define PRINT_RING_SIZE 4
1427
1428 char *
1429 lang_printable_name (decl)
1430 tree decl;
1431 {
1432 static tree decl_ring[PRINT_RING_SIZE];
1433 static char *print_ring[PRINT_RING_SIZE];
1434 static int ring_counter;
1435 int i;
1436
1437 /* Only cache functions. */
1438 if (TREE_CODE (decl) != FUNCTION_DECL
1439 || DECL_LANG_SPECIFIC (decl) == 0)
1440 return decl_as_string (decl, 1);
1441
1442 /* See if this print name is lying around. */
1443 for (i = 0; i < PRINT_RING_SIZE; i++)
1444 if (decl_ring[i] == decl)
1445 /* yes, so return it. */
1446 return print_ring[i];
1447
1448 if (++ring_counter == PRINT_RING_SIZE)
1449 ring_counter = 0;
1450
1451 if (current_function_decl != NULL_TREE)
1452 {
1453 if (decl_ring[ring_counter] == current_function_decl)
1454 ring_counter += 1;
1455 if (ring_counter == PRINT_RING_SIZE)
1456 ring_counter = 0;
1457 if (decl_ring[ring_counter] == current_function_decl)
1458 my_friendly_abort (106);
1459 }
1460
1461 if (print_ring[ring_counter])
1462 free (print_ring[ring_counter]);
1463
1464 {
1465 int print_ret_type_p
1466 = (!DECL_CONSTRUCTOR_P (decl)
1467 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1468
1469 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1470 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1471 strcpy (print_ring[ring_counter], name);
1472 decl_ring[ring_counter] = decl;
1473 }
1474 return print_ring[ring_counter];
1475 }
1476 \f
1477 /* Comparison function for sorting identifiers in RAISES lists.
1478 Note that because IDENTIFIER_NODEs are unique, we can sort
1479 them by address, saving an indirection. */
1480 static int
1481 id_cmp (p1, p2)
1482 tree *p1, *p2;
1483 {
1484 return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2);
1485 }
1486
1487 /* Build the FUNCTION_TYPE or METHOD_TYPE which may raise exceptions
1488 listed in RAISES. */
1489 tree
1490 build_exception_variant (ctype, type, raises)
1491 tree ctype, type;
1492 tree raises;
1493 {
1494 int i;
1495 tree v = TYPE_MAIN_VARIANT (type);
1496 tree t, t2, cname;
1497 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1498 int constp = TYPE_READONLY (type);
1499 int volatilep = TYPE_VOLATILE (type);
1500
1501 for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v))
1502 {
1503 if (TYPE_READONLY (v) != constp
1504 || TYPE_VOLATILE (v) != volatilep)
1505 continue;
1506
1507 t = raises;
1508 t2 = TYPE_RAISES_EXCEPTIONS (v);
1509 while (t && t2)
1510 {
1511 if (TREE_TYPE (t) == TREE_TYPE (t2))
1512 {
1513 t = TREE_CHAIN (t);
1514 t2 = TREE_CHAIN (t2);
1515 }
1516 else break;
1517 }
1518 if (t || t2)
1519 continue;
1520 /* List of exceptions raised matches previously found list.
1521
1522 @@ Nice to free up storage used in consing up the
1523 @@ list of exceptions raised. */
1524 return v;
1525 }
1526
1527 /* Need to build a new variant. */
1528 v = copy_node (type);
1529 TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type);
1530 TYPE_NEXT_VARIANT (type) = v;
1531 if (raises && ! TREE_PERMANENT (raises))
1532 {
1533 push_obstacks_nochange ();
1534 end_temporary_allocation ();
1535 raises = copy_list (raises);
1536 pop_obstacks ();
1537 }
1538 TYPE_RAISES_EXCEPTIONS (v) = raises;
1539 return v;
1540 }
1541
1542 /* Subroutine of copy_to_permanent
1543
1544 Assuming T is a node build bottom-up, make it all exist on
1545 permanent obstack, if it is not permanent already. */
1546 static tree
1547 make_deep_copy (t)
1548 tree t;
1549 {
1550 enum tree_code code;
1551
1552 if (t == NULL_TREE || TREE_PERMANENT (t))
1553 return t;
1554
1555 switch (code = TREE_CODE (t))
1556 {
1557 case ERROR_MARK:
1558 return error_mark_node;
1559
1560 case VAR_DECL:
1561 case FUNCTION_DECL:
1562 case CONST_DECL:
1563 break;
1564
1565 case PARM_DECL:
1566 {
1567 tree chain = TREE_CHAIN (t);
1568 t = copy_node (t);
1569 TREE_CHAIN (t) = make_deep_copy (chain);
1570 TREE_TYPE (t) = make_deep_copy (TREE_TYPE (t));
1571 DECL_INITIAL (t) = make_deep_copy (DECL_INITIAL (t));
1572 DECL_SIZE (t) = make_deep_copy (DECL_SIZE (t));
1573 return t;
1574 }
1575
1576 case TREE_LIST:
1577 {
1578 tree chain = TREE_CHAIN (t);
1579 t = copy_node (t);
1580 TREE_PURPOSE (t) = make_deep_copy (TREE_PURPOSE (t));
1581 TREE_VALUE (t) = make_deep_copy (TREE_VALUE (t));
1582 TREE_CHAIN (t) = make_deep_copy (chain);
1583 return t;
1584 }
1585
1586 case TREE_VEC:
1587 {
1588 int len = TREE_VEC_LENGTH (t);
1589
1590 t = copy_node (t);
1591 while (len--)
1592 TREE_VEC_ELT (t, len) = make_deep_copy (TREE_VEC_ELT (t, len));
1593 return t;
1594 }
1595
1596 case INTEGER_CST:
1597 case REAL_CST:
1598 case STRING_CST:
1599 return copy_node (t);
1600
1601 case COND_EXPR:
1602 case TARGET_EXPR:
1603 case NEW_EXPR:
1604 t = copy_node (t);
1605 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1606 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1607 TREE_OPERAND (t, 2) = make_deep_copy (TREE_OPERAND (t, 2));
1608 return t;
1609
1610 case SAVE_EXPR:
1611 t = copy_node (t);
1612 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1613 return t;
1614
1615 case MODIFY_EXPR:
1616 case PLUS_EXPR:
1617 case MINUS_EXPR:
1618 case MULT_EXPR:
1619 case TRUNC_DIV_EXPR:
1620 case TRUNC_MOD_EXPR:
1621 case MIN_EXPR:
1622 case MAX_EXPR:
1623 case LSHIFT_EXPR:
1624 case RSHIFT_EXPR:
1625 case BIT_IOR_EXPR:
1626 case BIT_XOR_EXPR:
1627 case BIT_AND_EXPR:
1628 case BIT_ANDTC_EXPR:
1629 case TRUTH_ANDIF_EXPR:
1630 case TRUTH_ORIF_EXPR:
1631 case LT_EXPR:
1632 case LE_EXPR:
1633 case GT_EXPR:
1634 case GE_EXPR:
1635 case EQ_EXPR:
1636 case NE_EXPR:
1637 case CEIL_DIV_EXPR:
1638 case FLOOR_DIV_EXPR:
1639 case ROUND_DIV_EXPR:
1640 case CEIL_MOD_EXPR:
1641 case FLOOR_MOD_EXPR:
1642 case ROUND_MOD_EXPR:
1643 case COMPOUND_EXPR:
1644 case PREDECREMENT_EXPR:
1645 case PREINCREMENT_EXPR:
1646 case POSTDECREMENT_EXPR:
1647 case POSTINCREMENT_EXPR:
1648 case CALL_EXPR:
1649 t = copy_node (t);
1650 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1651 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1652 return t;
1653
1654 case CONVERT_EXPR:
1655 case ADDR_EXPR:
1656 case INDIRECT_REF:
1657 case NEGATE_EXPR:
1658 case BIT_NOT_EXPR:
1659 case TRUTH_NOT_EXPR:
1660 case NOP_EXPR:
1661 case COMPONENT_REF:
1662 t = copy_node (t);
1663 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1664 return t;
1665
1666 case POINTER_TYPE:
1667 return build_pointer_type (make_deep_copy (TREE_TYPE (t)));
1668 case REFERENCE_TYPE:
1669 return build_reference_type (make_deep_copy (TREE_TYPE (t)));
1670 case FUNCTION_TYPE:
1671 return build_function_type (make_deep_copy (TREE_TYPE (t)),
1672 make_deep_copy (TYPE_ARG_TYPES (t)));
1673 case ARRAY_TYPE:
1674 return build_array_type (make_deep_copy (TREE_TYPE (t)),
1675 make_deep_copy (TYPE_DOMAIN (t)));
1676 case OFFSET_TYPE:
1677 return build_offset_type (make_deep_copy (TYPE_OFFSET_BASETYPE (t)),
1678 make_deep_copy (TREE_TYPE (t)));
1679 case METHOD_TYPE:
1680 return build_method_type
1681 (make_deep_copy (TYPE_METHOD_BASETYPE (t)),
1682 build_function_type
1683 (make_deep_copy (TREE_TYPE (t)),
1684 make_deep_copy (TREE_CHAIN (TYPE_ARG_TYPES (t)))));
1685 case RECORD_TYPE:
1686 if (TYPE_PTRMEMFUNC_P (t))
1687 return build_ptrmemfunc_type
1688 (make_deep_copy (TYPE_PTRMEMFUNC_FN_TYPE (t)));
1689 /* else fall through */
1690
1691 /* This list is incomplete, but should suffice for now.
1692 It is very important that `sorry' does not call
1693 `report_error_function'. That could cause an infinite loop. */
1694 default:
1695 sorry ("initializer contains unrecognized tree code");
1696 return error_mark_node;
1697
1698 }
1699 my_friendly_abort (107);
1700 /* NOTREACHED */
1701 return NULL_TREE;
1702 }
1703
1704 /* Assuming T is a node built bottom-up, make it all exist on
1705 permanent obstack, if it is not permanent already. */
1706 tree
1707 copy_to_permanent (t)
1708 tree t;
1709 {
1710 register struct obstack *ambient_obstack = current_obstack;
1711 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1712
1713 if (t == NULL_TREE || TREE_PERMANENT (t))
1714 return t;
1715
1716 saveable_obstack = &permanent_obstack;
1717 current_obstack = saveable_obstack;
1718
1719 t = make_deep_copy (t);
1720
1721 current_obstack = ambient_obstack;
1722 saveable_obstack = ambient_saveable_obstack;
1723
1724 return t;
1725 }
1726
1727 void
1728 print_lang_statistics ()
1729 {
1730 extern struct obstack maybepermanent_obstack;
1731 print_obstack_statistics ("class_obstack", &class_obstack);
1732 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1733 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1734 print_search_statistics ();
1735 print_class_statistics ();
1736 }
1737
1738 /* This is used by the `assert' macro. It is provided in libgcc.a,
1739 which `cc' doesn't know how to link. Note that the C++ front-end
1740 no longer actually uses the `assert' macro (instead, it calls
1741 my_friendly_assert). But all of the back-end files still need this. */
1742 void
1743 __eprintf (string, expression, line, filename)
1744 #ifdef __STDC__
1745 const char *string;
1746 const char *expression;
1747 unsigned line;
1748 const char *filename;
1749 #else
1750 char *string;
1751 char *expression;
1752 unsigned line;
1753 char *filename;
1754 #endif
1755 {
1756 fprintf (stderr, string, expression, line, filename);
1757 fflush (stderr);
1758 abort ();
1759 }
1760
1761 /* Return, as an INTEGER_CST node, the number of elements for
1762 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1763
1764 tree
1765 array_type_nelts_top (type)
1766 tree type;
1767 {
1768 return fold (build (PLUS_EXPR, sizetype,
1769 array_type_nelts (type),
1770 integer_one_node));
1771 }
1772
1773 /* Return, as an INTEGER_CST node, the number of elements for
1774 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1775 ARRAY_TYPEs that are clumped together. */
1776
1777 tree
1778 array_type_nelts_total (type)
1779 tree type;
1780 {
1781 tree sz = array_type_nelts_top (type);
1782 type = TREE_TYPE (type);
1783 while (TREE_CODE (type) == ARRAY_TYPE)
1784 {
1785 tree n = array_type_nelts_top (type);
1786 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1787 type = TREE_TYPE (type);
1788 }
1789 return sz;
1790 }
This page took 0.122365 seconds and 6 git commands to generate.