]> gcc.gnu.org Git - gcc.git/blob - gcc/cp/tree.c
be0fc99195b78eed5ce47252d61506bd9b2bbc3c
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include <stdio.h>
24 #include "obstack.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29
30 #define CEIL(x,y) (((x) + (y) - 1) / (y))
31
32 /* Return nonzero if REF is an lvalue valid for this language.
33 Lvalues can be assigned, unless they have TREE_READONLY.
34 Lvalues can have their address taken, unless they have DECL_REGISTER. */
35
36 int
37 real_lvalue_p (ref)
38 tree ref;
39 {
40 if (! language_lvalue_valid (ref))
41 return 0;
42
43 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
44 return 1;
45
46 if (ref == current_class_decl && flag_this_is_variable <= 0)
47 return 0;
48
49 switch (TREE_CODE (ref))
50 {
51 /* preincrements and predecrements are valid lvals, provided
52 what they refer to are valid lvals. */
53 case PREINCREMENT_EXPR:
54 case PREDECREMENT_EXPR:
55 case COMPONENT_REF:
56 case SAVE_EXPR:
57 return real_lvalue_p (TREE_OPERAND (ref, 0));
58
59 case STRING_CST:
60 return 1;
61
62 case VAR_DECL:
63 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
64 && DECL_LANG_SPECIFIC (ref)
65 && DECL_IN_AGGR_P (ref))
66 return 0;
67 case INDIRECT_REF:
68 case ARRAY_REF:
69 case PARM_DECL:
70 case RESULT_DECL:
71 case ERROR_MARK:
72 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
73 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
74 return 1;
75 break;
76
77 case WITH_CLEANUP_EXPR:
78 return real_lvalue_p (TREE_OPERAND (ref, 0));
79
80 /* A currently unresolved scope ref. */
81 case SCOPE_REF:
82 my_friendly_abort (103);
83 case OFFSET_REF:
84 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
85 return 1;
86 return real_lvalue_p (TREE_OPERAND (ref, 0))
87 && real_lvalue_p (TREE_OPERAND (ref, 1));
88 break;
89
90 case COND_EXPR:
91 return (real_lvalue_p (TREE_OPERAND (ref, 1))
92 && real_lvalue_p (TREE_OPERAND (ref, 2)));
93
94 case MODIFY_EXPR:
95 return 1;
96
97 case COMPOUND_EXPR:
98 return real_lvalue_p (TREE_OPERAND (ref, 1));
99
100 case MAX_EXPR:
101 case MIN_EXPR:
102 return (real_lvalue_p (TREE_OPERAND (ref, 0))
103 && real_lvalue_p (TREE_OPERAND (ref, 1)));
104 }
105
106 return 0;
107 }
108
109 int
110 lvalue_p (ref)
111 tree ref;
112 {
113 if (! language_lvalue_valid (ref))
114 return 0;
115
116 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
117 return 1;
118
119 if (ref == current_class_decl && flag_this_is_variable <= 0)
120 return 0;
121
122 switch (TREE_CODE (ref))
123 {
124 /* preincrements and predecrements are valid lvals, provided
125 what they refer to are valid lvals. */
126 case PREINCREMENT_EXPR:
127 case PREDECREMENT_EXPR:
128 case COMPONENT_REF:
129 case SAVE_EXPR:
130 return lvalue_p (TREE_OPERAND (ref, 0));
131
132 case STRING_CST:
133 return 1;
134
135 case VAR_DECL:
136 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
137 && DECL_LANG_SPECIFIC (ref)
138 && DECL_IN_AGGR_P (ref))
139 return 0;
140 case INDIRECT_REF:
141 case ARRAY_REF:
142 case PARM_DECL:
143 case RESULT_DECL:
144 case ERROR_MARK:
145 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
146 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
147 return 1;
148 break;
149
150 case WITH_CLEANUP_EXPR:
151 return lvalue_p (TREE_OPERAND (ref, 0));
152
153 case TARGET_EXPR:
154 return 1;
155
156 case CALL_EXPR:
157 if (IS_AGGR_TYPE (TREE_TYPE (ref)))
158 return 1;
159 break;
160
161 /* A currently unresolved scope ref. */
162 case SCOPE_REF:
163 my_friendly_abort (103);
164 case OFFSET_REF:
165 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
166 return 1;
167 return lvalue_p (TREE_OPERAND (ref, 0))
168 && lvalue_p (TREE_OPERAND (ref, 1));
169 break;
170
171 case COND_EXPR:
172 return (lvalue_p (TREE_OPERAND (ref, 1))
173 && lvalue_p (TREE_OPERAND (ref, 2)));
174
175 case MODIFY_EXPR:
176 return 1;
177
178 case COMPOUND_EXPR:
179 return lvalue_p (TREE_OPERAND (ref, 1));
180
181 case MAX_EXPR:
182 case MIN_EXPR:
183 return (lvalue_p (TREE_OPERAND (ref, 0))
184 && lvalue_p (TREE_OPERAND (ref, 1)));
185 }
186
187 return 0;
188 }
189
190 /* Return nonzero if REF is an lvalue valid for this language;
191 otherwise, print an error message and return zero. */
192
193 int
194 lvalue_or_else (ref, string)
195 tree ref;
196 char *string;
197 {
198 int win = lvalue_p (ref);
199 if (! win)
200 error ("non-lvalue in %s", string);
201 return win;
202 }
203
204 /* INIT is a CALL_EXPR which needs info about its target.
205 TYPE is the type that this initialization should appear to have.
206
207 Build an encapsulation of the initialization to perform
208 and return it so that it can be processed by language-independent
209 and language-specific expression expanders.
210
211 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
212 Otherwise, cleanups are not built here. For example, when building
213 an initialization for a stack slot, since the called function handles
214 the cleanup, we would not want to do it here. */
215 tree
216 build_cplus_new (type, init, with_cleanup_p)
217 tree type;
218 tree init;
219 int with_cleanup_p;
220 {
221 tree slot;
222 tree rval;
223
224 slot = build (VAR_DECL, type);
225 layout_decl (slot, 0);
226 rval = build (NEW_EXPR, type,
227 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
228 TREE_SIDE_EFFECTS (rval) = 1;
229 TREE_ADDRESSABLE (rval) = 1;
230 rval = build (TARGET_EXPR, type, slot, rval, 0);
231 TREE_SIDE_EFFECTS (rval) = 1;
232 TREE_ADDRESSABLE (rval) = 1;
233
234 #if 0
235 if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type))
236 {
237 TREE_OPERAND (rval, 2) = error_mark_node;
238 rval = build (WITH_CLEANUP_EXPR, type, rval, 0,
239 build_delete (build_pointer_type (type),
240 build_unary_op (ADDR_EXPR, slot, 0),
241 integer_two_node,
242 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0));
243 TREE_SIDE_EFFECTS (rval) = 1;
244 TREE_ADDRESSABLE (rval) = 1;
245 }
246 #endif
247 return rval;
248 }
249
250 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
251 these CALL_EXPRs with tree nodes that will perform the cleanups. */
252
253 tree
254 break_out_cleanups (exp)
255 tree exp;
256 {
257 tree tmp = exp;
258
259 if (TREE_CODE (tmp) == CALL_EXPR
260 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
261 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
262
263 while (TREE_CODE (tmp) == NOP_EXPR
264 || TREE_CODE (tmp) == CONVERT_EXPR
265 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
266 {
267 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
268 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
269 {
270 TREE_OPERAND (tmp, 0)
271 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
272 TREE_OPERAND (tmp, 0), 1);
273 break;
274 }
275 else
276 tmp = TREE_OPERAND (tmp, 0);
277 }
278 return exp;
279 }
280
281 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
282 copies where they are found. Returns a deep copy all nodes transitively
283 containing CALL_EXPRs. */
284
285 tree
286 break_out_calls (exp)
287 tree exp;
288 {
289 register tree t1, t2;
290 register enum tree_code code;
291 register int changed = 0;
292 register int i;
293
294 if (exp == NULL_TREE)
295 return exp;
296
297 code = TREE_CODE (exp);
298
299 if (code == CALL_EXPR)
300 return copy_node (exp);
301
302 /* Don't try and defeat a save_expr, as it should only be done once. */
303 if (code == SAVE_EXPR)
304 return exp;
305
306 switch (TREE_CODE_CLASS (code))
307 {
308 default:
309 abort ();
310
311 case 'c': /* a constant */
312 case 't': /* a type node */
313 case 'x': /* something random, like an identifier or an ERROR_MARK. */
314 return exp;
315
316 case 'd': /* A decl node */
317 #if 0 /* This is bogus. jason 9/21/94 */
318
319 t1 = break_out_calls (DECL_INITIAL (exp));
320 if (t1 != DECL_INITIAL (exp))
321 {
322 exp = copy_node (exp);
323 DECL_INITIAL (exp) = t1;
324 }
325 #endif
326 return exp;
327
328 case 'b': /* A block node */
329 {
330 /* Don't know how to handle these correctly yet. Must do a
331 break_out_calls on all DECL_INITIAL values for local variables,
332 and also break_out_calls on all sub-blocks and sub-statements. */
333 abort ();
334 }
335 return exp;
336
337 case 'e': /* an expression */
338 case 'r': /* a reference */
339 case 's': /* an expression with side effects */
340 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
341 {
342 t1 = break_out_calls (TREE_OPERAND (exp, i));
343 if (t1 != TREE_OPERAND (exp, i))
344 {
345 exp = copy_node (exp);
346 TREE_OPERAND (exp, i) = t1;
347 }
348 }
349 return exp;
350
351 case '<': /* a comparison expression */
352 case '2': /* a binary arithmetic expression */
353 t2 = break_out_calls (TREE_OPERAND (exp, 1));
354 if (t2 != TREE_OPERAND (exp, 1))
355 changed = 1;
356 case '1': /* a unary arithmetic expression */
357 t1 = break_out_calls (TREE_OPERAND (exp, 0));
358 if (t1 != TREE_OPERAND (exp, 0))
359 changed = 1;
360 if (changed)
361 {
362 if (tree_code_length[(int) code] == 1)
363 return build1 (code, TREE_TYPE (exp), t1);
364 else
365 return build (code, TREE_TYPE (exp), t1, t2);
366 }
367 return exp;
368 }
369
370 }
371 \f
372 extern struct obstack *current_obstack;
373 extern struct obstack permanent_obstack, class_obstack;
374 extern struct obstack *saveable_obstack;
375
376 /* Here is how primitive or already-canonicalized types' hash
377 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
378 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
379
380 /* Construct, lay out and return the type of methods belonging to class
381 BASETYPE and whose arguments are described by ARGTYPES and whose values
382 are described by RETTYPE. If each type exists already, reuse it. */
383 tree
384 build_cplus_method_type (basetype, rettype, argtypes)
385 tree basetype, rettype, argtypes;
386 {
387 register tree t;
388 tree ptype;
389 int hashcode;
390
391 /* Make a node of the sort we want. */
392 t = make_node (METHOD_TYPE);
393
394 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
395 TREE_TYPE (t) = rettype;
396 if (IS_SIGNATURE (basetype))
397 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
398 TYPE_READONLY (basetype),
399 TYPE_VOLATILE (basetype));
400 else
401 ptype = build_pointer_type (basetype);
402
403 /* The actual arglist for this function includes a "hidden" argument
404 which is "this". Put it into the list of argument types. */
405
406 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
407 TYPE_ARG_TYPES (t) = argtypes;
408 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
409
410 /* If we already have such a type, use the old one and free this one.
411 Note that it also frees up the above cons cell if found. */
412 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
413 t = type_hash_canon (hashcode, t);
414
415 if (TYPE_SIZE (t) == 0)
416 layout_type (t);
417
418 return t;
419 }
420
421 tree
422 build_cplus_staticfn_type (basetype, rettype, argtypes)
423 tree basetype, rettype, argtypes;
424 {
425 register tree t;
426 int hashcode;
427
428 /* Make a node of the sort we want. */
429 t = make_node (FUNCTION_TYPE);
430
431 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
432 TREE_TYPE (t) = rettype;
433
434 TYPE_ARG_TYPES (t) = argtypes;
435
436 /* If we already have such a type, use the old one and free this one.
437 Note that it also frees up the above cons cell if found. */
438 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
439 t = type_hash_canon (hashcode, t);
440
441 if (TYPE_SIZE (t) == 0)
442 layout_type (t);
443
444 return t;
445 }
446
447 tree
448 build_cplus_array_type (elt_type, index_type)
449 tree elt_type;
450 tree index_type;
451 {
452 register struct obstack *ambient_obstack = current_obstack;
453 register struct obstack *ambient_saveable_obstack = saveable_obstack;
454 tree t;
455
456 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
457 make this permanent too. */
458 if (TREE_PERMANENT (elt_type)
459 && (index_type == 0 || TREE_PERMANENT (index_type)))
460 {
461 current_obstack = &permanent_obstack;
462 saveable_obstack = &permanent_obstack;
463 }
464
465 t = build_array_type (elt_type, index_type);
466
467 /* Push these needs up so that initialization takes place
468 more easily. */
469 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
470 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
471 current_obstack = ambient_obstack;
472 saveable_obstack = ambient_saveable_obstack;
473 return t;
474 }
475 \f
476 /* Make a variant type in the proper way for C/C++, propagating qualifiers
477 down to the element type of an array. */
478
479 tree
480 cp_build_type_variant (type, constp, volatilep)
481 tree type;
482 int constp, volatilep;
483 {
484 if (TREE_CODE (type) == ARRAY_TYPE)
485 {
486 tree real_main_variant = TYPE_MAIN_VARIANT (type);
487
488 push_obstacks (TYPE_OBSTACK (real_main_variant),
489 TYPE_OBSTACK (real_main_variant));
490 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
491 constp, volatilep),
492 TYPE_DOMAIN (type));
493
494 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
495 make a copy. (TYPE might have come from the hash table and
496 REAL_MAIN_VARIANT might be in some function's obstack.) */
497
498 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
499 {
500 type = copy_node (type);
501 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
502 }
503
504 TYPE_MAIN_VARIANT (type) = real_main_variant;
505 pop_obstacks ();
506 }
507 return build_type_variant (type, constp, volatilep);
508 }
509 \f
510 /* Add OFFSET to all base types of T.
511
512 OFFSET, which is a type offset, is number of bytes.
513
514 Note that we don't have to worry about having two paths to the
515 same base type, since this type owns its association list. */
516 void
517 propagate_binfo_offsets (binfo, offset)
518 tree binfo;
519 tree offset;
520 {
521 tree binfos = BINFO_BASETYPES (binfo);
522 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
523
524 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
525 {
526 tree base_binfo = TREE_VEC_ELT (binfos, i);
527
528 if (TREE_VIA_VIRTUAL (base_binfo))
529 i += 1;
530 else
531 {
532 int j;
533 tree base_binfos = BINFO_BASETYPES (base_binfo);
534 tree delta;
535
536 for (j = i+1; j < n_baselinks; j++)
537 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
538 {
539 /* The next basetype offset must take into account the space
540 between the classes, not just the size of each class. */
541 delta = size_binop (MINUS_EXPR,
542 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
543 BINFO_OFFSET (base_binfo));
544 break;
545 }
546
547 #if 0
548 if (BINFO_OFFSET_ZEROP (base_binfo))
549 BINFO_OFFSET (base_binfo) = offset;
550 else
551 BINFO_OFFSET (base_binfo)
552 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
553 #else
554 BINFO_OFFSET (base_binfo) = offset;
555 #endif
556 if (base_binfos)
557 {
558 int k;
559 tree chain = NULL_TREE;
560
561 /* Now unshare the structure beneath BASE_BINFO. */
562 for (k = TREE_VEC_LENGTH (base_binfos)-1;
563 k >= 0; k--)
564 {
565 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
566 if (! TREE_VIA_VIRTUAL (base_base_binfo))
567 TREE_VEC_ELT (base_binfos, k)
568 = make_binfo (BINFO_OFFSET (base_base_binfo),
569 base_base_binfo,
570 BINFO_VTABLE (base_base_binfo),
571 BINFO_VIRTUALS (base_base_binfo),
572 chain);
573 chain = TREE_VEC_ELT (base_binfos, k);
574 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
575 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
576 BINFO_INHERITANCE_CHAIN (chain) = base_binfo;
577 }
578 /* Now propagate the offset to the base types. */
579 propagate_binfo_offsets (base_binfo, offset);
580 }
581
582 /* Go to our next class that counts for offset propagation. */
583 i = j;
584 if (i < n_baselinks)
585 offset = size_binop (PLUS_EXPR, offset, delta);
586 }
587 }
588 }
589
590 /* Compute the actual offsets that our virtual base classes
591 will have *for this type*. This must be performed after
592 the fields are laid out, since virtual baseclasses must
593 lay down at the end of the record.
594
595 Returns the maximum number of virtual functions any of the virtual
596 baseclasses provide. */
597 int
598 layout_vbasetypes (rec, max)
599 tree rec;
600 int max;
601 {
602 /* Get all the virtual base types that this type uses.
603 The TREE_VALUE slot holds the virtual baseclass type. */
604 tree vbase_types = get_vbase_types (rec);
605
606 #ifdef STRUCTURE_SIZE_BOUNDARY
607 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
608 #else
609 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
610 #endif
611 int desired_align;
612
613 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
614 where CONST_SIZE is an integer
615 and VAR_SIZE is a tree expression.
616 If VAR_SIZE is null, the size is just CONST_SIZE.
617 Naturally we try to avoid using VAR_SIZE. */
618 register unsigned const_size = 0;
619 register tree var_size = 0;
620 int nonvirtual_const_size;
621 tree nonvirtual_var_size;
622
623 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
624
625 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
626 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
627 else
628 var_size = TYPE_SIZE (rec);
629
630 nonvirtual_const_size = const_size;
631 nonvirtual_var_size = var_size;
632
633 while (vbase_types)
634 {
635 tree basetype = BINFO_TYPE (vbase_types);
636 tree offset;
637
638 desired_align = TYPE_ALIGN (basetype);
639 record_align = MAX (record_align, desired_align);
640
641 if (const_size == 0)
642 offset = integer_zero_node;
643 else
644 {
645 /* Give each virtual base type the alignment it wants. */
646 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
647 * TYPE_ALIGN (basetype);
648 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
649 }
650
651 if (CLASSTYPE_VSIZE (basetype) > max)
652 max = CLASSTYPE_VSIZE (basetype);
653 BINFO_OFFSET (vbase_types) = offset;
654
655 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
656 {
657 /* Every virtual baseclass takes a least a UNIT, so that we can
658 take it's address and get something different for each base. */
659 const_size += MAX (BITS_PER_UNIT,
660 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
661 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
662 }
663 else if (var_size == 0)
664 var_size = TYPE_SIZE (basetype);
665 else
666 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
667
668 vbase_types = TREE_CHAIN (vbase_types);
669 }
670
671 if (const_size)
672 {
673 /* Because a virtual base might take a single byte above,
674 we have to re-adjust the total size to make sure it it
675 a multiple of the alignment. */
676 /* Give the whole object the alignment it wants. */
677 const_size = CEIL (const_size, record_align) * record_align;
678 }
679
680 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
681 here, as that is for this class, without any virtual base classes. */
682 TYPE_ALIGN (rec) = record_align;
683 if (const_size != nonvirtual_const_size)
684 {
685 CLASSTYPE_VBASE_SIZE (rec)
686 = size_int (const_size - nonvirtual_const_size);
687 TYPE_SIZE (rec) = size_int (const_size);
688 }
689
690 /* Now propagate offset information throughout the lattice
691 under the vbase type. */
692 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
693 vbase_types = TREE_CHAIN (vbase_types))
694 {
695 tree base_binfos = BINFO_BASETYPES (vbase_types);
696
697 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
698
699 if (base_binfos)
700 {
701 tree chain = NULL_TREE;
702 int j;
703 /* Now unshare the structure beneath BASE_BINFO. */
704
705 for (j = TREE_VEC_LENGTH (base_binfos)-1;
706 j >= 0; j--)
707 {
708 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
709 if (! TREE_VIA_VIRTUAL (base_base_binfo))
710 TREE_VEC_ELT (base_binfos, j)
711 = make_binfo (BINFO_OFFSET (base_base_binfo),
712 base_base_binfo,
713 BINFO_VTABLE (base_base_binfo),
714 BINFO_VIRTUALS (base_base_binfo),
715 chain);
716 chain = TREE_VEC_ELT (base_binfos, j);
717 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
718 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
719 BINFO_INHERITANCE_CHAIN (chain) = vbase_types;
720 }
721
722 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
723 }
724 }
725
726 return max;
727 }
728
729 /* Lay out the base types of a record type, REC.
730 Tentatively set the size and alignment of REC
731 according to the base types alone.
732
733 Offsets for immediate nonvirtual baseclasses are also computed here.
734
735 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
736 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
737
738 Returns list of virtual base classes in a FIELD_DECL chain. */
739 tree
740 layout_basetypes (rec, binfos)
741 tree rec, binfos;
742 {
743 /* Chain to hold all the new FIELD_DECLs which point at virtual
744 base classes. */
745 tree vbase_decls = NULL_TREE;
746
747 #ifdef STRUCTURE_SIZE_BOUNDARY
748 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
749 #else
750 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
751 #endif
752
753 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
754 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
755 the size is just CONST_SIZE. Naturally we try to avoid using
756 VAR_SIZE. And so far, we've been successful. */
757 #if 0
758 register tree var_size = 0;
759 #endif
760
761 register unsigned const_size = 0;
762 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
763
764 /* Handle basetypes almost like fields, but record their
765 offsets differently. */
766
767 for (i = 0; i < n_baseclasses; i++)
768 {
769 int inc, desired_align, int_vbase_size;
770 register tree base_binfo = TREE_VEC_ELT (binfos, i);
771 register tree basetype = BINFO_TYPE (base_binfo);
772 tree decl, offset;
773
774 if (TYPE_SIZE (basetype) == 0)
775 {
776 #if 0
777 /* This error is now reported in xref_tag, thus giving better
778 location information. */
779 error_with_aggr_type (base_binfo,
780 "base class `%s' has incomplete type");
781
782 TREE_VIA_PUBLIC (base_binfo) = 1;
783 TREE_VIA_PROTECTED (base_binfo) = 0;
784 TREE_VIA_VIRTUAL (base_binfo) = 0;
785
786 /* Should handle this better so that
787
788 class A;
789 class B: private A { virtual void F(); };
790
791 does not dump core when compiled. */
792 my_friendly_abort (121);
793 #endif
794 continue;
795 }
796
797 /* All basetypes are recorded in the association list of the
798 derived type. */
799
800 if (TREE_VIA_VIRTUAL (base_binfo))
801 {
802 int j;
803 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
804 + sizeof (VBASE_NAME) + 1);
805
806 /* The offset for a virtual base class is only used in computing
807 virtual function tables and for initializing virtual base
808 pointers. It is built once `get_vbase_types' is called. */
809
810 /* If this basetype can come from another vbase pointer
811 without an additional indirection, we will share
812 that pointer. If an indirection is involved, we
813 make our own pointer. */
814 for (j = 0; j < n_baseclasses; j++)
815 {
816 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
817 if (! TREE_VIA_VIRTUAL (other_base_binfo)
818 && binfo_member (basetype,
819 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
820 goto got_it;
821 }
822 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
823 decl = build_lang_field_decl (FIELD_DECL, get_identifier (name),
824 build_pointer_type (basetype));
825 /* If you change any of the below, take a look at all the
826 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
827 them too. */
828 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
829 DECL_VIRTUAL_P (decl) = 1;
830 DECL_FIELD_CONTEXT (decl) = rec;
831 DECL_CLASS_CONTEXT (decl) = rec;
832 DECL_FCONTEXT (decl) = basetype;
833 DECL_SAVED_INSNS (decl) = NULL_RTX;
834 DECL_FIELD_SIZE (decl) = 0;
835 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
836 TREE_CHAIN (decl) = vbase_decls;
837 BINFO_VPTR_FIELD (base_binfo) = decl;
838 vbase_decls = decl;
839
840 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
841 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
842 {
843 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
844 "destructor `%s' non-virtual");
845 warning ("in inheritance relationship `%s: virtual %s'",
846 TYPE_NAME_STRING (rec),
847 TYPE_NAME_STRING (basetype));
848 }
849 got_it:
850 /* The space this decl occupies has already been accounted for. */
851 continue;
852 }
853
854 if (const_size == 0)
855 offset = integer_zero_node;
856 else
857 {
858 /* Give each base type the alignment it wants. */
859 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
860 * TYPE_ALIGN (basetype);
861 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
862
863 #if 0
864 /* bpk: Disabled this check until someone is willing to
865 claim it as theirs and explain exactly what circumstances
866 warrant the warning. */
867 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
868 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
869 {
870 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
871 "destructor `%s' non-virtual");
872 warning ("in inheritance relationship `%s:%s %s'",
873 TYPE_NAME_STRING (rec),
874 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
875 TYPE_NAME_STRING (basetype));
876 }
877 #endif
878 }
879 BINFO_OFFSET (base_binfo) = offset;
880 if (CLASSTYPE_VSIZE (basetype))
881 {
882 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
883 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
884 }
885 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
886 TYPE_BINFO (rec) = base_binfo;
887
888 /* Add only the amount of storage not present in
889 the virtual baseclasses. */
890
891 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
892 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
893 {
894 inc = MAX (record_align,
895 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
896 - int_vbase_size));
897
898 /* Record must have at least as much alignment as any field. */
899 desired_align = TYPE_ALIGN (basetype);
900 record_align = MAX (record_align, desired_align);
901
902 const_size += inc;
903 }
904 }
905
906 if (const_size)
907 CLASSTYPE_SIZE (rec) = size_int (const_size);
908 else
909 CLASSTYPE_SIZE (rec) = integer_zero_node;
910 CLASSTYPE_ALIGN (rec) = record_align;
911
912 return vbase_decls;
913 }
914 \f
915 /* Hashing of lists so that we don't make duplicates.
916 The entry point is `list_hash_canon'. */
917
918 /* Each hash table slot is a bucket containing a chain
919 of these structures. */
920
921 struct list_hash
922 {
923 struct list_hash *next; /* Next structure in the bucket. */
924 int hashcode; /* Hash code of this list. */
925 tree list; /* The list recorded here. */
926 };
927
928 /* Now here is the hash table. When recording a list, it is added
929 to the slot whose index is the hash code mod the table size.
930 Note that the hash table is used for several kinds of lists.
931 While all these live in the same table, they are completely independent,
932 and the hash code is computed differently for each of these. */
933
934 #define TYPE_HASH_SIZE 59
935 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
936
937 /* Compute a hash code for a list (chain of TREE_LIST nodes
938 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
939 TREE_COMMON slots), by adding the hash codes of the individual entries. */
940
941 int
942 list_hash (list)
943 tree list;
944 {
945 register int hashcode = 0;
946
947 if (TREE_CHAIN (list))
948 hashcode += TYPE_HASH (TREE_CHAIN (list));
949
950 if (TREE_VALUE (list))
951 hashcode += TYPE_HASH (TREE_VALUE (list));
952 else
953 hashcode += 1007;
954 if (TREE_PURPOSE (list))
955 hashcode += TYPE_HASH (TREE_PURPOSE (list));
956 else
957 hashcode += 1009;
958 return hashcode;
959 }
960
961 /* Look in the type hash table for a type isomorphic to TYPE.
962 If one is found, return it. Otherwise return 0. */
963
964 tree
965 list_hash_lookup (hashcode, list)
966 int hashcode;
967 tree list;
968 {
969 register struct list_hash *h;
970 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
971 if (h->hashcode == hashcode
972 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
973 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
974 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
975 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
976 && TREE_VALUE (h->list) == TREE_VALUE (list)
977 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
978 {
979 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
980 return h->list;
981 }
982 return 0;
983 }
984
985 /* Add an entry to the list-hash-table
986 for a list TYPE whose hash code is HASHCODE. */
987
988 void
989 list_hash_add (hashcode, list)
990 int hashcode;
991 tree list;
992 {
993 register struct list_hash *h;
994
995 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
996 h->hashcode = hashcode;
997 h->list = list;
998 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
999 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
1000 }
1001
1002 /* Given TYPE, and HASHCODE its hash code, return the canonical
1003 object for an identical list if one already exists.
1004 Otherwise, return TYPE, and record it as the canonical object
1005 if it is a permanent object.
1006
1007 To use this function, first create a list of the sort you want.
1008 Then compute its hash code from the fields of the list that
1009 make it different from other similar lists.
1010 Then call this function and use the value.
1011 This function frees the list you pass in if it is a duplicate. */
1012
1013 /* Set to 1 to debug without canonicalization. Never set by program. */
1014 static int debug_no_list_hash = 0;
1015
1016 tree
1017 list_hash_canon (hashcode, list)
1018 int hashcode;
1019 tree list;
1020 {
1021 tree t1;
1022
1023 if (debug_no_list_hash)
1024 return list;
1025
1026 t1 = list_hash_lookup (hashcode, list);
1027 if (t1 != 0)
1028 {
1029 obstack_free (&class_obstack, list);
1030 return t1;
1031 }
1032
1033 /* If this is a new list, record it for later reuse. */
1034 list_hash_add (hashcode, list);
1035
1036 return list;
1037 }
1038
1039 tree
1040 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
1041 int via_public, via_virtual, via_protected;
1042 tree purpose, value, chain;
1043 {
1044 struct obstack *ambient_obstack = current_obstack;
1045 tree t;
1046 int hashcode;
1047
1048 current_obstack = &class_obstack;
1049 t = tree_cons (purpose, value, chain);
1050 TREE_VIA_PUBLIC (t) = via_public;
1051 TREE_VIA_PROTECTED (t) = via_protected;
1052 TREE_VIA_VIRTUAL (t) = via_virtual;
1053 hashcode = list_hash (t);
1054 t = list_hash_canon (hashcode, t);
1055 current_obstack = ambient_obstack;
1056 return t;
1057 }
1058
1059 /* Constructor for hashed lists. */
1060 tree
1061 hash_tree_chain (value, chain)
1062 tree value, chain;
1063 {
1064 struct obstack *ambient_obstack = current_obstack;
1065 tree t;
1066 int hashcode;
1067
1068 current_obstack = &class_obstack;
1069 t = tree_cons (NULL_TREE, value, chain);
1070 hashcode = list_hash (t);
1071 t = list_hash_canon (hashcode, t);
1072 current_obstack = ambient_obstack;
1073 return t;
1074 }
1075
1076 /* Similar, but used for concatenating two lists. */
1077 tree
1078 hash_chainon (list1, list2)
1079 tree list1, list2;
1080 {
1081 if (list2 == 0)
1082 return list1;
1083 if (list1 == 0)
1084 return list2;
1085 if (TREE_CHAIN (list1) == NULL_TREE)
1086 return hash_tree_chain (TREE_VALUE (list1), list2);
1087 return hash_tree_chain (TREE_VALUE (list1),
1088 hash_chainon (TREE_CHAIN (list1), list2));
1089 }
1090
1091 static tree
1092 get_identifier_list (value)
1093 tree value;
1094 {
1095 tree list = IDENTIFIER_AS_LIST (value);
1096 if (list != NULL_TREE
1097 && (TREE_CODE (list) != TREE_LIST
1098 || TREE_VALUE (list) != value))
1099 list = NULL_TREE;
1100 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1101 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1102 && IDENTIFIER_TYPE_VALUE (value)
1103 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1104 {
1105 tree type = IDENTIFIER_TYPE_VALUE (value);
1106
1107 if (TYPE_PTRMEMFUNC_P (type))
1108 list = NULL_TREE;
1109 else if (type == current_class_type)
1110 /* Don't mess up the constructor name. */
1111 list = tree_cons (NULL_TREE, value, NULL_TREE);
1112 else
1113 {
1114 register tree id;
1115 /* This will return the correct thing for regular types,
1116 nested types, and templates. Yay! */
1117 if (TYPE_NESTED_NAME (type))
1118 id = TYPE_NESTED_NAME (type);
1119 else
1120 id = TYPE_IDENTIFIER (type);
1121
1122 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
1123 CLASSTYPE_ID_AS_LIST (type)
1124 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
1125 list = CLASSTYPE_ID_AS_LIST (type);
1126 }
1127 }
1128 return list;
1129 }
1130
1131 tree
1132 get_decl_list (value)
1133 tree value;
1134 {
1135 tree list = NULL_TREE;
1136
1137 if (TREE_CODE (value) == IDENTIFIER_NODE)
1138 list = get_identifier_list (value);
1139 else if (TREE_CODE (value) == RECORD_TYPE
1140 && TYPE_LANG_SPECIFIC (value)
1141 && value == TYPE_MAIN_VARIANT (value))
1142 list = CLASSTYPE_AS_LIST (value);
1143
1144 if (list != NULL_TREE)
1145 {
1146 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1147 return list;
1148 }
1149
1150 return build_decl_list (NULL_TREE, value);
1151 }
1152
1153 /* Look in the type hash table for a type isomorphic to
1154 `build_tree_list (NULL_TREE, VALUE)'.
1155 If one is found, return it. Otherwise return 0. */
1156
1157 tree
1158 list_hash_lookup_or_cons (value)
1159 tree value;
1160 {
1161 register int hashcode = TYPE_HASH (value);
1162 register struct list_hash *h;
1163 struct obstack *ambient_obstack;
1164 tree list = NULL_TREE;
1165
1166 if (TREE_CODE (value) == IDENTIFIER_NODE)
1167 list = get_identifier_list (value);
1168 else if (TREE_CODE (value) == TYPE_DECL
1169 && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE
1170 && TYPE_LANG_SPECIFIC (TREE_TYPE (value)))
1171 list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value));
1172 else if (TREE_CODE (value) == RECORD_TYPE
1173 && TYPE_LANG_SPECIFIC (value))
1174 list = CLASSTYPE_AS_LIST (value);
1175
1176 if (list != NULL_TREE)
1177 {
1178 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302);
1179 return list;
1180 }
1181
1182 if (debug_no_list_hash)
1183 return hash_tree_chain (value, NULL_TREE);
1184
1185 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1186 if (h->hashcode == hashcode
1187 && TREE_VIA_VIRTUAL (h->list) == 0
1188 && TREE_VIA_PUBLIC (h->list) == 0
1189 && TREE_VIA_PROTECTED (h->list) == 0
1190 && TREE_PURPOSE (h->list) == 0
1191 && TREE_VALUE (h->list) == value)
1192 {
1193 my_friendly_assert (TREE_TYPE (h->list) == 0, 303);
1194 my_friendly_assert (TREE_CHAIN (h->list) == 0, 304);
1195 return h->list;
1196 }
1197
1198 ambient_obstack = current_obstack;
1199 current_obstack = &class_obstack;
1200 list = build_tree_list (NULL_TREE, value);
1201 list_hash_add (hashcode, list);
1202 current_obstack = ambient_obstack;
1203 return list;
1204 }
1205 \f
1206 /* Build an association between TYPE and some parameters:
1207
1208 OFFSET is the offset added to `this' to convert it to a pointer
1209 of type `TYPE *'
1210
1211 BINFO is the base binfo to use, if we are deriving from one. This
1212 is necessary, as we want specialized parent binfos from base
1213 classes, so that the VTABLE_NAMEs of bases are for the most derived
1214 type, instead of of the simple type.
1215
1216 VTABLE is the virtual function table with which to initialize
1217 sub-objects of type TYPE.
1218
1219 VIRTUALS are the virtual functions sitting in VTABLE.
1220
1221 CHAIN are more associations we must retain. */
1222
1223 tree
1224 make_binfo (offset, binfo, vtable, virtuals, chain)
1225 tree offset, binfo;
1226 tree vtable, virtuals;
1227 tree chain;
1228 {
1229 tree new_binfo = make_tree_vec (6);
1230 tree type;
1231
1232 if (TREE_CODE (binfo) == TREE_VEC)
1233 type = BINFO_TYPE (binfo);
1234 else
1235 {
1236 type = binfo;
1237 binfo = TYPE_BINFO (binfo);
1238 }
1239
1240 TREE_CHAIN (new_binfo) = chain;
1241 if (chain)
1242 TREE_USED (new_binfo) = TREE_USED (chain);
1243
1244 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1245 BINFO_OFFSET (new_binfo) = offset;
1246 BINFO_VTABLE (new_binfo) = vtable;
1247 BINFO_VIRTUALS (new_binfo) = virtuals;
1248 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1249
1250 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1251 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1252 return new_binfo;
1253 }
1254
1255 /* Return the binfo value for ELEM in TYPE. */
1256
1257 tree
1258 binfo_value (elem, type)
1259 tree elem;
1260 tree type;
1261 {
1262 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1263 compiler_error ("base class `%s' ambiguous in binfo_value",
1264 TYPE_NAME_STRING (elem));
1265 if (elem == type)
1266 return TYPE_BINFO (type);
1267 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1268 return type;
1269 return get_binfo (elem, type, 0);
1270 }
1271
1272 tree
1273 reverse_path (path)
1274 tree path;
1275 {
1276 register tree prev = 0, tmp, next;
1277 for (tmp = path; tmp; tmp = next)
1278 {
1279 next = BINFO_INHERITANCE_CHAIN (tmp);
1280 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1281 prev = tmp;
1282 }
1283 return prev;
1284 }
1285
1286 tree
1287 virtual_member (elem, list)
1288 tree elem;
1289 tree list;
1290 {
1291 tree t;
1292 tree rval, nval;
1293
1294 for (t = list; t; t = TREE_CHAIN (t))
1295 if (elem == BINFO_TYPE (t))
1296 return t;
1297 rval = 0;
1298 for (t = list; t; t = TREE_CHAIN (t))
1299 {
1300 tree binfos = BINFO_BASETYPES (t);
1301 int i;
1302
1303 if (binfos != NULL_TREE)
1304 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1305 {
1306 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1307 if (nval)
1308 {
1309 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1310 my_friendly_abort (104);
1311 rval = nval;
1312 }
1313 }
1314 }
1315 return rval;
1316 }
1317
1318 void
1319 debug_binfo (elem)
1320 tree elem;
1321 {
1322 unsigned HOST_WIDE_INT n;
1323 tree virtuals;
1324
1325 fprintf (stderr, "type \"%s\"; offset = %d\n",
1326 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1327 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1328 fprintf (stderr, "vtable type:\n");
1329 debug_tree (BINFO_TYPE (elem));
1330 if (BINFO_VTABLE (elem))
1331 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1332 else
1333 fprintf (stderr, "no vtable decl yet\n");
1334 fprintf (stderr, "virtuals:\n");
1335 virtuals = BINFO_VIRTUALS (elem);
1336
1337 n = skip_rtti_stuff (&virtuals);
1338
1339 while (virtuals)
1340 {
1341 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1342 fprintf (stderr, "%s [%d =? %d]\n",
1343 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1344 n, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1345 ++n;
1346 virtuals = TREE_CHAIN (virtuals);
1347 }
1348 }
1349
1350 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1351 We expect a null pointer to mark the end of the chain.
1352 This is the Lisp primitive `length'. */
1353
1354 int
1355 decl_list_length (t)
1356 tree t;
1357 {
1358 register tree tail;
1359 register int len = 0;
1360
1361 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1362 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1363 for (tail = t; tail; tail = DECL_CHAIN (tail))
1364 len++;
1365
1366 return len;
1367 }
1368
1369 int
1370 count_functions (t)
1371 tree t;
1372 {
1373 if (TREE_CODE (t) == FUNCTION_DECL)
1374 return 1;
1375 else if (TREE_CODE (t) == TREE_LIST)
1376 return decl_list_length (TREE_VALUE (t));
1377
1378 my_friendly_abort (359);
1379 return 0;
1380 }
1381
1382 /* Like value_member, but for DECL_CHAINs. */
1383 tree
1384 decl_value_member (elem, list)
1385 tree elem, list;
1386 {
1387 while (list)
1388 {
1389 if (elem == list)
1390 return list;
1391 list = DECL_CHAIN (list);
1392 }
1393 return NULL_TREE;
1394 }
1395
1396 int
1397 is_overloaded_fn (x)
1398 tree x;
1399 {
1400 if (TREE_CODE (x) == FUNCTION_DECL)
1401 return 1;
1402
1403 if (TREE_CODE (x) == TREE_LIST
1404 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1405 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1406 return 1;
1407
1408 return 0;
1409 }
1410
1411 int
1412 really_overloaded_fn (x)
1413 tree x;
1414 {
1415 if (TREE_CODE (x) == TREE_LIST
1416 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1417 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1418 return 1;
1419
1420 return 0;
1421 }
1422
1423 tree
1424 get_first_fn (from)
1425 tree from;
1426 {
1427 if (TREE_CODE (from) == FUNCTION_DECL)
1428 return from;
1429
1430 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1431
1432 return TREE_VALUE (from);
1433 }
1434
1435 tree
1436 fnaddr_from_vtable_entry (entry)
1437 tree entry;
1438 {
1439 if (flag_vtable_thunks)
1440 {
1441 tree func = entry;
1442 if (TREE_CODE (func) == ADDR_EXPR)
1443 func = TREE_OPERAND (func, 0);
1444 if (TREE_CODE (func) == THUNK_DECL)
1445 return DECL_INITIAL (func);
1446 else
1447 return entry;
1448 }
1449 else
1450 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1451 }
1452
1453 void
1454 set_fnaddr_from_vtable_entry (entry, value)
1455 tree entry, value;
1456 {
1457 if (flag_vtable_thunks)
1458 abort ();
1459 else
1460 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value;
1461 }
1462
1463 tree
1464 function_arg_chain (t)
1465 tree t;
1466 {
1467 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1468 }
1469
1470 int
1471 promotes_to_aggr_type (t, code)
1472 tree t;
1473 enum tree_code code;
1474 {
1475 if (TREE_CODE (t) == code)
1476 t = TREE_TYPE (t);
1477 return IS_AGGR_TYPE (t);
1478 }
1479
1480 int
1481 is_aggr_type_2 (t1, t2)
1482 tree t1, t2;
1483 {
1484 if (TREE_CODE (t1) != TREE_CODE (t2))
1485 return 0;
1486 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1487 }
1488
1489 /* Give message using types TYPE1 and TYPE2 as arguments.
1490 PFN is the function which will print the message;
1491 S is the format string for PFN to use. */
1492 void
1493 message_2_types (pfn, s, type1, type2)
1494 void (*pfn) ();
1495 char *s;
1496 tree type1, type2;
1497 {
1498 tree name1 = TYPE_NAME (type1);
1499 tree name2 = TYPE_NAME (type2);
1500 if (TREE_CODE (name1) == TYPE_DECL)
1501 name1 = DECL_NAME (name1);
1502 if (TREE_CODE (name2) == TYPE_DECL)
1503 name2 = DECL_NAME (name2);
1504 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1505 }
1506 \f
1507 #define PRINT_RING_SIZE 4
1508
1509 char *
1510 lang_printable_name (decl)
1511 tree decl;
1512 {
1513 static tree decl_ring[PRINT_RING_SIZE];
1514 static char *print_ring[PRINT_RING_SIZE];
1515 static int ring_counter;
1516 int i;
1517
1518 /* Only cache functions. */
1519 if (TREE_CODE (decl) != FUNCTION_DECL
1520 || DECL_LANG_SPECIFIC (decl) == 0)
1521 return decl_as_string (decl, 1);
1522
1523 /* See if this print name is lying around. */
1524 for (i = 0; i < PRINT_RING_SIZE; i++)
1525 if (decl_ring[i] == decl)
1526 /* yes, so return it. */
1527 return print_ring[i];
1528
1529 if (++ring_counter == PRINT_RING_SIZE)
1530 ring_counter = 0;
1531
1532 if (current_function_decl != NULL_TREE)
1533 {
1534 if (decl_ring[ring_counter] == current_function_decl)
1535 ring_counter += 1;
1536 if (ring_counter == PRINT_RING_SIZE)
1537 ring_counter = 0;
1538 if (decl_ring[ring_counter] == current_function_decl)
1539 my_friendly_abort (106);
1540 }
1541
1542 if (print_ring[ring_counter])
1543 free (print_ring[ring_counter]);
1544
1545 {
1546 int print_ret_type_p
1547 = (!DECL_CONSTRUCTOR_P (decl)
1548 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1549
1550 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1551 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1552 strcpy (print_ring[ring_counter], name);
1553 decl_ring[ring_counter] = decl;
1554 }
1555 return print_ring[ring_counter];
1556 }
1557 \f
1558 /* Comparison function for sorting identifiers in RAISES lists.
1559 Note that because IDENTIFIER_NODEs are unique, we can sort
1560 them by address, saving an indirection. */
1561 static int
1562 id_cmp (p1, p2)
1563 tree *p1, *p2;
1564 {
1565 return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2);
1566 }
1567
1568 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1569 listed in RAISES. */
1570 tree
1571 build_exception_variant (type, raises)
1572 tree type;
1573 tree raises;
1574 {
1575 int i;
1576 tree v = TYPE_MAIN_VARIANT (type);
1577 tree t, t2, cname;
1578 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1579 int constp = TYPE_READONLY (type);
1580 int volatilep = TYPE_VOLATILE (type);
1581
1582 for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v))
1583 {
1584 if (TYPE_READONLY (v) != constp
1585 || TYPE_VOLATILE (v) != volatilep)
1586 continue;
1587
1588 /* @@ This should do set equality, not exact match. */
1589 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1590 /* List of exceptions raised matches previously found list.
1591
1592 @@ Nice to free up storage used in consing up the
1593 @@ list of exceptions raised. */
1594 return v;
1595 }
1596
1597 /* Need to build a new variant. */
1598 v = copy_node (type);
1599 TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type);
1600 TYPE_NEXT_VARIANT (type) = v;
1601 if (raises && ! TREE_PERMANENT (raises))
1602 {
1603 push_obstacks_nochange ();
1604 end_temporary_allocation ();
1605 raises = copy_list (raises);
1606 pop_obstacks ();
1607 }
1608 TYPE_RAISES_EXCEPTIONS (v) = raises;
1609 return v;
1610 }
1611
1612 /* Subroutine of copy_to_permanent
1613
1614 Assuming T is a node build bottom-up, make it all exist on
1615 permanent obstack, if it is not permanent already. */
1616
1617 tree
1618 mapcar (t, func)
1619 tree t;
1620 tree (*func)();
1621 {
1622 enum tree_code code;
1623 tree tmp;
1624
1625 if (t == NULL_TREE)
1626 return t;
1627
1628 if (tmp = func (t), tmp != NULL_TREE)
1629 return tmp;
1630
1631 switch (code = TREE_CODE (t))
1632 {
1633 case ERROR_MARK:
1634 return error_mark_node;
1635
1636 case VAR_DECL:
1637 case FUNCTION_DECL:
1638 case CONST_DECL:
1639 break;
1640
1641 case PARM_DECL:
1642 {
1643 tree chain = TREE_CHAIN (t);
1644 t = copy_node (t);
1645 TREE_CHAIN (t) = mapcar (chain, func);
1646 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1647 DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
1648 DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
1649 return t;
1650 }
1651
1652 case TREE_LIST:
1653 {
1654 tree chain = TREE_CHAIN (t);
1655 t = copy_node (t);
1656 TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
1657 TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
1658 TREE_CHAIN (t) = mapcar (chain, func);
1659 return t;
1660 }
1661
1662 case TREE_VEC:
1663 {
1664 int len = TREE_VEC_LENGTH (t);
1665
1666 t = copy_node (t);
1667 while (len--)
1668 TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
1669 return t;
1670 }
1671
1672 case INTEGER_CST:
1673 case REAL_CST:
1674 case STRING_CST:
1675 return copy_node (t);
1676
1677 case COND_EXPR:
1678 case TARGET_EXPR:
1679 case NEW_EXPR:
1680 t = copy_node (t);
1681 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1682 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1683 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1684 return t;
1685
1686 case SAVE_EXPR:
1687 t = copy_node (t);
1688 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1689 return t;
1690
1691 case MODIFY_EXPR:
1692 case PLUS_EXPR:
1693 case MINUS_EXPR:
1694 case MULT_EXPR:
1695 case TRUNC_DIV_EXPR:
1696 case TRUNC_MOD_EXPR:
1697 case MIN_EXPR:
1698 case MAX_EXPR:
1699 case LSHIFT_EXPR:
1700 case RSHIFT_EXPR:
1701 case BIT_IOR_EXPR:
1702 case BIT_XOR_EXPR:
1703 case BIT_AND_EXPR:
1704 case BIT_ANDTC_EXPR:
1705 case TRUTH_ANDIF_EXPR:
1706 case TRUTH_ORIF_EXPR:
1707 case LT_EXPR:
1708 case LE_EXPR:
1709 case GT_EXPR:
1710 case GE_EXPR:
1711 case EQ_EXPR:
1712 case NE_EXPR:
1713 case CEIL_DIV_EXPR:
1714 case FLOOR_DIV_EXPR:
1715 case ROUND_DIV_EXPR:
1716 case CEIL_MOD_EXPR:
1717 case FLOOR_MOD_EXPR:
1718 case ROUND_MOD_EXPR:
1719 case COMPOUND_EXPR:
1720 case PREDECREMENT_EXPR:
1721 case PREINCREMENT_EXPR:
1722 case POSTDECREMENT_EXPR:
1723 case POSTINCREMENT_EXPR:
1724 case CALL_EXPR:
1725 t = copy_node (t);
1726 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1727 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1728 return t;
1729
1730 case CONVERT_EXPR:
1731 case ADDR_EXPR:
1732 case INDIRECT_REF:
1733 case NEGATE_EXPR:
1734 case BIT_NOT_EXPR:
1735 case TRUTH_NOT_EXPR:
1736 case NOP_EXPR:
1737 case COMPONENT_REF:
1738 t = copy_node (t);
1739 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1740 return t;
1741
1742 case POINTER_TYPE:
1743 return build_pointer_type (mapcar (TREE_TYPE (t), func));
1744 case REFERENCE_TYPE:
1745 return build_reference_type (mapcar (TREE_TYPE (t), func));
1746 case FUNCTION_TYPE:
1747 return build_function_type (mapcar (TREE_TYPE (t), func),
1748 mapcar (TYPE_ARG_TYPES (t), func));
1749 case ARRAY_TYPE:
1750 return build_array_type (mapcar (TREE_TYPE (t), func),
1751 mapcar (TYPE_DOMAIN (t), func));
1752 case INTEGER_TYPE:
1753 return build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
1754
1755 case OFFSET_TYPE:
1756 return build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
1757 mapcar (TREE_TYPE (t), func));
1758 case METHOD_TYPE:
1759 return build_method_type
1760 (mapcar (TYPE_METHOD_BASETYPE (t), func),
1761 build_function_type
1762 (mapcar (TREE_TYPE (t), func),
1763 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func)));
1764
1765 case RECORD_TYPE:
1766 if (TYPE_PTRMEMFUNC_P (t))
1767 return build_ptrmemfunc_type
1768 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
1769 /* else fall through */
1770
1771 /* This list is incomplete, but should suffice for now.
1772 It is very important that `sorry' does not call
1773 `report_error_function'. That could cause an infinite loop. */
1774 default:
1775 sorry ("initializer contains unrecognized tree code");
1776 return error_mark_node;
1777
1778 }
1779 my_friendly_abort (107);
1780 /* NOTREACHED */
1781 return NULL_TREE;
1782 }
1783
1784 static tree
1785 perm_manip (t)
1786 tree t;
1787 {
1788 if (TREE_PERMANENT (t))
1789 return t;
1790 return NULL_TREE;
1791 }
1792
1793 /* Assuming T is a node built bottom-up, make it all exist on
1794 permanent obstack, if it is not permanent already. */
1795 tree
1796 copy_to_permanent (t)
1797 tree t;
1798 {
1799 register struct obstack *ambient_obstack = current_obstack;
1800 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1801 int resume;
1802
1803 if (t == NULL_TREE || TREE_PERMANENT (t))
1804 return t;
1805
1806 saveable_obstack = &permanent_obstack;
1807 current_obstack = saveable_obstack;
1808 resume = suspend_momentary ();
1809
1810 t = mapcar (t, perm_manip);
1811
1812 resume_momentary (resume);
1813 current_obstack = ambient_obstack;
1814 saveable_obstack = ambient_saveable_obstack;
1815
1816 return t;
1817 }
1818
1819 void
1820 print_lang_statistics ()
1821 {
1822 extern struct obstack maybepermanent_obstack;
1823 print_obstack_statistics ("class_obstack", &class_obstack);
1824 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1825 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1826 print_search_statistics ();
1827 print_class_statistics ();
1828 }
1829
1830 /* This is used by the `assert' macro. It is provided in libgcc.a,
1831 which `cc' doesn't know how to link. Note that the C++ front-end
1832 no longer actually uses the `assert' macro (instead, it calls
1833 my_friendly_assert). But all of the back-end files still need this. */
1834 void
1835 __eprintf (string, expression, line, filename)
1836 #ifdef __STDC__
1837 const char *string;
1838 const char *expression;
1839 unsigned line;
1840 const char *filename;
1841 #else
1842 char *string;
1843 char *expression;
1844 unsigned line;
1845 char *filename;
1846 #endif
1847 {
1848 fprintf (stderr, string, expression, line, filename);
1849 fflush (stderr);
1850 abort ();
1851 }
1852
1853 /* Return, as an INTEGER_CST node, the number of elements for
1854 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1855
1856 tree
1857 array_type_nelts_top (type)
1858 tree type;
1859 {
1860 return fold (build (PLUS_EXPR, sizetype,
1861 array_type_nelts (type),
1862 integer_one_node));
1863 }
1864
1865 /* Return, as an INTEGER_CST node, the number of elements for
1866 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1867 ARRAY_TYPEs that are clumped together. */
1868
1869 tree
1870 array_type_nelts_total (type)
1871 tree type;
1872 {
1873 tree sz = array_type_nelts_top (type);
1874 type = TREE_TYPE (type);
1875 while (TREE_CODE (type) == ARRAY_TYPE)
1876 {
1877 tree n = array_type_nelts_top (type);
1878 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1879 type = TREE_TYPE (type);
1880 }
1881 return sz;
1882 }
1883
1884 static
1885 tree
1886 bot_manip (t)
1887 tree t;
1888 {
1889 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1890 return t;
1891 else if (TREE_CODE (t) == TARGET_EXPR)
1892 return build_cplus_new (TREE_TYPE (t),
1893 break_out_target_exprs (TREE_OPERAND (t, 1)), 0);
1894 return NULL_TREE;
1895 }
1896
1897 /* Actually, we'll just clean out the target exprs for the moment. */
1898 tree
1899 break_out_target_exprs (t)
1900 tree t;
1901 {
1902 return mapcar (t, bot_manip);
1903 }
1904
1905 tree
1906 unsave_expr (expr)
1907 tree expr;
1908 {
1909 tree t;
1910
1911 t = build1 (UNSAVE_EXPR, TREE_TYPE (expr), expr);
1912 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (expr);
1913 return t;
1914 }
1915
1916 /* Modify a tree in place so that all the evaluate only once things
1917 are cleared out. Return the EXPR given. */
1918 tree
1919 unsave_expr_now (expr)
1920 tree expr;
1921 {
1922 enum tree_code code;
1923 register int i;
1924
1925 if (expr == NULL_TREE)
1926 return expr;
1927
1928 code = TREE_CODE (expr);
1929 switch (code)
1930 {
1931 case SAVE_EXPR:
1932 SAVE_EXPR_RTL (expr) = NULL_RTX;
1933 break;
1934
1935 case TARGET_EXPR:
1936 sorry ("TARGET_EXPR reused inside UNSAVE_EXPR");
1937 break;
1938
1939 case RTL_EXPR:
1940 warning ("RTL_EXPR reused inside UNSAVE_EXPR");
1941 RTL_EXPR_SEQUENCE (expr) = NULL_RTX;
1942 break;
1943
1944 case CALL_EXPR:
1945 CALL_EXPR_RTL (expr) = NULL_RTX;
1946 if (TREE_OPERAND (expr, 1)
1947 && TREE_CODE (TREE_OPERAND (expr, 1)) == TREE_LIST)
1948 {
1949 tree exp = TREE_OPERAND (expr, 1);
1950 while (exp)
1951 {
1952 unsave_expr_now (TREE_VALUE (exp));
1953 exp = TREE_CHAIN (exp);
1954 }
1955 }
1956 break;
1957
1958 case WITH_CLEANUP_EXPR:
1959 warning ("WITH_CLEANUP_EXPR reused inside UNSAVE_EXPR");
1960 RTL_EXPR_RTL (expr) = NULL_RTX;
1961 break;
1962 }
1963
1964 switch (TREE_CODE_CLASS (code))
1965 {
1966 case 'c': /* a constant */
1967 case 't': /* a type node */
1968 case 'x': /* something random, like an identifier or an ERROR_MARK. */
1969 case 'd': /* A decl node */
1970 case 'b': /* A block node */
1971 return expr;
1972
1973 case 'e': /* an expression */
1974 case 'r': /* a reference */
1975 case 's': /* an expression with side effects */
1976 case '<': /* a comparison expression */
1977 case '2': /* a binary arithmetic expression */
1978 case '1': /* a unary arithmetic expression */
1979 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
1980 unsave_expr_now (TREE_OPERAND (expr, i));
1981 return expr;
1982
1983 default:
1984 my_friendly_abort (999);
1985 }
1986 }
1987
1988 /* Since cleanup may have SAVE_EXPRs in it, we protect it with an
1989 UNSAVE_EXPR as the backend cannot yet handle SAVE_EXPRs in cleanups
1990 by itself. */
1991 int
1992 cp_expand_decl_cleanup (decl, cleanup)
1993 tree decl, cleanup;
1994 {
1995 return expand_decl_cleanup (decl, unsave_expr (cleanup));
1996 }
This page took 0.135905 seconds and 5 git commands to generate.