]> gcc.gnu.org Git - gcc.git/blob - gcc/cp/tree.c
81st Cygnus<->FSF merge
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include <stdio.h>
24 #include "obstack.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29
30 #define CEIL(x,y) (((x) + (y) - 1) / (y))
31
32 /* Return nonzero if REF is an lvalue valid for this language.
33 Lvalues can be assigned, unless they have TREE_READONLY.
34 Lvalues can have their address taken, unless they have DECL_REGISTER. */
35
36 int
37 real_lvalue_p (ref)
38 tree ref;
39 {
40 if (! language_lvalue_valid (ref))
41 return 0;
42
43 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
44 return 1;
45
46 if (ref == current_class_decl && flag_this_is_variable <= 0)
47 return 0;
48
49 switch (TREE_CODE (ref))
50 {
51 /* preincrements and predecrements are valid lvals, provided
52 what they refer to are valid lvals. */
53 case PREINCREMENT_EXPR:
54 case PREDECREMENT_EXPR:
55 case COMPONENT_REF:
56 case SAVE_EXPR:
57 return real_lvalue_p (TREE_OPERAND (ref, 0));
58
59 case STRING_CST:
60 return 1;
61
62 case VAR_DECL:
63 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
64 && DECL_LANG_SPECIFIC (ref)
65 && DECL_IN_AGGR_P (ref))
66 return 0;
67 case INDIRECT_REF:
68 case ARRAY_REF:
69 case PARM_DECL:
70 case RESULT_DECL:
71 case ERROR_MARK:
72 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
73 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
74 return 1;
75 break;
76
77 /* A currently unresolved scope ref. */
78 case SCOPE_REF:
79 my_friendly_abort (103);
80 case OFFSET_REF:
81 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
82 return 1;
83 return real_lvalue_p (TREE_OPERAND (ref, 0))
84 && real_lvalue_p (TREE_OPERAND (ref, 1));
85 break;
86
87 case COND_EXPR:
88 return (real_lvalue_p (TREE_OPERAND (ref, 1))
89 && real_lvalue_p (TREE_OPERAND (ref, 2)));
90
91 case MODIFY_EXPR:
92 return 1;
93
94 case COMPOUND_EXPR:
95 return real_lvalue_p (TREE_OPERAND (ref, 1));
96
97 case MAX_EXPR:
98 case MIN_EXPR:
99 return (real_lvalue_p (TREE_OPERAND (ref, 0))
100 && real_lvalue_p (TREE_OPERAND (ref, 1)));
101 }
102
103 return 0;
104 }
105
106 int
107 lvalue_p (ref)
108 tree ref;
109 {
110 if (! language_lvalue_valid (ref))
111 return 0;
112
113 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
114 return 1;
115
116 if (ref == current_class_decl && flag_this_is_variable <= 0)
117 return 0;
118
119 switch (TREE_CODE (ref))
120 {
121 /* preincrements and predecrements are valid lvals, provided
122 what they refer to are valid lvals. */
123 case PREINCREMENT_EXPR:
124 case PREDECREMENT_EXPR:
125 case COMPONENT_REF:
126 case SAVE_EXPR:
127 return lvalue_p (TREE_OPERAND (ref, 0));
128
129 case STRING_CST:
130 return 1;
131
132 case VAR_DECL:
133 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
134 && DECL_LANG_SPECIFIC (ref)
135 && DECL_IN_AGGR_P (ref))
136 return 0;
137 case INDIRECT_REF:
138 case ARRAY_REF:
139 case PARM_DECL:
140 case RESULT_DECL:
141 case ERROR_MARK:
142 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
143 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
144 return 1;
145 break;
146
147 case TARGET_EXPR:
148 return 1;
149
150 case CALL_EXPR:
151 if (IS_AGGR_TYPE (TREE_TYPE (ref)))
152 return 1;
153 break;
154
155 /* A currently unresolved scope ref. */
156 case SCOPE_REF:
157 my_friendly_abort (103);
158 case OFFSET_REF:
159 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
160 return 1;
161 return lvalue_p (TREE_OPERAND (ref, 0))
162 && lvalue_p (TREE_OPERAND (ref, 1));
163 break;
164
165 case COND_EXPR:
166 return (lvalue_p (TREE_OPERAND (ref, 1))
167 && lvalue_p (TREE_OPERAND (ref, 2)));
168
169 case MODIFY_EXPR:
170 return 1;
171
172 case COMPOUND_EXPR:
173 return lvalue_p (TREE_OPERAND (ref, 1));
174
175 case MAX_EXPR:
176 case MIN_EXPR:
177 return (lvalue_p (TREE_OPERAND (ref, 0))
178 && lvalue_p (TREE_OPERAND (ref, 1)));
179 }
180
181 return 0;
182 }
183
184 /* Return nonzero if REF is an lvalue valid for this language;
185 otherwise, print an error message and return zero. */
186
187 int
188 lvalue_or_else (ref, string)
189 tree ref;
190 char *string;
191 {
192 int win = lvalue_p (ref);
193 if (! win)
194 error ("non-lvalue in %s", string);
195 return win;
196 }
197
198 /* INIT is a CALL_EXPR which needs info about its target.
199 TYPE is the type that this initialization should appear to have.
200
201 Build an encapsulation of the initialization to perform
202 and return it so that it can be processed by language-independent
203 and language-specific expression expanders. */
204 tree
205 build_cplus_new (type, init, with_cleanup_p)
206 tree type;
207 tree init;
208 int with_cleanup_p;
209 {
210 tree slot;
211 tree rval;
212
213 slot = build (VAR_DECL, type);
214 layout_decl (slot, 0);
215 rval = build (NEW_EXPR, type,
216 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
217 TREE_SIDE_EFFECTS (rval) = 1;
218 TREE_ADDRESSABLE (rval) = 1;
219 rval = build (TARGET_EXPR, type, slot, rval, 0);
220 TREE_SIDE_EFFECTS (rval) = 1;
221 TREE_ADDRESSABLE (rval) = 1;
222
223 return rval;
224 }
225
226 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
227 these CALL_EXPRs with tree nodes that will perform the cleanups. */
228
229 tree
230 break_out_cleanups (exp)
231 tree exp;
232 {
233 tree tmp = exp;
234
235 if (TREE_CODE (tmp) == CALL_EXPR
236 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
237 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
238
239 while (TREE_CODE (tmp) == NOP_EXPR
240 || TREE_CODE (tmp) == CONVERT_EXPR
241 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
242 {
243 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
244 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
245 {
246 TREE_OPERAND (tmp, 0)
247 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
248 TREE_OPERAND (tmp, 0), 1);
249 break;
250 }
251 else
252 tmp = TREE_OPERAND (tmp, 0);
253 }
254 return exp;
255 }
256
257 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
258 copies where they are found. Returns a deep copy all nodes transitively
259 containing CALL_EXPRs. */
260
261 tree
262 break_out_calls (exp)
263 tree exp;
264 {
265 register tree t1, t2;
266 register enum tree_code code;
267 register int changed = 0;
268 register int i;
269
270 if (exp == NULL_TREE)
271 return exp;
272
273 code = TREE_CODE (exp);
274
275 if (code == CALL_EXPR)
276 return copy_node (exp);
277
278 /* Don't try and defeat a save_expr, as it should only be done once. */
279 if (code == SAVE_EXPR)
280 return exp;
281
282 switch (TREE_CODE_CLASS (code))
283 {
284 default:
285 abort ();
286
287 case 'c': /* a constant */
288 case 't': /* a type node */
289 case 'x': /* something random, like an identifier or an ERROR_MARK. */
290 return exp;
291
292 case 'd': /* A decl node */
293 #if 0 /* This is bogus. jason 9/21/94 */
294
295 t1 = break_out_calls (DECL_INITIAL (exp));
296 if (t1 != DECL_INITIAL (exp))
297 {
298 exp = copy_node (exp);
299 DECL_INITIAL (exp) = t1;
300 }
301 #endif
302 return exp;
303
304 case 'b': /* A block node */
305 {
306 /* Don't know how to handle these correctly yet. Must do a
307 break_out_calls on all DECL_INITIAL values for local variables,
308 and also break_out_calls on all sub-blocks and sub-statements. */
309 abort ();
310 }
311 return exp;
312
313 case 'e': /* an expression */
314 case 'r': /* a reference */
315 case 's': /* an expression with side effects */
316 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
317 {
318 t1 = break_out_calls (TREE_OPERAND (exp, i));
319 if (t1 != TREE_OPERAND (exp, i))
320 {
321 exp = copy_node (exp);
322 TREE_OPERAND (exp, i) = t1;
323 }
324 }
325 return exp;
326
327 case '<': /* a comparison expression */
328 case '2': /* a binary arithmetic expression */
329 t2 = break_out_calls (TREE_OPERAND (exp, 1));
330 if (t2 != TREE_OPERAND (exp, 1))
331 changed = 1;
332 case '1': /* a unary arithmetic expression */
333 t1 = break_out_calls (TREE_OPERAND (exp, 0));
334 if (t1 != TREE_OPERAND (exp, 0))
335 changed = 1;
336 if (changed)
337 {
338 if (tree_code_length[(int) code] == 1)
339 return build1 (code, TREE_TYPE (exp), t1);
340 else
341 return build (code, TREE_TYPE (exp), t1, t2);
342 }
343 return exp;
344 }
345
346 }
347 \f
348 extern struct obstack *current_obstack;
349 extern struct obstack permanent_obstack, class_obstack;
350 extern struct obstack *saveable_obstack;
351
352 /* Here is how primitive or already-canonicalized types' hash
353 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
354 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
355
356 /* Construct, lay out and return the type of methods belonging to class
357 BASETYPE and whose arguments are described by ARGTYPES and whose values
358 are described by RETTYPE. If each type exists already, reuse it. */
359 tree
360 build_cplus_method_type (basetype, rettype, argtypes)
361 tree basetype, rettype, argtypes;
362 {
363 register tree t;
364 tree ptype;
365 int hashcode;
366
367 /* Make a node of the sort we want. */
368 t = make_node (METHOD_TYPE);
369
370 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
371 TREE_TYPE (t) = rettype;
372 if (IS_SIGNATURE (basetype))
373 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
374 TYPE_READONLY (basetype),
375 TYPE_VOLATILE (basetype));
376 else
377 ptype = build_pointer_type (basetype);
378
379 /* The actual arglist for this function includes a "hidden" argument
380 which is "this". Put it into the list of argument types. */
381
382 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
383 TYPE_ARG_TYPES (t) = argtypes;
384 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
385
386 /* If we already have such a type, use the old one and free this one.
387 Note that it also frees up the above cons cell if found. */
388 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
389 t = type_hash_canon (hashcode, t);
390
391 if (TYPE_SIZE (t) == 0)
392 layout_type (t);
393
394 return t;
395 }
396
397 tree
398 build_cplus_array_type (elt_type, index_type)
399 tree elt_type;
400 tree index_type;
401 {
402 register struct obstack *ambient_obstack = current_obstack;
403 register struct obstack *ambient_saveable_obstack = saveable_obstack;
404 tree t;
405
406 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
407 make this permanent too. */
408 if (TREE_PERMANENT (elt_type)
409 && (index_type == 0 || TREE_PERMANENT (index_type)))
410 {
411 current_obstack = &permanent_obstack;
412 saveable_obstack = &permanent_obstack;
413 }
414
415 t = build_array_type (elt_type, index_type);
416
417 /* Push these needs up so that initialization takes place
418 more easily. */
419 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
420 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
421 current_obstack = ambient_obstack;
422 saveable_obstack = ambient_saveable_obstack;
423 return t;
424 }
425 \f
426 /* Make a variant type in the proper way for C/C++, propagating qualifiers
427 down to the element type of an array. */
428
429 tree
430 cp_build_type_variant (type, constp, volatilep)
431 tree type;
432 int constp, volatilep;
433 {
434 if (TREE_CODE (type) == ARRAY_TYPE)
435 {
436 tree real_main_variant = TYPE_MAIN_VARIANT (type);
437
438 push_obstacks (TYPE_OBSTACK (real_main_variant),
439 TYPE_OBSTACK (real_main_variant));
440 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
441 constp, volatilep),
442 TYPE_DOMAIN (type));
443
444 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
445 make a copy. (TYPE might have come from the hash table and
446 REAL_MAIN_VARIANT might be in some function's obstack.) */
447
448 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
449 {
450 type = copy_node (type);
451 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
452 }
453
454 TYPE_MAIN_VARIANT (type) = real_main_variant;
455 pop_obstacks ();
456 }
457 return build_type_variant (type, constp, volatilep);
458 }
459 \f
460 /* Add OFFSET to all base types of T.
461
462 OFFSET, which is a type offset, is number of bytes.
463
464 Note that we don't have to worry about having two paths to the
465 same base type, since this type owns its association list. */
466 void
467 propagate_binfo_offsets (binfo, offset)
468 tree binfo;
469 tree offset;
470 {
471 tree binfos = BINFO_BASETYPES (binfo);
472 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
473
474 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
475 {
476 tree base_binfo = TREE_VEC_ELT (binfos, i);
477
478 if (TREE_VIA_VIRTUAL (base_binfo))
479 i += 1;
480 else
481 {
482 int j;
483 tree base_binfos = BINFO_BASETYPES (base_binfo);
484 tree delta;
485
486 for (j = i+1; j < n_baselinks; j++)
487 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
488 {
489 /* The next basetype offset must take into account the space
490 between the classes, not just the size of each class. */
491 delta = size_binop (MINUS_EXPR,
492 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
493 BINFO_OFFSET (base_binfo));
494 break;
495 }
496
497 #if 0
498 if (BINFO_OFFSET_ZEROP (base_binfo))
499 BINFO_OFFSET (base_binfo) = offset;
500 else
501 BINFO_OFFSET (base_binfo)
502 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
503 #else
504 BINFO_OFFSET (base_binfo) = offset;
505 #endif
506 if (base_binfos)
507 {
508 int k;
509 tree chain = NULL_TREE;
510
511 /* Now unshare the structure beneath BASE_BINFO. */
512 for (k = TREE_VEC_LENGTH (base_binfos)-1;
513 k >= 0; k--)
514 {
515 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
516 if (! TREE_VIA_VIRTUAL (base_base_binfo))
517 TREE_VEC_ELT (base_binfos, k)
518 = make_binfo (BINFO_OFFSET (base_base_binfo),
519 base_base_binfo,
520 BINFO_VTABLE (base_base_binfo),
521 BINFO_VIRTUALS (base_base_binfo),
522 chain);
523 chain = TREE_VEC_ELT (base_binfos, k);
524 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
525 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
526 BINFO_INHERITANCE_CHAIN (chain) = base_binfo;
527 }
528 /* Now propagate the offset to the base types. */
529 propagate_binfo_offsets (base_binfo, offset);
530 }
531
532 /* Go to our next class that counts for offset propagation. */
533 i = j;
534 if (i < n_baselinks)
535 offset = size_binop (PLUS_EXPR, offset, delta);
536 }
537 }
538 }
539
540 /* Compute the actual offsets that our virtual base classes
541 will have *for this type*. This must be performed after
542 the fields are laid out, since virtual baseclasses must
543 lay down at the end of the record.
544
545 Returns the maximum number of virtual functions any of the virtual
546 baseclasses provide. */
547 int
548 layout_vbasetypes (rec, max)
549 tree rec;
550 int max;
551 {
552 /* Get all the virtual base types that this type uses.
553 The TREE_VALUE slot holds the virtual baseclass type. */
554 tree vbase_types = get_vbase_types (rec);
555
556 #ifdef STRUCTURE_SIZE_BOUNDARY
557 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
558 #else
559 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
560 #endif
561 int desired_align;
562
563 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
564 where CONST_SIZE is an integer
565 and VAR_SIZE is a tree expression.
566 If VAR_SIZE is null, the size is just CONST_SIZE.
567 Naturally we try to avoid using VAR_SIZE. */
568 register unsigned const_size = 0;
569 register tree var_size = 0;
570 int nonvirtual_const_size;
571 tree nonvirtual_var_size;
572
573 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
574
575 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
576 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
577 else
578 var_size = TYPE_SIZE (rec);
579
580 nonvirtual_const_size = const_size;
581 nonvirtual_var_size = var_size;
582
583 while (vbase_types)
584 {
585 tree basetype = BINFO_TYPE (vbase_types);
586 tree offset;
587
588 desired_align = TYPE_ALIGN (basetype);
589 record_align = MAX (record_align, desired_align);
590
591 if (const_size == 0)
592 offset = integer_zero_node;
593 else
594 {
595 /* Give each virtual base type the alignment it wants. */
596 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
597 * TYPE_ALIGN (basetype);
598 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
599 }
600
601 if (CLASSTYPE_VSIZE (basetype) > max)
602 max = CLASSTYPE_VSIZE (basetype);
603 BINFO_OFFSET (vbase_types) = offset;
604
605 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
606 {
607 /* Every virtual baseclass takes a least a UNIT, so that we can
608 take it's address and get something different for each base. */
609 const_size += MAX (BITS_PER_UNIT,
610 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
611 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
612 }
613 else if (var_size == 0)
614 var_size = TYPE_SIZE (basetype);
615 else
616 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
617
618 vbase_types = TREE_CHAIN (vbase_types);
619 }
620
621 if (const_size)
622 {
623 /* Because a virtual base might take a single byte above,
624 we have to re-adjust the total size to make sure it it
625 a multiple of the alignment. */
626 /* Give the whole object the alignment it wants. */
627 const_size = CEIL (const_size, record_align) * record_align;
628 }
629
630 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
631 here, as that is for this class, without any virtual base classes. */
632 TYPE_ALIGN (rec) = record_align;
633 if (const_size != nonvirtual_const_size)
634 {
635 CLASSTYPE_VBASE_SIZE (rec)
636 = size_int (const_size - nonvirtual_const_size);
637 TYPE_SIZE (rec) = size_int (const_size);
638 }
639
640 /* Now propagate offset information throughout the lattice
641 under the vbase type. */
642 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
643 vbase_types = TREE_CHAIN (vbase_types))
644 {
645 tree base_binfos = BINFO_BASETYPES (vbase_types);
646
647 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
648
649 if (base_binfos)
650 {
651 tree chain = NULL_TREE;
652 int j;
653 /* Now unshare the structure beneath BASE_BINFO. */
654
655 for (j = TREE_VEC_LENGTH (base_binfos)-1;
656 j >= 0; j--)
657 {
658 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
659 if (! TREE_VIA_VIRTUAL (base_base_binfo))
660 TREE_VEC_ELT (base_binfos, j)
661 = make_binfo (BINFO_OFFSET (base_base_binfo),
662 base_base_binfo,
663 BINFO_VTABLE (base_base_binfo),
664 BINFO_VIRTUALS (base_base_binfo),
665 chain);
666 chain = TREE_VEC_ELT (base_binfos, j);
667 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
668 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
669 BINFO_INHERITANCE_CHAIN (chain) = vbase_types;
670 }
671
672 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
673 }
674 }
675
676 return max;
677 }
678
679 /* Lay out the base types of a record type, REC.
680 Tentatively set the size and alignment of REC
681 according to the base types alone.
682
683 Offsets for immediate nonvirtual baseclasses are also computed here.
684
685 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
686 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
687
688 Returns list of virtual base classes in a FIELD_DECL chain. */
689 tree
690 layout_basetypes (rec, binfos)
691 tree rec, binfos;
692 {
693 /* Chain to hold all the new FIELD_DECLs which point at virtual
694 base classes. */
695 tree vbase_decls = NULL_TREE;
696
697 #ifdef STRUCTURE_SIZE_BOUNDARY
698 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
699 #else
700 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
701 #endif
702
703 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
704 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
705 the size is just CONST_SIZE. Naturally we try to avoid using
706 VAR_SIZE. And so far, we've been successful. */
707 #if 0
708 register tree var_size = 0;
709 #endif
710
711 register unsigned const_size = 0;
712 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
713
714 /* Handle basetypes almost like fields, but record their
715 offsets differently. */
716
717 for (i = 0; i < n_baseclasses; i++)
718 {
719 int inc, desired_align, int_vbase_size;
720 register tree base_binfo = TREE_VEC_ELT (binfos, i);
721 register tree basetype = BINFO_TYPE (base_binfo);
722 tree decl, offset;
723
724 if (TYPE_SIZE (basetype) == 0)
725 {
726 #if 0
727 /* This error is now reported in xref_tag, thus giving better
728 location information. */
729 error_with_aggr_type (base_binfo,
730 "base class `%s' has incomplete type");
731
732 TREE_VIA_PUBLIC (base_binfo) = 1;
733 TREE_VIA_PROTECTED (base_binfo) = 0;
734 TREE_VIA_VIRTUAL (base_binfo) = 0;
735
736 /* Should handle this better so that
737
738 class A;
739 class B: private A { virtual void F(); };
740
741 does not dump core when compiled. */
742 my_friendly_abort (121);
743 #endif
744 continue;
745 }
746
747 /* All basetypes are recorded in the association list of the
748 derived type. */
749
750 if (TREE_VIA_VIRTUAL (base_binfo))
751 {
752 int j;
753 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
754 + sizeof (VBASE_NAME) + 1);
755
756 /* The offset for a virtual base class is only used in computing
757 virtual function tables and for initializing virtual base
758 pointers. It is built once `get_vbase_types' is called. */
759
760 /* If this basetype can come from another vbase pointer
761 without an additional indirection, we will share
762 that pointer. If an indirection is involved, we
763 make our own pointer. */
764 for (j = 0; j < n_baseclasses; j++)
765 {
766 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
767 if (! TREE_VIA_VIRTUAL (other_base_binfo)
768 && binfo_member (basetype,
769 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
770 goto got_it;
771 }
772 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
773 decl = build_lang_field_decl (FIELD_DECL, get_identifier (name),
774 build_pointer_type (basetype));
775 /* If you change any of the below, take a look at all the
776 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
777 them too. */
778 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
779 DECL_VIRTUAL_P (decl) = 1;
780 DECL_FIELD_CONTEXT (decl) = rec;
781 DECL_CLASS_CONTEXT (decl) = rec;
782 DECL_FCONTEXT (decl) = basetype;
783 DECL_SAVED_INSNS (decl) = NULL_RTX;
784 DECL_FIELD_SIZE (decl) = 0;
785 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
786 TREE_CHAIN (decl) = vbase_decls;
787 BINFO_VPTR_FIELD (base_binfo) = decl;
788 vbase_decls = decl;
789
790 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
791 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
792 {
793 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
794 "destructor `%s' non-virtual");
795 warning ("in inheritance relationship `%s: virtual %s'",
796 TYPE_NAME_STRING (rec),
797 TYPE_NAME_STRING (basetype));
798 }
799 got_it:
800 /* The space this decl occupies has already been accounted for. */
801 continue;
802 }
803
804 if (const_size == 0)
805 offset = integer_zero_node;
806 else
807 {
808 /* Give each base type the alignment it wants. */
809 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
810 * TYPE_ALIGN (basetype);
811 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
812
813 #if 0
814 /* bpk: Disabled this check until someone is willing to
815 claim it as theirs and explain exactly what circumstances
816 warrant the warning. */
817 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
818 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
819 {
820 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
821 "destructor `%s' non-virtual");
822 warning ("in inheritance relationship `%s:%s %s'",
823 TYPE_NAME_STRING (rec),
824 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
825 TYPE_NAME_STRING (basetype));
826 }
827 #endif
828 }
829 BINFO_OFFSET (base_binfo) = offset;
830 if (CLASSTYPE_VSIZE (basetype))
831 {
832 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
833 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
834 }
835 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
836 TYPE_BINFO (rec) = base_binfo;
837
838 /* Add only the amount of storage not present in
839 the virtual baseclasses. */
840
841 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
842 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
843 {
844 inc = MAX (record_align,
845 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
846 - int_vbase_size));
847
848 /* Record must have at least as much alignment as any field. */
849 desired_align = TYPE_ALIGN (basetype);
850 record_align = MAX (record_align, desired_align);
851
852 const_size += inc;
853 }
854 }
855
856 if (const_size)
857 CLASSTYPE_SIZE (rec) = size_int (const_size);
858 else
859 CLASSTYPE_SIZE (rec) = integer_zero_node;
860 CLASSTYPE_ALIGN (rec) = record_align;
861
862 return vbase_decls;
863 }
864 \f
865 /* Hashing of lists so that we don't make duplicates.
866 The entry point is `list_hash_canon'. */
867
868 /* Each hash table slot is a bucket containing a chain
869 of these structures. */
870
871 struct list_hash
872 {
873 struct list_hash *next; /* Next structure in the bucket. */
874 int hashcode; /* Hash code of this list. */
875 tree list; /* The list recorded here. */
876 };
877
878 /* Now here is the hash table. When recording a list, it is added
879 to the slot whose index is the hash code mod the table size.
880 Note that the hash table is used for several kinds of lists.
881 While all these live in the same table, they are completely independent,
882 and the hash code is computed differently for each of these. */
883
884 #define TYPE_HASH_SIZE 59
885 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
886
887 /* Compute a hash code for a list (chain of TREE_LIST nodes
888 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
889 TREE_COMMON slots), by adding the hash codes of the individual entries. */
890
891 int
892 list_hash (list)
893 tree list;
894 {
895 register int hashcode = 0;
896
897 if (TREE_CHAIN (list))
898 hashcode += TYPE_HASH (TREE_CHAIN (list));
899
900 if (TREE_VALUE (list))
901 hashcode += TYPE_HASH (TREE_VALUE (list));
902 else
903 hashcode += 1007;
904 if (TREE_PURPOSE (list))
905 hashcode += TYPE_HASH (TREE_PURPOSE (list));
906 else
907 hashcode += 1009;
908 return hashcode;
909 }
910
911 /* Look in the type hash table for a type isomorphic to TYPE.
912 If one is found, return it. Otherwise return 0. */
913
914 tree
915 list_hash_lookup (hashcode, list)
916 int hashcode;
917 tree list;
918 {
919 register struct list_hash *h;
920 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
921 if (h->hashcode == hashcode
922 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
923 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
924 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
925 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
926 && TREE_VALUE (h->list) == TREE_VALUE (list)
927 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
928 {
929 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
930 return h->list;
931 }
932 return 0;
933 }
934
935 /* Add an entry to the list-hash-table
936 for a list TYPE whose hash code is HASHCODE. */
937
938 void
939 list_hash_add (hashcode, list)
940 int hashcode;
941 tree list;
942 {
943 register struct list_hash *h;
944
945 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
946 h->hashcode = hashcode;
947 h->list = list;
948 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
949 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
950 }
951
952 /* Given TYPE, and HASHCODE its hash code, return the canonical
953 object for an identical list if one already exists.
954 Otherwise, return TYPE, and record it as the canonical object
955 if it is a permanent object.
956
957 To use this function, first create a list of the sort you want.
958 Then compute its hash code from the fields of the list that
959 make it different from other similar lists.
960 Then call this function and use the value.
961 This function frees the list you pass in if it is a duplicate. */
962
963 /* Set to 1 to debug without canonicalization. Never set by program. */
964 static int debug_no_list_hash = 0;
965
966 tree
967 list_hash_canon (hashcode, list)
968 int hashcode;
969 tree list;
970 {
971 tree t1;
972
973 if (debug_no_list_hash)
974 return list;
975
976 t1 = list_hash_lookup (hashcode, list);
977 if (t1 != 0)
978 {
979 obstack_free (&class_obstack, list);
980 return t1;
981 }
982
983 /* If this is a new list, record it for later reuse. */
984 list_hash_add (hashcode, list);
985
986 return list;
987 }
988
989 tree
990 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
991 int via_public, via_virtual, via_protected;
992 tree purpose, value, chain;
993 {
994 struct obstack *ambient_obstack = current_obstack;
995 tree t;
996 int hashcode;
997
998 current_obstack = &class_obstack;
999 t = tree_cons (purpose, value, chain);
1000 TREE_VIA_PUBLIC (t) = via_public;
1001 TREE_VIA_PROTECTED (t) = via_protected;
1002 TREE_VIA_VIRTUAL (t) = via_virtual;
1003 hashcode = list_hash (t);
1004 t = list_hash_canon (hashcode, t);
1005 current_obstack = ambient_obstack;
1006 return t;
1007 }
1008
1009 /* Constructor for hashed lists. */
1010 tree
1011 hash_tree_chain (value, chain)
1012 tree value, chain;
1013 {
1014 struct obstack *ambient_obstack = current_obstack;
1015 tree t;
1016 int hashcode;
1017
1018 current_obstack = &class_obstack;
1019 t = tree_cons (NULL_TREE, value, chain);
1020 hashcode = list_hash (t);
1021 t = list_hash_canon (hashcode, t);
1022 current_obstack = ambient_obstack;
1023 return t;
1024 }
1025
1026 /* Similar, but used for concatenating two lists. */
1027 tree
1028 hash_chainon (list1, list2)
1029 tree list1, list2;
1030 {
1031 if (list2 == 0)
1032 return list1;
1033 if (list1 == 0)
1034 return list2;
1035 if (TREE_CHAIN (list1) == NULL_TREE)
1036 return hash_tree_chain (TREE_VALUE (list1), list2);
1037 return hash_tree_chain (TREE_VALUE (list1),
1038 hash_chainon (TREE_CHAIN (list1), list2));
1039 }
1040
1041 static tree
1042 get_identifier_list (value)
1043 tree value;
1044 {
1045 tree list = IDENTIFIER_AS_LIST (value);
1046 if (list != NULL_TREE
1047 && (TREE_CODE (list) != TREE_LIST
1048 || TREE_VALUE (list) != value))
1049 list = NULL_TREE;
1050 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1051 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1052 && IDENTIFIER_TYPE_VALUE (value)
1053 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1054 {
1055 tree type = IDENTIFIER_TYPE_VALUE (value);
1056
1057 if (TYPE_PTRMEMFUNC_P (type))
1058 list = NULL_TREE;
1059 else if (type == current_class_type)
1060 /* Don't mess up the constructor name. */
1061 list = tree_cons (NULL_TREE, value, NULL_TREE);
1062 else
1063 {
1064 register tree id;
1065 /* This will return the correct thing for regular types,
1066 nested types, and templates. Yay! */
1067 if (TYPE_NESTED_NAME (type))
1068 id = TYPE_NESTED_NAME (type);
1069 else
1070 id = TYPE_IDENTIFIER (type);
1071
1072 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
1073 CLASSTYPE_ID_AS_LIST (type)
1074 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
1075 list = CLASSTYPE_ID_AS_LIST (type);
1076 }
1077 }
1078 return list;
1079 }
1080
1081 tree
1082 get_decl_list (value)
1083 tree value;
1084 {
1085 tree list = NULL_TREE;
1086
1087 if (TREE_CODE (value) == IDENTIFIER_NODE)
1088 list = get_identifier_list (value);
1089 else if (TREE_CODE (value) == RECORD_TYPE
1090 && TYPE_LANG_SPECIFIC (value)
1091 && value == TYPE_MAIN_VARIANT (value))
1092 list = CLASSTYPE_AS_LIST (value);
1093
1094 if (list != NULL_TREE)
1095 {
1096 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1097 return list;
1098 }
1099
1100 return build_decl_list (NULL_TREE, value);
1101 }
1102 \f
1103 /* Build an association between TYPE and some parameters:
1104
1105 OFFSET is the offset added to `this' to convert it to a pointer
1106 of type `TYPE *'
1107
1108 BINFO is the base binfo to use, if we are deriving from one. This
1109 is necessary, as we want specialized parent binfos from base
1110 classes, so that the VTABLE_NAMEs of bases are for the most derived
1111 type, instead of of the simple type.
1112
1113 VTABLE is the virtual function table with which to initialize
1114 sub-objects of type TYPE.
1115
1116 VIRTUALS are the virtual functions sitting in VTABLE.
1117
1118 CHAIN are more associations we must retain. */
1119
1120 tree
1121 make_binfo (offset, binfo, vtable, virtuals, chain)
1122 tree offset, binfo;
1123 tree vtable, virtuals;
1124 tree chain;
1125 {
1126 tree new_binfo = make_tree_vec (6);
1127 tree type;
1128
1129 if (TREE_CODE (binfo) == TREE_VEC)
1130 type = BINFO_TYPE (binfo);
1131 else
1132 {
1133 type = binfo;
1134 binfo = TYPE_BINFO (binfo);
1135 }
1136
1137 TREE_CHAIN (new_binfo) = chain;
1138 if (chain)
1139 TREE_USED (new_binfo) = TREE_USED (chain);
1140
1141 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1142 BINFO_OFFSET (new_binfo) = offset;
1143 BINFO_VTABLE (new_binfo) = vtable;
1144 BINFO_VIRTUALS (new_binfo) = virtuals;
1145 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1146
1147 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1148 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1149 return new_binfo;
1150 }
1151
1152 /* Return the binfo value for ELEM in TYPE. */
1153
1154 tree
1155 binfo_value (elem, type)
1156 tree elem;
1157 tree type;
1158 {
1159 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1160 compiler_error ("base class `%s' ambiguous in binfo_value",
1161 TYPE_NAME_STRING (elem));
1162 if (elem == type)
1163 return TYPE_BINFO (type);
1164 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1165 return type;
1166 return get_binfo (elem, type, 0);
1167 }
1168
1169 tree
1170 reverse_path (path)
1171 tree path;
1172 {
1173 register tree prev = 0, tmp, next;
1174 for (tmp = path; tmp; tmp = next)
1175 {
1176 next = BINFO_INHERITANCE_CHAIN (tmp);
1177 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1178 prev = tmp;
1179 }
1180 return prev;
1181 }
1182
1183 void
1184 debug_binfo (elem)
1185 tree elem;
1186 {
1187 unsigned HOST_WIDE_INT n;
1188 tree virtuals;
1189
1190 fprintf (stderr, "type \"%s\"; offset = %d\n",
1191 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1192 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1193 fprintf (stderr, "vtable type:\n");
1194 debug_tree (BINFO_TYPE (elem));
1195 if (BINFO_VTABLE (elem))
1196 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1197 else
1198 fprintf (stderr, "no vtable decl yet\n");
1199 fprintf (stderr, "virtuals:\n");
1200 virtuals = BINFO_VIRTUALS (elem);
1201
1202 n = skip_rtti_stuff (&virtuals);
1203
1204 while (virtuals)
1205 {
1206 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1207 fprintf (stderr, "%s [%d =? %d]\n",
1208 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1209 n, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1210 ++n;
1211 virtuals = TREE_CHAIN (virtuals);
1212 }
1213 }
1214
1215 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1216 We expect a null pointer to mark the end of the chain.
1217 This is the Lisp primitive `length'. */
1218
1219 int
1220 decl_list_length (t)
1221 tree t;
1222 {
1223 register tree tail;
1224 register int len = 0;
1225
1226 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1227 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1228 for (tail = t; tail; tail = DECL_CHAIN (tail))
1229 len++;
1230
1231 return len;
1232 }
1233
1234 int
1235 count_functions (t)
1236 tree t;
1237 {
1238 if (TREE_CODE (t) == FUNCTION_DECL)
1239 return 1;
1240 else if (TREE_CODE (t) == TREE_LIST)
1241 return decl_list_length (TREE_VALUE (t));
1242
1243 my_friendly_abort (359);
1244 return 0;
1245 }
1246
1247 int
1248 is_overloaded_fn (x)
1249 tree x;
1250 {
1251 if (TREE_CODE (x) == FUNCTION_DECL)
1252 return 1;
1253
1254 if (TREE_CODE (x) == TREE_LIST
1255 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1256 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1257 return 1;
1258
1259 return 0;
1260 }
1261
1262 int
1263 really_overloaded_fn (x)
1264 tree x;
1265 {
1266 if (TREE_CODE (x) == TREE_LIST
1267 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1268 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1269 return 1;
1270
1271 return 0;
1272 }
1273
1274 tree
1275 get_first_fn (from)
1276 tree from;
1277 {
1278 if (TREE_CODE (from) == FUNCTION_DECL)
1279 return from;
1280
1281 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1282
1283 return TREE_VALUE (from);
1284 }
1285
1286 tree
1287 fnaddr_from_vtable_entry (entry)
1288 tree entry;
1289 {
1290 if (flag_vtable_thunks)
1291 {
1292 tree func = entry;
1293 if (TREE_CODE (func) == ADDR_EXPR)
1294 func = TREE_OPERAND (func, 0);
1295 if (TREE_CODE (func) == THUNK_DECL)
1296 return DECL_INITIAL (func);
1297 else
1298 return entry;
1299 }
1300 else
1301 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1302 }
1303
1304 tree
1305 function_arg_chain (t)
1306 tree t;
1307 {
1308 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1309 }
1310
1311 int
1312 promotes_to_aggr_type (t, code)
1313 tree t;
1314 enum tree_code code;
1315 {
1316 if (TREE_CODE (t) == code)
1317 t = TREE_TYPE (t);
1318 return IS_AGGR_TYPE (t);
1319 }
1320
1321 int
1322 is_aggr_type_2 (t1, t2)
1323 tree t1, t2;
1324 {
1325 if (TREE_CODE (t1) != TREE_CODE (t2))
1326 return 0;
1327 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1328 }
1329
1330 /* Give message using types TYPE1 and TYPE2 as arguments.
1331 PFN is the function which will print the message;
1332 S is the format string for PFN to use. */
1333 void
1334 message_2_types (pfn, s, type1, type2)
1335 void (*pfn) ();
1336 char *s;
1337 tree type1, type2;
1338 {
1339 tree name1 = TYPE_NAME (type1);
1340 tree name2 = TYPE_NAME (type2);
1341 if (TREE_CODE (name1) == TYPE_DECL)
1342 name1 = DECL_NAME (name1);
1343 if (TREE_CODE (name2) == TYPE_DECL)
1344 name2 = DECL_NAME (name2);
1345 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1346 }
1347 \f
1348 #define PRINT_RING_SIZE 4
1349
1350 char *
1351 lang_printable_name (decl)
1352 tree decl;
1353 {
1354 static tree decl_ring[PRINT_RING_SIZE];
1355 static char *print_ring[PRINT_RING_SIZE];
1356 static int ring_counter;
1357 int i;
1358
1359 /* Only cache functions. */
1360 if (TREE_CODE (decl) != FUNCTION_DECL
1361 || DECL_LANG_SPECIFIC (decl) == 0)
1362 return decl_as_string (decl, 1);
1363
1364 /* See if this print name is lying around. */
1365 for (i = 0; i < PRINT_RING_SIZE; i++)
1366 if (decl_ring[i] == decl)
1367 /* yes, so return it. */
1368 return print_ring[i];
1369
1370 if (++ring_counter == PRINT_RING_SIZE)
1371 ring_counter = 0;
1372
1373 if (current_function_decl != NULL_TREE)
1374 {
1375 if (decl_ring[ring_counter] == current_function_decl)
1376 ring_counter += 1;
1377 if (ring_counter == PRINT_RING_SIZE)
1378 ring_counter = 0;
1379 if (decl_ring[ring_counter] == current_function_decl)
1380 my_friendly_abort (106);
1381 }
1382
1383 if (print_ring[ring_counter])
1384 free (print_ring[ring_counter]);
1385
1386 {
1387 int print_ret_type_p
1388 = (!DECL_CONSTRUCTOR_P (decl)
1389 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1390
1391 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1392 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1393 strcpy (print_ring[ring_counter], name);
1394 decl_ring[ring_counter] = decl;
1395 }
1396 return print_ring[ring_counter];
1397 }
1398 \f
1399 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1400 listed in RAISES. */
1401 tree
1402 build_exception_variant (type, raises)
1403 tree type;
1404 tree raises;
1405 {
1406 int i;
1407 tree v = TYPE_MAIN_VARIANT (type);
1408 tree t, t2, cname;
1409 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1410 int constp = TYPE_READONLY (type);
1411 int volatilep = TYPE_VOLATILE (type);
1412
1413 for (; v; v = TYPE_NEXT_VARIANT (v))
1414 {
1415 if (TYPE_READONLY (v) != constp
1416 || TYPE_VOLATILE (v) != volatilep)
1417 continue;
1418
1419 /* @@ This should do set equality, not exact match. */
1420 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1421 /* List of exceptions raised matches previously found list.
1422
1423 @@ Nice to free up storage used in consing up the
1424 @@ list of exceptions raised. */
1425 return v;
1426 }
1427
1428 /* Need to build a new variant. */
1429 v = build_type_copy (type);
1430
1431 if (raises && ! TREE_PERMANENT (raises))
1432 {
1433 push_obstacks_nochange ();
1434 end_temporary_allocation ();
1435 raises = copy_list (raises);
1436 pop_obstacks ();
1437 }
1438 TYPE_RAISES_EXCEPTIONS (v) = raises;
1439 return v;
1440 }
1441
1442 /* Subroutine of copy_to_permanent
1443
1444 Assuming T is a node build bottom-up, make it all exist on
1445 permanent obstack, if it is not permanent already. */
1446
1447 tree
1448 mapcar (t, func)
1449 tree t;
1450 tree (*func)();
1451 {
1452 enum tree_code code;
1453 tree tmp;
1454
1455 if (t == NULL_TREE)
1456 return t;
1457
1458 if (tmp = func (t), tmp != NULL_TREE)
1459 return tmp;
1460
1461 switch (code = TREE_CODE (t))
1462 {
1463 case ERROR_MARK:
1464 return error_mark_node;
1465
1466 case VAR_DECL:
1467 case FUNCTION_DECL:
1468 case CONST_DECL:
1469 break;
1470
1471 case PARM_DECL:
1472 {
1473 tree chain = TREE_CHAIN (t);
1474 t = copy_node (t);
1475 TREE_CHAIN (t) = mapcar (chain, func);
1476 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1477 DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
1478 DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
1479 return t;
1480 }
1481
1482 case TREE_LIST:
1483 {
1484 tree chain = TREE_CHAIN (t);
1485 t = copy_node (t);
1486 TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
1487 TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
1488 TREE_CHAIN (t) = mapcar (chain, func);
1489 return t;
1490 }
1491
1492 case TREE_VEC:
1493 {
1494 int len = TREE_VEC_LENGTH (t);
1495
1496 t = copy_node (t);
1497 while (len--)
1498 TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
1499 return t;
1500 }
1501
1502 case INTEGER_CST:
1503 case REAL_CST:
1504 case STRING_CST:
1505 return copy_node (t);
1506
1507 case COND_EXPR:
1508 case TARGET_EXPR:
1509 case NEW_EXPR:
1510 t = copy_node (t);
1511 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1512 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1513 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1514 return t;
1515
1516 case SAVE_EXPR:
1517 t = copy_node (t);
1518 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1519 return t;
1520
1521 case MODIFY_EXPR:
1522 case PLUS_EXPR:
1523 case MINUS_EXPR:
1524 case MULT_EXPR:
1525 case TRUNC_DIV_EXPR:
1526 case TRUNC_MOD_EXPR:
1527 case MIN_EXPR:
1528 case MAX_EXPR:
1529 case LSHIFT_EXPR:
1530 case RSHIFT_EXPR:
1531 case BIT_IOR_EXPR:
1532 case BIT_XOR_EXPR:
1533 case BIT_AND_EXPR:
1534 case BIT_ANDTC_EXPR:
1535 case TRUTH_ANDIF_EXPR:
1536 case TRUTH_ORIF_EXPR:
1537 case LT_EXPR:
1538 case LE_EXPR:
1539 case GT_EXPR:
1540 case GE_EXPR:
1541 case EQ_EXPR:
1542 case NE_EXPR:
1543 case CEIL_DIV_EXPR:
1544 case FLOOR_DIV_EXPR:
1545 case ROUND_DIV_EXPR:
1546 case CEIL_MOD_EXPR:
1547 case FLOOR_MOD_EXPR:
1548 case ROUND_MOD_EXPR:
1549 case COMPOUND_EXPR:
1550 case PREDECREMENT_EXPR:
1551 case PREINCREMENT_EXPR:
1552 case POSTDECREMENT_EXPR:
1553 case POSTINCREMENT_EXPR:
1554 case CALL_EXPR:
1555 t = copy_node (t);
1556 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1557 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1558 return t;
1559
1560 case CONVERT_EXPR:
1561 case ADDR_EXPR:
1562 case INDIRECT_REF:
1563 case NEGATE_EXPR:
1564 case BIT_NOT_EXPR:
1565 case TRUTH_NOT_EXPR:
1566 case NOP_EXPR:
1567 case COMPONENT_REF:
1568 t = copy_node (t);
1569 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1570 return t;
1571
1572 case POINTER_TYPE:
1573 return build_pointer_type (mapcar (TREE_TYPE (t), func));
1574 case REFERENCE_TYPE:
1575 return build_reference_type (mapcar (TREE_TYPE (t), func));
1576 case FUNCTION_TYPE:
1577 return build_function_type (mapcar (TREE_TYPE (t), func),
1578 mapcar (TYPE_ARG_TYPES (t), func));
1579 case ARRAY_TYPE:
1580 return build_array_type (mapcar (TREE_TYPE (t), func),
1581 mapcar (TYPE_DOMAIN (t), func));
1582 case INTEGER_TYPE:
1583 return build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
1584
1585 case OFFSET_TYPE:
1586 return build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
1587 mapcar (TREE_TYPE (t), func));
1588 case METHOD_TYPE:
1589 return build_method_type
1590 (mapcar (TYPE_METHOD_BASETYPE (t), func),
1591 build_function_type
1592 (mapcar (TREE_TYPE (t), func),
1593 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func)));
1594
1595 case RECORD_TYPE:
1596 if (TYPE_PTRMEMFUNC_P (t))
1597 return build_ptrmemfunc_type
1598 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
1599 /* else fall through */
1600
1601 /* This list is incomplete, but should suffice for now.
1602 It is very important that `sorry' does not call
1603 `report_error_function'. That could cause an infinite loop. */
1604 default:
1605 sorry ("initializer contains unrecognized tree code");
1606 return error_mark_node;
1607
1608 }
1609 my_friendly_abort (107);
1610 /* NOTREACHED */
1611 return NULL_TREE;
1612 }
1613
1614 static tree
1615 perm_manip (t)
1616 tree t;
1617 {
1618 if (TREE_PERMANENT (t))
1619 return t;
1620 return NULL_TREE;
1621 }
1622
1623 /* Assuming T is a node built bottom-up, make it all exist on
1624 permanent obstack, if it is not permanent already. */
1625 tree
1626 copy_to_permanent (t)
1627 tree t;
1628 {
1629 register struct obstack *ambient_obstack = current_obstack;
1630 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1631 int resume;
1632
1633 if (t == NULL_TREE || TREE_PERMANENT (t))
1634 return t;
1635
1636 saveable_obstack = &permanent_obstack;
1637 current_obstack = saveable_obstack;
1638 resume = suspend_momentary ();
1639
1640 t = mapcar (t, perm_manip);
1641
1642 resume_momentary (resume);
1643 current_obstack = ambient_obstack;
1644 saveable_obstack = ambient_saveable_obstack;
1645
1646 return t;
1647 }
1648
1649 void
1650 print_lang_statistics ()
1651 {
1652 extern struct obstack maybepermanent_obstack;
1653 print_obstack_statistics ("class_obstack", &class_obstack);
1654 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1655 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1656 print_search_statistics ();
1657 print_class_statistics ();
1658 }
1659
1660 /* This is used by the `assert' macro. It is provided in libgcc.a,
1661 which `cc' doesn't know how to link. Note that the C++ front-end
1662 no longer actually uses the `assert' macro (instead, it calls
1663 my_friendly_assert). But all of the back-end files still need this. */
1664 void
1665 __eprintf (string, expression, line, filename)
1666 #ifdef __STDC__
1667 const char *string;
1668 const char *expression;
1669 unsigned line;
1670 const char *filename;
1671 #else
1672 char *string;
1673 char *expression;
1674 unsigned line;
1675 char *filename;
1676 #endif
1677 {
1678 fprintf (stderr, string, expression, line, filename);
1679 fflush (stderr);
1680 abort ();
1681 }
1682
1683 /* Return, as an INTEGER_CST node, the number of elements for
1684 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1685
1686 tree
1687 array_type_nelts_top (type)
1688 tree type;
1689 {
1690 return fold (build (PLUS_EXPR, sizetype,
1691 array_type_nelts (type),
1692 integer_one_node));
1693 }
1694
1695 /* Return, as an INTEGER_CST node, the number of elements for
1696 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1697 ARRAY_TYPEs that are clumped together. */
1698
1699 tree
1700 array_type_nelts_total (type)
1701 tree type;
1702 {
1703 tree sz = array_type_nelts_top (type);
1704 type = TREE_TYPE (type);
1705 while (TREE_CODE (type) == ARRAY_TYPE)
1706 {
1707 tree n = array_type_nelts_top (type);
1708 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1709 type = TREE_TYPE (type);
1710 }
1711 return sz;
1712 }
1713
1714 static
1715 tree
1716 bot_manip (t)
1717 tree t;
1718 {
1719 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1720 return t;
1721 else if (TREE_CODE (t) == TARGET_EXPR)
1722 return build_cplus_new (TREE_TYPE (t),
1723 break_out_target_exprs (TREE_OPERAND (t, 1)), 0);
1724 return NULL_TREE;
1725 }
1726
1727 /* Actually, we'll just clean out the target exprs for the moment. */
1728 tree
1729 break_out_target_exprs (t)
1730 tree t;
1731 {
1732 return mapcar (t, bot_manip);
1733 }
1734
1735 tree
1736 unsave_expr (expr)
1737 tree expr;
1738 {
1739 tree t;
1740
1741 t = build1 (UNSAVE_EXPR, TREE_TYPE (expr), expr);
1742 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (expr);
1743 return t;
1744 }
1745
1746 /* Modify a tree in place so that all the evaluate only once things
1747 are cleared out. Return the EXPR given. */
1748 tree
1749 unsave_expr_now (expr)
1750 tree expr;
1751 {
1752 enum tree_code code;
1753 register int i;
1754
1755 if (expr == NULL_TREE)
1756 return expr;
1757
1758 code = TREE_CODE (expr);
1759 switch (code)
1760 {
1761 case SAVE_EXPR:
1762 SAVE_EXPR_RTL (expr) = NULL_RTX;
1763 break;
1764
1765 case TARGET_EXPR:
1766 sorry ("TARGET_EXPR reused inside UNSAVE_EXPR");
1767 break;
1768
1769 case RTL_EXPR:
1770 warning ("RTL_EXPR reused inside UNSAVE_EXPR");
1771 RTL_EXPR_SEQUENCE (expr) = NULL_RTX;
1772 break;
1773
1774 case CALL_EXPR:
1775 CALL_EXPR_RTL (expr) = NULL_RTX;
1776 if (TREE_OPERAND (expr, 1)
1777 && TREE_CODE (TREE_OPERAND (expr, 1)) == TREE_LIST)
1778 {
1779 tree exp = TREE_OPERAND (expr, 1);
1780 while (exp)
1781 {
1782 unsave_expr_now (TREE_VALUE (exp));
1783 exp = TREE_CHAIN (exp);
1784 }
1785 }
1786 break;
1787 }
1788
1789 switch (TREE_CODE_CLASS (code))
1790 {
1791 case 'c': /* a constant */
1792 case 't': /* a type node */
1793 case 'x': /* something random, like an identifier or an ERROR_MARK. */
1794 case 'd': /* A decl node */
1795 case 'b': /* A block node */
1796 return expr;
1797
1798 case 'e': /* an expression */
1799 case 'r': /* a reference */
1800 case 's': /* an expression with side effects */
1801 case '<': /* a comparison expression */
1802 case '2': /* a binary arithmetic expression */
1803 case '1': /* a unary arithmetic expression */
1804 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
1805 unsave_expr_now (TREE_OPERAND (expr, i));
1806 return expr;
1807
1808 default:
1809 my_friendly_abort (999);
1810 }
1811 }
1812
1813 /* Since cleanup may have SAVE_EXPRs in it, we protect it with an
1814 UNSAVE_EXPR as the backend cannot yet handle SAVE_EXPRs in cleanups
1815 by itself. */
1816 int
1817 cp_expand_decl_cleanup (decl, cleanup)
1818 tree decl, cleanup;
1819 {
1820 return expand_decl_cleanup (decl, unsave_expr (cleanup));
1821 }
This page took 0.110659 seconds and 6 git commands to generate.