]> gcc.gnu.org Git - gcc.git/blob - gcc/cp/tree.c
85th Cygnus<->FSF quick merge
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include <stdio.h>
24 #include "obstack.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #ifdef __STDC__
30 #include <stdarg.h>
31 #else
32 #include <varargs.h>
33 #endif
34
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37 /* Return nonzero if REF is an lvalue valid for this language.
38 Lvalues can be assigned, unless they have TREE_READONLY.
39 Lvalues can have their address taken, unless they have DECL_REGISTER. */
40
41 int
42 real_lvalue_p (ref)
43 tree ref;
44 {
45 if (! language_lvalue_valid (ref))
46 return 0;
47
48 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
49 return 1;
50
51 if (ref == current_class_decl && flag_this_is_variable <= 0)
52 return 0;
53
54 switch (TREE_CODE (ref))
55 {
56 /* preincrements and predecrements are valid lvals, provided
57 what they refer to are valid lvals. */
58 case PREINCREMENT_EXPR:
59 case PREDECREMENT_EXPR:
60 case COMPONENT_REF:
61 case SAVE_EXPR:
62 return real_lvalue_p (TREE_OPERAND (ref, 0));
63
64 case STRING_CST:
65 return 1;
66
67 case VAR_DECL:
68 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
69 && DECL_LANG_SPECIFIC (ref)
70 && DECL_IN_AGGR_P (ref))
71 return 0;
72 case INDIRECT_REF:
73 case ARRAY_REF:
74 case PARM_DECL:
75 case RESULT_DECL:
76 case ERROR_MARK:
77 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
78 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
79 return 1;
80 break;
81
82 /* A currently unresolved scope ref. */
83 case SCOPE_REF:
84 my_friendly_abort (103);
85 case OFFSET_REF:
86 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
87 return 1;
88 return real_lvalue_p (TREE_OPERAND (ref, 0))
89 && real_lvalue_p (TREE_OPERAND (ref, 1));
90 break;
91
92 case COND_EXPR:
93 return (real_lvalue_p (TREE_OPERAND (ref, 1))
94 && real_lvalue_p (TREE_OPERAND (ref, 2)));
95
96 case MODIFY_EXPR:
97 return 1;
98
99 case COMPOUND_EXPR:
100 return real_lvalue_p (TREE_OPERAND (ref, 1));
101
102 case MAX_EXPR:
103 case MIN_EXPR:
104 return (real_lvalue_p (TREE_OPERAND (ref, 0))
105 && real_lvalue_p (TREE_OPERAND (ref, 1)));
106 }
107
108 return 0;
109 }
110
111 int
112 lvalue_p (ref)
113 tree ref;
114 {
115 if (! language_lvalue_valid (ref))
116 return 0;
117
118 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
119 return 1;
120
121 if (ref == current_class_decl && flag_this_is_variable <= 0)
122 return 0;
123
124 switch (TREE_CODE (ref))
125 {
126 /* preincrements and predecrements are valid lvals, provided
127 what they refer to are valid lvals. */
128 case PREINCREMENT_EXPR:
129 case PREDECREMENT_EXPR:
130 case COMPONENT_REF:
131 case SAVE_EXPR:
132 return lvalue_p (TREE_OPERAND (ref, 0));
133
134 case STRING_CST:
135 return 1;
136
137 case VAR_DECL:
138 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
139 && DECL_LANG_SPECIFIC (ref)
140 && DECL_IN_AGGR_P (ref))
141 return 0;
142 case INDIRECT_REF:
143 case ARRAY_REF:
144 case PARM_DECL:
145 case RESULT_DECL:
146 case ERROR_MARK:
147 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
148 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
149 return 1;
150 break;
151
152 case TARGET_EXPR:
153 return 1;
154
155 case CALL_EXPR:
156 if (IS_AGGR_TYPE (TREE_TYPE (ref)))
157 return 1;
158 break;
159
160 /* A currently unresolved scope ref. */
161 case SCOPE_REF:
162 my_friendly_abort (103);
163 case OFFSET_REF:
164 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
165 return 1;
166 return lvalue_p (TREE_OPERAND (ref, 0))
167 && lvalue_p (TREE_OPERAND (ref, 1));
168 break;
169
170 case COND_EXPR:
171 return (lvalue_p (TREE_OPERAND (ref, 1))
172 && lvalue_p (TREE_OPERAND (ref, 2)));
173
174 case MODIFY_EXPR:
175 return 1;
176
177 case COMPOUND_EXPR:
178 return lvalue_p (TREE_OPERAND (ref, 1));
179
180 case MAX_EXPR:
181 case MIN_EXPR:
182 return (lvalue_p (TREE_OPERAND (ref, 0))
183 && lvalue_p (TREE_OPERAND (ref, 1)));
184 }
185
186 return 0;
187 }
188
189 /* Return nonzero if REF is an lvalue valid for this language;
190 otherwise, print an error message and return zero. */
191
192 int
193 lvalue_or_else (ref, string)
194 tree ref;
195 char *string;
196 {
197 int win = lvalue_p (ref);
198 if (! win)
199 error ("non-lvalue in %s", string);
200 return win;
201 }
202
203 /* INIT is a CALL_EXPR which needs info about its target.
204 TYPE is the type that this initialization should appear to have.
205
206 Build an encapsulation of the initialization to perform
207 and return it so that it can be processed by language-independent
208 and language-specific expression expanders. */
209 tree
210 build_cplus_new (type, init)
211 tree type;
212 tree init;
213 {
214 tree slot;
215 tree rval;
216
217 slot = build (VAR_DECL, type);
218 layout_decl (slot, 0);
219 rval = build (NEW_EXPR, type,
220 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
221 TREE_SIDE_EFFECTS (rval) = 1;
222 TREE_ADDRESSABLE (rval) = 1;
223 rval = build (TARGET_EXPR, type, slot, rval, 0);
224 TREE_SIDE_EFFECTS (rval) = 1;
225 TREE_ADDRESSABLE (rval) = 1;
226
227 return rval;
228 }
229
230 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
231 these CALL_EXPRs with tree nodes that will perform the cleanups. */
232
233 tree
234 break_out_cleanups (exp)
235 tree exp;
236 {
237 tree tmp = exp;
238
239 if (TREE_CODE (tmp) == CALL_EXPR
240 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
241 return build_cplus_new (TREE_TYPE (tmp), tmp);
242
243 while (TREE_CODE (tmp) == NOP_EXPR
244 || TREE_CODE (tmp) == CONVERT_EXPR
245 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
246 {
247 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
248 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
249 {
250 TREE_OPERAND (tmp, 0)
251 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
252 TREE_OPERAND (tmp, 0));
253 break;
254 }
255 else
256 tmp = TREE_OPERAND (tmp, 0);
257 }
258 return exp;
259 }
260
261 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
262 copies where they are found. Returns a deep copy all nodes transitively
263 containing CALL_EXPRs. */
264
265 tree
266 break_out_calls (exp)
267 tree exp;
268 {
269 register tree t1, t2;
270 register enum tree_code code;
271 register int changed = 0;
272 register int i;
273
274 if (exp == NULL_TREE)
275 return exp;
276
277 code = TREE_CODE (exp);
278
279 if (code == CALL_EXPR)
280 return copy_node (exp);
281
282 /* Don't try and defeat a save_expr, as it should only be done once. */
283 if (code == SAVE_EXPR)
284 return exp;
285
286 switch (TREE_CODE_CLASS (code))
287 {
288 default:
289 abort ();
290
291 case 'c': /* a constant */
292 case 't': /* a type node */
293 case 'x': /* something random, like an identifier or an ERROR_MARK. */
294 return exp;
295
296 case 'd': /* A decl node */
297 #if 0 /* This is bogus. jason 9/21/94 */
298
299 t1 = break_out_calls (DECL_INITIAL (exp));
300 if (t1 != DECL_INITIAL (exp))
301 {
302 exp = copy_node (exp);
303 DECL_INITIAL (exp) = t1;
304 }
305 #endif
306 return exp;
307
308 case 'b': /* A block node */
309 {
310 /* Don't know how to handle these correctly yet. Must do a
311 break_out_calls on all DECL_INITIAL values for local variables,
312 and also break_out_calls on all sub-blocks and sub-statements. */
313 abort ();
314 }
315 return exp;
316
317 case 'e': /* an expression */
318 case 'r': /* a reference */
319 case 's': /* an expression with side effects */
320 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
321 {
322 t1 = break_out_calls (TREE_OPERAND (exp, i));
323 if (t1 != TREE_OPERAND (exp, i))
324 {
325 exp = copy_node (exp);
326 TREE_OPERAND (exp, i) = t1;
327 }
328 }
329 return exp;
330
331 case '<': /* a comparison expression */
332 case '2': /* a binary arithmetic expression */
333 t2 = break_out_calls (TREE_OPERAND (exp, 1));
334 if (t2 != TREE_OPERAND (exp, 1))
335 changed = 1;
336 case '1': /* a unary arithmetic expression */
337 t1 = break_out_calls (TREE_OPERAND (exp, 0));
338 if (t1 != TREE_OPERAND (exp, 0))
339 changed = 1;
340 if (changed)
341 {
342 if (tree_code_length[(int) code] == 1)
343 return build1 (code, TREE_TYPE (exp), t1);
344 else
345 return build (code, TREE_TYPE (exp), t1, t2);
346 }
347 return exp;
348 }
349
350 }
351 \f
352 extern struct obstack *current_obstack;
353 extern struct obstack permanent_obstack, class_obstack;
354 extern struct obstack *saveable_obstack;
355
356 /* Here is how primitive or already-canonicalized types' hash
357 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
358 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
359
360 /* Construct, lay out and return the type of methods belonging to class
361 BASETYPE and whose arguments are described by ARGTYPES and whose values
362 are described by RETTYPE. If each type exists already, reuse it. */
363 tree
364 build_cplus_method_type (basetype, rettype, argtypes)
365 tree basetype, rettype, argtypes;
366 {
367 register tree t;
368 tree ptype;
369 int hashcode;
370
371 /* Make a node of the sort we want. */
372 t = make_node (METHOD_TYPE);
373
374 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
375 TREE_TYPE (t) = rettype;
376 if (IS_SIGNATURE (basetype))
377 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
378 TYPE_READONLY (basetype),
379 TYPE_VOLATILE (basetype));
380 else
381 ptype = build_pointer_type (basetype);
382
383 /* The actual arglist for this function includes a "hidden" argument
384 which is "this". Put it into the list of argument types. */
385
386 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
387 TYPE_ARG_TYPES (t) = argtypes;
388 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
389
390 /* If we already have such a type, use the old one and free this one.
391 Note that it also frees up the above cons cell if found. */
392 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
393 t = type_hash_canon (hashcode, t);
394
395 if (TYPE_SIZE (t) == 0)
396 layout_type (t);
397
398 return t;
399 }
400
401 tree
402 build_cplus_array_type (elt_type, index_type)
403 tree elt_type;
404 tree index_type;
405 {
406 register struct obstack *ambient_obstack = current_obstack;
407 register struct obstack *ambient_saveable_obstack = saveable_obstack;
408 tree t;
409
410 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
411 make this permanent too. */
412 if (TREE_PERMANENT (elt_type)
413 && (index_type == 0 || TREE_PERMANENT (index_type)))
414 {
415 current_obstack = &permanent_obstack;
416 saveable_obstack = &permanent_obstack;
417 }
418
419 if (current_template_parms)
420 {
421 t = make_node (ARRAY_TYPE);
422 TREE_TYPE (t) = elt_type;
423 TYPE_DOMAIN (t) = index_type;
424 }
425 else
426 t = build_array_type (elt_type, index_type);
427
428 /* Push these needs up so that initialization takes place
429 more easily. */
430 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
431 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
432 current_obstack = ambient_obstack;
433 saveable_obstack = ambient_saveable_obstack;
434 return t;
435 }
436 \f
437 /* Make a variant type in the proper way for C/C++, propagating qualifiers
438 down to the element type of an array. */
439
440 tree
441 cp_build_type_variant (type, constp, volatilep)
442 tree type;
443 int constp, volatilep;
444 {
445 if (type == error_mark_node)
446 return type;
447
448 if (TREE_CODE (type) == ARRAY_TYPE)
449 {
450 tree real_main_variant = TYPE_MAIN_VARIANT (type);
451
452 push_obstacks (TYPE_OBSTACK (real_main_variant),
453 TYPE_OBSTACK (real_main_variant));
454 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
455 constp, volatilep),
456 TYPE_DOMAIN (type));
457
458 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
459 make a copy. (TYPE might have come from the hash table and
460 REAL_MAIN_VARIANT might be in some function's obstack.) */
461
462 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
463 {
464 type = copy_node (type);
465 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
466 }
467
468 TYPE_MAIN_VARIANT (type) = real_main_variant;
469 pop_obstacks ();
470 }
471 return build_type_variant (type, constp, volatilep);
472 }
473 \f
474 /* Add OFFSET to all base types of T.
475
476 OFFSET, which is a type offset, is number of bytes.
477
478 Note that we don't have to worry about having two paths to the
479 same base type, since this type owns its association list. */
480 void
481 propagate_binfo_offsets (binfo, offset)
482 tree binfo;
483 tree offset;
484 {
485 tree binfos = BINFO_BASETYPES (binfo);
486 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
487
488 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
489 {
490 tree base_binfo = TREE_VEC_ELT (binfos, i);
491
492 if (TREE_VIA_VIRTUAL (base_binfo))
493 i += 1;
494 else
495 {
496 int j;
497 tree base_binfos = BINFO_BASETYPES (base_binfo);
498 tree delta;
499
500 for (j = i+1; j < n_baselinks; j++)
501 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
502 {
503 /* The next basetype offset must take into account the space
504 between the classes, not just the size of each class. */
505 delta = size_binop (MINUS_EXPR,
506 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
507 BINFO_OFFSET (base_binfo));
508 break;
509 }
510
511 #if 0
512 if (BINFO_OFFSET_ZEROP (base_binfo))
513 BINFO_OFFSET (base_binfo) = offset;
514 else
515 BINFO_OFFSET (base_binfo)
516 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
517 #else
518 BINFO_OFFSET (base_binfo) = offset;
519 #endif
520 if (base_binfos)
521 {
522 int k;
523 tree chain = NULL_TREE;
524
525 /* Now unshare the structure beneath BASE_BINFO. */
526 for (k = TREE_VEC_LENGTH (base_binfos)-1;
527 k >= 0; k--)
528 {
529 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
530 if (! TREE_VIA_VIRTUAL (base_base_binfo))
531 TREE_VEC_ELT (base_binfos, k)
532 = make_binfo (BINFO_OFFSET (base_base_binfo),
533 base_base_binfo,
534 BINFO_VTABLE (base_base_binfo),
535 BINFO_VIRTUALS (base_base_binfo),
536 chain);
537 chain = TREE_VEC_ELT (base_binfos, k);
538 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
539 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
540 BINFO_INHERITANCE_CHAIN (chain) = base_binfo;
541 }
542 /* Now propagate the offset to the base types. */
543 propagate_binfo_offsets (base_binfo, offset);
544 }
545
546 /* Go to our next class that counts for offset propagation. */
547 i = j;
548 if (i < n_baselinks)
549 offset = size_binop (PLUS_EXPR, offset, delta);
550 }
551 }
552 }
553
554 /* Compute the actual offsets that our virtual base classes
555 will have *for this type*. This must be performed after
556 the fields are laid out, since virtual baseclasses must
557 lay down at the end of the record.
558
559 Returns the maximum number of virtual functions any of the virtual
560 baseclasses provide. */
561 int
562 layout_vbasetypes (rec, max)
563 tree rec;
564 int max;
565 {
566 /* Get all the virtual base types that this type uses.
567 The TREE_VALUE slot holds the virtual baseclass type. */
568 tree vbase_types = get_vbase_types (rec);
569
570 #ifdef STRUCTURE_SIZE_BOUNDARY
571 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
572 #else
573 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
574 #endif
575 int desired_align;
576
577 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
578 where CONST_SIZE is an integer
579 and VAR_SIZE is a tree expression.
580 If VAR_SIZE is null, the size is just CONST_SIZE.
581 Naturally we try to avoid using VAR_SIZE. */
582 register unsigned const_size = 0;
583 register tree var_size = 0;
584 int nonvirtual_const_size;
585
586 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
587
588 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
589 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
590 else
591 var_size = TYPE_SIZE (rec);
592
593 nonvirtual_const_size = const_size;
594
595 while (vbase_types)
596 {
597 tree basetype = BINFO_TYPE (vbase_types);
598 tree offset;
599
600 desired_align = TYPE_ALIGN (basetype);
601 record_align = MAX (record_align, desired_align);
602
603 if (const_size == 0)
604 offset = integer_zero_node;
605 else
606 {
607 /* Give each virtual base type the alignment it wants. */
608 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
609 * TYPE_ALIGN (basetype);
610 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
611 }
612
613 if (CLASSTYPE_VSIZE (basetype) > max)
614 max = CLASSTYPE_VSIZE (basetype);
615 BINFO_OFFSET (vbase_types) = offset;
616
617 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
618 {
619 /* Every virtual baseclass takes a least a UNIT, so that we can
620 take it's address and get something different for each base. */
621 const_size += MAX (BITS_PER_UNIT,
622 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
623 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
624 }
625 else if (var_size == 0)
626 var_size = TYPE_SIZE (basetype);
627 else
628 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
629
630 vbase_types = TREE_CHAIN (vbase_types);
631 }
632
633 if (const_size)
634 {
635 /* Because a virtual base might take a single byte above,
636 we have to re-adjust the total size to make sure it it
637 a multiple of the alignment. */
638 /* Give the whole object the alignment it wants. */
639 const_size = CEIL (const_size, record_align) * record_align;
640 }
641
642 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
643 here, as that is for this class, without any virtual base classes. */
644 TYPE_ALIGN (rec) = record_align;
645 if (const_size != nonvirtual_const_size)
646 {
647 CLASSTYPE_VBASE_SIZE (rec)
648 = size_int (const_size - nonvirtual_const_size);
649 TYPE_SIZE (rec) = size_int (const_size);
650 }
651
652 /* Now propagate offset information throughout the lattice
653 under the vbase type. */
654 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
655 vbase_types = TREE_CHAIN (vbase_types))
656 {
657 tree base_binfos = BINFO_BASETYPES (vbase_types);
658
659 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
660
661 if (base_binfos)
662 {
663 tree chain = NULL_TREE;
664 int j;
665 /* Now unshare the structure beneath BASE_BINFO. */
666
667 for (j = TREE_VEC_LENGTH (base_binfos)-1;
668 j >= 0; j--)
669 {
670 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
671 if (! TREE_VIA_VIRTUAL (base_base_binfo))
672 TREE_VEC_ELT (base_binfos, j)
673 = make_binfo (BINFO_OFFSET (base_base_binfo),
674 base_base_binfo,
675 BINFO_VTABLE (base_base_binfo),
676 BINFO_VIRTUALS (base_base_binfo),
677 chain);
678 chain = TREE_VEC_ELT (base_binfos, j);
679 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
680 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
681 BINFO_INHERITANCE_CHAIN (chain) = vbase_types;
682 }
683
684 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
685 }
686 }
687
688 return max;
689 }
690
691 /* Lay out the base types of a record type, REC.
692 Tentatively set the size and alignment of REC
693 according to the base types alone.
694
695 Offsets for immediate nonvirtual baseclasses are also computed here.
696
697 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
698 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
699
700 Returns list of virtual base classes in a FIELD_DECL chain. */
701 tree
702 layout_basetypes (rec, binfos)
703 tree rec, binfos;
704 {
705 /* Chain to hold all the new FIELD_DECLs which point at virtual
706 base classes. */
707 tree vbase_decls = NULL_TREE;
708
709 #ifdef STRUCTURE_SIZE_BOUNDARY
710 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
711 #else
712 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
713 #endif
714
715 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
716 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
717 the size is just CONST_SIZE. Naturally we try to avoid using
718 VAR_SIZE. And so far, we've been successful. */
719 #if 0
720 register tree var_size = 0;
721 #endif
722
723 register unsigned const_size = 0;
724 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
725
726 /* Handle basetypes almost like fields, but record their
727 offsets differently. */
728
729 for (i = 0; i < n_baseclasses; i++)
730 {
731 int inc, desired_align, int_vbase_size;
732 register tree base_binfo = TREE_VEC_ELT (binfos, i);
733 register tree basetype = BINFO_TYPE (base_binfo);
734 tree decl, offset;
735
736 if (TYPE_SIZE (basetype) == 0)
737 {
738 #if 0
739 /* This error is now reported in xref_tag, thus giving better
740 location information. */
741 error_with_aggr_type (base_binfo,
742 "base class `%s' has incomplete type");
743
744 TREE_VIA_PUBLIC (base_binfo) = 1;
745 TREE_VIA_PROTECTED (base_binfo) = 0;
746 TREE_VIA_VIRTUAL (base_binfo) = 0;
747
748 /* Should handle this better so that
749
750 class A;
751 class B: private A { virtual void F(); };
752
753 does not dump core when compiled. */
754 my_friendly_abort (121);
755 #endif
756 continue;
757 }
758
759 /* All basetypes are recorded in the association list of the
760 derived type. */
761
762 if (TREE_VIA_VIRTUAL (base_binfo))
763 {
764 int j;
765 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
766 + sizeof (VBASE_NAME) + 1);
767
768 /* The offset for a virtual base class is only used in computing
769 virtual function tables and for initializing virtual base
770 pointers. It is built once `get_vbase_types' is called. */
771
772 /* If this basetype can come from another vbase pointer
773 without an additional indirection, we will share
774 that pointer. If an indirection is involved, we
775 make our own pointer. */
776 for (j = 0; j < n_baseclasses; j++)
777 {
778 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
779 if (! TREE_VIA_VIRTUAL (other_base_binfo)
780 && binfo_member (basetype,
781 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
782 goto got_it;
783 }
784 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
785 decl = build_lang_field_decl (FIELD_DECL, get_identifier (name),
786 build_pointer_type (basetype));
787 /* If you change any of the below, take a look at all the
788 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
789 them too. */
790 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
791 DECL_VIRTUAL_P (decl) = 1;
792 DECL_FIELD_CONTEXT (decl) = rec;
793 DECL_CLASS_CONTEXT (decl) = rec;
794 DECL_FCONTEXT (decl) = basetype;
795 DECL_SAVED_INSNS (decl) = NULL_RTX;
796 DECL_FIELD_SIZE (decl) = 0;
797 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
798 TREE_CHAIN (decl) = vbase_decls;
799 BINFO_VPTR_FIELD (base_binfo) = decl;
800 vbase_decls = decl;
801
802 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
803 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1)) == NULL_TREE)
804 {
805 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1),
806 "destructor `%s' non-virtual");
807 warning ("in inheritance relationship `%s: virtual %s'",
808 TYPE_NAME_STRING (rec),
809 TYPE_NAME_STRING (basetype));
810 }
811 got_it:
812 /* The space this decl occupies has already been accounted for. */
813 continue;
814 }
815
816 if (const_size == 0)
817 offset = integer_zero_node;
818 else
819 {
820 /* Give each base type the alignment it wants. */
821 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
822 * TYPE_ALIGN (basetype);
823 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
824
825 #if 0
826 /* bpk: Disabled this check until someone is willing to
827 claim it as theirs and explain exactly what circumstances
828 warrant the warning. */
829 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
830 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1)) == NULL_TREE)
831 {
832 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1),
833 "destructor `%s' non-virtual");
834 warning ("in inheritance relationship `%s:%s %s'",
835 TYPE_NAME_STRING (rec),
836 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
837 TYPE_NAME_STRING (basetype));
838 }
839 #endif
840 }
841 BINFO_OFFSET (base_binfo) = offset;
842 if (CLASSTYPE_VSIZE (basetype))
843 {
844 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
845 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
846 }
847 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
848 TYPE_BINFO (rec) = base_binfo;
849
850 /* Add only the amount of storage not present in
851 the virtual baseclasses. */
852
853 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
854 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
855 {
856 inc = MAX (record_align,
857 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
858 - int_vbase_size));
859
860 /* Record must have at least as much alignment as any field. */
861 desired_align = TYPE_ALIGN (basetype);
862 record_align = MAX (record_align, desired_align);
863
864 const_size += inc;
865 }
866 }
867
868 if (const_size)
869 CLASSTYPE_SIZE (rec) = size_int (const_size);
870 else
871 CLASSTYPE_SIZE (rec) = integer_zero_node;
872 CLASSTYPE_ALIGN (rec) = record_align;
873
874 return vbase_decls;
875 }
876 \f
877 /* Hashing of lists so that we don't make duplicates.
878 The entry point is `list_hash_canon'. */
879
880 /* Each hash table slot is a bucket containing a chain
881 of these structures. */
882
883 struct list_hash
884 {
885 struct list_hash *next; /* Next structure in the bucket. */
886 int hashcode; /* Hash code of this list. */
887 tree list; /* The list recorded here. */
888 };
889
890 /* Now here is the hash table. When recording a list, it is added
891 to the slot whose index is the hash code mod the table size.
892 Note that the hash table is used for several kinds of lists.
893 While all these live in the same table, they are completely independent,
894 and the hash code is computed differently for each of these. */
895
896 #define TYPE_HASH_SIZE 59
897 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
898
899 /* Compute a hash code for a list (chain of TREE_LIST nodes
900 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
901 TREE_COMMON slots), by adding the hash codes of the individual entries. */
902
903 int
904 list_hash (list)
905 tree list;
906 {
907 register int hashcode = 0;
908
909 if (TREE_CHAIN (list))
910 hashcode += TYPE_HASH (TREE_CHAIN (list));
911
912 if (TREE_VALUE (list))
913 hashcode += TYPE_HASH (TREE_VALUE (list));
914 else
915 hashcode += 1007;
916 if (TREE_PURPOSE (list))
917 hashcode += TYPE_HASH (TREE_PURPOSE (list));
918 else
919 hashcode += 1009;
920 return hashcode;
921 }
922
923 /* Look in the type hash table for a type isomorphic to TYPE.
924 If one is found, return it. Otherwise return 0. */
925
926 tree
927 list_hash_lookup (hashcode, list)
928 int hashcode;
929 tree list;
930 {
931 register struct list_hash *h;
932 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
933 if (h->hashcode == hashcode
934 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
935 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
936 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
937 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
938 && TREE_VALUE (h->list) == TREE_VALUE (list)
939 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
940 {
941 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
942 return h->list;
943 }
944 return 0;
945 }
946
947 /* Add an entry to the list-hash-table
948 for a list TYPE whose hash code is HASHCODE. */
949
950 void
951 list_hash_add (hashcode, list)
952 int hashcode;
953 tree list;
954 {
955 register struct list_hash *h;
956
957 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
958 h->hashcode = hashcode;
959 h->list = list;
960 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
961 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
962 }
963
964 /* Given TYPE, and HASHCODE its hash code, return the canonical
965 object for an identical list if one already exists.
966 Otherwise, return TYPE, and record it as the canonical object
967 if it is a permanent object.
968
969 To use this function, first create a list of the sort you want.
970 Then compute its hash code from the fields of the list that
971 make it different from other similar lists.
972 Then call this function and use the value.
973 This function frees the list you pass in if it is a duplicate. */
974
975 /* Set to 1 to debug without canonicalization. Never set by program. */
976 static int debug_no_list_hash = 0;
977
978 tree
979 list_hash_canon (hashcode, list)
980 int hashcode;
981 tree list;
982 {
983 tree t1;
984
985 if (debug_no_list_hash)
986 return list;
987
988 t1 = list_hash_lookup (hashcode, list);
989 if (t1 != 0)
990 {
991 obstack_free (&class_obstack, list);
992 return t1;
993 }
994
995 /* If this is a new list, record it for later reuse. */
996 list_hash_add (hashcode, list);
997
998 return list;
999 }
1000
1001 tree
1002 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
1003 int via_public, via_virtual, via_protected;
1004 tree purpose, value, chain;
1005 {
1006 struct obstack *ambient_obstack = current_obstack;
1007 tree t;
1008 int hashcode;
1009
1010 current_obstack = &class_obstack;
1011 t = tree_cons (purpose, value, chain);
1012 TREE_VIA_PUBLIC (t) = via_public;
1013 TREE_VIA_PROTECTED (t) = via_protected;
1014 TREE_VIA_VIRTUAL (t) = via_virtual;
1015 hashcode = list_hash (t);
1016 t = list_hash_canon (hashcode, t);
1017 current_obstack = ambient_obstack;
1018 return t;
1019 }
1020
1021 /* Constructor for hashed lists. */
1022 tree
1023 hash_tree_chain (value, chain)
1024 tree value, chain;
1025 {
1026 struct obstack *ambient_obstack = current_obstack;
1027 tree t;
1028 int hashcode;
1029
1030 current_obstack = &class_obstack;
1031 t = tree_cons (NULL_TREE, value, chain);
1032 hashcode = list_hash (t);
1033 t = list_hash_canon (hashcode, t);
1034 current_obstack = ambient_obstack;
1035 return t;
1036 }
1037
1038 /* Similar, but used for concatenating two lists. */
1039 tree
1040 hash_chainon (list1, list2)
1041 tree list1, list2;
1042 {
1043 if (list2 == 0)
1044 return list1;
1045 if (list1 == 0)
1046 return list2;
1047 if (TREE_CHAIN (list1) == NULL_TREE)
1048 return hash_tree_chain (TREE_VALUE (list1), list2);
1049 return hash_tree_chain (TREE_VALUE (list1),
1050 hash_chainon (TREE_CHAIN (list1), list2));
1051 }
1052
1053 static tree
1054 get_identifier_list (value)
1055 tree value;
1056 {
1057 tree list = IDENTIFIER_AS_LIST (value);
1058 if (list != NULL_TREE
1059 && (TREE_CODE (list) != TREE_LIST
1060 || TREE_VALUE (list) != value))
1061 list = NULL_TREE;
1062 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1063 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1064 && IDENTIFIER_TYPE_VALUE (value)
1065 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1066 {
1067 tree type = IDENTIFIER_TYPE_VALUE (value);
1068
1069 if (TYPE_PTRMEMFUNC_P (type))
1070 list = NULL_TREE;
1071 else if (type == current_class_type)
1072 /* Don't mess up the constructor name. */
1073 list = tree_cons (NULL_TREE, value, NULL_TREE);
1074 else
1075 {
1076 register tree id;
1077 /* This will return the correct thing for regular types,
1078 nested types, and templates. Yay! */
1079 if (TYPE_NESTED_NAME (type))
1080 id = TYPE_NESTED_NAME (type);
1081 else
1082 id = TYPE_IDENTIFIER (type);
1083
1084 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
1085 CLASSTYPE_ID_AS_LIST (type)
1086 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
1087 list = CLASSTYPE_ID_AS_LIST (type);
1088 }
1089 }
1090 return list;
1091 }
1092
1093 tree
1094 get_decl_list (value)
1095 tree value;
1096 {
1097 tree list = NULL_TREE;
1098
1099 if (TREE_CODE (value) == IDENTIFIER_NODE)
1100 list = get_identifier_list (value);
1101 else if (TREE_CODE (value) == RECORD_TYPE
1102 && TYPE_LANG_SPECIFIC (value)
1103 && value == TYPE_MAIN_VARIANT (value))
1104 list = CLASSTYPE_AS_LIST (value);
1105
1106 if (list != NULL_TREE)
1107 {
1108 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1109 return list;
1110 }
1111
1112 return build_decl_list (NULL_TREE, value);
1113 }
1114 \f
1115 /* Build an association between TYPE and some parameters:
1116
1117 OFFSET is the offset added to `this' to convert it to a pointer
1118 of type `TYPE *'
1119
1120 BINFO is the base binfo to use, if we are deriving from one. This
1121 is necessary, as we want specialized parent binfos from base
1122 classes, so that the VTABLE_NAMEs of bases are for the most derived
1123 type, instead of of the simple type.
1124
1125 VTABLE is the virtual function table with which to initialize
1126 sub-objects of type TYPE.
1127
1128 VIRTUALS are the virtual functions sitting in VTABLE.
1129
1130 CHAIN are more associations we must retain. */
1131
1132 tree
1133 make_binfo (offset, binfo, vtable, virtuals, chain)
1134 tree offset, binfo;
1135 tree vtable, virtuals;
1136 tree chain;
1137 {
1138 tree new_binfo = make_tree_vec (6);
1139 tree type;
1140
1141 if (TREE_CODE (binfo) == TREE_VEC)
1142 type = BINFO_TYPE (binfo);
1143 else
1144 {
1145 type = binfo;
1146 binfo = TYPE_BINFO (binfo);
1147 }
1148
1149 TREE_CHAIN (new_binfo) = chain;
1150 if (chain)
1151 TREE_USED (new_binfo) = TREE_USED (chain);
1152
1153 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1154 BINFO_OFFSET (new_binfo) = offset;
1155 BINFO_VTABLE (new_binfo) = vtable;
1156 BINFO_VIRTUALS (new_binfo) = virtuals;
1157 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1158
1159 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1160 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1161 return new_binfo;
1162 }
1163
1164 /* Return the binfo value for ELEM in TYPE. */
1165
1166 tree
1167 binfo_value (elem, type)
1168 tree elem;
1169 tree type;
1170 {
1171 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1172 compiler_error ("base class `%s' ambiguous in binfo_value",
1173 TYPE_NAME_STRING (elem));
1174 if (elem == type)
1175 return TYPE_BINFO (type);
1176 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1177 return type;
1178 return get_binfo (elem, type, 0);
1179 }
1180
1181 tree
1182 reverse_path (path)
1183 tree path;
1184 {
1185 register tree prev = 0, tmp, next;
1186 for (tmp = path; tmp; tmp = next)
1187 {
1188 next = BINFO_INHERITANCE_CHAIN (tmp);
1189 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1190 prev = tmp;
1191 }
1192 return prev;
1193 }
1194
1195 void
1196 debug_binfo (elem)
1197 tree elem;
1198 {
1199 unsigned HOST_WIDE_INT n;
1200 tree virtuals;
1201
1202 fprintf (stderr, "type \"%s\"; offset = %d\n",
1203 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1204 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1205 fprintf (stderr, "vtable type:\n");
1206 debug_tree (BINFO_TYPE (elem));
1207 if (BINFO_VTABLE (elem))
1208 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1209 else
1210 fprintf (stderr, "no vtable decl yet\n");
1211 fprintf (stderr, "virtuals:\n");
1212 virtuals = BINFO_VIRTUALS (elem);
1213
1214 n = skip_rtti_stuff (&virtuals);
1215
1216 while (virtuals)
1217 {
1218 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1219 fprintf (stderr, "%s [%d =? %d]\n",
1220 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1221 n, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1222 ++n;
1223 virtuals = TREE_CHAIN (virtuals);
1224 }
1225 }
1226
1227 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1228 We expect a null pointer to mark the end of the chain.
1229 This is the Lisp primitive `length'. */
1230
1231 int
1232 decl_list_length (t)
1233 tree t;
1234 {
1235 register tree tail;
1236 register int len = 0;
1237
1238 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1239 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1240 for (tail = t; tail; tail = DECL_CHAIN (tail))
1241 len++;
1242
1243 return len;
1244 }
1245
1246 int
1247 count_functions (t)
1248 tree t;
1249 {
1250 if (TREE_CODE (t) == FUNCTION_DECL)
1251 return 1;
1252 else if (TREE_CODE (t) == TREE_LIST)
1253 return decl_list_length (TREE_VALUE (t));
1254
1255 my_friendly_abort (359);
1256 return 0;
1257 }
1258
1259 int
1260 is_overloaded_fn (x)
1261 tree x;
1262 {
1263 if (TREE_CODE (x) == FUNCTION_DECL)
1264 return 1;
1265
1266 if (TREE_CODE (x) == TREE_LIST
1267 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1268 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1269 return 1;
1270
1271 return 0;
1272 }
1273
1274 int
1275 really_overloaded_fn (x)
1276 tree x;
1277 {
1278 if (TREE_CODE (x) == TREE_LIST
1279 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1280 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1281 return 1;
1282
1283 return 0;
1284 }
1285
1286 tree
1287 get_first_fn (from)
1288 tree from;
1289 {
1290 if (TREE_CODE (from) == FUNCTION_DECL)
1291 return from;
1292
1293 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1294
1295 return TREE_VALUE (from);
1296 }
1297
1298 tree
1299 fnaddr_from_vtable_entry (entry)
1300 tree entry;
1301 {
1302 if (flag_vtable_thunks)
1303 {
1304 tree func = entry;
1305 if (TREE_CODE (func) == ADDR_EXPR)
1306 func = TREE_OPERAND (func, 0);
1307 if (TREE_CODE (func) == THUNK_DECL)
1308 return DECL_INITIAL (func);
1309 else
1310 return entry;
1311 }
1312 else
1313 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1314 }
1315
1316 tree
1317 function_arg_chain (t)
1318 tree t;
1319 {
1320 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1321 }
1322
1323 int
1324 promotes_to_aggr_type (t, code)
1325 tree t;
1326 enum tree_code code;
1327 {
1328 if (TREE_CODE (t) == code)
1329 t = TREE_TYPE (t);
1330 return IS_AGGR_TYPE (t);
1331 }
1332
1333 int
1334 is_aggr_type_2 (t1, t2)
1335 tree t1, t2;
1336 {
1337 if (TREE_CODE (t1) != TREE_CODE (t2))
1338 return 0;
1339 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1340 }
1341
1342 /* Give message using types TYPE1 and TYPE2 as arguments.
1343 PFN is the function which will print the message;
1344 S is the format string for PFN to use. */
1345 void
1346 message_2_types (pfn, s, type1, type2)
1347 void (*pfn) ();
1348 char *s;
1349 tree type1, type2;
1350 {
1351 tree name1 = TYPE_NAME (type1);
1352 tree name2 = TYPE_NAME (type2);
1353 if (TREE_CODE (name1) == TYPE_DECL)
1354 name1 = DECL_NAME (name1);
1355 if (TREE_CODE (name2) == TYPE_DECL)
1356 name2 = DECL_NAME (name2);
1357 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1358 }
1359 \f
1360 #define PRINT_RING_SIZE 4
1361
1362 char *
1363 lang_printable_name (decl)
1364 tree decl;
1365 {
1366 static tree decl_ring[PRINT_RING_SIZE];
1367 static char *print_ring[PRINT_RING_SIZE];
1368 static int ring_counter;
1369 int i;
1370
1371 /* Only cache functions. */
1372 if (TREE_CODE (decl) != FUNCTION_DECL
1373 || DECL_LANG_SPECIFIC (decl) == 0)
1374 return decl_as_string (decl, 1);
1375
1376 /* See if this print name is lying around. */
1377 for (i = 0; i < PRINT_RING_SIZE; i++)
1378 if (decl_ring[i] == decl)
1379 /* yes, so return it. */
1380 return print_ring[i];
1381
1382 if (++ring_counter == PRINT_RING_SIZE)
1383 ring_counter = 0;
1384
1385 if (current_function_decl != NULL_TREE)
1386 {
1387 if (decl_ring[ring_counter] == current_function_decl)
1388 ring_counter += 1;
1389 if (ring_counter == PRINT_RING_SIZE)
1390 ring_counter = 0;
1391 if (decl_ring[ring_counter] == current_function_decl)
1392 my_friendly_abort (106);
1393 }
1394
1395 if (print_ring[ring_counter])
1396 free (print_ring[ring_counter]);
1397
1398 {
1399 int print_ret_type_p
1400 = (!DECL_CONSTRUCTOR_P (decl)
1401 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1402
1403 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1404 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1405 strcpy (print_ring[ring_counter], name);
1406 decl_ring[ring_counter] = decl;
1407 }
1408 return print_ring[ring_counter];
1409 }
1410 \f
1411 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1412 listed in RAISES. */
1413 tree
1414 build_exception_variant (type, raises)
1415 tree type;
1416 tree raises;
1417 {
1418 tree v = TYPE_MAIN_VARIANT (type);
1419 int constp = TYPE_READONLY (type);
1420 int volatilep = TYPE_VOLATILE (type);
1421
1422 for (; v; v = TYPE_NEXT_VARIANT (v))
1423 {
1424 if (TYPE_READONLY (v) != constp
1425 || TYPE_VOLATILE (v) != volatilep)
1426 continue;
1427
1428 /* @@ This should do set equality, not exact match. */
1429 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1430 /* List of exceptions raised matches previously found list.
1431
1432 @@ Nice to free up storage used in consing up the
1433 @@ list of exceptions raised. */
1434 return v;
1435 }
1436
1437 /* Need to build a new variant. */
1438 v = build_type_copy (type);
1439
1440 if (raises && ! TREE_PERMANENT (raises))
1441 {
1442 push_obstacks_nochange ();
1443 end_temporary_allocation ();
1444 raises = copy_list (raises);
1445 pop_obstacks ();
1446 }
1447
1448 TYPE_RAISES_EXCEPTIONS (v) = raises;
1449 return v;
1450 }
1451
1452 /* Subroutine of copy_to_permanent
1453
1454 Assuming T is a node build bottom-up, make it all exist on
1455 permanent obstack, if it is not permanent already. */
1456
1457 tree
1458 mapcar (t, func)
1459 tree t;
1460 tree (*func)();
1461 {
1462 tree tmp;
1463
1464 if (t == NULL_TREE)
1465 return t;
1466
1467 if (tmp = func (t), tmp != NULL_TREE)
1468 return tmp;
1469
1470 switch (TREE_CODE (t))
1471 {
1472 case ERROR_MARK:
1473 return error_mark_node;
1474
1475 case VAR_DECL:
1476 case FUNCTION_DECL:
1477 case CONST_DECL:
1478 break;
1479
1480 case PARM_DECL:
1481 {
1482 tree chain = TREE_CHAIN (t);
1483 t = copy_node (t);
1484 TREE_CHAIN (t) = mapcar (chain, func);
1485 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1486 DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
1487 DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
1488 return t;
1489 }
1490
1491 case TREE_LIST:
1492 {
1493 tree chain = TREE_CHAIN (t);
1494 t = copy_node (t);
1495 TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
1496 TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
1497 TREE_CHAIN (t) = mapcar (chain, func);
1498 return t;
1499 }
1500
1501 case TREE_VEC:
1502 {
1503 int len = TREE_VEC_LENGTH (t);
1504
1505 t = copy_node (t);
1506 while (len--)
1507 TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
1508 return t;
1509 }
1510
1511 case INTEGER_CST:
1512 case REAL_CST:
1513 case STRING_CST:
1514 return copy_node (t);
1515
1516 case COND_EXPR:
1517 case TARGET_EXPR:
1518 case NEW_EXPR:
1519 t = copy_node (t);
1520 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1521 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1522 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1523 return t;
1524
1525 case SAVE_EXPR:
1526 t = copy_node (t);
1527 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1528 return t;
1529
1530 case MODIFY_EXPR:
1531 case PLUS_EXPR:
1532 case MINUS_EXPR:
1533 case MULT_EXPR:
1534 case TRUNC_DIV_EXPR:
1535 case TRUNC_MOD_EXPR:
1536 case MIN_EXPR:
1537 case MAX_EXPR:
1538 case LSHIFT_EXPR:
1539 case RSHIFT_EXPR:
1540 case BIT_IOR_EXPR:
1541 case BIT_XOR_EXPR:
1542 case BIT_AND_EXPR:
1543 case BIT_ANDTC_EXPR:
1544 case TRUTH_ANDIF_EXPR:
1545 case TRUTH_ORIF_EXPR:
1546 case LT_EXPR:
1547 case LE_EXPR:
1548 case GT_EXPR:
1549 case GE_EXPR:
1550 case EQ_EXPR:
1551 case NE_EXPR:
1552 case CEIL_DIV_EXPR:
1553 case FLOOR_DIV_EXPR:
1554 case ROUND_DIV_EXPR:
1555 case CEIL_MOD_EXPR:
1556 case FLOOR_MOD_EXPR:
1557 case ROUND_MOD_EXPR:
1558 case COMPOUND_EXPR:
1559 case PREDECREMENT_EXPR:
1560 case PREINCREMENT_EXPR:
1561 case POSTDECREMENT_EXPR:
1562 case POSTINCREMENT_EXPR:
1563 case CALL_EXPR:
1564 case ARRAY_REF:
1565 case SCOPE_REF:
1566 t = copy_node (t);
1567 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1568 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1569 return t;
1570
1571 case CONVERT_EXPR:
1572 case ADDR_EXPR:
1573 case INDIRECT_REF:
1574 case NEGATE_EXPR:
1575 case BIT_NOT_EXPR:
1576 case TRUTH_NOT_EXPR:
1577 case NOP_EXPR:
1578 case COMPONENT_REF:
1579 t = copy_node (t);
1580 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1581 return t;
1582
1583 case POINTER_TYPE:
1584 tmp = build_pointer_type (mapcar (TREE_TYPE (t), func));
1585 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1586 case REFERENCE_TYPE:
1587 tmp = build_reference_type (mapcar (TREE_TYPE (t), func));
1588 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1589 case FUNCTION_TYPE:
1590 tmp = build_function_type (mapcar (TREE_TYPE (t), func),
1591 mapcar (TYPE_ARG_TYPES (t), func));
1592 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1593 case ARRAY_TYPE:
1594 tmp = build_array_type (mapcar (TREE_TYPE (t), func),
1595 mapcar (TYPE_DOMAIN (t), func));
1596 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1597 case INTEGER_TYPE:
1598 tmp = build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
1599 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1600 case OFFSET_TYPE:
1601 tmp = build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
1602 mapcar (TREE_TYPE (t), func));
1603 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1604 case METHOD_TYPE:
1605 tmp = build_cplus_method_type
1606 (mapcar (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (t))), func),
1607 mapcar (TREE_TYPE (t), func),
1608 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func));
1609 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1610
1611 case RECORD_TYPE:
1612 if (TYPE_PTRMEMFUNC_P (t))
1613 return build_ptrmemfunc_type
1614 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
1615 /* else fall through */
1616
1617 /* This list is incomplete, but should suffice for now.
1618 It is very important that `sorry' not call
1619 `report_error_function'. That could cause an infinite loop. */
1620 default:
1621 sorry ("initializer contains unrecognized tree code");
1622 return error_mark_node;
1623
1624 }
1625 my_friendly_abort (107);
1626 /* NOTREACHED */
1627 return NULL_TREE;
1628 }
1629
1630 static tree
1631 perm_manip (t)
1632 tree t;
1633 {
1634 if (TREE_PERMANENT (t))
1635 return t;
1636 /* Support `void f () { extern int i; A<&i> a; }' */
1637 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == FUNCTION_DECL)
1638 && TREE_PUBLIC (t))
1639 return copy_node (t);
1640 return NULL_TREE;
1641 }
1642
1643 /* Assuming T is a node built bottom-up, make it all exist on
1644 permanent obstack, if it is not permanent already. */
1645 tree
1646 copy_to_permanent (t)
1647 tree t;
1648 {
1649 register struct obstack *ambient_obstack = current_obstack;
1650 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1651 int resume;
1652
1653 if (t == NULL_TREE || TREE_PERMANENT (t))
1654 return t;
1655
1656 saveable_obstack = &permanent_obstack;
1657 current_obstack = saveable_obstack;
1658 resume = suspend_momentary ();
1659
1660 t = mapcar (t, perm_manip);
1661
1662 resume_momentary (resume);
1663 current_obstack = ambient_obstack;
1664 saveable_obstack = ambient_saveable_obstack;
1665
1666 return t;
1667 }
1668
1669 #ifdef GATHER_STATISTICS
1670 extern int depth_reached;
1671 #endif
1672
1673 void
1674 print_lang_statistics ()
1675 {
1676 extern struct obstack maybepermanent_obstack, decl_obstack;
1677 print_obstack_statistics ("class_obstack", &class_obstack);
1678 print_obstack_statistics ("decl_obstack", &decl_obstack);
1679 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1680 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1681 print_search_statistics ();
1682 print_class_statistics ();
1683 #ifdef GATHER_STATISTICS
1684 fprintf (stderr, "maximum template instantiation depth reached: %d\n",
1685 depth_reached);
1686 #endif
1687 }
1688
1689 /* This is used by the `assert' macro. It is provided in libgcc.a,
1690 which `cc' doesn't know how to link. Note that the C++ front-end
1691 no longer actually uses the `assert' macro (instead, it calls
1692 my_friendly_assert). But all of the back-end files still need this. */
1693 void
1694 __eprintf (string, expression, line, filename)
1695 #ifdef __STDC__
1696 const char *string;
1697 const char *expression;
1698 unsigned line;
1699 const char *filename;
1700 #else
1701 char *string;
1702 char *expression;
1703 unsigned line;
1704 char *filename;
1705 #endif
1706 {
1707 fprintf (stderr, string, expression, line, filename);
1708 fflush (stderr);
1709 abort ();
1710 }
1711
1712 /* Return, as an INTEGER_CST node, the number of elements for
1713 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1714
1715 tree
1716 array_type_nelts_top (type)
1717 tree type;
1718 {
1719 return fold (build (PLUS_EXPR, sizetype,
1720 array_type_nelts (type),
1721 integer_one_node));
1722 }
1723
1724 /* Return, as an INTEGER_CST node, the number of elements for
1725 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1726 ARRAY_TYPEs that are clumped together. */
1727
1728 tree
1729 array_type_nelts_total (type)
1730 tree type;
1731 {
1732 tree sz = array_type_nelts_top (type);
1733 type = TREE_TYPE (type);
1734 while (TREE_CODE (type) == ARRAY_TYPE)
1735 {
1736 tree n = array_type_nelts_top (type);
1737 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1738 type = TREE_TYPE (type);
1739 }
1740 return sz;
1741 }
1742
1743 static
1744 tree
1745 bot_manip (t)
1746 tree t;
1747 {
1748 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1749 return t;
1750 else if (TREE_CODE (t) == TARGET_EXPR)
1751 return build_cplus_new (TREE_TYPE (t),
1752 break_out_target_exprs (TREE_OPERAND (t, 1)));
1753 return NULL_TREE;
1754 }
1755
1756 /* Actually, we'll just clean out the target exprs for the moment. */
1757 tree
1758 break_out_target_exprs (t)
1759 tree t;
1760 {
1761 return mapcar (t, bot_manip);
1762 }
1763
1764 /* Arrange for an expression to be expanded multiple independent
1765 times. This is useful for cleanup actions, as the backend can
1766 expand them multiple times in different places. */
1767 tree
1768 unsave_expr (expr)
1769 tree expr;
1770 {
1771 tree t;
1772
1773 /* If this is already protected, no sense in protecting it again. */
1774 if (TREE_CODE (expr) == UNSAVE_EXPR)
1775 return expr;
1776
1777 t = build1 (UNSAVE_EXPR, TREE_TYPE (expr), expr);
1778 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (expr);
1779 return t;
1780 }
1781
1782 /* Modify a tree in place so that all the evaluate only once things
1783 are cleared out. Return the EXPR given. */
1784 tree
1785 unsave_expr_now (expr)
1786 tree expr;
1787 {
1788 enum tree_code code;
1789 register int i;
1790
1791 if (expr == NULL_TREE)
1792 return expr;
1793
1794 code = TREE_CODE (expr);
1795 switch (code)
1796 {
1797 case SAVE_EXPR:
1798 SAVE_EXPR_RTL (expr) = NULL_RTX;
1799 break;
1800
1801 case TARGET_EXPR:
1802 sorry ("TARGET_EXPR reused inside UNSAVE_EXPR");
1803 break;
1804
1805 case RTL_EXPR:
1806 warning ("RTL_EXPR reused inside UNSAVE_EXPR");
1807 RTL_EXPR_SEQUENCE (expr) = NULL_RTX;
1808 break;
1809
1810 case CALL_EXPR:
1811 CALL_EXPR_RTL (expr) = NULL_RTX;
1812 if (TREE_OPERAND (expr, 1)
1813 && TREE_CODE (TREE_OPERAND (expr, 1)) == TREE_LIST)
1814 {
1815 tree exp = TREE_OPERAND (expr, 1);
1816 while (exp)
1817 {
1818 unsave_expr_now (TREE_VALUE (exp));
1819 exp = TREE_CHAIN (exp);
1820 }
1821 }
1822 break;
1823 }
1824
1825 switch (TREE_CODE_CLASS (code))
1826 {
1827 case 'c': /* a constant */
1828 case 't': /* a type node */
1829 case 'x': /* something random, like an identifier or an ERROR_MARK. */
1830 case 'd': /* A decl node */
1831 case 'b': /* A block node */
1832 return expr;
1833
1834 case 'e': /* an expression */
1835 case 'r': /* a reference */
1836 case 's': /* an expression with side effects */
1837 case '<': /* a comparison expression */
1838 case '2': /* a binary arithmetic expression */
1839 case '1': /* a unary arithmetic expression */
1840 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
1841 unsave_expr_now (TREE_OPERAND (expr, i));
1842 return expr;
1843
1844 default:
1845 my_friendly_abort (999);
1846 }
1847 }
1848
1849 /* Since cleanup may have SAVE_EXPRs in it, we protect it with an
1850 UNSAVE_EXPR as the backend cannot yet handle SAVE_EXPRs in cleanups
1851 by itself. */
1852 int
1853 cp_expand_decl_cleanup (decl, cleanup)
1854 tree decl, cleanup;
1855 {
1856 return expand_decl_cleanup (decl, unsave_expr (cleanup));
1857 }
1858
1859 /* Obstack used for allocating nodes in template function and variable
1860 definitions. */
1861
1862 extern struct obstack *expression_obstack;
1863
1864 /* Similar to `build_nt', except we build
1865 on the permanent_obstack, regardless. */
1866
1867 tree
1868 build_min_nt VPROTO((enum tree_code code, ...))
1869 {
1870 #ifndef __STDC__
1871 enum tree_code code;
1872 #endif
1873 register struct obstack *ambient_obstack = expression_obstack;
1874 va_list p;
1875 register tree t;
1876 register int length;
1877 register int i;
1878
1879 VA_START (p, code);
1880
1881 #ifndef __STDC__
1882 code = va_arg (p, enum tree_code);
1883 #endif
1884
1885 expression_obstack = &permanent_obstack;
1886
1887 t = make_node (code);
1888 length = tree_code_length[(int) code];
1889 TREE_COMPLEXITY (t) = lineno;
1890
1891 for (i = 0; i < length; i++)
1892 {
1893 tree x = va_arg (p, tree);
1894 TREE_OPERAND (t, i) = copy_to_permanent (x);
1895 }
1896
1897 va_end (p);
1898 expression_obstack = ambient_obstack;
1899 return t;
1900 }
1901
1902 /* Similar to `build', except we build
1903 on the permanent_obstack, regardless. */
1904
1905 tree
1906 build_min VPROTO((enum tree_code code, tree tt, ...))
1907 {
1908 #ifndef __STDC__
1909 enum tree_code code;
1910 tree tt;
1911 #endif
1912 register struct obstack *ambient_obstack = expression_obstack;
1913 va_list p;
1914 register tree t;
1915 register int length;
1916 register int i;
1917
1918 VA_START (p, tt);
1919
1920 #ifndef __STDC__
1921 code = va_arg (p, enum tree_code);
1922 tt = va_arg (p, tree);
1923 #endif
1924
1925 expression_obstack = &permanent_obstack;
1926
1927 t = make_node (code);
1928 length = tree_code_length[(int) code];
1929 TREE_TYPE (t) = tt;
1930 TREE_COMPLEXITY (t) = lineno;
1931
1932 for (i = 0; i < length; i++)
1933 {
1934 tree x = va_arg (p, tree);
1935 TREE_OPERAND (t, i) = copy_to_permanent (x);
1936 }
1937
1938 va_end (p);
1939 expression_obstack = ambient_obstack;
1940 return t;
1941 }
1942
1943 /* Same as `tree_cons' but make a permanent object. */
1944
1945 tree
1946 min_tree_cons (purpose, value, chain)
1947 tree purpose, value, chain;
1948 {
1949 register tree node;
1950 register struct obstack *ambient_obstack = current_obstack;
1951 current_obstack = &permanent_obstack;
1952
1953 node = tree_cons (copy_to_permanent (purpose),
1954 copy_to_permanent (value), chain);
1955 current_obstack = ambient_obstack;
1956 return node;
1957 }
1958
1959 tree
1960 get_type_decl (t)
1961 tree t;
1962 {
1963 if (TREE_CODE (t) == IDENTIFIER_NODE)
1964 return identifier_typedecl_value (t);
1965 if (TREE_CODE (t) == TYPE_DECL)
1966 return t;
1967 if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
1968 return TYPE_STUB_DECL (t);
1969
1970 my_friendly_abort (42);
1971 }
1972
1973 int
1974 can_free (obstack, t)
1975 struct obstack *obstack;
1976 tree t;
1977 {
1978 int size;
1979
1980 if (TREE_CODE (t) == TREE_VEC)
1981 size = (TREE_VEC_LENGTH (t)-1) * sizeof (tree) + sizeof (struct tree_vec);
1982 else
1983 my_friendly_abort (42);
1984
1985 #define ROUND(x) ((x + obstack_alignment_mask (obstack)) \
1986 & ~ obstack_alignment_mask (obstack))
1987 if ((char *)t + ROUND (size) == obstack_next_free (obstack))
1988 return 1;
1989 #undef ROUND
1990
1991 return 0;
1992 }
1993
1994 /* Return first vector element whose BINFO_TYPE is ELEM.
1995 Return 0 if ELEM is not in VEC. VEC may be NULL_TREE. */
1996
1997 tree
1998 vec_binfo_member (elem, vec)
1999 tree elem, vec;
2000 {
2001 int i;
2002
2003 if (vec)
2004 for (i = 0; i < TREE_VEC_LENGTH (vec); ++i)
2005 if (elem == BINFO_TYPE (TREE_VEC_ELT (vec, i)))
2006 return TREE_VEC_ELT (vec, i);
2007
2008 return NULL_TREE;
2009 }
2010
2011 /* Kludge around the fact that DECL_CONTEXT for virtual functions returns
2012 the wrong thing for decl_function_context. Hopefully the uses in the
2013 backend won't matter, since we don't need a static chain for local class
2014 methods. FIXME! */
2015
2016 tree
2017 hack_decl_function_context (decl)
2018 tree decl;
2019 {
2020 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_FUNCTION_MEMBER_P (decl))
2021 return decl_function_context (TYPE_MAIN_DECL (DECL_CLASS_CONTEXT (decl)));
2022 return decl_function_context (decl);
2023 }
This page took 0.132928 seconds and 6 git commands to generate.