]> gcc.gnu.org Git - gcc.git/blame - gcc/stor-layout.c
stor-layout.c (compute_record_mode): Don't force BLKmode if field is zero-length...
[gcc.git] / gcc / stor-layout.c
CommitLineData
7306ed3f 1/* C-compiler utilities for types and variables storage layout
06ceef4e 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
b8089d8d 3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
7306ed3f 4
1322177d 5This file is part of GCC.
7306ed3f 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
7306ed3f 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
7306ed3f
JW
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
7306ed3f
JW
21
22
23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
7306ed3f 27#include "tree.h"
d05a5492 28#include "rtl.h"
6baf1cc8 29#include "tm_p.h"
566cdc73 30#include "flags.h"
7306ed3f 31#include "function.h"
234042f4 32#include "expr.h"
10f0ad3d 33#include "toplev.h"
d7db6646 34#include "ggc.h"
f913c102 35#include "target.h"
43577e6b 36#include "langhooks.h"
7306ed3f 37
fed3cef0
RK
38/* Set to one when set_sizetype has been called. */
39static int sizetype_set;
40
41/* List of types created before set_sizetype has been called. We do not
42 make this a GGC root since we want these nodes to be reclaimed. */
43static tree early_type_list;
44
7306ed3f 45/* Data type for the expressions representing sizes of data types.
896cced4 46 It is the first integer type laid out. */
fed3cef0 47tree sizetype_tab[(int) TYPE_KIND_LAST];
7306ed3f 48
d4c40650
RS
49/* If nonzero, this is an upper limit on alignment of structure fields.
50 The value is measured in bits. */
729a2125 51unsigned int maximum_field_alignment;
d4c40650 52
0e9e1e0a 53/* If nonzero, the alignment of a bitstring or (power-)set value, in bits.
b5d11e41 54 May be overridden by front-ends. */
729a2125 55unsigned int set_alignment = 0;
b5d11e41 56
b5d6a2ff
RK
57/* Nonzero if all REFERENCE_TYPEs are internal and hence should be
58 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
59 called only by a front end. */
60static int reference_types_internal = 0;
61
46c5ad27
AJ
62static void finalize_record_size (record_layout_info);
63static void finalize_type_size (tree);
64static void place_union_field (record_layout_info, tree);
b8089d8d 65#if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
46c5ad27
AJ
66static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
67 HOST_WIDE_INT, tree);
b8089d8d 68#endif
46c5ad27
AJ
69static unsigned int update_alignment_for_field (record_layout_info, tree,
70 unsigned int);
71extern void debug_rli (record_layout_info);
7306ed3f
JW
72\f
73/* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
74
e2500fed 75static GTY(()) tree pending_sizes;
7306ed3f
JW
76
77/* Nonzero means cannot safely call expand_expr now,
78 so put variable sizes onto `pending_sizes' instead. */
79
80int immediate_size_expand;
81
b5d6a2ff
RK
82/* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
83 by front end. */
84
85void
46c5ad27 86internal_reference_types (void)
b5d6a2ff
RK
87{
88 reference_types_internal = 1;
89}
90
770ae6cc
RK
91/* Get a list of all the objects put on the pending sizes list. */
92
7306ed3f 93tree
46c5ad27 94get_pending_sizes (void)
7306ed3f
JW
95{
96 tree chain = pending_sizes;
4e4b555d
RS
97 tree t;
98
99 /* Put each SAVE_EXPR into the current function. */
100 for (t = chain; t; t = TREE_CHAIN (t))
101 SAVE_EXPR_CONTEXT (TREE_VALUE (t)) = current_function_decl;
d4b60170 102
7306ed3f
JW
103 pending_sizes = 0;
104 return chain;
105}
106
0e9e1e0a 107/* Return nonzero if EXPR is present on the pending sizes list. */
fe375cf1
JJ
108
109int
46c5ad27 110is_pending_size (tree expr)
fe375cf1
JJ
111{
112 tree t;
113
114 for (t = pending_sizes; t; t = TREE_CHAIN (t))
115 if (TREE_VALUE (t) == expr)
116 return 1;
117 return 0;
118}
119
120/* Add EXPR to the pending sizes list. */
121
122void
46c5ad27 123put_pending_size (tree expr)
fe375cf1 124{
3874585e
RK
125 /* Strip any simple arithmetic from EXPR to see if it has an underlying
126 SAVE_EXPR. */
a9ecacf6 127 expr = skip_simple_arithmetic (expr);
3874585e
RK
128
129 if (TREE_CODE (expr) == SAVE_EXPR)
130 pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
fe375cf1
JJ
131}
132
770ae6cc
RK
133/* Put a chain of objects into the pending sizes list, which must be
134 empty. */
135
1fd7c4ac 136void
46c5ad27 137put_pending_sizes (tree chain)
1fd7c4ac
RK
138{
139 if (pending_sizes)
140 abort ();
141
142 pending_sizes = chain;
143}
144
76ffb3a0 145/* Given a size SIZE that may not be a constant, return a SAVE_EXPR
7306ed3f
JW
146 to serve as the actual size-expression for a type or decl. */
147
4e4b555d 148tree
46c5ad27 149variable_size (tree size)
7306ed3f 150{
3695c25f
JM
151 tree save;
152
5e9bec99
RK
153 /* If the language-processor is to take responsibility for variable-sized
154 items (e.g., languages which have elaboration procedures like Ada),
ac79cd5a
RK
155 just return SIZE unchanged. Likewise for self-referential sizes and
156 constant sizes. */
76ffb3a0 157 if (TREE_CONSTANT (size)
43577e6b 158 || (*lang_hooks.decls.global_bindings_p) () < 0
679035f3 159 || CONTAINS_PLACEHOLDER_P (size))
5e9bec99
RK
160 return size;
161
3695c25f
JM
162 if (TREE_CODE (size) == MINUS_EXPR && integer_onep (TREE_OPERAND (size, 1)))
163 /* If this is the upper bound of a C array, leave the minus 1 outside
164 the SAVE_EXPR so it can be folded away. */
165 TREE_OPERAND (size, 0) = save = save_expr (TREE_OPERAND (size, 0));
166 else
167 size = save = save_expr (size);
68de3831 168
d26f8097
MM
169 /* If an array with a variable number of elements is declared, and
170 the elements require destruction, we will emit a cleanup for the
171 array. That cleanup is run both on normal exit from the block
172 and in the exception-handler for the block. Normally, when code
173 is used in both ordinary code and in an exception handler it is
174 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
175 not wish to do that here; the array-size is the same in both
176 places. */
3695c25f
JM
177 if (TREE_CODE (save) == SAVE_EXPR)
178 SAVE_EXPR_PERSISTENT_P (save) = 1;
d26f8097 179
43577e6b 180 if ((*lang_hooks.decls.global_bindings_p) ())
7306ed3f 181 {
80f9c711
RS
182 if (TREE_CONSTANT (size))
183 error ("type size can't be explicitly evaluated");
184 else
185 error ("variable-size type declared outside of any function");
186
fed3cef0 187 return size_one_node;
7306ed3f
JW
188 }
189
190 if (immediate_size_expand)
3695c25f 191 expand_expr (save, const0_rtx, VOIDmode, 0);
770ae6cc 192 else if (cfun != 0 && cfun->x_dont_save_pending_sizes_p)
d43163b7
MM
193 /* The front-end doesn't want us to keep a list of the expressions
194 that determine sizes for variable size objects. */
195 ;
fe375cf1 196 else
3695c25f 197 put_pending_size (save);
7306ed3f
JW
198
199 return size;
200}
201\f
202#ifndef MAX_FIXED_MODE_SIZE
203#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
204#endif
205
206/* Return the machine mode to use for a nonscalar of SIZE bits.
207 The mode must be in class CLASS, and have exactly that many bits.
208 If LIMIT is nonzero, modes of wider than MAX_FIXED_MODE_SIZE will not
209 be used. */
210
211enum machine_mode
46c5ad27 212mode_for_size (unsigned int size, enum mode_class class, int limit)
7306ed3f 213{
b3694847 214 enum machine_mode mode;
7306ed3f 215
72c602fc 216 if (limit && size > MAX_FIXED_MODE_SIZE)
7306ed3f
JW
217 return BLKmode;
218
5e9bec99 219 /* Get the first mode which has this size, in the specified class. */
7306ed3f
JW
220 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
221 mode = GET_MODE_WIDER_MODE (mode))
72c602fc 222 if (GET_MODE_BITSIZE (mode) == size)
7306ed3f
JW
223 return mode;
224
225 return BLKmode;
226}
227
72c602fc
RK
228/* Similar, except passed a tree node. */
229
230enum machine_mode
46c5ad27 231mode_for_size_tree (tree size, enum mode_class class, int limit)
72c602fc
RK
232{
233 if (TREE_CODE (size) != INTEGER_CST
5826955d 234 || TREE_OVERFLOW (size)
72c602fc
RK
235 /* What we really want to say here is that the size can fit in a
236 host integer, but we know there's no way we'd find a mode for
237 this many bits, so there's no point in doing the precise test. */
05bccae2 238 || compare_tree_int (size, 1000) > 0)
72c602fc
RK
239 return BLKmode;
240 else
0384674e 241 return mode_for_size (tree_low_cst (size, 1), class, limit);
72c602fc
RK
242}
243
5e9bec99
RK
244/* Similar, but never return BLKmode; return the narrowest mode that
245 contains at least the requested number of bits. */
246
27922c13 247enum machine_mode
46c5ad27 248smallest_mode_for_size (unsigned int size, enum mode_class class)
5e9bec99 249{
b3694847 250 enum machine_mode mode;
5e9bec99
RK
251
252 /* Get the first mode which has at least this size, in the
253 specified class. */
254 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
255 mode = GET_MODE_WIDER_MODE (mode))
72c602fc 256 if (GET_MODE_BITSIZE (mode) >= size)
5e9bec99
RK
257 return mode;
258
259 abort ();
260}
261
d006aa54
RH
262/* Find an integer mode of the exact same size, or BLKmode on failure. */
263
264enum machine_mode
46c5ad27 265int_mode_for_mode (enum machine_mode mode)
d006aa54
RH
266{
267 switch (GET_MODE_CLASS (mode))
268 {
269 case MODE_INT:
270 case MODE_PARTIAL_INT:
271 break;
272
273 case MODE_COMPLEX_INT:
274 case MODE_COMPLEX_FLOAT:
275 case MODE_FLOAT:
62c07905
JM
276 case MODE_VECTOR_INT:
277 case MODE_VECTOR_FLOAT:
d006aa54
RH
278 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
279 break;
280
281 case MODE_RANDOM:
282 if (mode == BLKmode)
786de7eb 283 break;
d4b60170 284
2d76cb1a 285 /* ... fall through ... */
d006aa54
RH
286
287 case MODE_CC:
288 default:
05bccae2 289 abort ();
d006aa54
RH
290 }
291
292 return mode;
293}
294
187515f5
AO
295/* Return the alignment of MODE. This will be bounded by 1 and
296 BIGGEST_ALIGNMENT. */
297
298unsigned int
46c5ad27 299get_mode_alignment (enum machine_mode mode)
187515f5
AO
300{
301 unsigned int alignment;
302
303 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
304 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
305 alignment = GET_MODE_UNIT_SIZE (mode);
306 else
307 alignment = GET_MODE_SIZE (mode);
308
309 /* Extract the LSB of the size. */
310 alignment = alignment & -alignment;
311 alignment *= BITS_PER_UNIT;
312
313 alignment = MIN (BIGGEST_ALIGNMENT, MAX (1, alignment));
314 return alignment;
315}
316
fed3cef0
RK
317/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
318 This can only be applied to objects of a sizetype. */
7306ed3f
JW
319
320tree
46c5ad27 321round_up (tree value, int divisor)
7306ed3f 322{
fed3cef0
RK
323 tree arg = size_int_type (divisor, TREE_TYPE (value));
324
325 return size_binop (MULT_EXPR, size_binop (CEIL_DIV_EXPR, value, arg), arg);
326}
327
328/* Likewise, but round down. */
329
330tree
46c5ad27 331round_down (tree value, int divisor)
fed3cef0
RK
332{
333 tree arg = size_int_type (divisor, TREE_TYPE (value));
334
335 return size_binop (MULT_EXPR, size_binop (FLOOR_DIV_EXPR, value, arg), arg);
7306ed3f
JW
336}
337\f
78d55cc8
JM
338/* Subroutine of layout_decl: Force alignment required for the data type.
339 But if the decl itself wants greater alignment, don't override that. */
340
341static inline void
342do_type_align (tree type, tree decl)
343{
344 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
345 {
346 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3acef2ae
JM
347 if (TREE_CODE (decl) == FIELD_DECL)
348 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
78d55cc8
JM
349 }
350}
351
7306ed3f
JW
352/* Set the size, mode and alignment of a ..._DECL node.
353 TYPE_DECL does need this for C++.
354 Note that LABEL_DECL and CONST_DECL nodes do not need this,
355 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
356 Don't call layout_decl for them.
357
358 KNOWN_ALIGN is the amount of alignment we can assume this
359 decl has with no special effort. It is relevant only for FIELD_DECLs
360 and depends on the previous fields.
361 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
362 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
363 the record will be aligned to suit. */
364
365void
46c5ad27 366layout_decl (tree decl, unsigned int known_align)
7306ed3f 367{
b3694847
SS
368 tree type = TREE_TYPE (decl);
369 enum tree_code code = TREE_CODE (decl);
a46666a9 370 rtx rtl = NULL_RTX;
7306ed3f
JW
371
372 if (code == CONST_DECL)
373 return;
9df2c88c 374 else if (code != VAR_DECL && code != PARM_DECL && code != RESULT_DECL
33433751 375 && code != TYPE_DECL && code != FIELD_DECL)
7306ed3f
JW
376 abort ();
377
a46666a9
RH
378 rtl = DECL_RTL_IF_SET (decl);
379
7306ed3f 380 if (type == error_mark_node)
33433751 381 type = void_type_node;
7306ed3f 382
770ae6cc
RK
383 /* Usually the size and mode come from the data type without change,
384 however, the front-end may set the explicit width of the field, so its
385 size may not be the same as the size of its type. This happens with
386 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
387 also happens with other fields. For example, the C++ front-end creates
388 zero-sized fields corresponding to empty base classes, and depends on
389 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
4b6bf620
RK
390 size in bytes from the size in bits. If we have already set the mode,
391 don't set it again since we can be called twice for FIELD_DECLs. */
770ae6cc 392
7306ed3f 393 TREE_UNSIGNED (decl) = TREE_UNSIGNED (type);
4b6bf620
RK
394 if (DECL_MODE (decl) == VOIDmode)
395 DECL_MODE (decl) = TYPE_MODE (type);
770ae6cc 396
5e9bec99 397 if (DECL_SIZE (decl) == 0)
06ceef4e
RK
398 {
399 DECL_SIZE (decl) = TYPE_SIZE (type);
400 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
401 }
1a96dc46 402 else if (DECL_SIZE_UNIT (decl) == 0)
770ae6cc
RK
403 DECL_SIZE_UNIT (decl)
404 = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
405 bitsize_unit_node));
06ceef4e 406
78d55cc8
JM
407 if (code != FIELD_DECL)
408 /* For non-fields, update the alignment from the type. */
409 do_type_align (type, decl);
410 else
411 /* For fields, it's a bit more complicated... */
786de7eb 412 {
78d55cc8
JM
413 if (DECL_BIT_FIELD (decl))
414 {
415 DECL_BIT_FIELD_TYPE (decl) = type;
7306ed3f 416
78d55cc8
JM
417 /* A zero-length bit-field affects the alignment of the next
418 field. */
419 if (integer_zerop (DECL_SIZE (decl))
420 && ! DECL_PACKED (decl)
421 && ! (*targetm.ms_bitfield_layout_p) (DECL_FIELD_CONTEXT (decl)))
422 {
423#ifdef PCC_BITFIELD_TYPE_MATTERS
424 if (PCC_BITFIELD_TYPE_MATTERS)
425 do_type_align (type, decl);
426 else
427#endif
ad3f5759 428 {
78d55cc8 429#ifdef EMPTY_FIELD_BOUNDARY
ad3f5759
AS
430 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
431 {
432 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
433 DECL_USER_ALIGN (decl) = 0;
434 }
78d55cc8 435#endif
ad3f5759 436 }
78d55cc8
JM
437 }
438
439 /* See if we can use an ordinary integer mode for a bit-field.
440 Conditions are: a fixed size that is correct for another mode
441 and occupying a complete byte or bytes on proper boundary. */
442 if (TYPE_SIZE (type) != 0
443 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
444 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
445 {
446 enum machine_mode xmode
447 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
448
449 if (xmode != BLKmode && known_align >= GET_MODE_ALIGNMENT (xmode))
450 {
451 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
452 DECL_ALIGN (decl));
453 DECL_MODE (decl) = xmode;
454 DECL_BIT_FIELD (decl) = 0;
455 }
456 }
457
458 /* Turn off DECL_BIT_FIELD if we won't need it set. */
459 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
460 && known_align >= TYPE_ALIGN (type)
461 && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
462 DECL_BIT_FIELD (decl) = 0;
463 }
464 else if (DECL_PACKED (decl) && DECL_USER_ALIGN (decl))
465 /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
466 round up; we'll reduce it again below. */;
467 else
468 do_type_align (type, decl);
6790d1bd
RK
469
470 /* If the field is of variable size, we can't misalign it since we
471 have no way to make a temporary to align the result. But this
472 isn't an issue if the decl is not addressable. Likewise if it
473 is of unknown size. */
78d55cc8
JM
474 if (DECL_PACKED (decl)
475 && !DECL_USER_ALIGN (decl)
476 && (DECL_NONADDRESSABLE_P (decl)
477 || DECL_SIZE_UNIT (decl) == 0
478 || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
479 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
480
481 /* Should this be controlled by DECL_USER_ALIGN, too? */
482 if (maximum_field_alignment != 0)
483 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
484 if (! DECL_USER_ALIGN (decl))
7306ed3f 485 {
78d55cc8
JM
486 /* Some targets (i.e. i386, VMS) limit struct field alignment
487 to a lower boundary than alignment of variables unless
488 it was overridden by attribute aligned. */
489#ifdef BIGGEST_FIELD_ALIGNMENT
490 DECL_ALIGN (decl)
491 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
492#endif
493#ifdef ADJUST_FIELD_ALIGN
494 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
495#endif
7306ed3f
JW
496 }
497 }
498
499 /* Evaluate nonconstant size only once, either now or as soon as safe. */
500 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
501 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
06ceef4e
RK
502 if (DECL_SIZE_UNIT (decl) != 0
503 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
504 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
505
506 /* If requested, warn about definitions of large data objects. */
507 if (warn_larger_than
17aec3eb 508 && (code == VAR_DECL || code == PARM_DECL)
06ceef4e
RK
509 && ! DECL_EXTERNAL (decl))
510 {
511 tree size = DECL_SIZE_UNIT (decl);
512
513 if (size != 0 && TREE_CODE (size) == INTEGER_CST
05bccae2 514 && compare_tree_int (size, larger_than_size) > 0)
06ceef4e 515 {
0384674e 516 int size_as_int = TREE_INT_CST_LOW (size);
06ceef4e 517
05bccae2 518 if (compare_tree_int (size, size_as_int) == 0)
ddd2d57e 519 warning ("%Jsize of '%D' is %d bytes", decl, decl, size_as_int);
06ceef4e 520 else
ddd2d57e
RH
521 warning ("%Jsize of '%D' is larger than %d bytes",
522 decl, decl, larger_than_size);
06ceef4e
RK
523 }
524 }
a46666a9
RH
525
526 /* If the RTL was already set, update its mode and mem attributes. */
527 if (rtl)
528 {
529 PUT_MODE (rtl, DECL_MODE (decl));
530 SET_DECL_RTL (decl, 0);
531 set_mem_attributes (rtl, decl, 1);
532 SET_DECL_RTL (decl, rtl);
533 }
7306ed3f
JW
534}
535\f
e0cea8d9
RK
536/* Hook for a front-end function that can modify the record layout as needed
537 immediately before it is finalized. */
538
46c5ad27 539void (*lang_adjust_rli) (record_layout_info) = 0;
e0cea8d9
RK
540
541void
46c5ad27 542set_lang_adjust_rli (void (*f) (record_layout_info))
e0cea8d9
RK
543{
544 lang_adjust_rli = f;
545}
546
770ae6cc
RK
547/* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
548 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
549 is to be passed to all other layout functions for this record. It is the
786de7eb 550 responsibility of the caller to call `free' for the storage returned.
770ae6cc
RK
551 Note that garbage collection is not permitted until we finish laying
552 out the record. */
7306ed3f 553
9328904c 554record_layout_info
46c5ad27 555start_record_layout (tree t)
7306ed3f 556{
703ad42b 557 record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
9328904c
MM
558
559 rli->t = t;
770ae6cc 560
9328904c
MM
561 /* If the type has a minimum specified alignment (via an attribute
562 declaration, for example) use it -- otherwise, start with a
563 one-byte alignment. */
564 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
78d55cc8 565 rli->unpacked_align = rli->record_align;
770ae6cc 566 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
7306ed3f 567
5c19a356
MS
568#ifdef STRUCTURE_SIZE_BOUNDARY
569 /* Packed structures don't need to have minimum size. */
f132af85 570 if (! TYPE_PACKED (t))
fc555370 571 rli->record_align = MAX (rli->record_align, (unsigned) STRUCTURE_SIZE_BOUNDARY);
5c19a356 572#endif
7306ed3f 573
770ae6cc
RK
574 rli->offset = size_zero_node;
575 rli->bitpos = bitsize_zero_node;
f913c102 576 rli->prev_field = 0;
770ae6cc
RK
577 rli->pending_statics = 0;
578 rli->packed_maybe_necessary = 0;
579
9328904c
MM
580 return rli;
581}
7306ed3f 582
f2704b9f
RK
583/* These four routines perform computations that convert between
584 the offset/bitpos forms and byte and bit offsets. */
585
586tree
46c5ad27 587bit_from_pos (tree offset, tree bitpos)
f2704b9f
RK
588{
589 return size_binop (PLUS_EXPR, bitpos,
590 size_binop (MULT_EXPR, convert (bitsizetype, offset),
591 bitsize_unit_node));
592}
593
594tree
46c5ad27 595byte_from_pos (tree offset, tree bitpos)
f2704b9f
RK
596{
597 return size_binop (PLUS_EXPR, offset,
598 convert (sizetype,
f0fddb15 599 size_binop (TRUNC_DIV_EXPR, bitpos,
f2704b9f
RK
600 bitsize_unit_node)));
601}
602
f2704b9f 603void
46c5ad27
AJ
604pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
605 tree pos)
f2704b9f
RK
606{
607 *poffset = size_binop (MULT_EXPR,
608 convert (sizetype,
609 size_binop (FLOOR_DIV_EXPR, pos,
610 bitsize_int (off_align))),
611 size_int (off_align / BITS_PER_UNIT));
612 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
613}
614
615/* Given a pointer to bit and byte offsets and an offset alignment,
616 normalize the offsets so they are within the alignment. */
617
618void
46c5ad27 619normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
f2704b9f
RK
620{
621 /* If the bit position is now larger than it should be, adjust it
622 downwards. */
623 if (compare_tree_int (*pbitpos, off_align) >= 0)
624 {
625 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
626 bitsize_int (off_align));
627
628 *poffset
629 = size_binop (PLUS_EXPR, *poffset,
630 size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
631 size_int (off_align / BITS_PER_UNIT)));
786de7eb 632
f2704b9f
RK
633 *pbitpos
634 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
635 }
636}
637
770ae6cc 638/* Print debugging information about the information in RLI. */
cc9d4a85 639
770ae6cc 640void
46c5ad27 641debug_rli (record_layout_info rli)
cc9d4a85 642{
770ae6cc
RK
643 print_node_brief (stderr, "type", rli->t, 0);
644 print_node_brief (stderr, "\noffset", rli->offset, 0);
645 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
cc9d4a85 646
78d55cc8
JM
647 fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
648 rli->record_align, rli->unpacked_align,
e0cea8d9 649 rli->offset_align);
770ae6cc
RK
650 if (rli->packed_maybe_necessary)
651 fprintf (stderr, "packed may be necessary\n");
652
653 if (rli->pending_statics)
654 {
655 fprintf (stderr, "pending statics:\n");
656 debug_tree (rli->pending_statics);
657 }
658}
659
660/* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
661 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
662
663void
46c5ad27 664normalize_rli (record_layout_info rli)
770ae6cc 665{
f2704b9f 666 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
770ae6cc 667}
cc9d4a85 668
770ae6cc
RK
669/* Returns the size in bytes allocated so far. */
670
671tree
46c5ad27 672rli_size_unit_so_far (record_layout_info rli)
770ae6cc 673{
f2704b9f 674 return byte_from_pos (rli->offset, rli->bitpos);
770ae6cc
RK
675}
676
677/* Returns the size in bits allocated so far. */
678
679tree
46c5ad27 680rli_size_so_far (record_layout_info rli)
770ae6cc 681{
f2704b9f 682 return bit_from_pos (rli->offset, rli->bitpos);
770ae6cc
RK
683}
684
0645ba8f
MM
685/* FIELD is about to be added to RLI->T. The alignment (in bits) of
686 the next available location is given by KNOWN_ALIGN. Update the
687 variable alignment fields in RLI, and return the alignment to give
688 the FIELD. */
770ae6cc 689
0645ba8f 690static unsigned int
46c5ad27
AJ
691update_alignment_for_field (record_layout_info rli, tree field,
692 unsigned int known_align)
9328904c
MM
693{
694 /* The alignment required for FIELD. */
695 unsigned int desired_align;
9328904c
MM
696 /* The type of this field. */
697 tree type = TREE_TYPE (field);
0645ba8f
MM
698 /* True if the field was explicitly aligned by the user. */
699 bool user_align;
78d55cc8 700 bool is_bitfield;
9328904c 701
78d55cc8
JM
702 /* Lay out the field so we know what alignment it needs. */
703 layout_decl (field, known_align);
770ae6cc 704 desired_align = DECL_ALIGN (field);
11cf4d18 705 user_align = DECL_USER_ALIGN (field);
770ae6cc 706
78d55cc8
JM
707 is_bitfield = (type != error_mark_node
708 && DECL_BIT_FIELD_TYPE (field)
709 && ! integer_zerop (TYPE_SIZE (type)));
7306ed3f 710
9328904c
MM
711 /* Record must have at least as much alignment as any field.
712 Otherwise, the alignment of the field within the record is
713 meaningless. */
78d55cc8 714 if (is_bitfield && (* targetm.ms_bitfield_layout_p) (rli->t))
f913c102 715 {
e4850f36
DR
716 /* Here, the alignment of the underlying type of a bitfield can
717 affect the alignment of a record; even a zero-sized field
718 can do this. The alignment should be to the alignment of
719 the type, except that for zero-size bitfields this only
0e9e1e0a 720 applies if there was an immediately prior, nonzero-size
e4850f36
DR
721 bitfield. (That's the way it is, experimentally.) */
722 if (! integer_zerop (DECL_SIZE (field))
46c5ad27
AJ
723 ? ! DECL_PACKED (field)
724 : (rli->prev_field
725 && DECL_BIT_FIELD_TYPE (rli->prev_field)
726 && ! integer_zerop (DECL_SIZE (rli->prev_field))))
f913c102 727 {
e4850f36
DR
728 unsigned int type_align = TYPE_ALIGN (type);
729 type_align = MAX (type_align, desired_align);
730 if (maximum_field_alignment != 0)
731 type_align = MIN (type_align, maximum_field_alignment);
732 rli->record_align = MAX (rli->record_align, type_align);
f913c102
AO
733 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
734 }
786de7eb 735 }
3c12fcc2 736#ifdef PCC_BITFIELD_TYPE_MATTERS
78d55cc8 737 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
9328904c 738 {
8dc65b6e
MM
739 /* Named bit-fields cause the entire structure to have the
740 alignment implied by their type. */
9328904c 741 if (DECL_NAME (field) != 0)
7306ed3f 742 {
9328904c 743 unsigned int type_align = TYPE_ALIGN (type);
729a2125 744
ad9335eb
JJ
745#ifdef ADJUST_FIELD_ALIGN
746 if (! TYPE_USER_ALIGN (type))
747 type_align = ADJUST_FIELD_ALIGN (field, type_align);
748#endif
749
9328904c
MM
750 if (maximum_field_alignment != 0)
751 type_align = MIN (type_align, maximum_field_alignment);
752 else if (DECL_PACKED (field))
753 type_align = MIN (type_align, BITS_PER_UNIT);
e2301a83 754
8dc65b6e
MM
755 /* The alignment of the record is increased to the maximum
756 of the current alignment, the alignment indicated on the
757 field (i.e., the alignment specified by an __aligned__
758 attribute), and the alignment indicated by the type of
759 the field. */
760 rli->record_align = MAX (rli->record_align, desired_align);
9328904c 761 rli->record_align = MAX (rli->record_align, type_align);
8dc65b6e 762
3c12fcc2 763 if (warn_packed)
e0cea8d9 764 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
daf06049 765 user_align |= TYPE_USER_ALIGN (type);
3c12fcc2 766 }
9328904c 767 }
9328904c 768#endif
78d55cc8 769 else
9328904c
MM
770 {
771 rli->record_align = MAX (rli->record_align, desired_align);
770ae6cc 772 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
9328904c 773 }
3c12fcc2 774
0645ba8f
MM
775 TYPE_USER_ALIGN (rli->t) |= user_align;
776
777 return desired_align;
778}
779
780/* Called from place_field to handle unions. */
781
782static void
46c5ad27 783place_union_field (record_layout_info rli, tree field)
0645ba8f
MM
784{
785 update_alignment_for_field (rli, field, /*known_align=*/0);
786
787 DECL_FIELD_OFFSET (field) = size_zero_node;
788 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
789 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
790
791 /* We assume the union's size will be a multiple of a byte so we don't
792 bother with BITPOS. */
793 if (TREE_CODE (rli->t) == UNION_TYPE)
794 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
795 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
796 rli->offset = fold (build (COND_EXPR, sizetype,
797 DECL_QUALIFIER (field),
798 DECL_SIZE_UNIT (field), rli->offset));
799}
800
b8089d8d 801#if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
4977bab6 802/* A bitfield of SIZE with a required access alignment of ALIGN is allocated
272d0bee 803 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
4977bab6
ZW
804 units of alignment than the underlying TYPE. */
805static int
46c5ad27
AJ
806excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
807 HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
4977bab6
ZW
808{
809 /* Note that the calculation of OFFSET might overflow; we calculate it so
810 that we still get the right result as long as ALIGN is a power of two. */
811 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
812
813 offset = offset % align;
814 return ((offset + size + align - 1) / align
815 > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
816 / align));
817}
b8089d8d 818#endif
4977bab6 819
0645ba8f
MM
820/* RLI contains information about the layout of a RECORD_TYPE. FIELD
821 is a FIELD_DECL to be added after those fields already present in
822 T. (FIELD is not actually added to the TYPE_FIELDS list here;
823 callers that desire that behavior must manually perform that step.) */
824
825void
46c5ad27 826place_field (record_layout_info rli, tree field)
0645ba8f
MM
827{
828 /* The alignment required for FIELD. */
829 unsigned int desired_align;
830 /* The alignment FIELD would have if we just dropped it into the
831 record as it presently stands. */
832 unsigned int known_align;
833 unsigned int actual_align;
834 /* The type of this field. */
835 tree type = TREE_TYPE (field);
836
837 if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
838 return;
839
840 /* If FIELD is static, then treat it like a separate variable, not
841 really like a structure field. If it is a FUNCTION_DECL, it's a
842 method. In both cases, all we do is lay out the decl, and we do
843 it *after* the record is laid out. */
844 if (TREE_CODE (field) == VAR_DECL)
845 {
846 rli->pending_statics = tree_cons (NULL_TREE, field,
847 rli->pending_statics);
848 return;
849 }
850
851 /* Enumerators and enum types which are local to this class need not
852 be laid out. Likewise for initialized constant fields. */
853 else if (TREE_CODE (field) != FIELD_DECL)
854 return;
855
856 /* Unions are laid out very differently than records, so split
857 that code off to another function. */
858 else if (TREE_CODE (rli->t) != RECORD_TYPE)
859 {
860 place_union_field (rli, field);
861 return;
862 }
863
864 /* Work out the known alignment so far. Note that A & (-A) is the
865 value of the least-significant bit in A that is one. */
866 if (! integer_zerop (rli->bitpos))
867 known_align = (tree_low_cst (rli->bitpos, 1)
868 & - tree_low_cst (rli->bitpos, 1));
869 else if (integer_zerop (rli->offset))
870 known_align = BIGGEST_ALIGNMENT;
871 else if (host_integerp (rli->offset, 1))
872 known_align = (BITS_PER_UNIT
873 * (tree_low_cst (rli->offset, 1)
874 & - tree_low_cst (rli->offset, 1)));
875 else
876 known_align = rli->offset_align;
46c5ad27 877
0645ba8f
MM
878 desired_align = update_alignment_for_field (rli, field, known_align);
879
9328904c
MM
880 if (warn_packed && DECL_PACKED (field))
881 {
78d55cc8 882 if (known_align >= TYPE_ALIGN (type))
3c12fcc2 883 {
9328904c 884 if (TYPE_ALIGN (type) > desired_align)
3c12fcc2 885 {
9328904c 886 if (STRICT_ALIGNMENT)
ddd2d57e
RH
887 warning ("%Jpacked attribute causes inefficient alignment "
888 "for '%D'", field, field);
9328904c 889 else
ddd2d57e
RH
890 warning ("%Jpacked attribute is unnecessary for '%D'",
891 field, field);
3c12fcc2 892 }
3c12fcc2 893 }
9328904c
MM
894 else
895 rli->packed_maybe_necessary = 1;
896 }
7306ed3f 897
9328904c
MM
898 /* Does this field automatically have alignment it needs by virtue
899 of the fields that precede it and the record's own alignment? */
770ae6cc 900 if (known_align < desired_align)
9328904c
MM
901 {
902 /* No, we need to skip space before this field.
903 Bump the cumulative size to multiple of field alignment. */
7306ed3f 904
9328904c 905 if (warn_padded)
ddd2d57e 906 warning ("%Jpadding struct to align '%D'", field, field);
3c12fcc2 907
770ae6cc
RK
908 /* If the alignment is still within offset_align, just align
909 the bit position. */
910 if (desired_align < rli->offset_align)
911 rli->bitpos = round_up (rli->bitpos, desired_align);
9328904c
MM
912 else
913 {
770ae6cc
RK
914 /* First adjust OFFSET by the partial bits, then align. */
915 rli->offset
916 = size_binop (PLUS_EXPR, rli->offset,
917 convert (sizetype,
918 size_binop (CEIL_DIV_EXPR, rli->bitpos,
919 bitsize_unit_node)));
920 rli->bitpos = bitsize_zero_node;
921
922 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
7306ed3f 923 }
770ae6cc 924
b1254b72
RK
925 if (! TREE_CONSTANT (rli->offset))
926 rli->offset_align = desired_align;
927
9328904c 928 }
7306ed3f 929
770ae6cc
RK
930 /* Handle compatibility with PCC. Note that if the record has any
931 variable-sized fields, we need not worry about compatibility. */
7306ed3f 932#ifdef PCC_BITFIELD_TYPE_MATTERS
9328904c 933 if (PCC_BITFIELD_TYPE_MATTERS
f913c102 934 && ! (* targetm.ms_bitfield_layout_p) (rli->t)
9328904c
MM
935 && TREE_CODE (field) == FIELD_DECL
936 && type != error_mark_node
770ae6cc
RK
937 && DECL_BIT_FIELD (field)
938 && ! DECL_PACKED (field)
9328904c 939 && maximum_field_alignment == 0
770ae6cc
RK
940 && ! integer_zerop (DECL_SIZE (field))
941 && host_integerp (DECL_SIZE (field), 1)
942 && host_integerp (rli->offset, 1)
943 && host_integerp (TYPE_SIZE (type), 1))
9328904c
MM
944 {
945 unsigned int type_align = TYPE_ALIGN (type);
770ae6cc
RK
946 tree dsize = DECL_SIZE (field);
947 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
948 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
949 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
9328904c 950
ad9335eb
JJ
951#ifdef ADJUST_FIELD_ALIGN
952 if (! TYPE_USER_ALIGN (type))
953 type_align = ADJUST_FIELD_ALIGN (field, type_align);
954#endif
955
9328904c
MM
956 /* A bit field may not span more units of alignment of its type
957 than its type itself. Advance to next boundary if necessary. */
4977bab6 958 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
770ae6cc 959 rli->bitpos = round_up (rli->bitpos, type_align);
daf06049 960
0645ba8f 961 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
9328904c 962 }
7306ed3f
JW
963#endif
964
7306ed3f 965#ifdef BITFIELD_NBYTES_LIMITED
9328904c 966 if (BITFIELD_NBYTES_LIMITED
f913c102 967 && ! (* targetm.ms_bitfield_layout_p) (rli->t)
9328904c
MM
968 && TREE_CODE (field) == FIELD_DECL
969 && type != error_mark_node
970 && DECL_BIT_FIELD_TYPE (field)
770ae6cc
RK
971 && ! DECL_PACKED (field)
972 && ! integer_zerop (DECL_SIZE (field))
973 && host_integerp (DECL_SIZE (field), 1)
163d3408 974 && host_integerp (rli->offset, 1)
770ae6cc 975 && host_integerp (TYPE_SIZE (type), 1))
9328904c
MM
976 {
977 unsigned int type_align = TYPE_ALIGN (type);
770ae6cc
RK
978 tree dsize = DECL_SIZE (field);
979 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
980 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
981 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
e2301a83 982
ad9335eb
JJ
983#ifdef ADJUST_FIELD_ALIGN
984 if (! TYPE_USER_ALIGN (type))
985 type_align = ADJUST_FIELD_ALIGN (field, type_align);
986#endif
987
9328904c
MM
988 if (maximum_field_alignment != 0)
989 type_align = MIN (type_align, maximum_field_alignment);
990 /* ??? This test is opposite the test in the containing if
991 statement, so this code is unreachable currently. */
992 else if (DECL_PACKED (field))
993 type_align = MIN (type_align, BITS_PER_UNIT);
994
995 /* A bit field may not span the unit of alignment of its type.
996 Advance to next boundary if necessary. */
4977bab6 997 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
770ae6cc 998 rli->bitpos = round_up (rli->bitpos, type_align);
daf06049 999
0645ba8f 1000 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
9328904c 1001 }
7306ed3f
JW
1002#endif
1003
e4850f36
DR
1004 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1005 A subtlety:
1006 When a bit field is inserted into a packed record, the whole
1007 size of the underlying type is used by one or more same-size
4977bab6 1008 adjacent bitfields. (That is, if its long:3, 32 bits is
e4850f36
DR
1009 used in the record, and any additional adjacent long bitfields are
1010 packed into the same chunk of 32 bits. However, if the size
1011 changes, a new field of that size is allocated.) In an unpacked
14b493d6 1012 record, this is the same as using alignment, but not equivalent
4977bab6 1013 when packing.
e4850f36 1014
14b493d6 1015 Note: for compatibility, we use the type size, not the type alignment
e4850f36
DR
1016 to determine alignment, since that matches the documentation */
1017
f913c102 1018 if ((* targetm.ms_bitfield_layout_p) (rli->t)
e4850f36 1019 && ((DECL_BIT_FIELD_TYPE (field) && ! DECL_PACKED (field))
46c5ad27 1020 || (rli->prev_field && ! DECL_PACKED (rli->prev_field))))
f913c102 1021 {
e4850f36 1022 /* At this point, either the prior or current are bitfields,
991b6592 1023 (possibly both), and we're dealing with MS packing. */
e4850f36 1024 tree prev_saved = rli->prev_field;
f913c102 1025
e4850f36 1026 /* Is the prior field a bitfield? If so, handle "runs" of same
991b6592
KH
1027 type size fields. */
1028 if (rli->prev_field /* necessarily a bitfield if it exists. */)
e4850f36
DR
1029 {
1030 /* If both are bitfields, nonzero, and the same size, this is
1031 the middle of a run. Zero declared size fields are special
1032 and handled as "end of run". (Note: it's nonzero declared
1033 size, but equal type sizes!) (Since we know that both
1034 the current and previous fields are bitfields by the
1035 time we check it, DECL_SIZE must be present for both.) */
1036 if (DECL_BIT_FIELD_TYPE (field)
1037 && !integer_zerop (DECL_SIZE (field))
1038 && !integer_zerop (DECL_SIZE (rli->prev_field))
0384674e
RK
1039 && host_integerp (DECL_SIZE (rli->prev_field), 0)
1040 && host_integerp (TYPE_SIZE (type), 0)
e4850f36 1041 && simple_cst_equal (TYPE_SIZE (type),
0384674e 1042 TYPE_SIZE (TREE_TYPE (rli->prev_field))))
e4850f36
DR
1043 {
1044 /* We're in the middle of a run of equal type size fields; make
1045 sure we realign if we run out of bits. (Not decl size,
1046 type size!) */
0384674e 1047 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 0);
e4850f36
DR
1048
1049 if (rli->remaining_in_alignment < bitsize)
1050 {
991b6592 1051 /* out of bits; bump up to next 'word'. */
5354730b 1052 rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
0384674e
RK
1053 rli->bitpos
1054 = size_binop (PLUS_EXPR, TYPE_SIZE (type),
1055 DECL_FIELD_BIT_OFFSET (rli->prev_field));
e4850f36 1056 rli->prev_field = field;
0384674e
RK
1057 rli->remaining_in_alignment
1058 = tree_low_cst (TYPE_SIZE (type), 0);
e4850f36 1059 }
0384674e 1060
e4850f36
DR
1061 rli->remaining_in_alignment -= bitsize;
1062 }
1063 else
1064 {
4977bab6
ZW
1065 /* End of a run: if leaving a run of bitfields of the same type
1066 size, we have to "use up" the rest of the bits of the type
e4850f36
DR
1067 size.
1068
1069 Compute the new position as the sum of the size for the prior
1070 type and where we first started working on that type.
1071 Note: since the beginning of the field was aligned then
1072 of course the end will be too. No round needed. */
1073
1074 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1075 {
0384674e
RK
1076 tree type_size = TYPE_SIZE (TREE_TYPE (rli->prev_field));
1077
1078 rli->bitpos
1079 = size_binop (PLUS_EXPR, type_size,
1080 DECL_FIELD_BIT_OFFSET (rli->prev_field));
e4850f36
DR
1081 }
1082 else
0384674e
RK
1083 /* We "use up" size zero fields; the code below should behave
1084 as if the prior field was not a bitfield. */
1085 prev_saved = NULL;
e4850f36 1086
4977bab6 1087 /* Cause a new bitfield to be captured, either this time (if
991b6592 1088 currently a bitfield) or next time we see one. */
e4850f36
DR
1089 if (!DECL_BIT_FIELD_TYPE(field)
1090 || integer_zerop (DECL_SIZE (field)))
0384674e 1091 rli->prev_field = NULL;
e4850f36 1092 }
0384674e 1093
61f71b34 1094 rli->offset_align = tree_low_cst (TYPE_SIZE (type), 0);
e4850f36
DR
1095 normalize_rli (rli);
1096 }
1097
1098 /* If we're starting a new run of same size type bitfields
1099 (or a run of non-bitfields), set up the "first of the run"
4977bab6 1100 fields.
e4850f36
DR
1101
1102 That is, if the current field is not a bitfield, or if there
1103 was a prior bitfield the type sizes differ, or if there wasn't
1104 a prior bitfield the size of the current field is nonzero.
1105
1106 Note: we must be sure to test ONLY the type size if there was
1107 a prior bitfield and ONLY for the current field being zero if
1108 there wasn't. */
1109
1110 if (!DECL_BIT_FIELD_TYPE (field)
4977bab6 1111 || ( prev_saved != NULL
e4850f36 1112 ? !simple_cst_equal (TYPE_SIZE (type),
0384674e
RK
1113 TYPE_SIZE (TREE_TYPE (prev_saved)))
1114 : !integer_zerop (DECL_SIZE (field)) ))
e4850f36 1115 {
0384674e
RK
1116 /* Never smaller than a byte for compatibility. */
1117 unsigned int type_align = BITS_PER_UNIT;
e4850f36 1118
4977bab6 1119 /* (When not a bitfield), we could be seeing a flex array (with
e4850f36 1120 no DECL_SIZE). Since we won't be using remaining_in_alignment
4977bab6 1121 until we see a bitfield (and come by here again) we just skip
e4850f36 1122 calculating it. */
0384674e
RK
1123 if (DECL_SIZE (field) != NULL
1124 && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
1125 && host_integerp (DECL_SIZE (field), 0))
1126 rli->remaining_in_alignment
1127 = tree_low_cst (TYPE_SIZE (TREE_TYPE(field)), 0)
1128 - tree_low_cst (DECL_SIZE (field), 0);
e4850f36 1129
991b6592 1130 /* Now align (conventionally) for the new type. */
e4850f36 1131 if (!DECL_PACKED(field))
0384674e 1132 type_align = MAX(TYPE_ALIGN (type), type_align);
e4850f36
DR
1133
1134 if (prev_saved
1135 && DECL_BIT_FIELD_TYPE (prev_saved)
1136 /* If the previous bit-field is zero-sized, we've already
1137 accounted for its alignment needs (or ignored it, if
1138 appropriate) while placing it. */
1139 && ! integer_zerop (DECL_SIZE (prev_saved)))
1140 type_align = MAX (type_align,
1141 TYPE_ALIGN (TREE_TYPE (prev_saved)));
f913c102 1142
e4850f36
DR
1143 if (maximum_field_alignment != 0)
1144 type_align = MIN (type_align, maximum_field_alignment);
f913c102 1145
e4850f36 1146 rli->bitpos = round_up (rli->bitpos, type_align);
0384674e 1147
e4850f36 1148 /* If we really aligned, don't allow subsequent bitfields
991b6592 1149 to undo that. */
e4850f36
DR
1150 rli->prev_field = NULL;
1151 }
f913c102
AO
1152 }
1153
770ae6cc
RK
1154 /* Offset so far becomes the position of this field after normalizing. */
1155 normalize_rli (rli);
1156 DECL_FIELD_OFFSET (field) = rli->offset;
1157 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
2f5c7f45 1158 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
770ae6cc
RK
1159
1160 /* If this field ended up more aligned than we thought it would be (we
1161 approximate this by seeing if its position changed), lay out the field
1162 again; perhaps we can use an integral mode for it now. */
4b6bf620 1163 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
770ae6cc
RK
1164 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1165 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
4b6bf620
RK
1166 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1167 actual_align = BIGGEST_ALIGNMENT;
770ae6cc
RK
1168 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1169 actual_align = (BITS_PER_UNIT
1170 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1171 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
9328904c 1172 else
770ae6cc
RK
1173 actual_align = DECL_OFFSET_ALIGN (field);
1174
1175 if (known_align != actual_align)
1176 layout_decl (field, actual_align);
1177
991b6592 1178 /* Only the MS bitfields use this. */
e4850f36
DR
1179 if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE(field))
1180 rli->prev_field = field;
f913c102 1181
770ae6cc
RK
1182 /* Now add size of this field to the size of the record. If the size is
1183 not constant, treat the field as being a multiple of bytes and just
1184 adjust the offset, resetting the bit position. Otherwise, apportion the
1185 size amongst the bit position and offset. First handle the case of an
1186 unspecified size, which can happen when we have an invalid nested struct
1187 definition, such as struct j { struct j { int i; } }. The error message
1188 is printed in finish_struct. */
1189 if (DECL_SIZE (field) == 0)
1190 /* Do nothing. */;
67011d81
RK
1191 else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1192 || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
9328904c 1193 {
770ae6cc
RK
1194 rli->offset
1195 = size_binop (PLUS_EXPR, rli->offset,
1196 convert (sizetype,
1197 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1198 bitsize_unit_node)));
1199 rli->offset
1200 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1201 rli->bitpos = bitsize_zero_node;
3923e410 1202 rli->offset_align = MIN (rli->offset_align, desired_align);
9328904c 1203 }
9328904c
MM
1204 else
1205 {
770ae6cc
RK
1206 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1207 normalize_rli (rli);
7306ed3f 1208 }
9328904c 1209}
7306ed3f 1210
9328904c
MM
1211/* Assuming that all the fields have been laid out, this function uses
1212 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
14b493d6 1213 indicated by RLI. */
7306ed3f 1214
9328904c 1215static void
46c5ad27 1216finalize_record_size (record_layout_info rli)
9328904c 1217{
770ae6cc
RK
1218 tree unpadded_size, unpadded_size_unit;
1219
65e14bf5
RK
1220 /* Now we want just byte and bit offsets, so set the offset alignment
1221 to be a byte and then normalize. */
1222 rli->offset_align = BITS_PER_UNIT;
1223 normalize_rli (rli);
7306ed3f
JW
1224
1225 /* Determine the desired alignment. */
1226#ifdef ROUND_TYPE_ALIGN
9328904c 1227 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
b451555a 1228 rli->record_align);
7306ed3f 1229#else
9328904c 1230 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
7306ed3f
JW
1231#endif
1232
65e14bf5
RK
1233 /* Compute the size so far. Be sure to allow for extra bits in the
1234 size in bytes. We have guaranteed above that it will be no more
1235 than a single byte. */
1236 unpadded_size = rli_size_so_far (rli);
1237 unpadded_size_unit = rli_size_unit_so_far (rli);
1238 if (! integer_zerop (rli->bitpos))
1239 unpadded_size_unit
1240 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
770ae6cc 1241
f9da5064 1242 /* Round the size up to be a multiple of the required alignment. */
770ae6cc
RK
1243 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1244 TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
1245 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
729a2125 1246
770ae6cc
RK
1247 if (warn_padded && TREE_CONSTANT (unpadded_size)
1248 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1249 warning ("padding struct size to alignment boundary");
786de7eb 1250
770ae6cc
RK
1251 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1252 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1253 && TREE_CONSTANT (unpadded_size))
3c12fcc2
GM
1254 {
1255 tree unpacked_size;
729a2125 1256
3c12fcc2 1257#ifdef ROUND_TYPE_ALIGN
9328904c
MM
1258 rli->unpacked_align
1259 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
3c12fcc2 1260#else
9328904c 1261 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
3c12fcc2 1262#endif
770ae6cc 1263
9328904c 1264 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
9328904c 1265 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
3c12fcc2 1266 {
770ae6cc
RK
1267 TYPE_PACKED (rli->t) = 0;
1268
9328904c 1269 if (TYPE_NAME (rli->t))
3c12fcc2 1270 {
63ad61ed 1271 const char *name;
729a2125 1272
9328904c
MM
1273 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1274 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
3c12fcc2 1275 else
9328904c 1276 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
770ae6cc 1277
3c12fcc2
GM
1278 if (STRICT_ALIGNMENT)
1279 warning ("packed attribute causes inefficient alignment for `%s'", name);
1280 else
1281 warning ("packed attribute is unnecessary for `%s'", name);
1282 }
1283 else
1284 {
1285 if (STRICT_ALIGNMENT)
1286 warning ("packed attribute causes inefficient alignment");
1287 else
1288 warning ("packed attribute is unnecessary");
1289 }
1290 }
3c12fcc2 1291 }
9328904c
MM
1292}
1293
1294/* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
7306ed3f 1295
65e14bf5 1296void
46c5ad27 1297compute_record_mode (tree type)
9328904c 1298{
770ae6cc
RK
1299 tree field;
1300 enum machine_mode mode = VOIDmode;
1301
9328904c
MM
1302 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1303 However, if possible, we use a mode that fits in a register
1304 instead, in order to allow for better optimization down the
1305 line. */
1306 TYPE_MODE (type) = BLKmode;
9328904c 1307
770ae6cc
RK
1308 if (! host_integerp (TYPE_SIZE (type), 1))
1309 return;
9328904c 1310
770ae6cc
RK
1311 /* A record which has any BLKmode members must itself be
1312 BLKmode; it can't go in a register. Unless the member is
1313 BLKmode only because it isn't aligned. */
1314 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1315 {
770ae6cc
RK
1316 if (TREE_CODE (field) != FIELD_DECL)
1317 continue;
9328904c 1318
770ae6cc
RK
1319 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1320 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
7a06d606
RK
1321 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1322 && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1323 && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
770ae6cc 1324 || ! host_integerp (bit_position (field), 1)
6a9f6727 1325 || DECL_SIZE (field) == 0
770ae6cc
RK
1326 || ! host_integerp (DECL_SIZE (field), 1))
1327 return;
1328
770ae6cc
RK
1329 /* If this field is the whole struct, remember its mode so
1330 that, say, we can put a double in a class into a DF
a8ca7756
JW
1331 register instead of forcing it to live in the stack. */
1332 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
770ae6cc 1333 mode = DECL_MODE (field);
9328904c 1334
31a02448 1335#ifdef MEMBER_TYPE_FORCES_BLK
770ae6cc
RK
1336 /* With some targets, eg. c4x, it is sub-optimal
1337 to access an aligned BLKmode structure as a scalar. */
0d7839da 1338
182e515e 1339 if (MEMBER_TYPE_FORCES_BLK (field, mode))
770ae6cc 1340 return;
31a02448 1341#endif /* MEMBER_TYPE_FORCES_BLK */
770ae6cc 1342 }
9328904c 1343
a8ca7756
JW
1344 /* If we only have one real field; use its mode. This only applies to
1345 RECORD_TYPE. This does not apply to unions. */
1346 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode)
770ae6cc
RK
1347 TYPE_MODE (type) = mode;
1348 else
1349 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1350
1351 /* If structure's known alignment is less than what the scalar
1352 mode would need, and it matters, then stick with BLKmode. */
1353 if (TYPE_MODE (type) != BLKmode
1354 && STRICT_ALIGNMENT
1355 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1356 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1357 {
1358 /* If this is the only reason this type is BLKmode, then
1359 don't force containing types to be BLKmode. */
1360 TYPE_NO_FORCE_BLK (type) = 1;
1361 TYPE_MODE (type) = BLKmode;
9328904c 1362 }
7306ed3f 1363}
9328904c
MM
1364
1365/* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1366 out. */
1367
1368static void
46c5ad27 1369finalize_type_size (tree type)
9328904c
MM
1370{
1371 /* Normally, use the alignment corresponding to the mode chosen.
1372 However, where strict alignment is not required, avoid
1373 over-aligning structures, since most compilers do not do this
1374 alignment. */
1375
1376 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1377 && (STRICT_ALIGNMENT
1378 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1379 && TREE_CODE (type) != QUAL_UNION_TYPE
1380 && TREE_CODE (type) != ARRAY_TYPE)))
11cf4d18
JJ
1381 {
1382 TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1383 TYPE_USER_ALIGN (type) = 0;
1384 }
9328904c
MM
1385
1386 /* Do machine-dependent extra alignment. */
1387#ifdef ROUND_TYPE_ALIGN
1388 TYPE_ALIGN (type)
1389 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1390#endif
1391
9328904c 1392 /* If we failed to find a simple way to calculate the unit size
770ae6cc 1393 of the type, find it by division. */
9328904c
MM
1394 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1395 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1396 result will fit in sizetype. We will get more efficient code using
1397 sizetype, so we force a conversion. */
1398 TYPE_SIZE_UNIT (type)
1399 = convert (sizetype,
1400 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
770ae6cc 1401 bitsize_unit_node));
9328904c 1402
770ae6cc
RK
1403 if (TYPE_SIZE (type) != 0)
1404 {
770ae6cc
RK
1405 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1406 TYPE_SIZE_UNIT (type)
1407 = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
770ae6cc
RK
1408 }
1409
1410 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1411 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1412 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
9328904c
MM
1413 if (TYPE_SIZE_UNIT (type) != 0
1414 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1415 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1416
1417 /* Also layout any other variants of the type. */
1418 if (TYPE_NEXT_VARIANT (type)
1419 || type != TYPE_MAIN_VARIANT (type))
1420 {
1421 tree variant;
1422 /* Record layout info of this variant. */
1423 tree size = TYPE_SIZE (type);
1424 tree size_unit = TYPE_SIZE_UNIT (type);
1425 unsigned int align = TYPE_ALIGN (type);
11cf4d18 1426 unsigned int user_align = TYPE_USER_ALIGN (type);
9328904c
MM
1427 enum machine_mode mode = TYPE_MODE (type);
1428
1429 /* Copy it into all variants. */
1430 for (variant = TYPE_MAIN_VARIANT (type);
1431 variant != 0;
1432 variant = TYPE_NEXT_VARIANT (variant))
1433 {
1434 TYPE_SIZE (variant) = size;
1435 TYPE_SIZE_UNIT (variant) = size_unit;
1436 TYPE_ALIGN (variant) = align;
11cf4d18 1437 TYPE_USER_ALIGN (variant) = user_align;
9328904c
MM
1438 TYPE_MODE (variant) = mode;
1439 }
1440 }
1441}
1442
1443/* Do all of the work required to layout the type indicated by RLI,
1444 once the fields have been laid out. This function will call `free'
17bbb839
MM
1445 for RLI, unless FREE_P is false. Passing a value other than false
1446 for FREE_P is bad practice; this option only exists to support the
1447 G++ 3.2 ABI. */
9328904c
MM
1448
1449void
46c5ad27 1450finish_record_layout (record_layout_info rli, int free_p)
9328904c 1451{
770ae6cc
RK
1452 /* Compute the final size. */
1453 finalize_record_size (rli);
1454
1455 /* Compute the TYPE_MODE for the record. */
1456 compute_record_mode (rli->t);
cc9d4a85 1457
8d8238b6
JM
1458 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1459 finalize_type_size (rli->t);
1460
9328904c
MM
1461 /* Lay out any static members. This is done now because their type
1462 may use the record's type. */
1463 while (rli->pending_statics)
1464 {
1465 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1466 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1467 }
cc9d4a85 1468
9328904c 1469 /* Clean up. */
17bbb839
MM
1470 if (free_p)
1471 free (rli);
9328904c 1472}
7306ed3f 1473\f
4977bab6
ZW
1474
1475/* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
1476 NAME, its fields are chained in reverse on FIELDS.
1477
1478 If ALIGN_TYPE is non-null, it is given the same alignment as
1479 ALIGN_TYPE. */
1480
1481void
46c5ad27
AJ
1482finish_builtin_struct (tree type, const char *name, tree fields,
1483 tree align_type)
4977bab6
ZW
1484{
1485 tree tail, next;
1486
1487 for (tail = NULL_TREE; fields; tail = fields, fields = next)
1488 {
1489 DECL_FIELD_CONTEXT (fields) = type;
1490 next = TREE_CHAIN (fields);
1491 TREE_CHAIN (fields) = tail;
1492 }
1493 TYPE_FIELDS (type) = tail;
1494
1495 if (align_type)
1496 {
1497 TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
1498 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
1499 }
1500
1501 layout_type (type);
1502#if 0 /* not yet, should get fixed properly later */
1503 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1504#else
1505 TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type);
1506#endif
1507 TYPE_STUB_DECL (type) = TYPE_NAME (type);
1508 layout_decl (TYPE_NAME (type), 0);
1509}
1510
7306ed3f
JW
1511/* Calculate the mode, size, and alignment for TYPE.
1512 For an array type, calculate the element separation as well.
1513 Record TYPE on the chain of permanent or temporary types
1514 so that dbxout will find out about it.
1515
1516 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1517 layout_type does nothing on such a type.
1518
1519 If the type is incomplete, its TYPE_SIZE remains zero. */
1520
1521void
46c5ad27 1522layout_type (tree type)
7306ed3f 1523{
7306ed3f
JW
1524 if (type == 0)
1525 abort ();
1526
1527 /* Do nothing if type has been laid out before. */
1528 if (TYPE_SIZE (type))
1529 return;
1530
7306ed3f
JW
1531 switch (TREE_CODE (type))
1532 {
1533 case LANG_TYPE:
1534 /* This kind of type is the responsibility
9faa82d8 1535 of the language-specific code. */
7306ed3f
JW
1536 abort ();
1537
2d76cb1a 1538 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
e9a25f70 1539 if (TYPE_PRECISION (type) == 0)
2d76cb1a 1540 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
d4b60170 1541
2d76cb1a 1542 /* ... fall through ... */
e9a25f70 1543
7306ed3f
JW
1544 case INTEGER_TYPE:
1545 case ENUMERAL_TYPE:
fc69eca0 1546 case CHAR_TYPE:
e2a77f99
RK
1547 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1548 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
7306ed3f
JW
1549 TREE_UNSIGNED (type) = 1;
1550
5e9bec99
RK
1551 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1552 MODE_INT);
06ceef4e 1553 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
ead17059 1554 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
7306ed3f
JW
1555 break;
1556
1557 case REAL_TYPE:
1558 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
06ceef4e 1559 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
ead17059 1560 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
7306ed3f
JW
1561 break;
1562
1563 case COMPLEX_TYPE:
1564 TREE_UNSIGNED (type) = TREE_UNSIGNED (TREE_TYPE (type));
1565 TYPE_MODE (type)
1566 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1567 (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1568 ? MODE_COMPLEX_INT : MODE_COMPLEX_FLOAT),
1569 0);
06ceef4e 1570 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
ead17059 1571 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
7306ed3f
JW
1572 break;
1573
0b4565c9
BS
1574 case VECTOR_TYPE:
1575 {
1576 tree subtype;
1577
1578 subtype = TREE_TYPE (type);
1579 TREE_UNSIGNED (type) = TREE_UNSIGNED (subtype);
1580 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1581 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1582 }
1583 break;
1584
7306ed3f 1585 case VOID_TYPE:
770ae6cc 1586 /* This is an incomplete type and so doesn't have a size. */
7306ed3f 1587 TYPE_ALIGN (type) = 1;
11cf4d18 1588 TYPE_USER_ALIGN (type) = 0;
7306ed3f
JW
1589 TYPE_MODE (type) = VOIDmode;
1590 break;
1591
321cb743 1592 case OFFSET_TYPE:
06ceef4e 1593 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
ead17059 1594 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
25caaba8
R
1595 /* A pointer might be MODE_PARTIAL_INT,
1596 but ptrdiff_t must be integral. */
1597 TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
321cb743
MT
1598 break;
1599
7306ed3f
JW
1600 case FUNCTION_TYPE:
1601 case METHOD_TYPE:
13275056 1602 TYPE_MODE (type) = mode_for_size (2 * POINTER_SIZE, MODE_INT, 0);
06ceef4e 1603 TYPE_SIZE (type) = bitsize_int (2 * POINTER_SIZE);
ead17059 1604 TYPE_SIZE_UNIT (type) = size_int ((2 * POINTER_SIZE) / BITS_PER_UNIT);
7306ed3f
JW
1605 break;
1606
1607 case POINTER_TYPE:
1608 case REFERENCE_TYPE:
b5d6a2ff 1609 {
b5d6a2ff 1610
4977bab6
ZW
1611 enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
1612 && reference_types_internal)
1613 ? Pmode : TYPE_MODE (type));
1614
1615 int nbits = GET_MODE_BITSIZE (mode);
1616
b5d6a2ff 1617 TYPE_SIZE (type) = bitsize_int (nbits);
4977bab6 1618 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
b5d6a2ff
RK
1619 TREE_UNSIGNED (type) = 1;
1620 TYPE_PRECISION (type) = nbits;
1621 }
7306ed3f
JW
1622 break;
1623
1624 case ARRAY_TYPE:
1625 {
b3694847
SS
1626 tree index = TYPE_DOMAIN (type);
1627 tree element = TREE_TYPE (type);
7306ed3f
JW
1628
1629 build_pointer_type (element);
1630
1631 /* We need to know both bounds in order to compute the size. */
1632 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1633 && TYPE_SIZE (element))
1634 {
e24ff973
RK
1635 tree ub = TYPE_MAX_VALUE (index);
1636 tree lb = TYPE_MIN_VALUE (index);
1637 tree length;
74a4fbfc 1638 tree element_size;
e24ff973 1639
a2d53b28
RH
1640 /* The initial subtraction should happen in the original type so
1641 that (possible) negative values are handled appropriately. */
e24ff973 1642 length = size_binop (PLUS_EXPR, size_one_node,
fed3cef0
RK
1643 convert (sizetype,
1644 fold (build (MINUS_EXPR,
1645 TREE_TYPE (lb),
1646 ub, lb))));
7306ed3f 1647
74a4fbfc
DB
1648 /* Special handling for arrays of bits (for Chill). */
1649 element_size = TYPE_SIZE (element);
382110c0
RK
1650 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1651 && (integer_zerop (TYPE_MAX_VALUE (element))
1652 || integer_onep (TYPE_MAX_VALUE (element)))
1653 && host_integerp (TYPE_MIN_VALUE (element), 1))
74a4fbfc 1654 {
d4b60170 1655 HOST_WIDE_INT maxvalue
382110c0 1656 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
d4b60170 1657 HOST_WIDE_INT minvalue
382110c0 1658 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
d4b60170 1659
74a4fbfc
DB
1660 if (maxvalue - minvalue == 1
1661 && (maxvalue == 1 || maxvalue == 0))
1662 element_size = integer_one_node;
1663 }
1664
0d3c8800
RK
1665 /* If neither bound is a constant and sizetype is signed, make
1666 sure the size is never negative. We should really do this
1667 if *either* bound is non-constant, but this is the best
1668 compromise between C and Ada. */
1669 if (! TREE_UNSIGNED (sizetype)
1670 && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST
1671 && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST)
1672 length = size_binop (MAX_EXPR, length, size_zero_node);
1673
fed3cef0
RK
1674 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1675 convert (bitsizetype, length));
ead17059
RH
1676
1677 /* If we know the size of the element, calculate the total
1678 size directly, rather than do some division thing below.
1679 This optimization helps Fortran assumed-size arrays
1680 (where the size of the array is determined at runtime)
7771032e
DB
1681 substantially.
1682 Note that we can't do this in the case where the size of
1683 the elements is one bit since TYPE_SIZE_UNIT cannot be
1684 set correctly in that case. */
fed3cef0 1685 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
d4b60170
RK
1686 TYPE_SIZE_UNIT (type)
1687 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
7306ed3f
JW
1688 }
1689
1690 /* Now round the alignment and size,
1691 using machine-dependent criteria if any. */
1692
1693#ifdef ROUND_TYPE_ALIGN
1694 TYPE_ALIGN (type)
1695 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1696#else
1697 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1698#endif
c163d21d 1699 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
7306ed3f
JW
1700 TYPE_MODE (type) = BLKmode;
1701 if (TYPE_SIZE (type) != 0
31a02448 1702#ifdef MEMBER_TYPE_FORCES_BLK
182e515e 1703 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
31a02448 1704#endif
7306ed3f
JW
1705 /* BLKmode elements force BLKmode aggregate;
1706 else extract/store fields may lose. */
1707 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1708 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1709 {
a1471322
RK
1710 /* One-element arrays get the component type's mode. */
1711 if (simple_cst_equal (TYPE_SIZE (type),
1712 TYPE_SIZE (TREE_TYPE (type))))
1713 TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1714 else
1715 TYPE_MODE (type)
1716 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
7306ed3f 1717
72c602fc
RK
1718 if (TYPE_MODE (type) != BLKmode
1719 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1720 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
7306ed3f
JW
1721 && TYPE_MODE (type) != BLKmode)
1722 {
1723 TYPE_NO_FORCE_BLK (type) = 1;
1724 TYPE_MODE (type) = BLKmode;
1725 }
7306ed3f
JW
1726 }
1727 break;
1728 }
1729
1730 case RECORD_TYPE:
cc9d4a85
MM
1731 case UNION_TYPE:
1732 case QUAL_UNION_TYPE:
9328904c
MM
1733 {
1734 tree field;
1735 record_layout_info rli;
1736
1737 /* Initialize the layout information. */
770ae6cc
RK
1738 rli = start_record_layout (type);
1739
cc9d4a85
MM
1740 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1741 in the reverse order in building the COND_EXPR that denotes
1742 its size. We reverse them again later. */
1743 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1744 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
770ae6cc
RK
1745
1746 /* Place all the fields. */
9328904c 1747 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
770ae6cc
RK
1748 place_field (rli, field);
1749
cc9d4a85
MM
1750 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1751 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
770ae6cc 1752
e0cea8d9
RK
1753 if (lang_adjust_rli)
1754 (*lang_adjust_rli) (rli);
1755
9328904c 1756 /* Finish laying out the record. */
17bbb839 1757 finish_record_layout (rli, /*free_p=*/true);
9328904c 1758 }
7306ed3f
JW
1759 break;
1760
2d76cb1a 1761 case SET_TYPE: /* Used by Chill and Pascal. */
b5d11e41
PB
1762 if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
1763 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
cf403648 1764 abort ();
b5d11e41
PB
1765 else
1766 {
1767#ifndef SET_WORD_SIZE
1768#define SET_WORD_SIZE BITS_PER_WORD
1769#endif
729a2125
RK
1770 unsigned int alignment
1771 = set_alignment ? set_alignment : SET_WORD_SIZE;
0384674e
RK
1772 HOST_WIDE_INT size_in_bits
1773 = (tree_low_cst (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 0)
1774 - tree_low_cst (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0) + 1);
1775 HOST_WIDE_INT rounded_size
b5d11e41 1776 = ((size_in_bits + alignment - 1) / alignment) * alignment;
729a2125
RK
1777
1778 if (rounded_size > (int) alignment)
b5d11e41
PB
1779 TYPE_MODE (type) = BLKmode;
1780 else
1781 TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
729a2125 1782
06ceef4e 1783 TYPE_SIZE (type) = bitsize_int (rounded_size);
ead17059 1784 TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
b5d11e41 1785 TYPE_ALIGN (type) = alignment;
11cf4d18 1786 TYPE_USER_ALIGN (type) = 0;
b5d11e41
PB
1787 TYPE_PRECISION (type) = size_in_bits;
1788 }
1789 break;
1790
4cc89e53
RS
1791 case FILE_TYPE:
1792 /* The size may vary in different languages, so the language front end
1793 should fill in the size. */
1794 TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
11cf4d18 1795 TYPE_USER_ALIGN (type) = 0;
4cc89e53
RS
1796 TYPE_MODE (type) = BLKmode;
1797 break;
1798
7306ed3f
JW
1799 default:
1800 abort ();
729a2125 1801 }
7306ed3f 1802
9328904c 1803 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
cc9d4a85
MM
1804 records and unions, finish_record_layout already called this
1805 function. */
786de7eb 1806 if (TREE_CODE (type) != RECORD_TYPE
cc9d4a85
MM
1807 && TREE_CODE (type) != UNION_TYPE
1808 && TREE_CODE (type) != QUAL_UNION_TYPE)
9328904c 1809 finalize_type_size (type);
7306ed3f 1810
fed3cef0
RK
1811 /* If this type is created before sizetype has been permanently set,
1812 record it so set_sizetype can fix it up. */
1813 if (! sizetype_set)
ad41cc2a 1814 early_type_list = tree_cons (NULL_TREE, type, early_type_list);
dc5041ab
JJ
1815
1816 /* If an alias set has been set for this aggregate when it was incomplete,
1817 force it into alias set 0.
1818 This is too conservative, but we cannot call record_component_aliases
1819 here because some frontends still change the aggregates after
1820 layout_type. */
1821 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1822 TYPE_ALIAS_SET (type) = 0;
7306ed3f
JW
1823}
1824\f
1825/* Create and return a type for signed integers of PRECISION bits. */
1826
1827tree
46c5ad27 1828make_signed_type (int precision)
7306ed3f 1829{
b3694847 1830 tree type = make_node (INTEGER_TYPE);
7306ed3f
JW
1831
1832 TYPE_PRECISION (type) = precision;
1833
fed3cef0 1834 fixup_signed_type (type);
7306ed3f
JW
1835 return type;
1836}
1837
1838/* Create and return a type for unsigned integers of PRECISION bits. */
1839
1840tree
46c5ad27 1841make_unsigned_type (int precision)
7306ed3f 1842{
b3694847 1843 tree type = make_node (INTEGER_TYPE);
7306ed3f
JW
1844
1845 TYPE_PRECISION (type) = precision;
1846
7306ed3f
JW
1847 fixup_unsigned_type (type);
1848 return type;
1849}
fed3cef0
RK
1850\f
1851/* Initialize sizetype and bitsizetype to a reasonable and temporary
1852 value to enable integer types to be created. */
1853
1854void
46c5ad27 1855initialize_sizetypes (void)
fed3cef0
RK
1856{
1857 tree t = make_node (INTEGER_TYPE);
1858
1859 /* Set this so we do something reasonable for the build_int_2 calls
1860 below. */
1861 integer_type_node = t;
1862
1863 TYPE_MODE (t) = SImode;
1864 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
11cf4d18 1865 TYPE_USER_ALIGN (t) = 0;
fed3cef0
RK
1866 TYPE_SIZE (t) = build_int_2 (GET_MODE_BITSIZE (SImode), 0);
1867 TYPE_SIZE_UNIT (t) = build_int_2 (GET_MODE_SIZE (SImode), 0);
1868 TREE_UNSIGNED (t) = 1;
1869 TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1870 TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
770ae6cc 1871 TYPE_IS_SIZETYPE (t) = 1;
fed3cef0
RK
1872
1873 /* 1000 avoids problems with possible overflow and is certainly
1874 larger than any size value we'd want to be storing. */
1875 TYPE_MAX_VALUE (t) = build_int_2 (1000, 0);
1876
1877 /* These two must be different nodes because of the caching done in
1878 size_int_wide. */
1879 sizetype = t;
1880 bitsizetype = copy_node (t);
1881 integer_type_node = 0;
1882}
7306ed3f 1883
896cced4 1884/* Set sizetype to TYPE, and initialize *sizetype accordingly.
f8dac6eb
R
1885 Also update the type of any standard type's sizes made so far. */
1886
1887void
46c5ad27 1888set_sizetype (tree type)
f8dac6eb 1889{
d4b60170 1890 int oprecision = TYPE_PRECISION (type);
f8dac6eb 1891 /* The *bitsizetype types use a precision that avoids overflows when
d4b60170
RK
1892 calculating signed sizes / offsets in bits. However, when
1893 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1894 precision. */
11a6092b 1895 int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
d4b60170 1896 2 * HOST_BITS_PER_WIDE_INT);
fed3cef0 1897 unsigned int i;
ad41cc2a 1898 tree t;
fed3cef0
RK
1899
1900 if (sizetype_set)
1901 abort ();
81b3411c 1902
fed3cef0
RK
1903 /* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE. */
1904 sizetype = copy_node (type);
21318741 1905 TYPE_DOMAIN (sizetype) = type;
770ae6cc 1906 TYPE_IS_SIZETYPE (sizetype) = 1;
81b3411c
BS
1907 bitsizetype = make_node (INTEGER_TYPE);
1908 TYPE_NAME (bitsizetype) = TYPE_NAME (type);
f8dac6eb 1909 TYPE_PRECISION (bitsizetype) = precision;
770ae6cc 1910 TYPE_IS_SIZETYPE (bitsizetype) = 1;
d4b60170 1911
896cced4
RH
1912 if (TREE_UNSIGNED (type))
1913 fixup_unsigned_type (bitsizetype);
1914 else
1915 fixup_signed_type (bitsizetype);
d4b60170 1916
f8dac6eb
R
1917 layout_type (bitsizetype);
1918
896cced4
RH
1919 if (TREE_UNSIGNED (type))
1920 {
1921 usizetype = sizetype;
1922 ubitsizetype = bitsizetype;
fed3cef0
RK
1923 ssizetype = copy_node (make_signed_type (oprecision));
1924 sbitsizetype = copy_node (make_signed_type (precision));
896cced4
RH
1925 }
1926 else
1927 {
1928 ssizetype = sizetype;
1929 sbitsizetype = bitsizetype;
fed3cef0
RK
1930 usizetype = copy_node (make_unsigned_type (oprecision));
1931 ubitsizetype = copy_node (make_unsigned_type (precision));
896cced4 1932 }
fed3cef0
RK
1933
1934 TYPE_NAME (bitsizetype) = get_identifier ("bit_size_type");
1935
21318741 1936 /* Show is a sizetype, is a main type, and has no pointers to it. */
b6a1cbae 1937 for (i = 0; i < ARRAY_SIZE (sizetype_tab); i++)
21318741
RK
1938 {
1939 TYPE_IS_SIZETYPE (sizetype_tab[i]) = 1;
1940 TYPE_MAIN_VARIANT (sizetype_tab[i]) = sizetype_tab[i];
1941 TYPE_NEXT_VARIANT (sizetype_tab[i]) = 0;
1942 TYPE_POINTER_TO (sizetype_tab[i]) = 0;
1943 TYPE_REFERENCE_TO (sizetype_tab[i]) = 0;
1944 }
d7db6646 1945
fed3cef0
RK
1946 /* Go down each of the types we already made and set the proper type
1947 for the sizes in them. */
ad41cc2a 1948 for (t = early_type_list; t != 0; t = TREE_CHAIN (t))
fed3cef0 1949 {
de7df9eb
JM
1950 if (TREE_CODE (TREE_VALUE (t)) != INTEGER_TYPE
1951 && TREE_CODE (TREE_VALUE (t)) != BOOLEAN_TYPE)
fed3cef0
RK
1952 abort ();
1953
ad41cc2a
RK
1954 TREE_TYPE (TYPE_SIZE (TREE_VALUE (t))) = bitsizetype;
1955 TREE_TYPE (TYPE_SIZE_UNIT (TREE_VALUE (t))) = sizetype;
fed3cef0
RK
1956 }
1957
1958 early_type_list = 0;
1959 sizetype_set = 1;
1960}
1961\f
7b6d72fc
MM
1962/* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE,
1963 BOOLEAN_TYPE, or CHAR_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
1964 for TYPE, based on the PRECISION and whether or not the TYPE
1965 IS_UNSIGNED. PRECISION need not correspond to a width supported
1966 natively by the hardware; for example, on a machine with 8-bit,
1967 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
1968 61. */
1969
1970void
1971set_min_and_max_values_for_integral_type (tree type,
1972 int precision,
1973 bool is_unsigned)
1974{
1975 tree min_value;
1976 tree max_value;
1977
1978 if (is_unsigned)
1979 {
1980 min_value = build_int_2 (0, 0);
1981 max_value
1982 = build_int_2 (precision - HOST_BITS_PER_WIDE_INT >= 0
1983 ? -1 : ((HOST_WIDE_INT) 1 << precision) - 1,
1984 precision - HOST_BITS_PER_WIDE_INT > 0
1985 ? ((unsigned HOST_WIDE_INT) ~0
1986 >> (HOST_BITS_PER_WIDE_INT
1987 - (precision - HOST_BITS_PER_WIDE_INT)))
1988 : 0);
1989 }
1990 else
1991 {
1992 min_value
1993 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
1994 ? 0 : (HOST_WIDE_INT) (-1) << (precision - 1)),
1995 (((HOST_WIDE_INT) (-1)
1996 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1997 ? precision - HOST_BITS_PER_WIDE_INT - 1
1998 : 0))));
1999 max_value
2000 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
2001 ? -1 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2002 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2003 ? (((HOST_WIDE_INT) 1
2004 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2005 : 0));
2006 }
2007
2008 TREE_TYPE (min_value) = type;
2009 TREE_TYPE (max_value) = type;
2010 TYPE_MIN_VALUE (type) = min_value;
2011 TYPE_MAX_VALUE (type) = max_value;
2012}
2013
4cc89e53 2014/* Set the extreme values of TYPE based on its precision in bits,
13756074 2015 then lay it out. Used when make_signed_type won't do
4cc89e53
RS
2016 because the tree code is not INTEGER_TYPE.
2017 E.g. for Pascal, when the -fsigned-char option is given. */
2018
2019void
46c5ad27 2020fixup_signed_type (tree type)
4cc89e53 2021{
b3694847 2022 int precision = TYPE_PRECISION (type);
4cc89e53 2023
9cd56be1
JH
2024 /* We can not represent properly constants greater then
2025 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2026 as they are used by i386 vector extensions and friends. */
2027 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2028 precision = HOST_BITS_PER_WIDE_INT * 2;
2029
7b6d72fc
MM
2030 set_min_and_max_values_for_integral_type (type, precision,
2031 /*is_unsigned=*/false);
4cc89e53
RS
2032
2033 /* Lay out the type: set its alignment, size, etc. */
4cc89e53
RS
2034 layout_type (type);
2035}
2036
7306ed3f 2037/* Set the extreme values of TYPE based on its precision in bits,
13756074 2038 then lay it out. This is used both in `make_unsigned_type'
7306ed3f
JW
2039 and for enumeral types. */
2040
2041void
46c5ad27 2042fixup_unsigned_type (tree type)
7306ed3f 2043{
b3694847 2044 int precision = TYPE_PRECISION (type);
7306ed3f 2045
9cd56be1
JH
2046 /* We can not represent properly constants greater then
2047 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2048 as they are used by i386 vector extensions and friends. */
2049 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2050 precision = HOST_BITS_PER_WIDE_INT * 2;
2051
7b6d72fc
MM
2052 set_min_and_max_values_for_integral_type (type, precision,
2053 /*is_unsigned=*/true);
7306ed3f
JW
2054
2055 /* Lay out the type: set its alignment, size, etc. */
7306ed3f
JW
2056 layout_type (type);
2057}
2058\f
2059/* Find the best machine mode to use when referencing a bit field of length
2060 BITSIZE bits starting at BITPOS.
2061
2062 The underlying object is known to be aligned to a boundary of ALIGN bits.
2063 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2064 larger than LARGEST_MODE (usually SImode).
2065
2066 If no mode meets all these conditions, we return VOIDmode. Otherwise, if
2067 VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
2068 mode meeting these conditions.
2069
77fa0940
RK
2070 Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
2071 the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2072 all the conditions. */
7306ed3f
JW
2073
2074enum machine_mode
46c5ad27
AJ
2075get_best_mode (int bitsize, int bitpos, unsigned int align,
2076 enum machine_mode largest_mode, int volatilep)
7306ed3f
JW
2077{
2078 enum machine_mode mode;
770ae6cc 2079 unsigned int unit = 0;
7306ed3f
JW
2080
2081 /* Find the narrowest integer mode that contains the bit field. */
2082 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2083 mode = GET_MODE_WIDER_MODE (mode))
2084 {
2085 unit = GET_MODE_BITSIZE (mode);
956d6950 2086 if ((bitpos % unit) + bitsize <= unit)
7306ed3f
JW
2087 break;
2088 }
2089
0c61f541 2090 if (mode == VOIDmode
7306ed3f 2091 /* It is tempting to omit the following line
4e4b555d 2092 if STRICT_ALIGNMENT is true.
7306ed3f
JW
2093 But that is incorrect, since if the bitfield uses part of 3 bytes
2094 and we use a 4-byte mode, we could get a spurious segv
2095 if the extra 4th byte is past the end of memory.
2096 (Though at least one Unix compiler ignores this problem:
2097 that on the Sequent 386 machine. */
770ae6cc 2098 || MIN (unit, BIGGEST_ALIGNMENT) > align
7306ed3f
JW
2099 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2100 return VOIDmode;
2101
77fa0940
RK
2102 if (SLOW_BYTE_ACCESS && ! volatilep)
2103 {
2104 enum machine_mode wide_mode = VOIDmode, tmode;
2105
2106 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2107 tmode = GET_MODE_WIDER_MODE (tmode))
2108 {
2109 unit = GET_MODE_BITSIZE (tmode);
2110 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2111 && unit <= BITS_PER_WORD
770ae6cc 2112 && unit <= MIN (align, BIGGEST_ALIGNMENT)
77fa0940
RK
2113 && (largest_mode == VOIDmode
2114 || unit <= GET_MODE_BITSIZE (largest_mode)))
2115 wide_mode = tmode;
2116 }
2117
2118 if (wide_mode != VOIDmode)
2119 return wide_mode;
2120 }
7306ed3f
JW
2121
2122 return mode;
2123}
d7db6646 2124
e2500fed 2125#include "gt-stor-layout.h"
This page took 2.493718 seconds and 5 git commands to generate.