Line data Source code
1 : /* C-compiler utilities for types and variables storage layout
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "target.h"
25 : #include "function.h"
26 : #include "rtl.h"
27 : #include "tree.h"
28 : #include "memmodel.h"
29 : #include "tm_p.h"
30 : #include "stringpool.h"
31 : #include "regs.h"
32 : #include "emit-rtl.h"
33 : #include "cgraph.h"
34 : #include "diagnostic-core.h"
35 : #include "fold-const.h"
36 : #include "stor-layout.h"
37 : #include "varasm.h"
38 : #include "print-tree.h"
39 : #include "langhooks.h"
40 : #include "tree-inline.h"
41 : #include "dumpfile.h"
42 : #include "gimplify.h"
43 : #include "attribs.h"
44 : #include "debug.h"
45 : #include "calls.h"
46 :
47 : /* Data type for the expressions representing sizes of data types.
48 : It is the first integer type laid out. */
49 : tree sizetype_tab[(int) stk_type_kind_last];
50 :
51 : /* If nonzero, this is an upper limit on alignment of structure fields.
52 : The value is measured in bits. */
53 : unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
54 :
55 : static tree self_referential_size (tree);
56 : static void finalize_record_size (record_layout_info);
57 : static void finalize_type_size (tree);
58 : static void place_union_field (record_layout_info, tree);
59 : static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
60 : HOST_WIDE_INT, tree);
61 : extern void debug_rli (record_layout_info);
62 :
63 : /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
64 : to serve as the actual size-expression for a type or decl. */
65 :
66 : tree
67 655981 : variable_size (tree size)
68 : {
69 : /* Obviously. */
70 655981 : if (TREE_CONSTANT (size))
71 : return size;
72 :
73 : /* If the size is self-referential, we can't make a SAVE_EXPR (see
74 : save_expr for the rationale). But we can do something else. */
75 655919 : if (CONTAINS_PLACEHOLDER_P (size))
76 0 : return self_referential_size (size);
77 :
78 : /* If we are in the global binding level, we can't make a SAVE_EXPR
79 : since it may end up being shared across functions, so it is up
80 : to the front-end to deal with this case. */
81 655919 : if (lang_hooks.decls.global_bindings_p ())
82 : return size;
83 :
84 320834 : return save_expr (size);
85 : }
86 :
87 : /* An array of functions used for self-referential size computation. */
88 : static GTY(()) vec<tree, va_gc> *size_functions;
89 :
90 : /* Return true if T is a self-referential component reference. */
91 :
92 : static bool
93 0 : self_referential_component_ref_p (tree t)
94 : {
95 0 : if (TREE_CODE (t) != COMPONENT_REF)
96 : return false;
97 :
98 0 : while (REFERENCE_CLASS_P (t))
99 0 : t = TREE_OPERAND (t, 0);
100 :
101 0 : return (TREE_CODE (t) == PLACEHOLDER_EXPR);
102 : }
103 :
104 : /* Similar to copy_tree_r but do not copy component references involving
105 : PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr
106 : and substituted in substitute_in_expr. */
107 :
108 : static tree
109 0 : copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data)
110 : {
111 0 : enum tree_code code = TREE_CODE (*tp);
112 :
113 : /* Stop at types, decls, constants like copy_tree_r. */
114 0 : if (TREE_CODE_CLASS (code) == tcc_type
115 : || TREE_CODE_CLASS (code) == tcc_declaration
116 0 : || TREE_CODE_CLASS (code) == tcc_constant)
117 : {
118 0 : *walk_subtrees = 0;
119 0 : return NULL_TREE;
120 : }
121 :
122 : /* This is the pattern built in ada/make_aligning_type. */
123 0 : else if (code == ADDR_EXPR
124 0 : && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR)
125 : {
126 0 : *walk_subtrees = 0;
127 0 : return NULL_TREE;
128 : }
129 :
130 : /* Default case: the component reference. */
131 0 : else if (self_referential_component_ref_p (*tp))
132 : {
133 0 : *walk_subtrees = 0;
134 0 : return NULL_TREE;
135 : }
136 :
137 : /* We're not supposed to have them in self-referential size trees
138 : because we wouldn't properly control when they are evaluated.
139 : However, not creating superfluous SAVE_EXPRs requires accurate
140 : tracking of readonly-ness all the way down to here, which we
141 : cannot always guarantee in practice. So punt in this case. */
142 0 : else if (code == SAVE_EXPR)
143 0 : return error_mark_node;
144 :
145 0 : else if (code == STATEMENT_LIST)
146 0 : gcc_unreachable ();
147 :
148 0 : return copy_tree_r (tp, walk_subtrees, data);
149 : }
150 :
151 : /* Given a SIZE expression that is self-referential, return an equivalent
152 : expression to serve as the actual size expression for a type. */
153 :
154 : static tree
155 0 : self_referential_size (tree size)
156 : {
157 0 : static unsigned HOST_WIDE_INT fnno = 0;
158 0 : vec<tree> self_refs = vNULL;
159 0 : tree param_type_list = NULL, param_decl_list = NULL;
160 0 : tree t, ref, return_type, fntype, fnname, fndecl;
161 0 : unsigned int i;
162 0 : char buf[128];
163 0 : vec<tree, va_gc> *args = NULL;
164 :
165 : /* Do not factor out simple operations. */
166 0 : t = skip_simple_constant_arithmetic (size);
167 0 : if (TREE_CODE (t) == CALL_EXPR || self_referential_component_ref_p (t))
168 : return size;
169 :
170 : /* Collect the list of self-references in the expression. */
171 0 : find_placeholder_in_expr (size, &self_refs);
172 0 : gcc_assert (self_refs.length () > 0);
173 :
174 : /* Obtain a private copy of the expression. */
175 0 : t = size;
176 0 : if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE)
177 : return size;
178 0 : size = t;
179 :
180 : /* Build the parameter and argument lists in parallel; also
181 : substitute the former for the latter in the expression. */
182 0 : vec_alloc (args, self_refs.length ());
183 0 : FOR_EACH_VEC_ELT (self_refs, i, ref)
184 : {
185 0 : tree subst, param_name, param_type, param_decl;
186 :
187 0 : if (DECL_P (ref))
188 : {
189 : /* We shouldn't have true variables here. */
190 0 : gcc_assert (TREE_READONLY (ref));
191 : subst = ref;
192 : }
193 : /* This is the pattern built in ada/make_aligning_type. */
194 0 : else if (TREE_CODE (ref) == ADDR_EXPR)
195 : subst = ref;
196 : /* Default case: the component reference. */
197 : else
198 0 : subst = TREE_OPERAND (ref, 1);
199 :
200 0 : sprintf (buf, "p%d", i);
201 0 : param_name = get_identifier (buf);
202 0 : param_type = TREE_TYPE (ref);
203 0 : param_decl
204 0 : = build_decl (input_location, PARM_DECL, param_name, param_type);
205 0 : DECL_ARG_TYPE (param_decl) = param_type;
206 0 : DECL_ARTIFICIAL (param_decl) = 1;
207 0 : TREE_READONLY (param_decl) = 1;
208 :
209 0 : size = substitute_in_expr (size, subst, param_decl);
210 :
211 0 : param_type_list = tree_cons (NULL_TREE, param_type, param_type_list);
212 0 : param_decl_list = chainon (param_decl, param_decl_list);
213 0 : args->quick_push (ref);
214 : }
215 :
216 0 : self_refs.release ();
217 :
218 : /* Append 'void' to indicate that the number of parameters is fixed. */
219 0 : param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
220 :
221 : /* The 3 lists have been created in reverse order. */
222 0 : param_type_list = nreverse (param_type_list);
223 0 : param_decl_list = nreverse (param_decl_list);
224 :
225 : /* Build the function type. */
226 0 : return_type = TREE_TYPE (size);
227 0 : fntype = build_function_type (return_type, param_type_list);
228 :
229 : /* Build the function declaration. */
230 0 : sprintf (buf, "SZ" HOST_WIDE_INT_PRINT_UNSIGNED, fnno++);
231 0 : fnname = get_file_function_name (buf);
232 0 : fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype);
233 0 : for (t = param_decl_list; t; t = DECL_CHAIN (t))
234 0 : DECL_CONTEXT (t) = fndecl;
235 0 : DECL_ARGUMENTS (fndecl) = param_decl_list;
236 0 : DECL_RESULT (fndecl)
237 0 : = build_decl (input_location, RESULT_DECL, 0, return_type);
238 0 : DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
239 :
240 : /* The function has been created by the compiler and we don't
241 : want to emit debug info for it. */
242 0 : DECL_ARTIFICIAL (fndecl) = 1;
243 0 : DECL_IGNORED_P (fndecl) = 1;
244 :
245 : /* It is supposed to be "const" and never throw. */
246 0 : TREE_READONLY (fndecl) = 1;
247 0 : TREE_NOTHROW (fndecl) = 1;
248 :
249 : /* We want it to be inlined when this is deemed profitable, as
250 : well as discarded if every call has been integrated. */
251 0 : DECL_DECLARED_INLINE_P (fndecl) = 1;
252 :
253 : /* It is made up of a unique return statement. */
254 0 : DECL_INITIAL (fndecl) = make_node (BLOCK);
255 0 : BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl;
256 0 : t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size);
257 0 : DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t);
258 0 : TREE_STATIC (fndecl) = 1;
259 :
260 : /* Put it onto the list of size functions. */
261 0 : vec_safe_push (size_functions, fndecl);
262 :
263 : /* Replace the original expression with a call to the size function. */
264 0 : return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args);
265 : }
266 :
267 : /* Take, queue and compile all the size functions. It is essential that
268 : the size functions be gimplified at the very end of the compilation
269 : in order to guarantee transparent handling of self-referential sizes.
270 : Otherwise the GENERIC inliner would not be able to inline them back
271 : at each of their call sites, thus creating artificial non-constant
272 : size expressions which would trigger nasty problems later on. */
273 :
274 : void
275 257857 : finalize_size_functions (void)
276 : {
277 257857 : unsigned int i;
278 257857 : tree fndecl;
279 :
280 257857 : for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++)
281 : {
282 0 : allocate_struct_function (fndecl, false);
283 0 : set_cfun (NULL);
284 0 : dump_function (TDI_original, fndecl);
285 :
286 : /* As these functions are used to describe the layout of variable-length
287 : structures, debug info generation needs their implementation. */
288 0 : debug_hooks->size_function (fndecl);
289 0 : gimplify_function_tree (fndecl);
290 0 : cgraph_node::finalize_function (fndecl, false);
291 : }
292 :
293 257857 : vec_free (size_functions);
294 257857 : }
295 :
296 : /* Return a machine mode of class MCLASS with SIZE bits of precision,
297 : if one exists. The mode may have padding bits as well the SIZE
298 : value bits. If LIMIT is nonzero, disregard modes wider than
299 : MAX_FIXED_MODE_SIZE. */
300 :
301 : opt_machine_mode
302 1212901909 : mode_for_size (poly_uint64 size, enum mode_class mclass, int limit)
303 : {
304 1212901909 : machine_mode mode;
305 1212901909 : int i;
306 :
307 1383690160 : if (limit && maybe_gt (size, (unsigned int) MAX_FIXED_MODE_SIZE))
308 38071845 : return opt_machine_mode ();
309 :
310 : /* Get the first mode which has this size, in the specified class. */
311 1739956688 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
312 1695125154 : if (known_eq (GET_MODE_PRECISION (mode), size))
313 1129998530 : return mode;
314 :
315 44831534 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
316 89628796 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
317 44814398 : if (known_eq (int_n_data[i].bitsize, size)
318 44814398 : && int_n_enabled_p[i])
319 0 : return int_n_data[i].m;
320 :
321 44831534 : return opt_machine_mode ();
322 : }
323 :
324 : /* Similar, except passed a tree node. */
325 :
326 : opt_machine_mode
327 164015288 : mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
328 : {
329 164015288 : unsigned HOST_WIDE_INT uhwi;
330 164015288 : unsigned int ui;
331 :
332 164015288 : if (!tree_fits_uhwi_p (size))
333 282471 : return opt_machine_mode ();
334 163732817 : uhwi = tree_to_uhwi (size);
335 163732817 : ui = uhwi;
336 163732817 : if (uhwi != ui)
337 565 : return opt_machine_mode ();
338 163732252 : return mode_for_size (ui, mclass, limit);
339 : }
340 :
341 : /* Return the narrowest mode of class MCLASS that contains at least
342 : SIZE bits, if such a mode exists. */
343 :
344 : opt_machine_mode
345 21021283 : smallest_mode_for_size (poly_uint64 size, enum mode_class mclass)
346 : {
347 21021283 : machine_mode mode = VOIDmode;
348 21021283 : int i;
349 :
350 : /* Get the first mode which has at least this size, in the
351 : specified class. */
352 69391621 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
353 69391621 : if (known_ge (GET_MODE_PRECISION (mode), size))
354 : break;
355 :
356 21021283 : if (mode == VOIDmode)
357 0 : return opt_machine_mode ();
358 :
359 21021283 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
360 42042566 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
361 21021283 : if (known_ge (int_n_data[i].bitsize, size)
362 21020793 : && known_lt (int_n_data[i].bitsize, GET_MODE_PRECISION (mode))
363 21021283 : && int_n_enabled_p[i])
364 0 : mode = int_n_data[i].m;
365 :
366 21021283 : return mode;
367 : }
368 :
369 : /* Return an integer mode of exactly the same size as MODE, if one exists. */
370 :
371 : opt_scalar_int_mode
372 1725245 : int_mode_for_mode (machine_mode mode)
373 : {
374 1725245 : switch (GET_MODE_CLASS (mode))
375 : {
376 1300730 : case MODE_INT:
377 1300730 : case MODE_PARTIAL_INT:
378 1300730 : return as_a <scalar_int_mode> (mode);
379 :
380 261916 : case MODE_COMPLEX_INT:
381 261916 : case MODE_COMPLEX_FLOAT:
382 261916 : case MODE_FLOAT:
383 261916 : case MODE_DECIMAL_FLOAT:
384 261916 : case MODE_FRACT:
385 261916 : case MODE_ACCUM:
386 261916 : case MODE_UFRACT:
387 261916 : case MODE_UACCUM:
388 261916 : case MODE_VECTOR_BOOL:
389 261916 : case MODE_VECTOR_INT:
390 261916 : case MODE_VECTOR_FLOAT:
391 261916 : case MODE_VECTOR_FRACT:
392 261916 : case MODE_VECTOR_ACCUM:
393 261916 : case MODE_VECTOR_UFRACT:
394 261916 : case MODE_VECTOR_UACCUM:
395 523832 : return int_mode_for_size (GET_MODE_BITSIZE (mode), 0);
396 :
397 0 : case MODE_OPAQUE:
398 0 : return opt_scalar_int_mode ();
399 :
400 162599 : case MODE_RANDOM:
401 162599 : if (mode == BLKmode)
402 162599 : return opt_scalar_int_mode ();
403 :
404 : /* fall through */
405 :
406 0 : case MODE_CC:
407 0 : default:
408 0 : gcc_unreachable ();
409 : }
410 : }
411 :
412 : /* Find a mode that can be used for efficient bitwise operations on MODE,
413 : if one exists. */
414 :
415 : opt_machine_mode
416 24373 : bitwise_mode_for_mode (machine_mode mode)
417 : {
418 : /* Quick exit if we already have a suitable mode. */
419 24373 : scalar_int_mode int_mode;
420 24373 : if (is_a <scalar_int_mode> (mode, &int_mode)
421 45532 : && GET_MODE_BITSIZE (int_mode) <= MAX_FIXED_MODE_SIZE)
422 22766 : return int_mode;
423 :
424 : /* Reuse the sanity checks from int_mode_for_mode. */
425 1607 : gcc_checking_assert ((int_mode_for_mode (mode), true));
426 :
427 3214 : poly_int64 bitsize = GET_MODE_BITSIZE (mode);
428 :
429 : /* Try to replace complex modes with complex modes. In general we
430 : expect both components to be processed independently, so we only
431 : care whether there is a register for the inner mode. */
432 1607 : if (COMPLEX_MODE_P (mode))
433 : {
434 357 : machine_mode trial = mode;
435 357 : if ((GET_MODE_CLASS (trial) == MODE_COMPLEX_INT
436 411 : || mode_for_size (bitsize, MODE_COMPLEX_INT, false).exists (&trial))
437 959 : && have_regs_of_mode[GET_MODE_INNER (trial)])
438 247 : return trial;
439 : }
440 :
441 : /* Try to replace vector modes with vector modes. Also try using vector
442 : modes if an integer mode would be too big. */
443 1117 : if (VECTOR_MODE_P (mode)
444 3592 : || maybe_gt (bitsize, MAX_FIXED_MODE_SIZE))
445 : {
446 364 : machine_mode trial = mode;
447 364 : if ((GET_MODE_CLASS (trial) == MODE_VECTOR_INT
448 185 : || mode_for_size (bitsize, MODE_VECTOR_INT, 0).exists (&trial))
449 308 : && have_regs_of_mode[trial]
450 608 : && targetm.vector_mode_supported_p (trial))
451 244 : return trial;
452 : }
453 :
454 : /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */
455 1116 : return mode_for_size (bitsize, MODE_INT, true);
456 : }
457 :
458 : /* Find a type that can be used for efficient bitwise operations on MODE.
459 : Return null if no such mode exists. */
460 :
461 : tree
462 24373 : bitwise_type_for_mode (machine_mode mode)
463 : {
464 24373 : if (!bitwise_mode_for_mode (mode).exists (&mode))
465 120 : return NULL_TREE;
466 :
467 24253 : unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode);
468 24253 : tree inner_type = build_nonstandard_integer_type (inner_size, true);
469 :
470 24253 : if (VECTOR_MODE_P (mode))
471 244 : return build_vector_type_for_mode (inner_type, mode);
472 :
473 24009 : if (COMPLEX_MODE_P (mode))
474 247 : return build_complex_type (inner_type);
475 :
476 47524 : gcc_checking_assert (GET_MODE_INNER (mode) == mode);
477 : return inner_type;
478 : }
479 :
480 : /* Find a mode that can be used for efficient bitwise operations on SIZE
481 : bits, if one exists. */
482 :
483 : opt_machine_mode
484 22957 : bitwise_mode_for_size (poly_uint64 size)
485 : {
486 45914 : if (known_le (size, (unsigned int) MAX_FIXED_MODE_SIZE))
487 22710 : return mode_for_size (size, MODE_INT, true);
488 :
489 : machine_mode mode, ret = VOIDmode;
490 4369 : FOR_EACH_MODE_FROM (mode, MIN_MODE_VECTOR_INT)
491 8730 : if (known_eq (GET_MODE_BITSIZE (mode), size)
492 263 : && (ret == VOIDmode || GET_MODE_INNER (mode) == QImode)
493 263 : && have_regs_of_mode[mode]
494 4608 : && targetm.vector_mode_supported_p (mode))
495 : {
496 486 : if (GET_MODE_INNER (mode) == QImode)
497 243 : return mode;
498 0 : else if (ret == VOIDmode)
499 4122 : ret = mode;
500 : }
501 4 : if (ret != VOIDmode)
502 0 : return ret;
503 4 : return opt_machine_mode ();
504 : }
505 :
506 : /* Find a mode that is suitable for representing a vector with NUNITS
507 : elements of mode INNERMODE, if one exists. The returned mode can be
508 : either an integer mode or a vector mode. */
509 :
510 : opt_machine_mode
511 68994858 : mode_for_vector (scalar_mode innermode, poly_uint64 nunits)
512 : {
513 68994858 : machine_mode mode;
514 :
515 : /* First, look for a supported vector type. */
516 68994858 : if (SCALAR_FLOAT_MODE_P (innermode))
517 : mode = MIN_MODE_VECTOR_FLOAT;
518 65178239 : else if (SCALAR_FRACT_MODE_P (innermode))
519 : mode = MIN_MODE_VECTOR_FRACT;
520 65178239 : else if (SCALAR_UFRACT_MODE_P (innermode))
521 : mode = MIN_MODE_VECTOR_UFRACT;
522 65178239 : else if (SCALAR_ACCUM_MODE_P (innermode))
523 : mode = MIN_MODE_VECTOR_ACCUM;
524 65178239 : else if (SCALAR_UACCUM_MODE_P (innermode))
525 : mode = MIN_MODE_VECTOR_UACCUM;
526 : else
527 68994858 : mode = MIN_MODE_VECTOR_INT;
528 :
529 : /* Only check the broader vector_mode_supported_any_target_p here.
530 : We'll filter through target-specific availability and
531 : vector_mode_supported_p later in vector_type_mode. */
532 825572047 : FOR_EACH_MODE_FROM (mode, mode)
533 1650545366 : if (known_eq (GET_MODE_NUNITS (mode), nunits)
534 377638358 : && GET_MODE_INNER (mode) == innermode
535 893968177 : && targetm.vector_mode_supported_any_target_p (mode))
536 68695494 : return mode;
537 :
538 : /* For integers, try mapping it to a same-sized scalar mode. */
539 299364 : if (GET_MODE_CLASS (innermode) == MODE_INT)
540 : {
541 17048 : poly_uint64 nbits = nunits * GET_MODE_BITSIZE (innermode);
542 17048 : if (int_mode_for_size (nbits, 0).exists (&mode)
543 16279 : && have_regs_of_mode[mode])
544 16279 : return mode;
545 : }
546 :
547 283085 : return opt_machine_mode ();
548 : }
549 :
550 : /* If a piece of code is using vector mode VECTOR_MODE and also wants
551 : to operate on elements of mode ELEMENT_MODE, return the vector mode
552 : it should use for those elements. If NUNITS is nonzero, ensure that
553 : the mode has exactly NUNITS elements, otherwise pick whichever vector
554 : size pairs the most naturally with VECTOR_MODE; this may mean choosing
555 : a mode with a different size and/or number of elements, depending on
556 : what the target prefers. Return an empty opt_machine_mode if there
557 : is no supported vector mode with the required properties.
558 :
559 : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
560 : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
561 :
562 : opt_machine_mode
563 33171654 : related_vector_mode (machine_mode vector_mode, scalar_mode element_mode,
564 : poly_uint64 nunits)
565 : {
566 33171654 : gcc_assert (VECTOR_MODE_P (vector_mode));
567 33171654 : return targetm.vectorize.related_mode (vector_mode, element_mode, nunits);
568 : }
569 :
570 : /* If a piece of code is using vector mode VECTOR_MODE and also wants
571 : to operate on integer vectors with the same element size and number
572 : of elements, return the vector mode it should use. Return an empty
573 : opt_machine_mode if there is no supported vector mode with the
574 : required properties.
575 :
576 : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
577 : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
578 :
579 : opt_machine_mode
580 27817 : related_int_vector_mode (machine_mode vector_mode)
581 : {
582 27817 : gcc_assert (VECTOR_MODE_P (vector_mode));
583 27817 : scalar_int_mode int_mode;
584 55634 : if (int_mode_for_mode (GET_MODE_INNER (vector_mode)).exists (&int_mode))
585 55634 : return related_vector_mode (vector_mode, int_mode,
586 27817 : GET_MODE_NUNITS (vector_mode));
587 0 : return opt_machine_mode ();
588 : }
589 :
590 : /* Return the alignment of MODE. This will be bounded by 1 and
591 : BIGGEST_ALIGNMENT. */
592 :
593 : unsigned int
594 1666028652 : get_mode_alignment (machine_mode mode)
595 : {
596 3235635457 : return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
597 : }
598 :
599 : /* Return the natural mode of an array, given that it is SIZE bytes in
600 : total and has elements of type ELEM_TYPE. */
601 :
602 : static machine_mode
603 68232422 : mode_for_array (tree elem_type, tree size)
604 : {
605 68232422 : tree elem_size;
606 68232422 : poly_uint64 int_size, int_elem_size;
607 68232422 : unsigned HOST_WIDE_INT num_elems;
608 68232422 : bool limit_p;
609 :
610 : /* One-element arrays get the component type's mode. */
611 68232422 : elem_size = TYPE_SIZE (elem_type);
612 68232422 : if (simple_cst_equal (size, elem_size))
613 4682416 : return TYPE_MODE (elem_type);
614 :
615 63550006 : limit_p = true;
616 63550006 : if (poly_int_tree_p (size, &int_size)
617 63267535 : && poly_int_tree_p (elem_size, &int_elem_size)
618 63267535 : && maybe_ne (int_elem_size, 0U)
619 63550006 : && constant_multiple_p (int_size, int_elem_size, &num_elems))
620 : {
621 63267535 : machine_mode elem_mode = TYPE_MODE (elem_type);
622 63267535 : machine_mode mode;
623 63267535 : if (targetm.array_mode (elem_mode, num_elems).exists (&mode))
624 0 : return mode;
625 63267535 : if (targetm.array_mode_supported_p (elem_mode, num_elems))
626 63550006 : limit_p = false;
627 : }
628 63550006 : return mode_for_size_tree (size, MODE_INT, limit_p).else_blk ();
629 : }
630 :
631 : /* Subroutine of layout_decl: Force alignment required for the data type.
632 : But if the decl itself wants greater alignment, don't override that. */
633 :
634 : static inline void
635 2011039363 : do_type_align (tree type, tree decl)
636 : {
637 2011039363 : if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
638 : {
639 1892820039 : SET_DECL_ALIGN (decl, TYPE_ALIGN (type));
640 1892820039 : if (TREE_CODE (decl) == FIELD_DECL)
641 63162185 : DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
642 : }
643 2011039363 : if (TYPE_WARN_IF_NOT_ALIGN (type) > DECL_WARN_IF_NOT_ALIGN (decl))
644 51 : SET_DECL_WARN_IF_NOT_ALIGN (decl, TYPE_WARN_IF_NOT_ALIGN (type));
645 2011039363 : }
646 :
647 : /* Set the size, mode and alignment of a ..._DECL node.
648 : TYPE_DECL does need this for C++.
649 : Note that LABEL_DECL and CONST_DECL nodes do not need this,
650 : and FUNCTION_DECL nodes have them set up in a special (and simple) way.
651 : Don't call layout_decl for them.
652 :
653 : KNOWN_ALIGN is the amount of alignment we can assume this
654 : decl has with no special effort. It is relevant only for FIELD_DECLs
655 : and depends on the previous fields.
656 : All that matters about KNOWN_ALIGN is which powers of 2 divide it.
657 : If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
658 : the record will be aligned to suit. */
659 :
660 : void
661 2011769762 : layout_decl (tree decl, unsigned int known_align)
662 : {
663 2011769762 : tree type = TREE_TYPE (decl);
664 2011769762 : enum tree_code code = TREE_CODE (decl);
665 2011769762 : rtx rtl = NULL_RTX;
666 2011769762 : location_t loc = DECL_SOURCE_LOCATION (decl);
667 :
668 2011769762 : if (code == CONST_DECL)
669 : return;
670 :
671 2011769762 : gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
672 : || code == TYPE_DECL || code == FIELD_DECL);
673 :
674 2011769762 : rtl = DECL_RTL_IF_SET (decl);
675 :
676 2011769762 : if (type == error_mark_node)
677 3037 : type = void_type_node;
678 :
679 : /* Usually the size and mode come from the data type without change,
680 : however, the front-end may set the explicit width of the field, so its
681 : size may not be the same as the size of its type. This happens with
682 : bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
683 : also happens with other fields. For example, the C++ front-end creates
684 : zero-sized fields corresponding to empty base classes, and depends on
685 : layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
686 : size in bytes from the size in bits. If we have already set the mode,
687 : don't set it again since we can be called twice for FIELD_DECLs. */
688 :
689 2011769762 : DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
690 2011769762 : if (DECL_MODE (decl) == VOIDmode)
691 1936791389 : SET_DECL_MODE (decl, TYPE_MODE (type));
692 :
693 2011769762 : if (DECL_SIZE (decl) == 0)
694 : {
695 1937134420 : DECL_SIZE (decl) = TYPE_SIZE (type);
696 1937134420 : DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
697 : }
698 74635342 : else if (DECL_SIZE_UNIT (decl) == 0)
699 685292 : DECL_SIZE_UNIT (decl)
700 1370584 : = fold_convert_loc (loc, sizetype,
701 685292 : size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl),
702 : bitsize_unit_node));
703 :
704 2011769762 : if (code != FIELD_DECL)
705 : /* For non-fields, update the alignment from the type. */
706 1943305003 : do_type_align (type, decl);
707 : else
708 : /* For fields, it's a bit more complicated... */
709 : {
710 68464759 : bool old_user_align = DECL_USER_ALIGN (decl);
711 68464759 : bool zero_bitfield = false;
712 68464759 : bool packed_p = DECL_PACKED (decl);
713 68464759 : unsigned int mfa;
714 :
715 68464759 : if (DECL_BIT_FIELD (decl))
716 : {
717 731506 : DECL_BIT_FIELD_TYPE (decl) = type;
718 :
719 : /* A zero-length bit-field affects the alignment of the next
720 : field. In essence such bit-fields are not influenced by
721 : any packing due to #pragma pack or attribute packed. */
722 731506 : if (integer_zerop (DECL_SIZE (decl))
723 731506 : && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
724 : {
725 2184 : zero_bitfield = true;
726 2184 : packed_p = false;
727 2184 : if (PCC_BITFIELD_TYPE_MATTERS)
728 2184 : do_type_align (type, decl);
729 : else
730 : {
731 : #ifdef EMPTY_FIELD_BOUNDARY
732 : if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
733 : {
734 : SET_DECL_ALIGN (decl, EMPTY_FIELD_BOUNDARY);
735 : DECL_USER_ALIGN (decl) = 0;
736 : }
737 : #endif
738 : }
739 : }
740 :
741 : /* See if we can use an ordinary integer mode for a bit-field.
742 : Conditions are: a fixed size that is correct for another mode,
743 : occupying a complete byte or bytes on proper boundary. */
744 731506 : if (TYPE_SIZE (type) != 0
745 731506 : && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
746 1463012 : && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
747 : {
748 731250 : machine_mode xmode;
749 731250 : if (mode_for_size_tree (DECL_SIZE (decl),
750 560482 : MODE_INT, 1).exists (&xmode))
751 : {
752 170768 : unsigned int xalign = GET_MODE_ALIGNMENT (xmode);
753 149278 : if (!(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
754 319776 : && (known_align == 0 || known_align >= xalign))
755 : {
756 149355 : SET_DECL_ALIGN (decl, MAX (xalign, DECL_ALIGN (decl)));
757 149355 : SET_DECL_MODE (decl, xmode);
758 149355 : DECL_BIT_FIELD (decl) = 0;
759 : }
760 : }
761 : }
762 :
763 : /* Turn off DECL_BIT_FIELD if we won't need it set. */
764 731760 : if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
765 254 : && known_align >= TYPE_ALIGN (type)
766 731556 : && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
767 1 : DECL_BIT_FIELD (decl) = 0;
768 : }
769 67733253 : else if (packed_p && DECL_USER_ALIGN (decl))
770 : /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
771 : round up; we'll reduce it again below. We want packing to
772 : supersede USER_ALIGN inherited from the type, but defer to
773 : alignment explicitly specified on the field decl. */;
774 : else
775 67732176 : do_type_align (type, decl);
776 :
777 : /* If the field is packed and not explicitly aligned, give it the
778 : minimum alignment. Note that do_type_align may set
779 : DECL_USER_ALIGN, so we need to check old_user_align instead. */
780 68464759 : if (packed_p
781 68464759 : && !old_user_align)
782 6802 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), BITS_PER_UNIT));
783 :
784 68464759 : if (! packed_p && ! DECL_USER_ALIGN (decl))
785 : {
786 : /* Some targets (i.e. i386, VMS) limit struct field alignment
787 : to a lower boundary than alignment of variables unless
788 : it was overridden by attribute aligned. */
789 : #ifdef BIGGEST_FIELD_ALIGNMENT
790 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl),
791 : (unsigned) BIGGEST_FIELD_ALIGNMENT));
792 : #endif
793 : #ifdef ADJUST_FIELD_ALIGN
794 67774029 : SET_DECL_ALIGN (decl, ADJUST_FIELD_ALIGN (decl, TREE_TYPE (decl),
795 : DECL_ALIGN (decl)));
796 : #endif
797 : }
798 :
799 68464759 : if (zero_bitfield)
800 2184 : mfa = initial_max_fld_align * BITS_PER_UNIT;
801 : else
802 68462575 : mfa = maximum_field_alignment;
803 : /* Should this be controlled by DECL_USER_ALIGN, too? */
804 68464759 : if (mfa != 0)
805 550 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), mfa));
806 : }
807 :
808 : /* Evaluate nonconstant size only once, either now or as soon as safe. */
809 2011769762 : if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
810 27054 : DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
811 2011769762 : if (DECL_SIZE_UNIT (decl) != 0
812 2011769762 : && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
813 27054 : DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
814 :
815 : /* If requested, warn about definitions of large data objects. */
816 1295958899 : if ((code == PARM_DECL || (code == VAR_DECL && !DECL_NONLOCAL_FRAME (decl)))
817 2346384250 : && !DECL_EXTERNAL (decl))
818 : {
819 1359203879 : tree size = DECL_SIZE_UNIT (decl);
820 :
821 1359203879 : if (size != 0 && TREE_CODE (size) == INTEGER_CST)
822 : {
823 : /* -Wlarger-than= argument of HOST_WIDE_INT_MAX is treated
824 : as if PTRDIFF_MAX had been specified, with the value
825 : being that on the target rather than the host. */
826 1226734977 : unsigned HOST_WIDE_INT max_size = warn_larger_than_size;
827 1226734977 : if (max_size == HOST_WIDE_INT_MAX)
828 1226734916 : max_size = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
829 :
830 1226734977 : if (compare_tree_int (size, max_size) > 0)
831 23 : warning (OPT_Wlarger_than_, "size of %q+D %E bytes exceeds "
832 : "maximum object size %wu",
833 : decl, size, max_size);
834 : }
835 : }
836 :
837 : /* If the RTL was already set, update its mode and mem attributes. */
838 2011769762 : if (rtl)
839 : {
840 37854 : PUT_MODE (rtl, DECL_MODE (decl));
841 37854 : SET_DECL_RTL (decl, 0);
842 37854 : if (MEM_P (rtl))
843 37854 : set_mem_attributes (rtl, decl, 1);
844 37854 : SET_DECL_RTL (decl, rtl);
845 : }
846 : }
847 :
848 : /* Given a VAR_DECL, PARM_DECL, RESULT_DECL, or FIELD_DECL, clears the
849 : results of a previous call to layout_decl and calls it again. */
850 :
851 : void
852 485818455 : relayout_decl (tree decl)
853 : {
854 485818455 : DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
855 485818455 : SET_DECL_MODE (decl, VOIDmode);
856 485818455 : if (!DECL_USER_ALIGN (decl))
857 485817273 : SET_DECL_ALIGN (decl, 0);
858 485818455 : if (DECL_RTL_SET_P (decl))
859 0 : SET_DECL_RTL (decl, 0);
860 :
861 485818455 : layout_decl (decl, 0);
862 485818455 : }
863 :
864 : /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
865 : QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
866 : is to be passed to all other layout functions for this record. It is the
867 : responsibility of the caller to call `free' for the storage returned.
868 : Note that garbage collection is not permitted until we finish laying
869 : out the record. */
870 :
871 : record_layout_info
872 60683216 : start_record_layout (tree t)
873 : {
874 60683216 : record_layout_info rli = XNEW (struct record_layout_info_s);
875 :
876 60683216 : rli->t = t;
877 :
878 : /* If the type has a minimum specified alignment (via an attribute
879 : declaration, for example) use it -- otherwise, start with a
880 : one-byte alignment. */
881 60683216 : rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
882 60683216 : rli->unpacked_align = rli->record_align;
883 121055138 : rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
884 :
885 : #ifdef STRUCTURE_SIZE_BOUNDARY
886 : /* Packed structures don't need to have minimum size. */
887 : if (! TYPE_PACKED (t))
888 : {
889 : unsigned tmp;
890 :
891 : /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */
892 : tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY;
893 : if (maximum_field_alignment != 0)
894 : tmp = MIN (tmp, maximum_field_alignment);
895 : rli->record_align = MAX (rli->record_align, tmp);
896 : }
897 : #endif
898 :
899 60683216 : rli->offset = size_zero_node;
900 60683216 : rli->bitpos = bitsize_zero_node;
901 60683216 : rli->prev_field = 0;
902 60683216 : rli->pending_statics = 0;
903 60683216 : rli->packed_maybe_necessary = 0;
904 60683216 : rli->remaining_in_alignment = 0;
905 :
906 60683216 : return rli;
907 : }
908 :
909 : /* Fold sizetype value X to bitsizetype, given that X represents a type
910 : size or offset. */
911 :
912 : static tree
913 373891955 : bits_from_bytes (tree x)
914 : {
915 373891955 : if (POLY_INT_CST_P (x))
916 : /* The runtime calculation isn't allowed to overflow sizetype;
917 : increasing the runtime values must always increase the size
918 : or offset of the object. This means that the object imposes
919 : a maximum value on the runtime parameters, but we don't record
920 : what that is. */
921 : return build_poly_int_cst
922 : (bitsizetype,
923 : poly_wide_int::from (poly_int_cst_value (x),
924 : TYPE_PRECISION (bitsizetype),
925 : TYPE_SIGN (TREE_TYPE (x))));
926 373891955 : x = fold_convert (bitsizetype, x);
927 373891955 : gcc_checking_assert (x);
928 373891955 : return x;
929 : }
930 :
931 : /* Return the combined bit position for the byte offset OFFSET and the
932 : bit position BITPOS.
933 :
934 : These functions operate on byte and bit positions present in FIELD_DECLs
935 : and assume that these expressions result in no (intermediate) overflow.
936 : This assumption is necessary to fold the expressions as much as possible,
937 : so as to avoid creating artificially variable-sized types in languages
938 : supporting variable-sized types like Ada. */
939 :
940 : tree
941 305072961 : bit_from_pos (tree offset, tree bitpos)
942 : {
943 305072961 : return size_binop (PLUS_EXPR, bitpos,
944 : size_binop (MULT_EXPR, bits_from_bytes (offset),
945 : bitsize_unit_node));
946 : }
947 :
948 : /* Return the combined truncated byte position for the byte offset OFFSET and
949 : the bit position BITPOS. */
950 :
951 : tree
952 325552135 : byte_from_pos (tree offset, tree bitpos)
953 : {
954 325552135 : tree bytepos;
955 325552135 : if (TREE_CODE (bitpos) == MULT_EXPR
956 325552135 : && tree_int_cst_equal (TREE_OPERAND (bitpos, 1), bitsize_unit_node))
957 0 : bytepos = TREE_OPERAND (bitpos, 0);
958 : else
959 325552135 : bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node);
960 325552135 : return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos));
961 : }
962 :
963 : /* Split the bit position POS into a byte offset *POFFSET and a bit
964 : position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */
965 :
966 : void
967 71126144 : pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
968 : tree pos)
969 : {
970 71126144 : tree toff_align = bitsize_int (off_align);
971 71126144 : if (TREE_CODE (pos) == MULT_EXPR
972 71126144 : && tree_int_cst_equal (TREE_OPERAND (pos, 1), toff_align))
973 : {
974 0 : *poffset = size_binop (MULT_EXPR,
975 : fold_convert (sizetype, TREE_OPERAND (pos, 0)),
976 : size_int (off_align / BITS_PER_UNIT));
977 0 : *pbitpos = bitsize_zero_node;
978 : }
979 : else
980 : {
981 71126144 : *poffset = size_binop (MULT_EXPR,
982 : fold_convert (sizetype,
983 : size_binop (FLOOR_DIV_EXPR, pos,
984 : toff_align)),
985 : size_int (off_align / BITS_PER_UNIT));
986 71126144 : *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align);
987 : }
988 71126144 : }
989 :
990 : /* Given a pointer to bit and byte offsets and an offset alignment,
991 : normalize the offsets so they are within the alignment. */
992 :
993 : void
994 218348367 : normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
995 : {
996 : /* If the bit position is now larger than it should be, adjust it
997 : downwards. */
998 218348367 : if (compare_tree_int (*pbitpos, off_align) >= 0)
999 : {
1000 71126144 : tree offset, bitpos;
1001 71126144 : pos_from_bit (&offset, &bitpos, off_align, *pbitpos);
1002 71126144 : *poffset = size_binop (PLUS_EXPR, *poffset, offset);
1003 71126144 : *pbitpos = bitpos;
1004 : }
1005 218348367 : }
1006 :
1007 : /* Print debugging information about the information in RLI. */
1008 :
1009 : DEBUG_FUNCTION void
1010 0 : debug_rli (record_layout_info rli)
1011 : {
1012 0 : print_node_brief (stderr, "type", rli->t, 0);
1013 0 : print_node_brief (stderr, "\noffset", rli->offset, 0);
1014 0 : print_node_brief (stderr, " bitpos", rli->bitpos, 0);
1015 :
1016 0 : fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
1017 : rli->record_align, rli->unpacked_align,
1018 : rli->offset_align);
1019 :
1020 : /* The ms_struct code is the only that uses this. */
1021 0 : if (targetm.ms_bitfield_layout_p (rli->t))
1022 0 : fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
1023 :
1024 0 : if (rli->packed_maybe_necessary)
1025 0 : fprintf (stderr, "packed may be necessary\n");
1026 :
1027 0 : if (!vec_safe_is_empty (rli->pending_statics))
1028 : {
1029 0 : fprintf (stderr, "pending statics:\n");
1030 0 : debug (rli->pending_statics);
1031 : }
1032 0 : }
1033 :
1034 : /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
1035 : BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
1036 :
1037 : void
1038 218348367 : normalize_rli (record_layout_info rli)
1039 : {
1040 218348367 : normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
1041 218348367 : }
1042 :
1043 : /* Returns the size in bytes allocated so far. */
1044 :
1045 : tree
1046 195234632 : rli_size_unit_so_far (record_layout_info rli)
1047 : {
1048 195234632 : return byte_from_pos (rli->offset, rli->bitpos);
1049 : }
1050 :
1051 : /* Returns the size in bits allocated so far. */
1052 :
1053 : tree
1054 168245396 : rli_size_so_far (record_layout_info rli)
1055 : {
1056 168245396 : return bit_from_pos (rli->offset, rli->bitpos);
1057 : }
1058 :
1059 : /* FIELD is about to be added to RLI->T. The alignment (in bits) of
1060 : the next available location within the record is given by KNOWN_ALIGN.
1061 : Update the variable alignment fields in RLI, and return the alignment
1062 : to give the FIELD. */
1063 :
1064 : unsigned int
1065 66978245 : update_alignment_for_field (record_layout_info rli, tree field,
1066 : unsigned int known_align)
1067 : {
1068 : /* The alignment required for FIELD. */
1069 66978245 : unsigned int desired_align;
1070 : /* The type of this field. */
1071 66978245 : tree type = TREE_TYPE (field);
1072 : /* True if the field was explicitly aligned by the user. */
1073 66978245 : bool user_align;
1074 66978245 : bool is_bitfield;
1075 :
1076 : /* Do not attempt to align an ERROR_MARK node */
1077 66978245 : if (TREE_CODE (type) == ERROR_MARK)
1078 : return 0;
1079 :
1080 : /* Lay out the field so we know what alignment it needs. */
1081 66978240 : layout_decl (field, known_align);
1082 66978240 : desired_align = DECL_ALIGN (field);
1083 66978240 : user_align = DECL_USER_ALIGN (field);
1084 :
1085 133956480 : is_bitfield = (type != error_mark_node
1086 66978240 : && DECL_BIT_FIELD_TYPE (field)
1087 67663532 : && ! integer_zerop (TYPE_SIZE (type)));
1088 :
1089 : /* Record must have at least as much alignment as any field.
1090 : Otherwise, the alignment of the field within the record is
1091 : meaningless. */
1092 66978240 : if (targetm.ms_bitfield_layout_p (rli->t))
1093 : {
1094 : /* Here, the alignment of the underlying type of a bitfield can
1095 : affect the alignment of a record; even a zero-sized field
1096 : can do this. The alignment should be to the alignment of
1097 : the type, except that for zero-size bitfields this only
1098 : applies if there was an immediately prior, nonzero-size
1099 : bitfield. (That's the way it is, experimentally.) */
1100 206 : if (!is_bitfield
1101 206 : || ((DECL_SIZE (field) == NULL_TREE
1102 131 : || !integer_zerop (DECL_SIZE (field)))
1103 121 : ? !DECL_PACKED (field)
1104 10 : : (rli->prev_field
1105 8 : && DECL_BIT_FIELD_TYPE (rli->prev_field)
1106 8 : && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
1107 : {
1108 202 : unsigned int type_align = TYPE_ALIGN (type);
1109 277 : if (!is_bitfield && DECL_PACKED (field))
1110 : type_align = desired_align;
1111 : else
1112 202 : type_align = MAX (type_align, desired_align);
1113 202 : if (maximum_field_alignment != 0)
1114 68 : type_align = MIN (type_align, maximum_field_alignment);
1115 202 : rli->record_align = MAX (rli->record_align, type_align);
1116 202 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1117 : }
1118 : }
1119 66978034 : else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
1120 : {
1121 : /* Named bit-fields cause the entire structure to have the
1122 : alignment implied by their type. Some targets also apply the same
1123 : rules to unnamed bitfields. */
1124 685161 : if (DECL_NAME (field) != 0
1125 685161 : || targetm.align_anon_bitfield ())
1126 : {
1127 545448 : unsigned int type_align = TYPE_ALIGN (type);
1128 :
1129 : #ifdef ADJUST_FIELD_ALIGN
1130 545448 : if (! TYPE_USER_ALIGN (type))
1131 538594 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1132 : #endif
1133 :
1134 : /* Targets might chose to handle unnamed and hence possibly
1135 : zero-width bitfield. Those are not influenced by #pragmas
1136 : or packed attributes. */
1137 545448 : if (integer_zerop (DECL_SIZE (field)))
1138 : {
1139 0 : if (initial_max_fld_align)
1140 0 : type_align = MIN (type_align,
1141 : initial_max_fld_align * BITS_PER_UNIT);
1142 : }
1143 545448 : else if (maximum_field_alignment != 0)
1144 177 : type_align = MIN (type_align, maximum_field_alignment);
1145 545271 : else if (DECL_PACKED (field))
1146 2853 : type_align = MIN (type_align, BITS_PER_UNIT);
1147 :
1148 : /* The alignment of the record is increased to the maximum
1149 : of the current alignment, the alignment indicated on the
1150 : field (i.e., the alignment specified by an __aligned__
1151 : attribute), and the alignment indicated by the type of
1152 : the field. */
1153 545448 : rli->record_align = MAX (rli->record_align, desired_align);
1154 545448 : rli->record_align = MAX (rli->record_align, type_align);
1155 :
1156 545448 : if (warn_packed)
1157 0 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1158 545448 : user_align |= TYPE_USER_ALIGN (type);
1159 : }
1160 : }
1161 : else
1162 : {
1163 66292873 : rli->record_align = MAX (rli->record_align, desired_align);
1164 66292873 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1165 : }
1166 :
1167 66978240 : TYPE_USER_ALIGN (rli->t) |= user_align;
1168 :
1169 66978240 : return desired_align;
1170 : }
1171 :
1172 : /* Issue a warning if the record alignment, RECORD_ALIGN, is less than
1173 : the field alignment of FIELD or FIELD isn't aligned. */
1174 :
1175 : static void
1176 66970122 : handle_warn_if_not_align (tree field, unsigned int record_align)
1177 : {
1178 66970122 : tree type = TREE_TYPE (field);
1179 :
1180 66970122 : if (type == error_mark_node)
1181 66970122 : return;
1182 :
1183 66970079 : unsigned int warn_if_not_align = 0;
1184 :
1185 66970079 : int opt_w = 0;
1186 :
1187 66970079 : if (warn_if_not_aligned)
1188 : {
1189 66969932 : warn_if_not_align = DECL_WARN_IF_NOT_ALIGN (field);
1190 66969932 : if (!warn_if_not_align)
1191 66969885 : warn_if_not_align = TYPE_WARN_IF_NOT_ALIGN (type);
1192 47 : if (warn_if_not_align)
1193 : opt_w = OPT_Wif_not_aligned;
1194 : }
1195 :
1196 : if (!warn_if_not_align
1197 66970032 : && warn_packed_not_aligned
1198 2654234 : && lookup_attribute ("aligned", TYPE_ATTRIBUTES (type)))
1199 : {
1200 115 : warn_if_not_align = TYPE_ALIGN (type);
1201 115 : opt_w = OPT_Wpacked_not_aligned;
1202 : }
1203 :
1204 66970079 : if (!warn_if_not_align)
1205 66969917 : return;
1206 :
1207 162 : tree context = DECL_CONTEXT (field);
1208 :
1209 162 : warn_if_not_align /= BITS_PER_UNIT;
1210 162 : record_align /= BITS_PER_UNIT;
1211 162 : if ((record_align % warn_if_not_align) != 0)
1212 36 : warning (opt_w, "alignment %u of %qT is less than %u",
1213 : record_align, context, warn_if_not_align);
1214 :
1215 162 : tree off = byte_position (field);
1216 162 : if (!multiple_of_p (TREE_TYPE (off), off, size_int (warn_if_not_align)))
1217 : {
1218 25 : if (TREE_CODE (off) == INTEGER_CST)
1219 24 : warning (opt_w, "%q+D offset %E in %qT isn%'t aligned to %u",
1220 : field, off, context, warn_if_not_align);
1221 : else
1222 1 : warning (opt_w, "%q+D offset %E in %qT may not be aligned to %u",
1223 : field, off, context, warn_if_not_align);
1224 : }
1225 : }
1226 :
1227 : /* Called from place_field to handle unions. */
1228 :
1229 : static void
1230 2254466 : place_union_field (record_layout_info rli, tree field)
1231 : {
1232 2254466 : update_alignment_for_field (rli, field, /*known_align=*/0);
1233 :
1234 2254466 : DECL_FIELD_OFFSET (field) = size_zero_node;
1235 2254466 : DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
1236 2254466 : SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
1237 2254466 : handle_warn_if_not_align (field, rli->record_align);
1238 :
1239 : /* If this is an ERROR_MARK return *after* having set the
1240 : field at the start of the union. This helps when parsing
1241 : invalid fields. */
1242 2254466 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
1243 : return;
1244 :
1245 3928083 : if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1246 2683366 : && TYPE_TYPELESS_STORAGE (TREE_TYPE (field)))
1247 473882 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1248 :
1249 : /* We might see a flexible array member field (with no DECL_SIZE_UNIT), use
1250 : zero size for such field. */
1251 2254461 : tree field_size_unit = DECL_SIZE_UNIT (field)
1252 2254461 : ? DECL_SIZE_UNIT (field)
1253 2254461 : : build_int_cst (sizetype, 0);
1254 : /* We assume the union's size will be a multiple of a byte so we don't
1255 : bother with BITPOS. */
1256 2254461 : if (TREE_CODE (rli->t) == UNION_TYPE)
1257 2254461 : rli->offset = size_binop (MAX_EXPR, rli->offset, field_size_unit);
1258 0 : else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
1259 0 : rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field),
1260 : field_size_unit, rli->offset);
1261 : }
1262 :
1263 : /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
1264 : at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
1265 : units of alignment than the underlying TYPE. */
1266 : static int
1267 531159 : excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
1268 : HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
1269 : {
1270 : /* Note that the calculation of OFFSET might overflow; we calculate it so
1271 : that we still get the right result as long as ALIGN is a power of two. */
1272 531159 : unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
1273 :
1274 531159 : offset = offset % align;
1275 531159 : return ((offset + size + align - 1) / align
1276 531159 : > tree_to_uhwi (TYPE_SIZE (type)) / align);
1277 : }
1278 :
1279 : /* RLI contains information about the layout of a RECORD_TYPE. FIELD
1280 : is a FIELD_DECL to be added after those fields already present in
1281 : T. (FIELD is not actually added to the TYPE_FIELDS list here;
1282 : callers that desire that behavior must manually perform that step.) */
1283 :
1284 : void
1285 403388630 : place_field (record_layout_info rli, tree field)
1286 : {
1287 : /* The alignment required for FIELD. */
1288 403388630 : unsigned int desired_align;
1289 : /* The alignment FIELD would have if we just dropped it into the
1290 : record as it presently stands. */
1291 403388630 : unsigned int known_align;
1292 403388630 : unsigned int actual_align;
1293 : /* The type of this field. */
1294 403388630 : tree type = TREE_TYPE (field);
1295 :
1296 403388630 : gcc_assert (TREE_CODE (field) != ERROR_MARK);
1297 :
1298 : /* If FIELD is static, then treat it like a separate variable, not
1299 : really like a structure field. If it is a FUNCTION_DECL, it's a
1300 : method. In both cases, all we do is lay out the decl, and we do
1301 : it *after* the record is laid out. */
1302 403388630 : if (VAR_P (field))
1303 : {
1304 13560433 : vec_safe_push (rli->pending_statics, field);
1305 13560433 : return;
1306 : }
1307 :
1308 : /* Enumerators and enum types which are local to this class need not
1309 : be laid out. Likewise for initialized constant fields. */
1310 389828197 : else if (TREE_CODE (field) != FIELD_DECL)
1311 : return;
1312 :
1313 : /* Unions are laid out very differently than records, so split
1314 : that code off to another function. */
1315 66970122 : else if (TREE_CODE (rli->t) != RECORD_TYPE)
1316 : {
1317 2254466 : place_union_field (rli, field);
1318 2254466 : return;
1319 : }
1320 :
1321 64715656 : else if (TREE_CODE (type) == ERROR_MARK)
1322 : {
1323 : /* Place this field at the current allocation position, so we
1324 : maintain monotonicity. */
1325 38 : DECL_FIELD_OFFSET (field) = rli->offset;
1326 38 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1327 38 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1328 38 : handle_warn_if_not_align (field, rli->record_align);
1329 38 : return;
1330 : }
1331 :
1332 64715618 : if (AGGREGATE_TYPE_P (type)
1333 64715618 : && TYPE_TYPELESS_STORAGE (type))
1334 1250074 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1335 :
1336 : /* Work out the known alignment so far. Note that A & (-A) is the
1337 : value of the least-significant bit in A that is one. */
1338 64715618 : if (! integer_zerop (rli->bitpos))
1339 18793759 : known_align = least_bit_hwi (tree_to_uhwi (rli->bitpos));
1340 45921859 : else if (integer_zerop (rli->offset))
1341 : known_align = 0;
1342 8608580 : else if (tree_fits_uhwi_p (rli->offset))
1343 8607813 : known_align = (BITS_PER_UNIT
1344 8607813 : * least_bit_hwi (tree_to_uhwi (rli->offset)));
1345 : else
1346 767 : known_align = rli->offset_align;
1347 :
1348 64715618 : desired_align = update_alignment_for_field (rli, field, known_align);
1349 64715618 : if (known_align == 0)
1350 74502674 : known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1351 :
1352 64715618 : if (warn_packed && DECL_PACKED (field))
1353 : {
1354 3 : if (known_align >= TYPE_ALIGN (type))
1355 : {
1356 3 : if (TYPE_ALIGN (type) > desired_align)
1357 : {
1358 3 : if (STRICT_ALIGNMENT)
1359 : warning (OPT_Wattributes, "packed attribute causes "
1360 : "inefficient alignment for %q+D", field);
1361 : /* Don't warn if DECL_PACKED was set by the type. */
1362 3 : else if (!TYPE_PACKED (rli->t))
1363 0 : warning (OPT_Wattributes, "packed attribute is "
1364 : "unnecessary for %q+D", field);
1365 : }
1366 : }
1367 : else
1368 0 : rli->packed_maybe_necessary = 1;
1369 : }
1370 :
1371 : /* Does this field automatically have alignment it needs by virtue
1372 : of the fields that precede it and the record's own alignment? */
1373 64715618 : if (known_align < desired_align
1374 64715618 : && (! targetm.ms_bitfield_layout_p (rli->t)
1375 17 : || rli->prev_field == NULL))
1376 : {
1377 : /* No, we need to skip space before this field.
1378 : Bump the cumulative size to multiple of field alignment. */
1379 :
1380 1441650 : if (!targetm.ms_bitfield_layout_p (rli->t)
1381 1441647 : && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION
1382 2868399 : && !TYPE_ARTIFICIAL (rli->t))
1383 1426718 : warning (OPT_Wpadded, "padding struct to align %q+D", field);
1384 :
1385 : /* If the alignment is still within offset_align, just align
1386 : the bit position. */
1387 1441650 : if (desired_align < rli->offset_align)
1388 1407267 : rli->bitpos = round_up (rli->bitpos, desired_align);
1389 : else
1390 : {
1391 : /* First adjust OFFSET by the partial bits, then align. */
1392 34383 : rli->offset
1393 34383 : = size_binop (PLUS_EXPR, rli->offset,
1394 : fold_convert (sizetype,
1395 : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1396 : bitsize_unit_node)));
1397 34383 : rli->bitpos = bitsize_zero_node;
1398 :
1399 34383 : rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
1400 : }
1401 :
1402 1441650 : if (! TREE_CONSTANT (rli->offset))
1403 416 : rli->offset_align = desired_align;
1404 : }
1405 :
1406 : /* Handle compatibility with PCC. Note that if the record has any
1407 : variable-sized fields, we need not worry about compatibility. */
1408 64715618 : if (PCC_BITFIELD_TYPE_MATTERS
1409 64715618 : && ! targetm.ms_bitfield_layout_p (rli->t)
1410 64715417 : && TREE_CODE (field) == FIELD_DECL
1411 64715417 : && type != error_mark_node
1412 64715417 : && DECL_BIT_FIELD (field)
1413 535278 : && (! DECL_PACKED (field)
1414 : /* Enter for these packed fields only to issue a warning. */
1415 2725 : || TYPE_ALIGN (type) <= BITS_PER_UNIT)
1416 532752 : && maximum_field_alignment == 0
1417 532518 : && ! integer_zerop (DECL_SIZE (field))
1418 531173 : && tree_fits_uhwi_p (DECL_SIZE (field))
1419 531173 : && tree_fits_uhwi_p (rli->offset)
1420 65246777 : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1421 : {
1422 531159 : unsigned int type_align = TYPE_ALIGN (type);
1423 531159 : tree dsize = DECL_SIZE (field);
1424 531159 : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1425 531159 : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1426 531159 : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1427 :
1428 : #ifdef ADJUST_FIELD_ALIGN
1429 531159 : if (! TYPE_USER_ALIGN (type))
1430 526170 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1431 : #endif
1432 :
1433 : /* A bit field may not span more units of alignment of its type
1434 : than its type itself. Advance to next boundary if necessary. */
1435 531159 : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1436 : {
1437 45585 : if (DECL_PACKED (field))
1438 : {
1439 22 : if (warn_packed_bitfield_compat == 1)
1440 14 : inform
1441 14 : (input_location,
1442 : "offset of packed bit-field %qD has changed in GCC 4.4",
1443 : field);
1444 : }
1445 : else
1446 45563 : rli->bitpos = round_up (rli->bitpos, type_align);
1447 : }
1448 :
1449 531159 : if (! DECL_PACKED (field))
1450 530966 : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1451 :
1452 531159 : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1453 : TYPE_WARN_IF_NOT_ALIGN (type));
1454 : }
1455 :
1456 : #ifdef BITFIELD_NBYTES_LIMITED
1457 : if (BITFIELD_NBYTES_LIMITED
1458 : && ! targetm.ms_bitfield_layout_p (rli->t)
1459 : && TREE_CODE (field) == FIELD_DECL
1460 : && type != error_mark_node
1461 : && DECL_BIT_FIELD_TYPE (field)
1462 : && ! DECL_PACKED (field)
1463 : && ! integer_zerop (DECL_SIZE (field))
1464 : && tree_fits_uhwi_p (DECL_SIZE (field))
1465 : && tree_fits_uhwi_p (rli->offset)
1466 : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1467 : {
1468 : unsigned int type_align = TYPE_ALIGN (type);
1469 : tree dsize = DECL_SIZE (field);
1470 : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1471 : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1472 : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1473 :
1474 : #ifdef ADJUST_FIELD_ALIGN
1475 : if (! TYPE_USER_ALIGN (type))
1476 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1477 : #endif
1478 :
1479 : if (maximum_field_alignment != 0)
1480 : type_align = MIN (type_align, maximum_field_alignment);
1481 : /* ??? This test is opposite the test in the containing if
1482 : statement, so this code is unreachable currently. */
1483 : else if (DECL_PACKED (field))
1484 : type_align = MIN (type_align, BITS_PER_UNIT);
1485 :
1486 : /* A bit field may not span the unit of alignment of its type.
1487 : Advance to next boundary if necessary. */
1488 : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1489 : rli->bitpos = round_up (rli->bitpos, type_align);
1490 :
1491 : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1492 : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1493 : TYPE_WARN_IF_NOT_ALIGN (type));
1494 : }
1495 : #endif
1496 :
1497 : /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1498 : A subtlety:
1499 : When a bit field is inserted into a packed record, the whole
1500 : size of the underlying type is used by one or more same-size
1501 : adjacent bitfields. (That is, if its long:3, 32 bits is
1502 : used in the record, and any additional adjacent long bitfields are
1503 : packed into the same chunk of 32 bits. However, if the size
1504 : changes, a new field of that size is allocated.) In an unpacked
1505 : record, this is the same as using alignment, but not equivalent
1506 : when packing.
1507 :
1508 : Note: for compatibility, we use the type size, not the type alignment
1509 : to determine alignment, since that matches the documentation */
1510 :
1511 64715618 : if (targetm.ms_bitfield_layout_p (rli->t))
1512 : {
1513 201 : tree prev_saved = rli->prev_field;
1514 283 : tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL;
1515 :
1516 : /* This is a bitfield if it exists. */
1517 201 : if (rli->prev_field)
1518 : {
1519 82 : bool realign_p = known_align < desired_align;
1520 :
1521 : /* If both are bitfields, nonzero, and the same size, this is
1522 : the middle of a run. Zero declared size fields are special
1523 : and handled as "end of run". (Note: it's nonzero declared
1524 : size, but equal type sizes!) (Since we know that both
1525 : the current and previous fields are bitfields by the
1526 : time we check it, DECL_SIZE must be present for both.) */
1527 82 : if (DECL_BIT_FIELD_TYPE (field)
1528 64 : && !integer_zerop (DECL_SIZE (field))
1529 56 : && !integer_zerop (DECL_SIZE (rli->prev_field))
1530 54 : && tree_fits_shwi_p (DECL_SIZE (rli->prev_field))
1531 54 : && tree_fits_uhwi_p (TYPE_SIZE (type))
1532 136 : && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
1533 : {
1534 : /* We're in the middle of a run of equal type size fields; make
1535 : sure we realign if we run out of bits. (Not decl size,
1536 : type size!) */
1537 52 : HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field));
1538 :
1539 52 : if (rli->remaining_in_alignment < bitsize)
1540 : {
1541 1 : HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type));
1542 :
1543 : /* out of bits; bump up to next 'word'. */
1544 1 : rli->bitpos
1545 1 : = size_binop (PLUS_EXPR, rli->bitpos,
1546 : bitsize_int (rli->remaining_in_alignment));
1547 1 : rli->prev_field = field;
1548 1 : if (typesize < bitsize)
1549 0 : rli->remaining_in_alignment = 0;
1550 : else
1551 1 : rli->remaining_in_alignment = typesize - bitsize;
1552 : }
1553 : else
1554 : {
1555 51 : rli->remaining_in_alignment -= bitsize;
1556 51 : realign_p = false;
1557 : }
1558 : }
1559 : else
1560 : {
1561 : /* End of a run: if leaving a run of bitfields of the same type
1562 : size, we have to "use up" the rest of the bits of the type
1563 : size.
1564 :
1565 : Compute the new position as the sum of the size for the prior
1566 : type and where we first started working on that type.
1567 : Note: since the beginning of the field was aligned then
1568 : of course the end will be too. No round needed. */
1569 :
1570 30 : if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1571 : {
1572 20 : rli->bitpos
1573 20 : = size_binop (PLUS_EXPR, rli->bitpos,
1574 : bitsize_int (rli->remaining_in_alignment));
1575 : }
1576 : else
1577 : /* We "use up" size zero fields; the code below should behave
1578 : as if the prior field was not a bitfield. */
1579 : prev_saved = NULL;
1580 :
1581 : /* Cause a new bitfield to be captured, either this time (if
1582 : currently a bitfield) or next time we see one. */
1583 30 : if (!DECL_BIT_FIELD_TYPE (field)
1584 30 : || integer_zerop (DECL_SIZE (field)))
1585 26 : rli->prev_field = NULL;
1586 : }
1587 :
1588 : /* Does this field automatically have alignment it needs by virtue
1589 : of the fields that precede it and the record's own alignment? */
1590 82 : if (realign_p)
1591 : {
1592 : /* If the alignment is still within offset_align, just align
1593 : the bit position. */
1594 13 : if (desired_align < rli->offset_align)
1595 11 : rli->bitpos = round_up (rli->bitpos, desired_align);
1596 : else
1597 : {
1598 : /* First adjust OFFSET by the partial bits, then align. */
1599 2 : tree d = size_binop (CEIL_DIV_EXPR, rli->bitpos,
1600 : bitsize_unit_node);
1601 2 : rli->offset = size_binop (PLUS_EXPR, rli->offset,
1602 : fold_convert (sizetype, d));
1603 2 : rli->bitpos = bitsize_zero_node;
1604 :
1605 2 : rli->offset = round_up (rli->offset,
1606 : desired_align / BITS_PER_UNIT);
1607 : }
1608 :
1609 13 : if (! TREE_CONSTANT (rli->offset))
1610 0 : rli->offset_align = desired_align;
1611 : }
1612 :
1613 82 : normalize_rli (rli);
1614 : }
1615 :
1616 : /* If we're starting a new run of same type size bitfields
1617 : (or a run of non-bitfields), set up the "first of the run"
1618 : fields.
1619 :
1620 : That is, if the current field is not a bitfield, or if there
1621 : was a prior bitfield the type sizes differ, or if there wasn't
1622 : a prior bitfield the size of the current field is nonzero.
1623 :
1624 : Note: we must be sure to test ONLY the type size if there was
1625 : a prior bitfield and ONLY for the current field being zero if
1626 : there wasn't. */
1627 :
1628 201 : if (!DECL_BIT_FIELD_TYPE (field)
1629 261 : || (prev_saved != NULL
1630 129 : ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))
1631 69 : : !integer_zerop (DECL_SIZE (field))))
1632 : {
1633 : /* Never smaller than a byte for compatibility. */
1634 143 : unsigned int type_align = BITS_PER_UNIT;
1635 :
1636 : /* (When not a bitfield), we could be seeing a flex array (with
1637 : no DECL_SIZE). Since we won't be using remaining_in_alignment
1638 : until we see a bitfield (and come by here again) we just skip
1639 : calculating it. */
1640 143 : if (DECL_SIZE (field) != NULL
1641 143 : && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field)))
1642 285 : && tree_fits_uhwi_p (DECL_SIZE (field)))
1643 : {
1644 142 : unsigned HOST_WIDE_INT bitsize
1645 142 : = tree_to_uhwi (DECL_SIZE (field));
1646 142 : unsigned HOST_WIDE_INT typesize
1647 142 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field)));
1648 :
1649 142 : if (typesize < bitsize)
1650 0 : rli->remaining_in_alignment = 0;
1651 : else
1652 142 : rli->remaining_in_alignment = typesize - bitsize;
1653 : }
1654 :
1655 : /* Now align (conventionally) for the new type. */
1656 143 : if (! DECL_PACKED (field))
1657 137 : type_align = TYPE_ALIGN (TREE_TYPE (field));
1658 :
1659 143 : if (maximum_field_alignment != 0)
1660 56 : type_align = MIN (type_align, maximum_field_alignment);
1661 :
1662 143 : rli->bitpos = round_up (rli->bitpos, type_align);
1663 :
1664 : /* If we really aligned, don't allow subsequent bitfields
1665 : to undo that. */
1666 143 : rli->prev_field = NULL;
1667 : }
1668 : }
1669 :
1670 : /* Offset so far becomes the position of this field after normalizing. */
1671 64715618 : normalize_rli (rli);
1672 64715618 : DECL_FIELD_OFFSET (field) = rli->offset;
1673 64715618 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1674 64715618 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1675 64715618 : handle_warn_if_not_align (field, rli->record_align);
1676 :
1677 : /* Evaluate nonconstant offsets only once, either now or as soon as safe. */
1678 64715618 : if (TREE_CODE (DECL_FIELD_OFFSET (field)) != INTEGER_CST)
1679 1171 : DECL_FIELD_OFFSET (field) = variable_size (DECL_FIELD_OFFSET (field));
1680 :
1681 : /* If this field ended up more aligned than we thought it would be (we
1682 : approximate this by seeing if its position changed), lay out the field
1683 : again; perhaps we can use an integral mode for it now. */
1684 64715618 : if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1685 18227537 : actual_align = least_bit_hwi (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)));
1686 46488081 : else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1687 74502650 : actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1688 9174802 : else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1689 9173648 : actual_align = (BITS_PER_UNIT
1690 9173648 : * least_bit_hwi (tree_to_uhwi (DECL_FIELD_OFFSET (field))));
1691 : else
1692 1154 : actual_align = DECL_OFFSET_ALIGN (field);
1693 : /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1694 : store / extract bit field operations will check the alignment of the
1695 : record against the mode of bit fields. */
1696 :
1697 64715618 : if (known_align != actual_align)
1698 1486519 : layout_decl (field, actual_align);
1699 :
1700 64715618 : if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field))
1701 160017 : rli->prev_field = field;
1702 :
1703 : /* Now add size of this field to the size of the record. If the size is
1704 : not constant, treat the field as being a multiple of bytes and just
1705 : adjust the offset, resetting the bit position. Otherwise, apportion the
1706 : size amongst the bit position and offset. First handle the case of an
1707 : unspecified size, which can happen when we have an invalid nested struct
1708 : definition, such as struct j { struct j { int i; } }. The error message
1709 : is printed in finish_struct. */
1710 64715618 : if (DECL_SIZE (field) == 0)
1711 : /* Do nothing. */;
1712 64624970 : else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1713 64624970 : || TREE_OVERFLOW (DECL_SIZE (field)))
1714 : {
1715 1099 : rli->offset
1716 1099 : = size_binop (PLUS_EXPR, rli->offset,
1717 : fold_convert (sizetype,
1718 : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1719 : bitsize_unit_node)));
1720 1099 : rli->offset
1721 1099 : = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1722 1099 : rli->bitpos = bitsize_zero_node;
1723 1099 : rli->offset_align = MIN (rli->offset_align, desired_align);
1724 :
1725 1099 : if (!multiple_of_p (bitsizetype, DECL_SIZE (field),
1726 2198 : bitsize_int (rli->offset_align)))
1727 : {
1728 304 : tree type = strip_array_types (TREE_TYPE (field));
1729 : /* The above adjusts offset_align just based on the start of the
1730 : field. The field might not have a size that is a multiple of
1731 : that offset_align though. If the field is an array of fixed
1732 : sized elements, assume there can be any multiple of those
1733 : sizes. If it is a variable length aggregate or array of
1734 : variable length aggregates, assume worst that the end is
1735 : just BITS_PER_UNIT aligned. */
1736 304 : if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
1737 : {
1738 304 : if (TREE_INT_CST_LOW (TYPE_SIZE (type)))
1739 : {
1740 304 : unsigned HOST_WIDE_INT sz
1741 304 : = least_bit_hwi (TREE_INT_CST_LOW (TYPE_SIZE (type)));
1742 304 : rli->offset_align = MIN (rli->offset_align, sz);
1743 : }
1744 : }
1745 : else
1746 0 : rli->offset_align = MIN (rli->offset_align, BITS_PER_UNIT);
1747 : }
1748 : }
1749 64623871 : else if (targetm.ms_bitfield_layout_p (rli->t))
1750 : {
1751 201 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1752 :
1753 : /* If FIELD is the last field and doesn't end at the full length
1754 : of the type then pad the struct out to the full length of the
1755 : last type. */
1756 201 : if (DECL_BIT_FIELD_TYPE (field)
1757 201 : && !integer_zerop (DECL_SIZE (field)))
1758 : {
1759 : /* We have to scan, because non-field DECLS are also here. */
1760 : tree probe = field;
1761 182 : while ((probe = DECL_CHAIN (probe)))
1762 135 : if (TREE_CODE (probe) == FIELD_DECL)
1763 : break;
1764 119 : if (!probe)
1765 47 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,
1766 : bitsize_int (rli->remaining_in_alignment));
1767 : }
1768 :
1769 201 : normalize_rli (rli);
1770 : }
1771 : else
1772 : {
1773 64623670 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1774 64623670 : normalize_rli (rli);
1775 : }
1776 : }
1777 :
1778 : /* Assuming that all the fields have been laid out, this function uses
1779 : RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1780 : indicated by RLI. */
1781 :
1782 : static void
1783 60683216 : finalize_record_size (record_layout_info rli)
1784 : {
1785 60683216 : tree unpadded_size, unpadded_size_unit;
1786 :
1787 : /* Now we want just byte and bit offsets, so set the offset alignment
1788 : to be a byte and then normalize. */
1789 60683216 : rli->offset_align = BITS_PER_UNIT;
1790 60683216 : normalize_rli (rli);
1791 :
1792 : /* Determine the desired alignment. */
1793 : #ifdef ROUND_TYPE_ALIGN
1794 : SET_TYPE_ALIGN (rli->t, ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1795 : rli->record_align));
1796 : #else
1797 60683216 : SET_TYPE_ALIGN (rli->t, MAX (TYPE_ALIGN (rli->t), rli->record_align));
1798 : #endif
1799 :
1800 : /* Compute the size so far. Be sure to allow for extra bits in the
1801 : size in bytes. We have guaranteed above that it will be no more
1802 : than a single byte. */
1803 60683216 : unpadded_size = rli_size_so_far (rli);
1804 60683216 : unpadded_size_unit = rli_size_unit_so_far (rli);
1805 60683216 : if (! integer_zerop (rli->bitpos))
1806 2201 : unpadded_size_unit
1807 2201 : = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1808 :
1809 : /* Round the size up to be a multiple of the required alignment. */
1810 60683216 : TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1811 60683216 : TYPE_SIZE_UNIT (rli->t)
1812 60683216 : = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1813 :
1814 60683216 : if (TREE_CONSTANT (unpadded_size)
1815 60682476 : && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0
1816 1259710 : && input_location != BUILTINS_LOCATION
1817 61942805 : && !TYPE_ARTIFICIAL (rli->t))
1818 : {
1819 1251594 : tree pad_size
1820 1251594 : = size_binop (MINUS_EXPR, TYPE_SIZE_UNIT (rli->t), unpadded_size_unit);
1821 1251594 : warning (OPT_Wpadded,
1822 : "padding struct size to alignment boundary with %E bytes", pad_size);
1823 : }
1824 :
1825 20 : if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1826 20 : && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1827 60683218 : && TREE_CONSTANT (unpadded_size))
1828 : {
1829 2 : tree unpacked_size;
1830 :
1831 : #ifdef ROUND_TYPE_ALIGN
1832 : rli->unpacked_align
1833 : = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1834 : #else
1835 2 : rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1836 : #endif
1837 :
1838 2 : unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1839 2 : if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1840 : {
1841 2 : if (TYPE_NAME (rli->t))
1842 : {
1843 2 : tree name;
1844 :
1845 2 : if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1846 2 : name = TYPE_NAME (rli->t);
1847 : else
1848 0 : name = DECL_NAME (TYPE_NAME (rli->t));
1849 :
1850 2 : if (STRICT_ALIGNMENT)
1851 : warning (OPT_Wpacked, "packed attribute causes inefficient "
1852 : "alignment for %qE", name);
1853 : else
1854 2 : warning (OPT_Wpacked,
1855 : "packed attribute is unnecessary for %qE", name);
1856 : }
1857 : else
1858 : {
1859 0 : if (STRICT_ALIGNMENT)
1860 : warning (OPT_Wpacked,
1861 : "packed attribute causes inefficient alignment");
1862 : else
1863 0 : warning (OPT_Wpacked, "packed attribute is unnecessary");
1864 : }
1865 : }
1866 : }
1867 60683216 : }
1868 :
1869 : /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1870 :
1871 : void
1872 114464306 : compute_record_mode (tree type)
1873 : {
1874 114464306 : tree field;
1875 114464306 : machine_mode mode = VOIDmode;
1876 :
1877 : /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1878 : However, if possible, we use a mode that fits in a register
1879 : instead, in order to allow for better optimization down the
1880 : line. */
1881 114464306 : SET_TYPE_MODE (type, BLKmode);
1882 :
1883 114464306 : poly_uint64 type_size;
1884 114464306 : if (!poly_int_tree_p (TYPE_SIZE (type), &type_size))
1885 9598926 : return;
1886 :
1887 : /* A record which has any BLKmode members must itself be
1888 : BLKmode; it can't go in a register. Unless the member is
1889 : BLKmode only because it isn't aligned. */
1890 419396248 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1891 : {
1892 314530868 : if (TREE_CODE (field) != FIELD_DECL)
1893 224758928 : continue;
1894 :
1895 89771940 : poly_uint64 field_size;
1896 89771940 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1897 89771622 : || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1898 53188581 : && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1899 106320258 : && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1900 53131677 : && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1901 80213596 : || !tree_fits_poly_uint64_p (bit_position (field))
1902 80213596 : || DECL_SIZE (field) == 0
1903 169985536 : || !poly_int_tree_p (DECL_SIZE (field), &field_size))
1904 9598148 : return;
1905 :
1906 : /* If this field is the whole struct, remember its mode so
1907 : that, say, we can put a double in a class into a DF
1908 : register instead of forcing it to live in the stack. */
1909 80213596 : if (known_eq (field_size, type_size)
1910 : /* Partial int types (e.g. __int20) may have TYPE_SIZE equal to
1911 : wider types (e.g. int32), despite precision being less. Ensure
1912 : that the TYPE_MODE of the struct does not get set to the partial
1913 : int mode if there is a wider type also in the struct. */
1914 80213596 : && known_gt (GET_MODE_PRECISION (DECL_MODE (field)),
1915 : GET_MODE_PRECISION (mode)))
1916 5342035 : mode = DECL_MODE (field);
1917 :
1918 : /* With some targets, it is sub-optimal to access an aligned
1919 : BLKmode structure as a scalar. */
1920 80213596 : if (targetm.member_type_forces_blk (field, mode))
1921 : return;
1922 : }
1923 :
1924 : /* If we only have one real field; use its mode if that mode's size
1925 : matches the type's size. This generally only applies to RECORD_TYPE.
1926 : For UNION_TYPE, if the widest field is MODE_INT then use that mode.
1927 : If the widest field is MODE_PARTIAL_INT, and the union will be passed
1928 : by reference, then use that mode. */
1929 104865380 : if ((TREE_CODE (type) == RECORD_TYPE
1930 500967 : || (TREE_CODE (type) == UNION_TYPE
1931 500967 : && (GET_MODE_CLASS (mode) == MODE_INT
1932 67265 : || (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
1933 0 : && (targetm.calls.pass_by_reference
1934 0 : (pack_cumulative_args (0),
1935 5188902 : function_arg_info (type, mode, /*named=*/false)))))))
1936 104798115 : && mode != VOIDmode
1937 110054415 : && known_eq (GET_MODE_BITSIZE (mode), type_size))
1938 : ;
1939 : else
1940 99676478 : mode = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1).else_blk ();
1941 :
1942 : /* If structure's known alignment is less than what the scalar
1943 : mode would need, and it matters, then stick with BLKmode. */
1944 104865380 : if (mode != BLKmode
1945 : && STRICT_ALIGNMENT
1946 : && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1947 : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
1948 : {
1949 : /* If this is the only reason this type is BLKmode, then
1950 : don't force containing types to be BLKmode. */
1951 : TYPE_NO_FORCE_BLK (type) = 1;
1952 : mode = BLKmode;
1953 : }
1954 :
1955 104865380 : SET_TYPE_MODE (type, mode);
1956 : }
1957 :
1958 : /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1959 : out. */
1960 :
1961 : static void
1962 1479272205 : finalize_type_size (tree type)
1963 : {
1964 : /* Normally, use the alignment corresponding to the mode chosen.
1965 : However, where strict alignment is not required, avoid
1966 : over-aligning structures, since most compilers do not do this
1967 : alignment. */
1968 1479272205 : bool tua_cleared_p = false;
1969 1479272205 : if (TYPE_MODE (type) != BLKmode
1970 1406397262 : && TYPE_MODE (type) != VOIDmode
1971 2885380032 : && (STRICT_ALIGNMENT || !AGGREGATE_TYPE_P (type)))
1972 : {
1973 1333411386 : unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1974 :
1975 : /* Don't override a larger alignment requirement coming from a user
1976 : alignment of one of the fields. */
1977 1333411386 : if (mode_align >= TYPE_ALIGN (type))
1978 : {
1979 1333411386 : SET_TYPE_ALIGN (type, mode_align);
1980 : /* Remember that we're about to reset this flag. */
1981 1333411386 : tua_cleared_p = TYPE_USER_ALIGN (type);
1982 1333411386 : TYPE_USER_ALIGN (type) = false;
1983 : }
1984 : }
1985 :
1986 : /* Do machine-dependent extra alignment. */
1987 : #ifdef ROUND_TYPE_ALIGN
1988 : SET_TYPE_ALIGN (type,
1989 : ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT));
1990 : #endif
1991 :
1992 : /* If we failed to find a simple way to calculate the unit size
1993 : of the type, find it by division. */
1994 1479272205 : if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1995 : /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1996 : result will fit in sizetype. We will get more efficient code using
1997 : sizetype, so we force a conversion. */
1998 0 : TYPE_SIZE_UNIT (type)
1999 0 : = fold_convert (sizetype,
2000 : size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
2001 : bitsize_unit_node));
2002 :
2003 1479272205 : if (TYPE_SIZE (type) != 0)
2004 : {
2005 1467349848 : TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
2006 1467349848 : TYPE_SIZE_UNIT (type)
2007 2934699696 : = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type));
2008 : }
2009 :
2010 : /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
2011 1479272205 : if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2012 299580 : TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
2013 1479272205 : if (TYPE_SIZE_UNIT (type) != 0
2014 1479272205 : && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
2015 299580 : TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
2016 :
2017 : /* Handle empty records as per the x86-64 psABI. */
2018 1479272205 : TYPE_EMPTY_P (type) = targetm.calls.empty_record_p (type);
2019 :
2020 : /* Also layout any other variants of the type. */
2021 1479272205 : if (TYPE_NEXT_VARIANT (type)
2022 1479272205 : || type != TYPE_MAIN_VARIANT (type))
2023 : {
2024 57638470 : tree variant;
2025 : /* Record layout info of this variant. */
2026 57638470 : tree size = TYPE_SIZE (type);
2027 57638470 : tree size_unit = TYPE_SIZE_UNIT (type);
2028 57638470 : unsigned int align = TYPE_ALIGN (type);
2029 57638470 : unsigned int precision = TYPE_PRECISION (type);
2030 57638470 : unsigned int user_align = TYPE_USER_ALIGN (type);
2031 57638470 : machine_mode mode = TYPE_MODE (type);
2032 57638470 : bool empty_p = TYPE_EMPTY_P (type);
2033 57638470 : bool typeless = AGGREGATE_TYPE_P (type) && TYPE_TYPELESS_STORAGE (type);
2034 :
2035 : /* Copy it into all variants. */
2036 57638470 : for (variant = TYPE_MAIN_VARIANT (type);
2037 191907690 : variant != NULL_TREE;
2038 134269220 : variant = TYPE_NEXT_VARIANT (variant))
2039 : {
2040 134269220 : TYPE_SIZE (variant) = size;
2041 134269220 : TYPE_SIZE_UNIT (variant) = size_unit;
2042 134269220 : unsigned valign = align;
2043 134269220 : if (TYPE_USER_ALIGN (variant))
2044 : {
2045 815162 : valign = MAX (valign, TYPE_ALIGN (variant));
2046 : /* If we reset TYPE_USER_ALIGN on the main variant, we might
2047 : need to reset it on the variants too. TYPE_MODE will be set
2048 : to MODE in this variant, so we can use that. */
2049 815162 : if (tua_cleared_p && GET_MODE_ALIGNMENT (mode) >= valign)
2050 0 : TYPE_USER_ALIGN (variant) = false;
2051 : }
2052 : else
2053 133454058 : TYPE_USER_ALIGN (variant) = user_align;
2054 134269220 : SET_TYPE_ALIGN (variant, valign);
2055 134269220 : TYPE_PRECISION (variant) = precision;
2056 134269220 : SET_TYPE_MODE (variant, mode);
2057 134269220 : TYPE_EMPTY_P (variant) = empty_p;
2058 134269220 : if (AGGREGATE_TYPE_P (variant))
2059 134269200 : TYPE_TYPELESS_STORAGE (variant) = typeless;
2060 : }
2061 : }
2062 1479272205 : }
2063 :
2064 : /* Return a new underlying object for a bitfield started with FIELD. */
2065 :
2066 : static tree
2067 165547 : start_bitfield_representative (tree field)
2068 : {
2069 165547 : tree repr = make_node (FIELD_DECL);
2070 165547 : DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field);
2071 : /* Force the representative to begin at a BITS_PER_UNIT aligned
2072 : boundary - C++ may use tail-padding of a base object to
2073 : continue packing bits so the bitfield region does not start
2074 : at bit zero (see g++.dg/abi/bitfield5.C for example).
2075 : Unallocated bits may happen for other reasons as well,
2076 : for example Ada which allows explicit bit-granular structure layout. */
2077 331094 : DECL_FIELD_BIT_OFFSET (repr)
2078 165547 : = size_binop (BIT_AND_EXPR,
2079 : DECL_FIELD_BIT_OFFSET (field),
2080 : bitsize_int (~(BITS_PER_UNIT - 1)));
2081 165547 : SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field));
2082 165547 : DECL_SIZE (repr) = DECL_SIZE (field);
2083 165547 : DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field);
2084 165547 : DECL_PACKED (repr) = DECL_PACKED (field);
2085 165547 : DECL_CONTEXT (repr) = DECL_CONTEXT (field);
2086 : /* There are no indirect accesses to this field. If we introduce
2087 : some then they have to use the record alias set. This makes
2088 : sure to properly conflict with [indirect] accesses to addressable
2089 : fields of the bitfield group. */
2090 165547 : DECL_NONADDRESSABLE_P (repr) = 1;
2091 165547 : return repr;
2092 : }
2093 :
2094 : /* Finish up a bitfield group that was started by creating the underlying
2095 : object REPR with the last field in the bitfield group FIELD. */
2096 :
2097 : static void
2098 165547 : finish_bitfield_representative (tree repr, tree field)
2099 : {
2100 165547 : unsigned HOST_WIDE_INT bitsize, maxbitsize;
2101 165547 : tree nextf, size;
2102 :
2103 165547 : size = size_diffop (DECL_FIELD_OFFSET (field),
2104 : DECL_FIELD_OFFSET (repr));
2105 331120 : while (TREE_CODE (size) == COMPOUND_EXPR)
2106 26 : size = TREE_OPERAND (size, 1);
2107 165547 : gcc_assert (tree_fits_uhwi_p (size));
2108 165547 : bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT
2109 165547 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2110 165547 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))
2111 165547 : + tree_to_uhwi (DECL_SIZE (field)));
2112 :
2113 : /* Round up bitsize to multiples of BITS_PER_UNIT. */
2114 165547 : bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2115 :
2116 : /* Now nothing tells us how to pad out bitsize ... */
2117 165547 : if (TREE_CODE (DECL_CONTEXT (field)) == RECORD_TYPE)
2118 : {
2119 162105 : nextf = DECL_CHAIN (field);
2120 593316 : while (nextf && TREE_CODE (nextf) != FIELD_DECL)
2121 431211 : nextf = DECL_CHAIN (nextf);
2122 : }
2123 : else
2124 : nextf = NULL_TREE;
2125 162105 : if (nextf)
2126 : {
2127 73783 : tree maxsize;
2128 : /* If there was an error, the field may be not laid out
2129 : correctly. Don't bother to do anything. */
2130 73783 : if (TREE_TYPE (nextf) == error_mark_node)
2131 : {
2132 1 : TREE_TYPE (repr) = error_mark_node;
2133 1 : return;
2134 : }
2135 73782 : maxsize = size_diffop (DECL_FIELD_OFFSET (nextf),
2136 : DECL_FIELD_OFFSET (repr));
2137 73782 : if (tree_fits_uhwi_p (maxsize))
2138 : {
2139 73766 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2140 73766 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf))
2141 73766 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2142 : /* If the group ends within a bitfield nextf does not need to be
2143 : aligned to BITS_PER_UNIT. Thus round up. */
2144 73766 : maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2145 : }
2146 : else
2147 : maxbitsize = bitsize;
2148 : }
2149 : else
2150 : {
2151 : /* Note that if the C++ FE sets up tail-padding to be re-used it
2152 : creates a as-base variant of the type with TYPE_SIZE adjusted
2153 : accordingly. So it is safe to include tail-padding here. */
2154 91764 : tree aggsize = lang_hooks.types.unit_size_without_reusable_padding
2155 91764 : (DECL_CONTEXT (field));
2156 91764 : tree maxsize = size_diffop (aggsize, DECL_FIELD_OFFSET (repr));
2157 : /* We cannot generally rely on maxsize to fold to an integer constant,
2158 : so use bitsize as fallback for this case. */
2159 91764 : if (tree_fits_uhwi_p (maxsize))
2160 91754 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2161 91754 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2162 : else
2163 : maxbitsize = bitsize;
2164 : }
2165 :
2166 : /* Only if we don't artificially break up the representative in
2167 : the middle of a large bitfield with different possibly
2168 : overlapping representatives. And all representatives start
2169 : at byte offset. */
2170 165546 : gcc_assert (maxbitsize % BITS_PER_UNIT == 0);
2171 :
2172 : /* Find the smallest nice mode to use. */
2173 165546 : opt_scalar_int_mode mode_iter;
2174 469352 : FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2175 938388 : if (GET_MODE_BITSIZE (mode_iter.require ()) >= bitsize)
2176 : break;
2177 :
2178 165546 : scalar_int_mode mode;
2179 165546 : if (!mode_iter.exists (&mode)
2180 165388 : || GET_MODE_BITSIZE (mode) > maxbitsize
2181 249948 : || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)
2182 : {
2183 41393 : if (TREE_CODE (TREE_TYPE (field)) == BITINT_TYPE)
2184 : {
2185 76 : struct bitint_info info;
2186 76 : unsigned prec = TYPE_PRECISION (TREE_TYPE (field));
2187 76 : bool ok = targetm.c.bitint_type_info (prec, &info);
2188 76 : gcc_assert (ok);
2189 76 : scalar_int_mode limb_mode
2190 76 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2191 76 : unsigned lprec = GET_MODE_PRECISION (limb_mode);
2192 76 : if (prec > lprec)
2193 : {
2194 : /* For middle/large/huge _BitInt prefer bitsize being a multiple
2195 : of limb precision. */
2196 74 : unsigned HOST_WIDE_INT bsz = CEIL (bitsize, lprec) * lprec;
2197 74 : if (bsz <= maxbitsize)
2198 76 : bitsize = bsz;
2199 : }
2200 : }
2201 : /* We really want a BLKmode representative only as a last resort,
2202 : considering the member b in
2203 : struct { int a : 7; int b : 17; int c; } __attribute__((packed));
2204 : Otherwise we simply want to split the representative up
2205 : allowing for overlaps within the bitfield region as required for
2206 : struct { int a : 7; int b : 7;
2207 : int c : 10; int d; } __attribute__((packed));
2208 : [0, 15] HImode for a and b, [8, 23] HImode for c. */
2209 41393 : DECL_SIZE (repr) = bitsize_int (bitsize);
2210 41393 : DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT);
2211 41393 : SET_DECL_MODE (repr, BLKmode);
2212 41393 : TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node,
2213 41393 : bitsize / BITS_PER_UNIT);
2214 : }
2215 : else
2216 : {
2217 124153 : unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode);
2218 124153 : DECL_SIZE (repr) = bitsize_int (modesize);
2219 124153 : DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT);
2220 124153 : SET_DECL_MODE (repr, mode);
2221 124153 : TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1);
2222 : }
2223 :
2224 : /* Remember whether the bitfield group is at the end of the
2225 : structure or not. */
2226 165546 : DECL_CHAIN (repr) = nextf;
2227 : }
2228 :
2229 : /* Compute and set FIELD_DECLs for the underlying objects we should
2230 : use for bitfield access for the structure T. */
2231 :
2232 : void
2233 60683216 : finish_bitfield_layout (tree t)
2234 : {
2235 60683216 : tree field, prev;
2236 60683216 : tree repr = NULL_TREE;
2237 :
2238 60683216 : if (TREE_CODE (t) == QUAL_UNION_TYPE)
2239 : return;
2240 :
2241 60683216 : for (prev = NULL_TREE, field = TYPE_FIELDS (t);
2242 463825127 : field; field = DECL_CHAIN (field))
2243 : {
2244 403141911 : if (TREE_CODE (field) != FIELD_DECL)
2245 336418508 : continue;
2246 :
2247 : /* In the C++ memory model, consecutive bit fields in a structure are
2248 : considered one memory location and updating a memory location
2249 : may not store into adjacent memory locations. */
2250 66723403 : if (!repr
2251 66723403 : && DECL_BIT_FIELD_TYPE (field))
2252 : {
2253 : /* Start new representative. */
2254 165546 : repr = start_bitfield_representative (field);
2255 : }
2256 66557857 : else if (repr
2257 66557857 : && ! DECL_BIT_FIELD_TYPE (field))
2258 : {
2259 : /* Finish off new representative. */
2260 73144 : finish_bitfield_representative (repr, prev);
2261 73144 : repr = NULL_TREE;
2262 : }
2263 66484713 : else if (DECL_BIT_FIELD_TYPE (field))
2264 : {
2265 518978 : gcc_assert (repr != NULL_TREE);
2266 :
2267 : /* Zero-size bitfields finish off a representative and
2268 : do not have a representative themselves. This is
2269 : required by the C++ memory model. */
2270 518978 : if (integer_zerop (DECL_SIZE (field)))
2271 : {
2272 638 : finish_bitfield_representative (repr, prev);
2273 638 : repr = NULL_TREE;
2274 : }
2275 :
2276 : /* We assume that either DECL_FIELD_OFFSET of the representative
2277 : and each bitfield member is a constant or they are equal.
2278 : This is because we need to be able to compute the bit-offset
2279 : of each field relative to the representative in get_bit_range
2280 : during RTL expansion.
2281 : If these constraints are not met, simply force a new
2282 : representative to be generated. That will at most
2283 : generate worse code but still maintain correctness with
2284 : respect to the C++ memory model. */
2285 518351 : else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))
2286 518329 : && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
2287 11 : || operand_equal_p (DECL_FIELD_OFFSET (repr),
2288 11 : DECL_FIELD_OFFSET (field), 0)))
2289 : {
2290 1 : finish_bitfield_representative (repr, prev);
2291 1 : repr = start_bitfield_representative (field);
2292 : }
2293 : }
2294 : else
2295 65965735 : continue;
2296 :
2297 239329 : if (repr)
2298 683886 : DECL_BIT_FIELD_REPRESENTATIVE (field) = repr;
2299 :
2300 757668 : if (TREE_CODE (t) == RECORD_TYPE)
2301 : prev = field;
2302 3442 : else if (repr)
2303 : {
2304 3442 : finish_bitfield_representative (repr, field);
2305 3442 : repr = NULL_TREE;
2306 : }
2307 : }
2308 :
2309 60683216 : if (repr)
2310 88322 : finish_bitfield_representative (repr, prev);
2311 : }
2312 :
2313 : /* Do all of the work required to layout the type indicated by RLI,
2314 : once the fields have been laid out. This function will call `free'
2315 : for RLI, unless FREE_P is false. Passing a value other than false
2316 : for FREE_P is bad practice; this option only exists to support the
2317 : G++ 3.2 ABI. */
2318 :
2319 : void
2320 60683216 : finish_record_layout (record_layout_info rli, int free_p)
2321 : {
2322 60683216 : tree variant;
2323 :
2324 : /* Compute the final size. */
2325 60683216 : finalize_record_size (rli);
2326 :
2327 : /* Compute the TYPE_MODE for the record. */
2328 60683216 : compute_record_mode (rli->t);
2329 :
2330 : /* Perform any last tweaks to the TYPE_SIZE, etc. */
2331 60683216 : finalize_type_size (rli->t);
2332 :
2333 : /* Compute bitfield representatives. */
2334 60683216 : finish_bitfield_layout (rli->t);
2335 :
2336 : /* Propagate TYPE_PACKED and TYPE_REVERSE_STORAGE_ORDER to variants.
2337 : With C++ templates, it is too early to do this when the attribute
2338 : is being parsed. */
2339 134505737 : for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
2340 73822521 : variant = TYPE_NEXT_VARIANT (variant))
2341 : {
2342 73822521 : TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
2343 147645042 : TYPE_REVERSE_STORAGE_ORDER (variant)
2344 73822521 : = TYPE_REVERSE_STORAGE_ORDER (rli->t);
2345 : }
2346 :
2347 : /* Lay out any static members. This is done now because their type
2348 : may use the record's type. */
2349 74243649 : while (!vec_safe_is_empty (rli->pending_statics))
2350 13560433 : layout_decl (rli->pending_statics->pop (), 0);
2351 :
2352 : /* Clean up. */
2353 60683216 : if (free_p)
2354 : {
2355 60683216 : vec_free (rli->pending_statics);
2356 60683216 : free (rli);
2357 : }
2358 60683216 : }
2359 :
2360 :
2361 : /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
2362 : NAME, its fields are chained in reverse on FIELDS.
2363 :
2364 : If ALIGN_TYPE is non-null, it is given the same alignment as
2365 : ALIGN_TYPE. */
2366 :
2367 : void
2368 1149748 : finish_builtin_struct (tree type, const char *name, tree fields,
2369 : tree align_type)
2370 : {
2371 1149748 : tree tail, next;
2372 :
2373 3594770 : for (tail = NULL_TREE; fields; tail = fields, fields = next)
2374 : {
2375 2445022 : DECL_FIELD_CONTEXT (fields) = type;
2376 2445022 : next = DECL_CHAIN (fields);
2377 2445022 : DECL_CHAIN (fields) = tail;
2378 : }
2379 1149748 : TYPE_FIELDS (type) = tail;
2380 :
2381 1149748 : if (align_type)
2382 : {
2383 1015669 : SET_TYPE_ALIGN (type, TYPE_ALIGN (align_type));
2384 1015669 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
2385 1015669 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2386 : TYPE_WARN_IF_NOT_ALIGN (align_type));
2387 : }
2388 :
2389 1149748 : layout_type (type);
2390 : #if 0 /* not yet, should get fixed properly later */
2391 : TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
2392 : #else
2393 1149748 : TYPE_NAME (type) = build_decl (BUILTINS_LOCATION,
2394 : TYPE_DECL, get_identifier (name), type);
2395 : #endif
2396 1149748 : TYPE_STUB_DECL (type) = TYPE_NAME (type);
2397 1149748 : layout_decl (TYPE_NAME (type), 0);
2398 1149748 : }
2399 :
2400 : /* Compute TYPE_MODE for TYPE (which is ARRAY_TYPE). */
2401 :
2402 80453765 : void compute_array_mode (tree type)
2403 : {
2404 80453765 : gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
2405 :
2406 80453765 : SET_TYPE_MODE (type, BLKmode);
2407 80453765 : if (TYPE_SIZE (type) != 0
2408 68818994 : && ! targetm.member_type_forces_blk (type, VOIDmode)
2409 : /* BLKmode elements force BLKmode aggregate;
2410 : else extract/store fields may lose. */
2411 149272759 : && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
2412 586572 : || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
2413 : {
2414 68232422 : SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type),
2415 : TYPE_SIZE (type)));
2416 68232422 : if (TYPE_MODE (type) != BLKmode
2417 : && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
2418 : && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
2419 : {
2420 : TYPE_NO_FORCE_BLK (type) = 1;
2421 : SET_TYPE_MODE (type, BLKmode);
2422 : }
2423 : }
2424 80453765 : }
2425 :
2426 : /* Calculate the mode, size, and alignment for TYPE.
2427 : For an array type, calculate the element separation as well.
2428 : Record TYPE on the chain of permanent or temporary types
2429 : so that dbxout will find out about it.
2430 :
2431 : TYPE_SIZE of a type is nonzero if the type has been laid out already.
2432 : layout_type does nothing on such a type.
2433 :
2434 : If the type is incomplete, its TYPE_SIZE remains zero. */
2435 :
2436 : void
2437 2567712992 : layout_type (tree type)
2438 : {
2439 2567712992 : gcc_assert (type);
2440 :
2441 2567712992 : if (type == error_mark_node)
2442 : return;
2443 :
2444 : /* We don't want finalize_type_size to copy an alignment attribute to
2445 : variants that don't have it. */
2446 2567712992 : type = TYPE_MAIN_VARIANT (type);
2447 :
2448 : /* Do nothing if type has been laid out before. */
2449 2567712992 : if (TYPE_SIZE (type))
2450 : return;
2451 :
2452 1423767373 : switch (TREE_CODE (type))
2453 : {
2454 0 : case LANG_TYPE:
2455 : /* This kind of type is the responsibility
2456 : of the language-specific code. */
2457 0 : gcc_unreachable ();
2458 :
2459 14357427 : case BOOLEAN_TYPE:
2460 14357427 : case INTEGER_TYPE:
2461 14357427 : case ENUMERAL_TYPE:
2462 14357427 : {
2463 14357427 : scalar_int_mode mode
2464 14357427 : = smallest_int_mode_for_size (TYPE_PRECISION (type)).require ();
2465 14357427 : SET_TYPE_MODE (type, mode);
2466 28714854 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2467 : /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */
2468 28714854 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2469 14357427 : break;
2470 : }
2471 :
2472 59206 : case BITINT_TYPE:
2473 59206 : {
2474 59206 : struct bitint_info info;
2475 59206 : int cnt;
2476 59206 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
2477 59206 : gcc_assert (ok);
2478 59206 : scalar_int_mode limb_mode
2479 59206 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2480 59206 : if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
2481 : {
2482 1652 : SET_TYPE_MODE (type, limb_mode);
2483 1652 : gcc_assert (info.abi_limb_mode == info.limb_mode);
2484 : cnt = 1;
2485 : }
2486 : else
2487 : {
2488 57554 : SET_TYPE_MODE (type, BLKmode);
2489 57554 : cnt = CEIL (TYPE_PRECISION (type), GET_MODE_PRECISION (limb_mode));
2490 57554 : gcc_assert (info.abi_limb_mode == info.limb_mode
2491 : || !info.big_endian == !WORDS_BIG_ENDIAN);
2492 : }
2493 118412 : TYPE_SIZE (type) = bitsize_int (cnt * GET_MODE_BITSIZE (limb_mode));
2494 118412 : TYPE_SIZE_UNIT (type) = size_int (cnt * GET_MODE_SIZE (limb_mode));
2495 59206 : SET_TYPE_ALIGN (type, GET_MODE_ALIGNMENT (limb_mode));
2496 59206 : if (cnt > 1)
2497 : {
2498 : /* Use same mode as compute_record_mode would use for a structure
2499 : containing cnt limb_mode elements. */
2500 57554 : machine_mode mode = mode_for_size_tree (TYPE_SIZE (type),
2501 57554 : MODE_INT, 1).else_blk ();
2502 57554 : if (mode == BLKmode)
2503 : break;
2504 12406 : finalize_type_size (type);
2505 12406 : SET_TYPE_MODE (type, mode);
2506 12406 : if (STRICT_ALIGNMENT
2507 : && !(TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
2508 : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
2509 : {
2510 : /* If this is the only reason this type is BLKmode, then
2511 : don't force containing types to be BLKmode. */
2512 : TYPE_NO_FORCE_BLK (type) = 1;
2513 : SET_TYPE_MODE (type, BLKmode);
2514 : }
2515 12406 : if (TYPE_NEXT_VARIANT (type) || type != TYPE_MAIN_VARIANT (type))
2516 0 : for (tree variant = TYPE_MAIN_VARIANT (type);
2517 0 : variant != NULL_TREE;
2518 0 : variant = TYPE_NEXT_VARIANT (variant))
2519 : {
2520 0 : SET_TYPE_MODE (variant, mode);
2521 0 : if (STRICT_ALIGNMENT
2522 : && !(TYPE_ALIGN (variant) >= BIGGEST_ALIGNMENT
2523 : || (TYPE_ALIGN (variant)
2524 : >= GET_MODE_ALIGNMENT (mode))))
2525 : {
2526 : TYPE_NO_FORCE_BLK (variant) = 1;
2527 : SET_TYPE_MODE (variant, BLKmode);
2528 : }
2529 : }
2530 12406 : return;
2531 : }
2532 : break;
2533 : }
2534 :
2535 4206709 : case REAL_TYPE:
2536 4206709 : {
2537 : /* Allow the caller to choose the type mode, which is how decimal
2538 : floats are distinguished from binary ones. */
2539 4206709 : if (TYPE_MODE (type) == VOIDmode)
2540 2095574 : SET_TYPE_MODE
2541 : (type, float_mode_for_size (TYPE_PRECISION (type)).require ());
2542 4206709 : scalar_float_mode mode = as_a <scalar_float_mode> (TYPE_MODE (type));
2543 8413418 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2544 8413418 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2545 4206709 : break;
2546 : }
2547 :
2548 10353096 : case FIXED_POINT_TYPE:
2549 10353096 : {
2550 : /* TYPE_MODE (type) has been set already. */
2551 10353096 : scalar_mode mode = SCALAR_TYPE_MODE (type);
2552 20706192 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2553 20706192 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2554 10353096 : break;
2555 : }
2556 :
2557 5317239 : case COMPLEX_TYPE:
2558 5317239 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2559 5317239 : if (TYPE_MODE (TREE_TYPE (type)) == BLKmode)
2560 : {
2561 4558 : gcc_checking_assert (TREE_CODE (TREE_TYPE (type)) == BITINT_TYPE);
2562 4558 : SET_TYPE_MODE (type, BLKmode);
2563 4558 : TYPE_SIZE (type)
2564 4558 : = int_const_binop (MULT_EXPR, TYPE_SIZE (TREE_TYPE (type)),
2565 4558 : bitsize_int (2));
2566 4558 : TYPE_SIZE_UNIT (type)
2567 4558 : = int_const_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (type)),
2568 4558 : bitsize_int (2));
2569 4558 : break;
2570 : }
2571 5312681 : SET_TYPE_MODE (type,
2572 : GET_MODE_COMPLEX_MODE (TYPE_MODE (TREE_TYPE (type))));
2573 :
2574 10625362 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
2575 10625362 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
2576 5312681 : break;
2577 :
2578 70042001 : case VECTOR_TYPE:
2579 70042001 : {
2580 70042001 : poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (type);
2581 70042001 : tree innertype = TREE_TYPE (type);
2582 :
2583 : /* Find an appropriate mode for the vector type. */
2584 70042001 : if (TYPE_MODE (type) == VOIDmode)
2585 34747201 : SET_TYPE_MODE (type,
2586 : mode_for_vector (SCALAR_TYPE_MODE (innertype),
2587 : nunits).else_blk ());
2588 :
2589 70042001 : TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type));
2590 70042001 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2591 : /* Several boolean vector elements may fit in a single unit. */
2592 70042001 : if (VECTOR_BOOLEAN_TYPE_P (type)
2593 72506212 : && type->type_common.mode != BLKmode)
2594 : {
2595 2464208 : TYPE_SIZE_UNIT (type)
2596 4928416 : = size_int (GET_MODE_SIZE (type->type_common.mode));
2597 2464208 : TYPE_SIZE (type)
2598 7392624 : = bitsize_int (GET_MODE_BITSIZE (type->type_common.mode));
2599 : }
2600 : else
2601 : {
2602 67577793 : TYPE_SIZE_UNIT (type)
2603 135155586 : = size_int (GET_MODE_SIZE (SCALAR_TYPE_MODE (innertype))
2604 : * nunits);
2605 67577793 : TYPE_SIZE (type)
2606 202733379 : = bitsize_int (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (innertype))
2607 : * nunits);
2608 : }
2609 :
2610 : /* For vector types, we do not default to the mode's alignment.
2611 : Instead, query a target hook, defaulting to natural alignment.
2612 : This prevents ABI changes depending on whether or not native
2613 : vector modes are supported. */
2614 70042001 : SET_TYPE_ALIGN (type, targetm.vector_alignment (type));
2615 :
2616 : /* However, if the underlying mode requires a bigger alignment than
2617 : what the target hook provides, we cannot use the mode. For now,
2618 : simply reject that case. */
2619 70042001 : gcc_assert (TYPE_ALIGN (type)
2620 : >= GET_MODE_ALIGNMENT (TYPE_MODE (type)));
2621 70042001 : break;
2622 : }
2623 :
2624 287586 : case VOID_TYPE:
2625 : /* This is an incomplete type and so doesn't have a size. */
2626 287586 : SET_TYPE_ALIGN (type, 1);
2627 287586 : TYPE_USER_ALIGN (type) = 0;
2628 287586 : SET_TYPE_MODE (type, VOIDmode);
2629 287586 : break;
2630 :
2631 932788 : case OFFSET_TYPE:
2632 936839 : TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
2633 936839 : TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE_UNITS);
2634 : /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be
2635 : integral, which may be an __intN. */
2636 936839 : SET_TYPE_MODE (type, int_mode_for_size (POINTER_SIZE, 0).require ());
2637 936839 : TYPE_PRECISION (type) = POINTER_SIZE;
2638 932788 : break;
2639 :
2640 982476427 : case FUNCTION_TYPE:
2641 982476427 : case METHOD_TYPE:
2642 : /* It's hard to see what the mode and size of a function ought to
2643 : be, but we do know the alignment is FUNCTION_BOUNDARY, so
2644 : make it consistent with that. */
2645 982476427 : SET_TYPE_MODE (type,
2646 : int_mode_for_size (FUNCTION_BOUNDARY, 0).else_blk ());
2647 982476427 : TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
2648 982476427 : TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
2649 982476427 : break;
2650 :
2651 250102745 : case POINTER_TYPE:
2652 250102745 : case REFERENCE_TYPE:
2653 250102745 : {
2654 250102745 : scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
2655 500205490 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2656 500205490 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2657 250102745 : TYPE_UNSIGNED (type) = 1;
2658 250102745 : TYPE_PRECISION (type) = GET_MODE_PRECISION (mode);
2659 : }
2660 250102745 : break;
2661 :
2662 80453765 : case ARRAY_TYPE:
2663 80453765 : {
2664 80453765 : tree index = TYPE_DOMAIN (type);
2665 80453765 : tree element = TREE_TYPE (type);
2666 :
2667 : /* We need to know both bounds in order to compute the size. */
2668 70134170 : if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
2669 149331678 : && TYPE_SIZE (element))
2670 : {
2671 68818994 : tree ub = TYPE_MAX_VALUE (index);
2672 68818994 : tree lb = TYPE_MIN_VALUE (index);
2673 68818994 : tree element_size = TYPE_SIZE (element);
2674 68818994 : tree length;
2675 :
2676 : /* Make sure that an array of zero-sized element is zero-sized
2677 : regardless of its extent. */
2678 68818994 : if (integer_zerop (element_size))
2679 4723 : length = size_zero_node;
2680 :
2681 : /* The computation should happen in the original signedness so
2682 : that (possible) negative values are handled appropriately
2683 : when determining overflow. */
2684 : else
2685 : {
2686 : /* ??? When it is obvious that the range is signed
2687 : represent it using ssizetype. */
2688 68814271 : if (TREE_CODE (lb) == INTEGER_CST
2689 68813629 : && TREE_CODE (ub) == INTEGER_CST
2690 68517947 : && TYPE_UNSIGNED (TREE_TYPE (lb))
2691 136454000 : && tree_int_cst_lt (ub, lb))
2692 : {
2693 425 : lb = wide_int_to_tree (ssizetype,
2694 425 : offset_int::from (wi::to_wide (lb),
2695 : SIGNED));
2696 425 : ub = wide_int_to_tree (ssizetype,
2697 850 : offset_int::from (wi::to_wide (ub),
2698 : SIGNED));
2699 : }
2700 68814271 : length
2701 68814271 : = fold_convert (sizetype,
2702 : size_binop (PLUS_EXPR,
2703 : build_int_cst (TREE_TYPE (lb), 1),
2704 : size_binop (MINUS_EXPR, ub, lb)));
2705 : }
2706 :
2707 : /* ??? We have no way to distinguish a null-sized array from an
2708 : array spanning the whole sizetype range, so we arbitrarily
2709 : decide that [0, -1] is the only valid representation. */
2710 68818994 : if (integer_zerop (length)
2711 42708 : && TREE_OVERFLOW (length)
2712 68844723 : && integer_zerop (lb))
2713 25729 : length = size_zero_node;
2714 :
2715 68818994 : TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
2716 : bits_from_bytes (length));
2717 :
2718 : /* If we know the size of the element, calculate the total size
2719 : directly, rather than do some division thing below. This
2720 : optimization helps Fortran assumed-size arrays (where the
2721 : size of the array is determined at runtime) substantially. */
2722 68818994 : if (TYPE_SIZE_UNIT (element))
2723 68818994 : TYPE_SIZE_UNIT (type)
2724 137637988 : = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
2725 : }
2726 :
2727 : /* Now round the alignment and size,
2728 : using machine-dependent criteria if any. */
2729 :
2730 80453765 : unsigned align = TYPE_ALIGN (element);
2731 80453765 : if (TYPE_USER_ALIGN (type))
2732 2695126 : align = MAX (align, TYPE_ALIGN (type));
2733 : else
2734 77758639 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
2735 80453765 : if (!TYPE_WARN_IF_NOT_ALIGN (type))
2736 80453765 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2737 : TYPE_WARN_IF_NOT_ALIGN (element));
2738 : #ifdef ROUND_TYPE_ALIGN
2739 : align = ROUND_TYPE_ALIGN (type, align, BITS_PER_UNIT);
2740 : #else
2741 80453765 : align = MAX (align, BITS_PER_UNIT);
2742 : #endif
2743 80453765 : SET_TYPE_ALIGN (type, align);
2744 80453765 : compute_array_mode (type);
2745 80453765 : if (AGGREGATE_TYPE_P (element))
2746 5453087 : TYPE_TYPELESS_STORAGE (type) = TYPE_TYPELESS_STORAGE (element);
2747 : /* When the element size is constant, check that it is at least as
2748 : large as the element alignment. */
2749 80453765 : if (TYPE_SIZE_UNIT (element)
2750 80386375 : && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
2751 : /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
2752 : TYPE_ALIGN_UNIT. */
2753 80353537 : && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element))
2754 160807287 : && !integer_zerop (TYPE_SIZE_UNIT (element)))
2755 : {
2756 80343056 : if (compare_tree_int (TYPE_SIZE_UNIT (element),
2757 80343056 : TYPE_ALIGN_UNIT (element)) < 0)
2758 11 : error ("alignment of array elements is greater than "
2759 : "element size");
2760 80343045 : else if (TYPE_ALIGN_UNIT (element) > 1
2761 116456995 : && (wi::zext (wi::to_wide (TYPE_SIZE_UNIT (element)),
2762 18056975 : ffs_hwi (TYPE_ALIGN_UNIT (element)) - 1)
2763 116456995 : != 0))
2764 6 : error ("size of array element is not a multiple of its "
2765 : "alignment");
2766 : }
2767 : break;
2768 : }
2769 :
2770 5178384 : case RECORD_TYPE:
2771 5178384 : case UNION_TYPE:
2772 5178384 : case QUAL_UNION_TYPE:
2773 5178384 : {
2774 5178384 : tree field;
2775 5178384 : record_layout_info rli;
2776 :
2777 : /* Initialize the layout information. */
2778 5178384 : rli = start_record_layout (type);
2779 :
2780 : /* If this is a QUAL_UNION_TYPE, we want to process the fields
2781 : in the reverse order in building the COND_EXPR that denotes
2782 : its size. We reverse them again later. */
2783 5178384 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2784 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2785 :
2786 : /* Place all the fields. */
2787 26373774 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2788 21195390 : place_field (rli, field);
2789 :
2790 5178384 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2791 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2792 :
2793 : /* Finish laying out the record. */
2794 5178384 : finish_record_layout (rli, /*free_p=*/true);
2795 : }
2796 5178384 : break;
2797 :
2798 0 : default:
2799 0 : gcc_unreachable ();
2800 : }
2801 :
2802 : /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
2803 : records and unions, finish_record_layout already called this
2804 : function. */
2805 1423754967 : if (!RECORD_OR_UNION_TYPE_P (type))
2806 1418576583 : finalize_type_size (type);
2807 :
2808 : /* We should never see alias sets on incomplete aggregates. And we
2809 : should not call layout_type on not incomplete aggregates. */
2810 1423754967 : if (AGGREGATE_TYPE_P (type))
2811 85632149 : gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type));
2812 : }
2813 :
2814 : /* Return the least alignment required for type TYPE. */
2815 :
2816 : unsigned int
2817 39561641 : min_align_of_type (tree type)
2818 : {
2819 39561641 : unsigned int align = TYPE_ALIGN (type);
2820 39561641 : if (!TYPE_USER_ALIGN (type))
2821 : {
2822 71791067 : align = MIN (align, BIGGEST_ALIGNMENT);
2823 : #ifdef BIGGEST_FIELD_ALIGNMENT
2824 : align = MIN (align, BIGGEST_FIELD_ALIGNMENT);
2825 : #endif
2826 36890858 : unsigned int field_align = align;
2827 : #ifdef ADJUST_FIELD_ALIGN
2828 36890858 : field_align = ADJUST_FIELD_ALIGN (NULL_TREE, type, field_align);
2829 : #endif
2830 36890858 : align = MIN (align, field_align);
2831 : }
2832 39561641 : return align / BITS_PER_UNIT;
2833 : }
2834 :
2835 : /* Create and return a type for signed integers of PRECISION bits. */
2836 :
2837 : tree
2838 2715241 : make_signed_type (int precision)
2839 : {
2840 2715241 : tree type = make_node (INTEGER_TYPE);
2841 :
2842 2715241 : TYPE_PRECISION (type) = precision;
2843 :
2844 2715241 : fixup_signed_type (type);
2845 2715241 : return type;
2846 : }
2847 :
2848 : /* Create and return a type for unsigned integers of PRECISION bits. */
2849 :
2850 : tree
2851 10040342 : make_unsigned_type (int precision)
2852 : {
2853 10040342 : tree type = make_node (INTEGER_TYPE);
2854 :
2855 10040342 : TYPE_PRECISION (type) = precision;
2856 :
2857 10040342 : fixup_unsigned_type (type);
2858 10040342 : return type;
2859 : }
2860 :
2861 : /* Create and return a type for fract of PRECISION bits, UNSIGNEDP,
2862 : and SATP. */
2863 :
2864 : tree
2865 5751720 : make_fract_type (int precision, int unsignedp, int satp)
2866 : {
2867 5751720 : tree type = make_node (FIXED_POINT_TYPE);
2868 :
2869 5751720 : TYPE_PRECISION (type) = precision;
2870 :
2871 5751720 : if (satp)
2872 2875860 : TYPE_SATURATING (type) = 1;
2873 :
2874 : /* Lay out the type: set its alignment, size, etc. */
2875 5751720 : TYPE_UNSIGNED (type) = unsignedp;
2876 5751720 : enum mode_class mclass = unsignedp ? MODE_UFRACT : MODE_FRACT;
2877 5751720 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2878 5751720 : layout_type (type);
2879 :
2880 5751720 : return type;
2881 : }
2882 :
2883 : /* Create and return a type for accum of PRECISION bits, UNSIGNEDP,
2884 : and SATP. */
2885 :
2886 : tree
2887 4601376 : make_accum_type (int precision, int unsignedp, int satp)
2888 : {
2889 4601376 : tree type = make_node (FIXED_POINT_TYPE);
2890 :
2891 4601376 : TYPE_PRECISION (type) = precision;
2892 :
2893 4601376 : if (satp)
2894 2300688 : TYPE_SATURATING (type) = 1;
2895 :
2896 : /* Lay out the type: set its alignment, size, etc. */
2897 4601376 : TYPE_UNSIGNED (type) = unsignedp;
2898 4601376 : enum mode_class mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM;
2899 4601376 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2900 4601376 : layout_type (type);
2901 :
2902 4601376 : return type;
2903 : }
2904 :
2905 : /* Initialize sizetypes so layout_type can use them. */
2906 :
2907 : void
2908 287586 : initialize_sizetypes (void)
2909 : {
2910 287586 : int precision, bprecision;
2911 :
2912 : /* Get sizetypes precision from the SIZE_TYPE target macro. */
2913 294838 : if (strcmp (SIZETYPE, "unsigned int") == 0)
2914 : precision = INT_TYPE_SIZE;
2915 280334 : else if (strcmp (SIZETYPE, "long unsigned int") == 0)
2916 280334 : precision = LONG_TYPE_SIZE;
2917 0 : else if (strcmp (SIZETYPE, "long long unsigned int") == 0)
2918 : precision = LONG_LONG_TYPE_SIZE;
2919 0 : else if (strcmp (SIZETYPE, "short unsigned int") == 0)
2920 : precision = SHORT_TYPE_SIZE;
2921 : else
2922 : {
2923 : int i;
2924 :
2925 : precision = -1;
2926 0 : for (i = 0; i < NUM_INT_N_ENTS; i++)
2927 0 : if (int_n_enabled_p[i])
2928 : {
2929 0 : char name[50], altname[50];
2930 0 : sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
2931 0 : sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
2932 :
2933 0 : if (strcmp (name, SIZETYPE) == 0
2934 0 : || strcmp (altname, SIZETYPE) == 0)
2935 : {
2936 0 : precision = int_n_data[i].bitsize;
2937 : }
2938 : }
2939 0 : if (precision == -1)
2940 0 : gcc_unreachable ();
2941 : }
2942 :
2943 287586 : bprecision
2944 575172 : = MIN (precision + LOG2_BITS_PER_UNIT + 1, MAX_FIXED_MODE_SIZE);
2945 287586 : bprecision
2946 287586 : = GET_MODE_PRECISION (smallest_int_mode_for_size (bprecision).require ());
2947 287586 : if (bprecision > HOST_BITS_PER_DOUBLE_INT)
2948 : bprecision = HOST_BITS_PER_DOUBLE_INT;
2949 :
2950 : /* Create stubs for sizetype and bitsizetype so we can create constants. */
2951 287586 : sizetype = make_node (INTEGER_TYPE);
2952 287586 : TYPE_NAME (sizetype) = get_identifier ("sizetype");
2953 287586 : TYPE_PRECISION (sizetype) = precision;
2954 287586 : TYPE_UNSIGNED (sizetype) = 1;
2955 287586 : bitsizetype = make_node (INTEGER_TYPE);
2956 287586 : TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype");
2957 287586 : TYPE_PRECISION (bitsizetype) = bprecision;
2958 287586 : TYPE_UNSIGNED (bitsizetype) = 1;
2959 :
2960 : /* Now layout both types manually. */
2961 287586 : scalar_int_mode mode = smallest_int_mode_for_size (precision).require ();
2962 287586 : SET_TYPE_MODE (sizetype, mode);
2963 287586 : SET_TYPE_ALIGN (sizetype, GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)));
2964 287586 : TYPE_SIZE (sizetype) = bitsize_int (precision);
2965 575172 : TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (mode));
2966 287586 : set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED);
2967 :
2968 287586 : mode = smallest_int_mode_for_size (bprecision).require ();
2969 287586 : SET_TYPE_MODE (bitsizetype, mode);
2970 287586 : SET_TYPE_ALIGN (bitsizetype, GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)));
2971 287586 : TYPE_SIZE (bitsizetype) = bitsize_int (bprecision);
2972 575172 : TYPE_SIZE_UNIT (bitsizetype) = size_int (GET_MODE_SIZE (mode));
2973 287586 : set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED);
2974 :
2975 : /* Create the signed variants of *sizetype. */
2976 287586 : ssizetype = make_signed_type (TYPE_PRECISION (sizetype));
2977 287586 : TYPE_NAME (ssizetype) = get_identifier ("ssizetype");
2978 287586 : sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype));
2979 287586 : TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype");
2980 287586 : }
2981 :
2982 : /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2983 : or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2984 : for TYPE, based on the PRECISION and whether or not the TYPE
2985 : IS_UNSIGNED. PRECISION need not correspond to a width supported
2986 : natively by the hardware; for example, on a machine with 8-bit,
2987 : 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2988 : 61. */
2989 :
2990 : void
2991 16221029 : set_min_and_max_values_for_integral_type (tree type,
2992 : int precision,
2993 : signop sgn)
2994 : {
2995 : /* For bitfields with zero width we end up creating integer types
2996 : with zero precision. Don't assign any minimum/maximum values
2997 : to those types, they don't have any valid value. */
2998 16221029 : if (precision < 1)
2999 : return;
3000 :
3001 16220705 : gcc_assert (precision <= WIDE_INT_MAX_PRECISION);
3002 :
3003 16220705 : TYPE_MIN_VALUE (type)
3004 32441410 : = wide_int_to_tree (type, wi::min_value (precision, sgn));
3005 16220705 : TYPE_MAX_VALUE (type)
3006 32442807 : = wide_int_to_tree (type, wi::max_value (precision, sgn));
3007 : }
3008 :
3009 : /* Set the extreme values of TYPE based on its precision in bits,
3010 : then lay it out. Used when make_signed_type won't do
3011 : because the tree code is not INTEGER_TYPE. */
3012 :
3013 : void
3014 3278622 : fixup_signed_type (tree type)
3015 : {
3016 3278622 : int precision = TYPE_PRECISION (type);
3017 :
3018 3278622 : set_min_and_max_values_for_integral_type (type, precision, SIGNED);
3019 :
3020 : /* Lay out the type: set its alignment, size, etc. */
3021 3278622 : layout_type (type);
3022 3278622 : }
3023 :
3024 : /* Set the extreme values of TYPE based on its precision in bits,
3025 : then lay it out. This is used both in `make_unsigned_type'
3026 : and for enumeral types. */
3027 :
3028 : void
3029 10923118 : fixup_unsigned_type (tree type)
3030 : {
3031 10923118 : int precision = TYPE_PRECISION (type);
3032 :
3033 10923118 : TYPE_UNSIGNED (type) = 1;
3034 :
3035 10923118 : set_min_and_max_values_for_integral_type (type, precision, UNSIGNED);
3036 :
3037 : /* Lay out the type: set its alignment, size, etc. */
3038 10923118 : layout_type (type);
3039 10923118 : }
3040 :
3041 : /* Construct an iterator for a bitfield that spans BITSIZE bits,
3042 : starting at BITPOS.
3043 :
3044 : BITREGION_START is the bit position of the first bit in this
3045 : sequence of bit fields. BITREGION_END is the last bit in this
3046 : sequence. If these two fields are non-zero, we should restrict the
3047 : memory access to that range. Otherwise, we are allowed to touch
3048 : any adjacent non bit-fields.
3049 :
3050 : ALIGN is the alignment of the underlying object in bits.
3051 : VOLATILEP says whether the bitfield is volatile. */
3052 :
3053 3392042 : bit_field_mode_iterator
3054 : ::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3055 : poly_int64 bitregion_start,
3056 : poly_int64 bitregion_end,
3057 3392042 : unsigned int align, bool volatilep)
3058 3392042 : : m_mode (NARROWEST_INT_MODE), m_bitsize (bitsize),
3059 3392042 : m_bitpos (bitpos), m_bitregion_start (bitregion_start),
3060 3392042 : m_bitregion_end (bitregion_end), m_align (align),
3061 3392042 : m_volatilep (volatilep), m_count (0)
3062 : {
3063 3392042 : if (known_eq (m_bitregion_end, 0))
3064 : {
3065 : /* We can assume that any aligned chunk of ALIGN bits that overlaps
3066 : the bitfield is mapped and won't trap, provided that ALIGN isn't
3067 : too large. The cap is the biggest required alignment for data,
3068 : or at least the word size. And force one such chunk at least. */
3069 294970 : unsigned HOST_WIDE_INT units
3070 1072234 : = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD));
3071 294970 : if (bitsize <= 0)
3072 : bitsize = 1;
3073 294970 : HOST_WIDE_INT end = bitpos + bitsize + units - 1;
3074 294970 : m_bitregion_end = end - end % units - 1;
3075 : }
3076 3392042 : }
3077 :
3078 : /* Calls to this function return successively larger modes that can be used
3079 : to represent the bitfield. Return true if another bitfield mode is
3080 : available, storing it in *OUT_MODE if so. */
3081 :
3082 : bool
3083 3393282 : bit_field_mode_iterator::next_mode (scalar_int_mode *out_mode)
3084 : {
3085 3393282 : scalar_int_mode mode;
3086 4140347 : for (; m_mode.exists (&mode); m_mode = GET_MODE_WIDER_MODE (mode))
3087 : {
3088 4140347 : unsigned int unit = GET_MODE_BITSIZE (mode);
3089 :
3090 : /* Skip modes that don't have full precision. */
3091 4140347 : if (unit != GET_MODE_PRECISION (mode))
3092 747065 : continue;
3093 :
3094 : /* Stop if the mode is too wide to handle efficiently. */
3095 8280694 : if (unit > MAX_FIXED_MODE_SIZE)
3096 : break;
3097 :
3098 : /* Don't deliver more than one multiword mode; the smallest one
3099 : should be used. */
3100 4123736 : if (m_count > 0 && unit > BITS_PER_WORD)
3101 : break;
3102 :
3103 : /* Skip modes that are too small. */
3104 4123596 : unsigned HOST_WIDE_INT substart = (unsigned HOST_WIDE_INT) m_bitpos % unit;
3105 4123596 : unsigned HOST_WIDE_INT subend = substart + m_bitsize;
3106 4123596 : if (subend > unit)
3107 747065 : continue;
3108 :
3109 : /* Stop if the mode goes outside the bitregion. */
3110 3376531 : HOST_WIDE_INT start = m_bitpos - substart;
3111 3376531 : if (maybe_ne (m_bitregion_start, 0)
3112 3376531 : && maybe_lt (start, m_bitregion_start))
3113 : break;
3114 3376505 : HOST_WIDE_INT end = start + unit;
3115 3376505 : if (maybe_gt (end, m_bitregion_end + 1))
3116 : break;
3117 :
3118 : /* Stop if the mode requires too much alignment. */
3119 3361900 : if (GET_MODE_ALIGNMENT (mode) > m_align
3120 3361900 : && targetm.slow_unaligned_access (mode, m_align))
3121 : break;
3122 :
3123 3361900 : *out_mode = mode;
3124 3361900 : m_mode = GET_MODE_WIDER_MODE (mode);
3125 3361900 : m_count++;
3126 3361900 : return true;
3127 : }
3128 : return false;
3129 : }
3130 :
3131 : /* Return true if smaller modes are generally preferred for this kind
3132 : of bitfield. */
3133 :
3134 : bool
3135 3342577 : bit_field_mode_iterator::prefer_smaller_modes ()
3136 : {
3137 3342577 : return (m_volatilep
3138 3342577 : ? targetm.narrow_volatile_bitfield ()
3139 3342577 : : !SLOW_BYTE_ACCESS);
3140 : }
3141 :
3142 : /* Find the best machine mode to use when referencing a bit field of length
3143 : BITSIZE bits starting at BITPOS.
3144 :
3145 : BITREGION_START is the bit position of the first bit in this
3146 : sequence of bit fields. BITREGION_END is the last bit in this
3147 : sequence. If these two fields are non-zero, we should restrict the
3148 : memory access to that range. Otherwise, we are allowed to touch
3149 : any adjacent non bit-fields.
3150 :
3151 : The chosen mode must have no more than LARGEST_MODE_BITSIZE bits.
3152 : INT_MAX is a suitable value for LARGEST_MODE_BITSIZE if the caller
3153 : doesn't want to apply a specific limit.
3154 :
3155 : If no mode meets all these conditions, we return VOIDmode.
3156 :
3157 : The underlying object is known to be aligned to a boundary of ALIGN bits.
3158 :
3159 : If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
3160 : smallest mode meeting these conditions.
3161 :
3162 : If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
3163 : largest mode (but a mode no wider than UNITS_PER_WORD) that meets
3164 : all the conditions.
3165 :
3166 : If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
3167 : decide which of the above modes should be used. */
3168 :
3169 : bool
3170 3183998 : get_best_mode (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3171 : poly_uint64 bitregion_start, poly_uint64 bitregion_end,
3172 : unsigned int align,
3173 : unsigned HOST_WIDE_INT largest_mode_bitsize, bool volatilep,
3174 : scalar_int_mode *best_mode)
3175 : {
3176 3183998 : bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start,
3177 3183998 : bitregion_end, align, volatilep);
3178 3183998 : scalar_int_mode mode;
3179 3183998 : bool found = false;
3180 3183998 : while (iter.next_mode (&mode)
3181 : /* ??? For historical reasons, reject modes that would normally
3182 : receive greater alignment, even if unaligned accesses are
3183 : acceptable. This has both advantages and disadvantages.
3184 : Removing this check means that something like:
3185 :
3186 : struct s { unsigned int x; unsigned int y; };
3187 : int f (struct s *s) { return s->x == 0 && s->y == 0; }
3188 :
3189 : can be implemented using a single load and compare on
3190 : 64-bit machines that have no alignment restrictions.
3191 : For example, on powerpc64-linux-gnu, we would generate:
3192 :
3193 : ld 3,0(3)
3194 : cntlzd 3,3
3195 : srdi 3,3,6
3196 : blr
3197 :
3198 : rather than:
3199 :
3200 : lwz 9,0(3)
3201 : cmpwi 7,9,0
3202 : bne 7,.L3
3203 : lwz 3,4(3)
3204 : cntlzw 3,3
3205 : srwi 3,3,5
3206 : extsw 3,3
3207 : blr
3208 : .p2align 4,,15
3209 : .L3:
3210 : li 3,0
3211 : blr
3212 :
3213 : However, accessing more than one field can make life harder
3214 : for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c
3215 : has a series of unsigned short copies followed by a series of
3216 : unsigned short comparisons. With this check, both the copies
3217 : and comparisons remain 16-bit accesses and FRE is able
3218 : to eliminate the latter. Without the check, the comparisons
3219 : can be done using 2 64-bit operations, which FRE isn't able
3220 : to handle in the same way.
3221 :
3222 : Either way, it would probably be worth disabling this check
3223 : during expand. One particular example where removing the
3224 : check would help is the get_best_mode call in store_bit_field.
3225 : If we are given a memory bitregion of 128 bits that is aligned
3226 : to a 64-bit boundary, and the bitfield we want to modify is
3227 : in the second half of the bitregion, this check causes
3228 : store_bitfield to turn the memory into a 64-bit reference
3229 : to the _first_ half of the region. We later use
3230 : adjust_bitfield_address to get a reference to the correct half,
3231 : but doing so looks to adjust_bitfield_address as though we are
3232 : moving past the end of the original object, so it drops the
3233 : associated MEM_EXPR and MEM_OFFSET. Removing the check
3234 : causes store_bit_field to keep a 128-bit memory reference,
3235 : so that the final bitfield reference still has a MEM_EXPR
3236 : and MEM_OFFSET. */
3237 3160043 : && GET_MODE_ALIGNMENT (mode) <= align
3238 6327356 : && GET_MODE_BITSIZE (mode) <= largest_mode_bitsize)
3239 : {
3240 3141032 : *best_mode = mode;
3241 3141032 : found = true;
3242 3141032 : if (iter.prefer_smaller_modes ())
3243 : break;
3244 : }
3245 :
3246 3183998 : return found;
3247 : }
3248 :
3249 : /* Gets minimal and maximal values for MODE (signed or unsigned depending on
3250 : SIGN). The returned constants are made to be usable in TARGET_MODE. */
3251 :
3252 : void
3253 62615705 : get_mode_bounds (scalar_int_mode mode, int sign,
3254 : scalar_int_mode target_mode,
3255 : rtx *mmin, rtx *mmax)
3256 : {
3257 62615705 : unsigned size = GET_MODE_PRECISION (mode);
3258 62615705 : unsigned HOST_WIDE_INT min_val, max_val;
3259 :
3260 62615705 : gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
3261 :
3262 : /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */
3263 62615705 : if (mode == BImode)
3264 : {
3265 : if (STORE_FLAG_VALUE < 0)
3266 : {
3267 : min_val = STORE_FLAG_VALUE;
3268 : max_val = 0;
3269 : }
3270 : else
3271 : {
3272 : min_val = 0;
3273 : max_val = STORE_FLAG_VALUE;
3274 : }
3275 : }
3276 62615705 : else if (sign)
3277 : {
3278 55882271 : min_val = -(HOST_WIDE_INT_1U << (size - 1));
3279 55882271 : max_val = (HOST_WIDE_INT_1U << (size - 1)) - 1;
3280 : }
3281 : else
3282 : {
3283 6733434 : min_val = 0;
3284 6733434 : max_val = (HOST_WIDE_INT_1U << (size - 1) << 1) - 1;
3285 : }
3286 :
3287 62615705 : *mmin = gen_int_mode (min_val, target_mode);
3288 62615705 : *mmax = gen_int_mode (max_val, target_mode);
3289 62615705 : }
3290 :
3291 : #include "gt-stor-layout.h"
|