Line data Source code
1 : /* C-compiler utilities for types and variables storage layout
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "target.h"
25 : #include "function.h"
26 : #include "rtl.h"
27 : #include "tree.h"
28 : #include "memmodel.h"
29 : #include "tm_p.h"
30 : #include "stringpool.h"
31 : #include "regs.h"
32 : #include "emit-rtl.h"
33 : #include "cgraph.h"
34 : #include "diagnostic-core.h"
35 : #include "fold-const.h"
36 : #include "stor-layout.h"
37 : #include "varasm.h"
38 : #include "print-tree.h"
39 : #include "langhooks.h"
40 : #include "tree-inline.h"
41 : #include "dumpfile.h"
42 : #include "gimplify.h"
43 : #include "attribs.h"
44 : #include "debug.h"
45 : #include "calls.h"
46 :
47 : /* Data type for the expressions representing sizes of data types.
48 : It is the first integer type laid out. */
49 : tree sizetype_tab[(int) stk_type_kind_last];
50 :
51 : /* If nonzero, this is an upper limit on alignment of structure fields.
52 : The value is measured in bits. */
53 : unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
54 :
55 : static tree self_referential_size (tree);
56 : static void finalize_record_size (record_layout_info);
57 : static void finalize_type_size (tree);
58 : static void place_union_field (record_layout_info, tree);
59 : static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
60 : HOST_WIDE_INT, tree);
61 : extern void debug_rli (record_layout_info);
62 :
63 : /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
64 : to serve as the actual size-expression for a type or decl. */
65 :
66 : tree
67 662373 : variable_size (tree size)
68 : {
69 : /* Obviously. */
70 662373 : if (TREE_CONSTANT (size))
71 : return size;
72 :
73 : /* If the size is self-referential, we can't make a SAVE_EXPR (see
74 : save_expr for the rationale). But we can do something else. */
75 662311 : if (CONTAINS_PLACEHOLDER_P (size))
76 0 : return self_referential_size (size);
77 :
78 : /* If we are in the global binding level, we can't make a SAVE_EXPR
79 : since it may end up being shared across functions, so it is up
80 : to the front-end to deal with this case. */
81 662311 : if (lang_hooks.decls.global_bindings_p ())
82 : return size;
83 :
84 319344 : return save_expr (size);
85 : }
86 :
87 : /* An array of functions used for self-referential size computation. */
88 : static GTY(()) vec<tree, va_gc> *size_functions;
89 :
90 : /* Return true if T is a self-referential component reference. */
91 :
92 : static bool
93 0 : self_referential_component_ref_p (tree t)
94 : {
95 0 : if (TREE_CODE (t) != COMPONENT_REF)
96 : return false;
97 :
98 0 : while (REFERENCE_CLASS_P (t))
99 0 : t = TREE_OPERAND (t, 0);
100 :
101 0 : return (TREE_CODE (t) == PLACEHOLDER_EXPR);
102 : }
103 :
104 : /* Similar to copy_tree_r but do not copy component references involving
105 : PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr
106 : and substituted in substitute_in_expr. */
107 :
108 : static tree
109 0 : copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data)
110 : {
111 0 : enum tree_code code = TREE_CODE (*tp);
112 :
113 : /* Stop at types, decls, constants like copy_tree_r. */
114 0 : if (TREE_CODE_CLASS (code) == tcc_type
115 : || TREE_CODE_CLASS (code) == tcc_declaration
116 0 : || TREE_CODE_CLASS (code) == tcc_constant)
117 : {
118 0 : *walk_subtrees = 0;
119 0 : return NULL_TREE;
120 : }
121 :
122 : /* This is the pattern built in ada/make_aligning_type. */
123 0 : else if (code == ADDR_EXPR
124 0 : && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR)
125 : {
126 0 : *walk_subtrees = 0;
127 0 : return NULL_TREE;
128 : }
129 :
130 : /* Default case: the component reference. */
131 0 : else if (self_referential_component_ref_p (*tp))
132 : {
133 0 : *walk_subtrees = 0;
134 0 : return NULL_TREE;
135 : }
136 :
137 : /* We're not supposed to have them in self-referential size trees
138 : because we wouldn't properly control when they are evaluated.
139 : However, not creating superfluous SAVE_EXPRs requires accurate
140 : tracking of readonly-ness all the way down to here, which we
141 : cannot always guarantee in practice. So punt in this case. */
142 0 : else if (code == SAVE_EXPR)
143 0 : return error_mark_node;
144 :
145 0 : else if (code == STATEMENT_LIST)
146 0 : gcc_unreachable ();
147 :
148 0 : return copy_tree_r (tp, walk_subtrees, data);
149 : }
150 :
151 : /* Given a SIZE expression that is self-referential, return an equivalent
152 : expression to serve as the actual size expression for a type. */
153 :
154 : static tree
155 0 : self_referential_size (tree size)
156 : {
157 0 : static unsigned HOST_WIDE_INT fnno = 0;
158 0 : vec<tree> self_refs = vNULL;
159 0 : tree param_type_list = NULL, param_decl_list = NULL;
160 0 : tree t, ref, return_type, fntype, fnname, fndecl;
161 0 : unsigned int i;
162 0 : char buf[128];
163 0 : vec<tree, va_gc> *args = NULL;
164 :
165 : /* Do not factor out simple operations. */
166 0 : t = skip_simple_constant_arithmetic (size);
167 0 : if (TREE_CODE (t) == CALL_EXPR || self_referential_component_ref_p (t))
168 : return size;
169 :
170 : /* Collect the list of self-references in the expression. */
171 0 : find_placeholder_in_expr (size, &self_refs);
172 0 : gcc_assert (self_refs.length () > 0);
173 :
174 : /* Obtain a private copy of the expression. */
175 0 : t = size;
176 0 : if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE)
177 : return size;
178 0 : size = t;
179 :
180 : /* Build the parameter and argument lists in parallel; also
181 : substitute the former for the latter in the expression. */
182 0 : vec_alloc (args, self_refs.length ());
183 0 : FOR_EACH_VEC_ELT (self_refs, i, ref)
184 : {
185 0 : tree subst, param_name, param_type, param_decl;
186 :
187 0 : if (DECL_P (ref))
188 : {
189 : /* We shouldn't have true variables here. */
190 0 : gcc_assert (TREE_READONLY (ref));
191 : subst = ref;
192 : }
193 : /* This is the pattern built in ada/make_aligning_type. */
194 0 : else if (TREE_CODE (ref) == ADDR_EXPR)
195 : subst = ref;
196 : /* Default case: the component reference. */
197 : else
198 0 : subst = TREE_OPERAND (ref, 1);
199 :
200 0 : sprintf (buf, "p%d", i);
201 0 : param_name = get_identifier (buf);
202 0 : param_type = TREE_TYPE (ref);
203 0 : param_decl
204 0 : = build_decl (input_location, PARM_DECL, param_name, param_type);
205 0 : DECL_ARG_TYPE (param_decl) = param_type;
206 0 : DECL_ARTIFICIAL (param_decl) = 1;
207 0 : TREE_READONLY (param_decl) = 1;
208 :
209 0 : size = substitute_in_expr (size, subst, param_decl);
210 :
211 0 : param_type_list = tree_cons (NULL_TREE, param_type, param_type_list);
212 0 : param_decl_list = chainon (param_decl, param_decl_list);
213 0 : args->quick_push (ref);
214 : }
215 :
216 0 : self_refs.release ();
217 :
218 : /* Append 'void' to indicate that the number of parameters is fixed. */
219 0 : param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
220 :
221 : /* The 3 lists have been created in reverse order. */
222 0 : param_type_list = nreverse (param_type_list);
223 0 : param_decl_list = nreverse (param_decl_list);
224 :
225 : /* Build the function type. */
226 0 : return_type = TREE_TYPE (size);
227 0 : fntype = build_function_type (return_type, param_type_list);
228 :
229 : /* Build the function declaration. */
230 0 : sprintf (buf, "SZ" HOST_WIDE_INT_PRINT_UNSIGNED, fnno++);
231 0 : fnname = get_file_function_name (buf);
232 0 : fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype);
233 0 : for (t = param_decl_list; t; t = DECL_CHAIN (t))
234 0 : DECL_CONTEXT (t) = fndecl;
235 0 : DECL_ARGUMENTS (fndecl) = param_decl_list;
236 0 : DECL_RESULT (fndecl)
237 0 : = build_decl (input_location, RESULT_DECL, 0, return_type);
238 0 : DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
239 :
240 : /* The function has been created by the compiler and we don't
241 : want to emit debug info for it. */
242 0 : DECL_ARTIFICIAL (fndecl) = 1;
243 0 : DECL_IGNORED_P (fndecl) = 1;
244 :
245 : /* It is supposed to be "const" and never throw. */
246 0 : TREE_READONLY (fndecl) = 1;
247 0 : TREE_NOTHROW (fndecl) = 1;
248 :
249 : /* We want it to be inlined when this is deemed profitable, as
250 : well as discarded if every call has been integrated. */
251 0 : DECL_DECLARED_INLINE_P (fndecl) = 1;
252 :
253 : /* It is made up of a unique return statement. */
254 0 : DECL_INITIAL (fndecl) = make_node (BLOCK);
255 0 : BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl;
256 0 : t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size);
257 0 : DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t);
258 0 : TREE_STATIC (fndecl) = 1;
259 :
260 : /* Put it onto the list of size functions. */
261 0 : vec_safe_push (size_functions, fndecl);
262 :
263 : /* Replace the original expression with a call to the size function. */
264 0 : return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args);
265 : }
266 :
267 : /* Take, queue and compile all the size functions. It is essential that
268 : the size functions be gimplified at the very end of the compilation
269 : in order to guarantee transparent handling of self-referential sizes.
270 : Otherwise the GENERIC inliner would not be able to inline them back
271 : at each of their call sites, thus creating artificial non-constant
272 : size expressions which would trigger nasty problems later on. */
273 :
274 : void
275 255697 : finalize_size_functions (void)
276 : {
277 255697 : unsigned int i;
278 255697 : tree fndecl;
279 :
280 255697 : for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++)
281 : {
282 0 : allocate_struct_function (fndecl, false);
283 0 : set_cfun (NULL);
284 0 : dump_function (TDI_original, fndecl);
285 :
286 : /* As these functions are used to describe the layout of variable-length
287 : structures, debug info generation needs their implementation. */
288 0 : debug_hooks->size_function (fndecl);
289 0 : gimplify_function_tree (fndecl);
290 0 : cgraph_node::finalize_function (fndecl, false);
291 : }
292 :
293 255697 : vec_free (size_functions);
294 255697 : }
295 :
296 : /* Return a machine mode of class MCLASS with SIZE bits of precision,
297 : if one exists. The mode may have padding bits as well the SIZE
298 : value bits. If LIMIT is nonzero, disregard modes wider than
299 : MAX_FIXED_MODE_SIZE. */
300 :
301 : opt_machine_mode
302 1215045411 : mode_for_size (poly_uint64 size, enum mode_class mclass, int limit)
303 : {
304 1215045411 : machine_mode mode;
305 1215045411 : int i;
306 :
307 1386114227 : if (limit && maybe_gt (size, (unsigned int) MAX_FIXED_MODE_SIZE))
308 38021243 : return opt_machine_mode ();
309 :
310 : /* Get the first mode which has this size, in the specified class. */
311 1741701430 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
312 1696820059 : if (known_eq (GET_MODE_PRECISION (mode), size))
313 1132142797 : return mode;
314 :
315 44881371 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
316 89732614 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
317 44866307 : if (known_eq (int_n_data[i].bitsize, size)
318 44866307 : && int_n_enabled_p[i])
319 0 : return int_n_data[i].m;
320 :
321 44881371 : return opt_machine_mode ();
322 : }
323 :
324 : /* Similar, except passed a tree node. */
325 :
326 : opt_machine_mode
327 164342715 : mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
328 : {
329 164342715 : unsigned HOST_WIDE_INT uhwi;
330 164342715 : unsigned int ui;
331 :
332 164342715 : if (!tree_fits_uhwi_p (size))
333 285763 : return opt_machine_mode ();
334 164056952 : uhwi = tree_to_uhwi (size);
335 164056952 : ui = uhwi;
336 164056952 : if (uhwi != ui)
337 565 : return opt_machine_mode ();
338 164056387 : return mode_for_size (ui, mclass, limit);
339 : }
340 :
341 : /* Return the narrowest mode of class MCLASS that contains at least
342 : SIZE bits, if such a mode exists. */
343 :
344 : opt_machine_mode
345 20993728 : smallest_mode_for_size (poly_uint64 size, enum mode_class mclass)
346 : {
347 20993728 : machine_mode mode = VOIDmode;
348 20993728 : int i;
349 :
350 : /* Get the first mode which has at least this size, in the
351 : specified class. */
352 69418426 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
353 69418426 : if (known_ge (GET_MODE_PRECISION (mode), size))
354 : break;
355 :
356 20993728 : if (mode == VOIDmode)
357 0 : return opt_machine_mode ();
358 :
359 20993728 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
360 41987456 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
361 20993728 : if (known_ge (int_n_data[i].bitsize, size)
362 20993238 : && known_lt (int_n_data[i].bitsize, GET_MODE_PRECISION (mode))
363 20993728 : && int_n_enabled_p[i])
364 0 : mode = int_n_data[i].m;
365 :
366 20993728 : return mode;
367 : }
368 :
369 : /* Return an integer mode of exactly the same size as MODE, if one exists. */
370 :
371 : opt_scalar_int_mode
372 1708706 : int_mode_for_mode (machine_mode mode)
373 : {
374 1708706 : switch (GET_MODE_CLASS (mode))
375 : {
376 1285517 : case MODE_INT:
377 1285517 : case MODE_PARTIAL_INT:
378 1285517 : return as_a <scalar_int_mode> (mode);
379 :
380 261281 : case MODE_COMPLEX_INT:
381 261281 : case MODE_COMPLEX_FLOAT:
382 261281 : case MODE_FLOAT:
383 261281 : case MODE_DECIMAL_FLOAT:
384 261281 : case MODE_FRACT:
385 261281 : case MODE_ACCUM:
386 261281 : case MODE_UFRACT:
387 261281 : case MODE_UACCUM:
388 261281 : case MODE_VECTOR_BOOL:
389 261281 : case MODE_VECTOR_INT:
390 261281 : case MODE_VECTOR_FLOAT:
391 261281 : case MODE_VECTOR_FRACT:
392 261281 : case MODE_VECTOR_ACCUM:
393 261281 : case MODE_VECTOR_UFRACT:
394 261281 : case MODE_VECTOR_UACCUM:
395 522562 : return int_mode_for_size (GET_MODE_BITSIZE (mode), 0);
396 :
397 0 : case MODE_OPAQUE:
398 0 : return opt_scalar_int_mode ();
399 :
400 161908 : case MODE_RANDOM:
401 161908 : if (mode == BLKmode)
402 161908 : return opt_scalar_int_mode ();
403 :
404 : /* fall through */
405 :
406 0 : case MODE_CC:
407 0 : default:
408 0 : gcc_unreachable ();
409 : }
410 : }
411 :
412 : /* Find a mode that can be used for efficient bitwise operations on MODE,
413 : if one exists. */
414 :
415 : opt_machine_mode
416 24159 : bitwise_mode_for_mode (machine_mode mode)
417 : {
418 : /* Quick exit if we already have a suitable mode. */
419 24159 : scalar_int_mode int_mode;
420 24159 : if (is_a <scalar_int_mode> (mode, &int_mode)
421 45112 : && GET_MODE_BITSIZE (int_mode) <= MAX_FIXED_MODE_SIZE)
422 22556 : return int_mode;
423 :
424 : /* Reuse the sanity checks from int_mode_for_mode. */
425 1603 : gcc_checking_assert ((int_mode_for_mode (mode), true));
426 :
427 3206 : poly_int64 bitsize = GET_MODE_BITSIZE (mode);
428 :
429 : /* Try to replace complex modes with complex modes. In general we
430 : expect both components to be processed independently, so we only
431 : care whether there is a register for the inner mode. */
432 1603 : if (COMPLEX_MODE_P (mode))
433 : {
434 357 : machine_mode trial = mode;
435 357 : if ((GET_MODE_CLASS (trial) == MODE_COMPLEX_INT
436 411 : || mode_for_size (bitsize, MODE_COMPLEX_INT, false).exists (&trial))
437 959 : && have_regs_of_mode[GET_MODE_INNER (trial)])
438 247 : return trial;
439 : }
440 :
441 : /* Try to replace vector modes with vector modes. Also try using vector
442 : modes if an integer mode would be too big. */
443 1113 : if (VECTOR_MODE_P (mode)
444 3580 : || maybe_gt (bitsize, MAX_FIXED_MODE_SIZE))
445 : {
446 364 : machine_mode trial = mode;
447 364 : if ((GET_MODE_CLASS (trial) == MODE_VECTOR_INT
448 185 : || mode_for_size (bitsize, MODE_VECTOR_INT, 0).exists (&trial))
449 308 : && have_regs_of_mode[trial]
450 608 : && targetm.vector_mode_supported_p (trial))
451 244 : return trial;
452 : }
453 :
454 : /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */
455 1112 : return mode_for_size (bitsize, MODE_INT, true);
456 : }
457 :
458 : /* Find a type that can be used for efficient bitwise operations on MODE.
459 : Return null if no such mode exists. */
460 :
461 : tree
462 24159 : bitwise_type_for_mode (machine_mode mode)
463 : {
464 24159 : if (!bitwise_mode_for_mode (mode).exists (&mode))
465 120 : return NULL_TREE;
466 :
467 24039 : unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode);
468 24039 : tree inner_type = build_nonstandard_integer_type (inner_size, true);
469 :
470 24039 : if (VECTOR_MODE_P (mode))
471 244 : return build_vector_type_for_mode (inner_type, mode);
472 :
473 23795 : if (COMPLEX_MODE_P (mode))
474 247 : return build_complex_type (inner_type);
475 :
476 47096 : gcc_checking_assert (GET_MODE_INNER (mode) == mode);
477 : return inner_type;
478 : }
479 :
480 : /* Find a mode that can be used for efficient bitwise operations on SIZE
481 : bits, if one exists. */
482 :
483 : opt_machine_mode
484 22763 : bitwise_mode_for_size (poly_uint64 size)
485 : {
486 45526 : if (known_le (size, (unsigned int) MAX_FIXED_MODE_SIZE))
487 22516 : return mode_for_size (size, MODE_INT, true);
488 :
489 : machine_mode mode, ret = VOIDmode;
490 4369 : FOR_EACH_MODE_FROM (mode, MIN_MODE_VECTOR_INT)
491 8730 : if (known_eq (GET_MODE_BITSIZE (mode), size)
492 263 : && (ret == VOIDmode || GET_MODE_INNER (mode) == QImode)
493 263 : && have_regs_of_mode[mode]
494 4608 : && targetm.vector_mode_supported_p (mode))
495 : {
496 486 : if (GET_MODE_INNER (mode) == QImode)
497 243 : return mode;
498 0 : else if (ret == VOIDmode)
499 4122 : ret = mode;
500 : }
501 4 : if (ret != VOIDmode)
502 0 : return ret;
503 4 : return opt_machine_mode ();
504 : }
505 :
506 : /* Find a mode that is suitable for representing a vector with NUNITS
507 : elements of mode INNERMODE, if one exists. The returned mode can be
508 : either an integer mode or a vector mode. */
509 :
510 : opt_machine_mode
511 69151971 : mode_for_vector (scalar_mode innermode, poly_uint64 nunits)
512 : {
513 69151971 : machine_mode mode;
514 :
515 : /* First, look for a supported vector type. */
516 69151971 : if (SCALAR_FLOAT_MODE_P (innermode))
517 : mode = MIN_MODE_VECTOR_FLOAT;
518 65339348 : else if (SCALAR_FRACT_MODE_P (innermode))
519 : mode = MIN_MODE_VECTOR_FRACT;
520 65339348 : else if (SCALAR_UFRACT_MODE_P (innermode))
521 : mode = MIN_MODE_VECTOR_UFRACT;
522 65339348 : else if (SCALAR_ACCUM_MODE_P (innermode))
523 : mode = MIN_MODE_VECTOR_ACCUM;
524 65339348 : else if (SCALAR_UACCUM_MODE_P (innermode))
525 : mode = MIN_MODE_VECTOR_UACCUM;
526 : else
527 69151971 : mode = MIN_MODE_VECTOR_INT;
528 :
529 : /* Only check the broader vector_mode_supported_any_target_p here.
530 : We'll filter through target-specific availability and
531 : vector_mode_supported_p later in vector_type_mode. */
532 827052959 : FOR_EACH_MODE_FROM (mode, mode)
533 1653508040 : if (known_eq (GET_MODE_NUNITS (mode), nunits)
534 378329006 : && GET_MODE_INNER (mode) == innermode
535 895607052 : && targetm.vector_mode_supported_any_target_p (mode))
536 68853032 : return mode;
537 :
538 : /* For integers, try mapping it to a same-sized scalar mode. */
539 298939 : if (GET_MODE_CLASS (innermode) == MODE_INT)
540 : {
541 16956 : poly_uint64 nbits = nunits * GET_MODE_BITSIZE (innermode);
542 16956 : if (int_mode_for_size (nbits, 0).exists (&mode)
543 16214 : && have_regs_of_mode[mode])
544 16214 : return mode;
545 : }
546 :
547 282725 : return opt_machine_mode ();
548 : }
549 :
550 : /* If a piece of code is using vector mode VECTOR_MODE and also wants
551 : to operate on elements of mode ELEMENT_MODE, return the vector mode
552 : it should use for those elements. If NUNITS is nonzero, ensure that
553 : the mode has exactly NUNITS elements, otherwise pick whichever vector
554 : size pairs the most naturally with VECTOR_MODE; this may mean choosing
555 : a mode with a different size and/or number of elements, depending on
556 : what the target prefers. Return an empty opt_machine_mode if there
557 : is no supported vector mode with the required properties.
558 :
559 : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
560 : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
561 :
562 : opt_machine_mode
563 33265222 : related_vector_mode (machine_mode vector_mode, scalar_mode element_mode,
564 : poly_uint64 nunits)
565 : {
566 33265222 : gcc_assert (VECTOR_MODE_P (vector_mode));
567 33265222 : return targetm.vectorize.related_mode (vector_mode, element_mode, nunits);
568 : }
569 :
570 : /* If a piece of code is using vector mode VECTOR_MODE and also wants
571 : to operate on integer vectors with the same element size and number
572 : of elements, return the vector mode it should use. Return an empty
573 : opt_machine_mode if there is no supported vector mode with the
574 : required properties.
575 :
576 : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
577 : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
578 :
579 : opt_machine_mode
580 27715 : related_int_vector_mode (machine_mode vector_mode)
581 : {
582 27715 : gcc_assert (VECTOR_MODE_P (vector_mode));
583 27715 : scalar_int_mode int_mode;
584 55430 : if (int_mode_for_mode (GET_MODE_INNER (vector_mode)).exists (&int_mode))
585 55430 : return related_vector_mode (vector_mode, int_mode,
586 27715 : GET_MODE_NUNITS (vector_mode));
587 0 : return opt_machine_mode ();
588 : }
589 :
590 : /* Return the alignment of MODE. This will be bounded by 1 and
591 : BIGGEST_ALIGNMENT. */
592 :
593 : unsigned int
594 1669399475 : get_mode_alignment (machine_mode mode)
595 : {
596 3242360142 : return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
597 : }
598 :
599 : /* Return the natural mode of an array, given that it is SIZE bytes in
600 : total and has elements of type ELEM_TYPE. */
601 :
602 : static machine_mode
603 68157914 : mode_for_array (tree elem_type, tree size)
604 : {
605 68157914 : tree elem_size;
606 68157914 : poly_uint64 int_size, int_elem_size;
607 68157914 : unsigned HOST_WIDE_INT num_elems;
608 68157914 : bool limit_p;
609 :
610 : /* One-element arrays get the component type's mode. */
611 68157914 : elem_size = TYPE_SIZE (elem_type);
612 68157914 : if (simple_cst_equal (size, elem_size))
613 4655452 : return TYPE_MODE (elem_type);
614 :
615 63502462 : limit_p = true;
616 63502462 : if (poly_int_tree_p (size, &int_size)
617 63216699 : && poly_int_tree_p (elem_size, &int_elem_size)
618 63216699 : && maybe_ne (int_elem_size, 0U)
619 63502462 : && constant_multiple_p (int_size, int_elem_size, &num_elems))
620 : {
621 63216699 : machine_mode elem_mode = TYPE_MODE (elem_type);
622 63216699 : machine_mode mode;
623 63216699 : if (targetm.array_mode (elem_mode, num_elems).exists (&mode))
624 0 : return mode;
625 63216699 : if (targetm.array_mode_supported_p (elem_mode, num_elems))
626 63502462 : limit_p = false;
627 : }
628 63502462 : return mode_for_size_tree (size, MODE_INT, limit_p).else_blk ();
629 : }
630 :
631 : /* Subroutine of layout_decl: Force alignment required for the data type.
632 : But if the decl itself wants greater alignment, don't override that. */
633 :
634 : static inline void
635 2017849743 : do_type_align (tree type, tree decl)
636 : {
637 2017849743 : if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
638 : {
639 1898886469 : SET_DECL_ALIGN (decl, TYPE_ALIGN (type));
640 1898886469 : if (TREE_CODE (decl) == FIELD_DECL)
641 63284510 : DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
642 : }
643 2017849743 : if (TYPE_WARN_IF_NOT_ALIGN (type) > DECL_WARN_IF_NOT_ALIGN (decl))
644 51 : SET_DECL_WARN_IF_NOT_ALIGN (decl, TYPE_WARN_IF_NOT_ALIGN (type));
645 2017849743 : }
646 :
647 : /* Set the size, mode and alignment of a ..._DECL node.
648 : TYPE_DECL does need this for C++.
649 : Note that LABEL_DECL and CONST_DECL nodes do not need this,
650 : and FUNCTION_DECL nodes have them set up in a special (and simple) way.
651 : Don't call layout_decl for them.
652 :
653 : KNOWN_ALIGN is the amount of alignment we can assume this
654 : decl has with no special effort. It is relevant only for FIELD_DECLs
655 : and depends on the previous fields.
656 : All that matters about KNOWN_ALIGN is which powers of 2 divide it.
657 : If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
658 : the record will be aligned to suit. */
659 :
660 : void
661 2018584160 : layout_decl (tree decl, unsigned int known_align)
662 : {
663 2018584160 : tree type = TREE_TYPE (decl);
664 2018584160 : enum tree_code code = TREE_CODE (decl);
665 2018584160 : rtx rtl = NULL_RTX;
666 2018584160 : location_t loc = DECL_SOURCE_LOCATION (decl);
667 :
668 2018584160 : if (code == CONST_DECL)
669 : return;
670 :
671 2018584160 : gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
672 : || code == TYPE_DECL || code == FIELD_DECL);
673 :
674 2018584160 : rtl = DECL_RTL_IF_SET (decl);
675 :
676 2018584160 : if (type == error_mark_node)
677 3037 : type = void_type_node;
678 :
679 : /* Usually the size and mode come from the data type without change,
680 : however, the front-end may set the explicit width of the field, so its
681 : size may not be the same as the size of its type. This happens with
682 : bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
683 : also happens with other fields. For example, the C++ front-end creates
684 : zero-sized fields corresponding to empty base classes, and depends on
685 : layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
686 : size in bytes from the size in bits. If we have already set the mode,
687 : don't set it again since we can be called twice for FIELD_DECLs. */
688 :
689 2018584160 : DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
690 2018584160 : if (DECL_MODE (decl) == VOIDmode)
691 1942901246 : SET_DECL_MODE (decl, TYPE_MODE (type));
692 :
693 2018584160 : if (DECL_SIZE (decl) == 0)
694 : {
695 1943196012 : DECL_SIZE (decl) = TYPE_SIZE (type);
696 1943196012 : DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
697 : }
698 75388148 : else if (DECL_SIZE_UNIT (decl) == 0)
699 689246 : DECL_SIZE_UNIT (decl)
700 1378492 : = fold_convert_loc (loc, sizetype,
701 689246 : size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl),
702 : bitsize_unit_node));
703 :
704 2018584160 : if (code != FIELD_DECL)
705 : /* For non-fields, update the alignment from the type. */
706 1949934731 : do_type_align (type, decl);
707 : else
708 : /* For fields, it's a bit more complicated... */
709 : {
710 68649429 : bool old_user_align = DECL_USER_ALIGN (decl);
711 68649429 : bool zero_bitfield = false;
712 68649429 : bool packed_p = DECL_PACKED (decl);
713 68649429 : unsigned int mfa;
714 :
715 68649429 : if (DECL_BIT_FIELD (decl))
716 : {
717 735524 : DECL_BIT_FIELD_TYPE (decl) = type;
718 :
719 : /* A zero-length bit-field affects the alignment of the next
720 : field. In essence such bit-fields are not influenced by
721 : any packing due to #pragma pack or attribute packed. */
722 735524 : if (integer_zerop (DECL_SIZE (decl))
723 735524 : && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
724 : {
725 2184 : zero_bitfield = true;
726 2184 : packed_p = false;
727 2184 : if (PCC_BITFIELD_TYPE_MATTERS)
728 2184 : do_type_align (type, decl);
729 : else
730 : {
731 : #ifdef EMPTY_FIELD_BOUNDARY
732 : if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
733 : {
734 : SET_DECL_ALIGN (decl, EMPTY_FIELD_BOUNDARY);
735 : DECL_USER_ALIGN (decl) = 0;
736 : }
737 : #endif
738 : }
739 : }
740 :
741 : /* See if we can use an ordinary integer mode for a bit-field.
742 : Conditions are: a fixed size that is correct for another mode,
743 : occupying a complete byte or bytes on proper boundary. */
744 735524 : if (TYPE_SIZE (type) != 0
745 735524 : && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
746 1471048 : && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
747 : {
748 735268 : machine_mode xmode;
749 735268 : if (mode_for_size_tree (DECL_SIZE (decl),
750 562444 : MODE_INT, 1).exists (&xmode))
751 : {
752 172824 : unsigned int xalign = GET_MODE_ALIGNMENT (xmode);
753 151302 : if (!(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
754 323856 : && (known_align == 0 || known_align >= xalign))
755 : {
756 151379 : SET_DECL_ALIGN (decl, MAX (xalign, DECL_ALIGN (decl)));
757 151379 : SET_DECL_MODE (decl, xmode);
758 151379 : DECL_BIT_FIELD (decl) = 0;
759 : }
760 : }
761 : }
762 :
763 : /* Turn off DECL_BIT_FIELD if we won't need it set. */
764 735778 : if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
765 254 : && known_align >= TYPE_ALIGN (type)
766 735574 : && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
767 1 : DECL_BIT_FIELD (decl) = 0;
768 : }
769 67913905 : else if (packed_p && DECL_USER_ALIGN (decl))
770 : /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
771 : round up; we'll reduce it again below. We want packing to
772 : supersede USER_ALIGN inherited from the type, but defer to
773 : alignment explicitly specified on the field decl. */;
774 : else
775 67912828 : do_type_align (type, decl);
776 :
777 : /* If the field is packed and not explicitly aligned, give it the
778 : minimum alignment. Note that do_type_align may set
779 : DECL_USER_ALIGN, so we need to check old_user_align instead. */
780 68649429 : if (packed_p
781 68649429 : && !old_user_align)
782 6802 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), BITS_PER_UNIT));
783 :
784 68649429 : if (! packed_p && ! DECL_USER_ALIGN (decl))
785 : {
786 : /* Some targets (i.e. i386, VMS) limit struct field alignment
787 : to a lower boundary than alignment of variables unless
788 : it was overridden by attribute aligned. */
789 : #ifdef BIGGEST_FIELD_ALIGNMENT
790 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl),
791 : (unsigned) BIGGEST_FIELD_ALIGNMENT));
792 : #endif
793 : #ifdef ADJUST_FIELD_ALIGN
794 67957903 : SET_DECL_ALIGN (decl, ADJUST_FIELD_ALIGN (decl, TREE_TYPE (decl),
795 : DECL_ALIGN (decl)));
796 : #endif
797 : }
798 :
799 68649429 : if (zero_bitfield)
800 2184 : mfa = initial_max_fld_align * BITS_PER_UNIT;
801 : else
802 68647245 : mfa = maximum_field_alignment;
803 : /* Should this be controlled by DECL_USER_ALIGN, too? */
804 68649429 : if (mfa != 0)
805 550 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), mfa));
806 : }
807 :
808 : /* Evaluate nonconstant size only once, either now or as soon as safe. */
809 2018584160 : if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
810 26999 : DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
811 2018584160 : if (DECL_SIZE_UNIT (decl) != 0
812 2018584160 : && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
813 26999 : DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
814 :
815 : /* If requested, warn about definitions of large data objects. */
816 1301543607 : if ((code == PARM_DECL || (code == VAR_DECL && !DECL_NONLOCAL_FRAME (decl)))
817 2354989981 : && !DECL_EXTERNAL (decl))
818 : {
819 1363803374 : tree size = DECL_SIZE_UNIT (decl);
820 :
821 1363803374 : if (size != 0 && TREE_CODE (size) == INTEGER_CST)
822 : {
823 : /* -Wlarger-than= argument of HOST_WIDE_INT_MAX is treated
824 : as if PTRDIFF_MAX had been specified, with the value
825 : being that on the target rather than the host. */
826 1230790077 : unsigned HOST_WIDE_INT max_size = warn_larger_than_size;
827 1230790077 : if (max_size == HOST_WIDE_INT_MAX)
828 1230790016 : max_size = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
829 :
830 1230790077 : if (compare_tree_int (size, max_size) > 0)
831 23 : warning (OPT_Wlarger_than_, "size of %q+D %E bytes exceeds "
832 : "maximum object size %wu",
833 : decl, size, max_size);
834 : }
835 : }
836 :
837 : /* If the RTL was already set, update its mode and mem attributes. */
838 2018584160 : if (rtl)
839 : {
840 37968 : PUT_MODE (rtl, DECL_MODE (decl));
841 37968 : SET_DECL_RTL (decl, 0);
842 37968 : if (MEM_P (rtl))
843 37968 : set_mem_attributes (rtl, decl, 1);
844 37968 : SET_DECL_RTL (decl, rtl);
845 : }
846 : }
847 :
848 : /* Given a VAR_DECL, PARM_DECL, RESULT_DECL, or FIELD_DECL, clears the
849 : results of a previous call to layout_decl and calls it again. */
850 :
851 : void
852 487642274 : relayout_decl (tree decl)
853 : {
854 487642274 : DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
855 487642274 : SET_DECL_MODE (decl, VOIDmode);
856 487642274 : if (!DECL_USER_ALIGN (decl))
857 487641092 : SET_DECL_ALIGN (decl, 0);
858 487642274 : if (DECL_RTL_SET_P (decl))
859 0 : SET_DECL_RTL (decl, 0);
860 :
861 487642274 : layout_decl (decl, 0);
862 487642274 : }
863 :
864 : /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
865 : QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
866 : is to be passed to all other layout functions for this record. It is the
867 : responsibility of the caller to call `free' for the storage returned.
868 : Note that garbage collection is not permitted until we finish laying
869 : out the record. */
870 :
871 : record_layout_info
872 60930906 : start_record_layout (tree t)
873 : {
874 60930906 : record_layout_info rli = XNEW (struct record_layout_info_s);
875 :
876 60930906 : rli->t = t;
877 :
878 : /* If the type has a minimum specified alignment (via an attribute
879 : declaration, for example) use it -- otherwise, start with a
880 : one-byte alignment. */
881 60930906 : rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
882 60930906 : rli->unpacked_align = rli->record_align;
883 121550748 : rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
884 :
885 : #ifdef STRUCTURE_SIZE_BOUNDARY
886 : /* Packed structures don't need to have minimum size. */
887 : if (! TYPE_PACKED (t))
888 : {
889 : unsigned tmp;
890 :
891 : /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */
892 : tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY;
893 : if (maximum_field_alignment != 0)
894 : tmp = MIN (tmp, maximum_field_alignment);
895 : rli->record_align = MAX (rli->record_align, tmp);
896 : }
897 : #endif
898 :
899 60930906 : rli->offset = size_zero_node;
900 60930906 : rli->bitpos = bitsize_zero_node;
901 60930906 : rli->prev_field = 0;
902 60930906 : rli->pending_statics = 0;
903 60930906 : rli->packed_maybe_necessary = 0;
904 60930906 : rli->remaining_in_alignment = 0;
905 :
906 60930906 : return rli;
907 : }
908 :
909 : /* Fold sizetype value X to bitsizetype, given that X represents a type
910 : size or offset. */
911 :
912 : static tree
913 374035758 : bits_from_bytes (tree x)
914 : {
915 374035758 : if (POLY_INT_CST_P (x))
916 : /* The runtime calculation isn't allowed to overflow sizetype;
917 : increasing the runtime values must always increase the size
918 : or offset of the object. This means that the object imposes
919 : a maximum value on the runtime parameters, but we don't record
920 : what that is. */
921 : return build_poly_int_cst
922 : (bitsizetype,
923 : poly_wide_int::from (poly_int_cst_value (x),
924 : TYPE_PRECISION (bitsizetype),
925 : TYPE_SIGN (TREE_TYPE (x))));
926 374035758 : x = fold_convert (bitsizetype, x);
927 374035758 : gcc_checking_assert (x);
928 374035758 : return x;
929 : }
930 :
931 : /* Return the combined bit position for the byte offset OFFSET and the
932 : bit position BITPOS.
933 :
934 : These functions operate on byte and bit positions present in FIELD_DECLs
935 : and assume that these expressions result in no (intermediate) overflow.
936 : This assumption is necessary to fold the expressions as much as possible,
937 : so as to avoid creating artificially variable-sized types in languages
938 : supporting variable-sized types like Ada. */
939 :
940 : tree
941 305293951 : bit_from_pos (tree offset, tree bitpos)
942 : {
943 305293951 : return size_binop (PLUS_EXPR, bitpos,
944 : size_binop (MULT_EXPR, bits_from_bytes (offset),
945 : bitsize_unit_node));
946 : }
947 :
948 : /* Return the combined truncated byte position for the byte offset OFFSET and
949 : the bit position BITPOS. */
950 :
951 : tree
952 327711991 : byte_from_pos (tree offset, tree bitpos)
953 : {
954 327711991 : tree bytepos;
955 327711991 : if (TREE_CODE (bitpos) == MULT_EXPR
956 327711991 : && tree_int_cst_equal (TREE_OPERAND (bitpos, 1), bitsize_unit_node))
957 0 : bytepos = TREE_OPERAND (bitpos, 0);
958 : else
959 327711991 : bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node);
960 327711991 : return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos));
961 : }
962 :
963 : /* Split the bit position POS into a byte offset *POFFSET and a bit
964 : position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */
965 :
966 : void
967 71444525 : pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
968 : tree pos)
969 : {
970 71444525 : tree toff_align = bitsize_int (off_align);
971 71444525 : if (TREE_CODE (pos) == MULT_EXPR
972 71444525 : && tree_int_cst_equal (TREE_OPERAND (pos, 1), toff_align))
973 : {
974 0 : *poffset = size_binop (MULT_EXPR,
975 : fold_convert (sizetype, TREE_OPERAND (pos, 0)),
976 : size_int (off_align / BITS_PER_UNIT));
977 0 : *pbitpos = bitsize_zero_node;
978 : }
979 : else
980 : {
981 71444525 : *poffset = size_binop (MULT_EXPR,
982 : fold_convert (sizetype,
983 : size_binop (FLOOR_DIV_EXPR, pos,
984 : toff_align)),
985 : size_int (off_align / BITS_PER_UNIT));
986 71444525 : *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align);
987 : }
988 71444525 : }
989 :
990 : /* Given a pointer to bit and byte offsets and an offset alignment,
991 : normalize the offsets so they are within the alignment. */
992 :
993 : void
994 219105910 : normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
995 : {
996 : /* If the bit position is now larger than it should be, adjust it
997 : downwards. */
998 219105910 : if (compare_tree_int (*pbitpos, off_align) >= 0)
999 : {
1000 71444525 : tree offset, bitpos;
1001 71444525 : pos_from_bit (&offset, &bitpos, off_align, *pbitpos);
1002 71444525 : *poffset = size_binop (PLUS_EXPR, *poffset, offset);
1003 71444525 : *pbitpos = bitpos;
1004 : }
1005 219105910 : }
1006 :
1007 : /* Print debugging information about the information in RLI. */
1008 :
1009 : DEBUG_FUNCTION void
1010 0 : debug_rli (record_layout_info rli)
1011 : {
1012 0 : print_node_brief (stderr, "type", rli->t, 0);
1013 0 : print_node_brief (stderr, "\noffset", rli->offset, 0);
1014 0 : print_node_brief (stderr, " bitpos", rli->bitpos, 0);
1015 :
1016 0 : fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
1017 : rli->record_align, rli->unpacked_align,
1018 : rli->offset_align);
1019 :
1020 : /* The ms_struct code is the only that uses this. */
1021 0 : if (targetm.ms_bitfield_layout_p (rli->t))
1022 0 : fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
1023 :
1024 0 : if (rli->packed_maybe_necessary)
1025 0 : fprintf (stderr, "packed may be necessary\n");
1026 :
1027 0 : if (!vec_safe_is_empty (rli->pending_statics))
1028 : {
1029 0 : fprintf (stderr, "pending statics:\n");
1030 0 : debug (rli->pending_statics);
1031 : }
1032 0 : }
1033 :
1034 : /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
1035 : BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
1036 :
1037 : void
1038 219105910 : normalize_rli (record_layout_info rli)
1039 : {
1040 219105910 : normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
1041 219105910 : }
1042 :
1043 : /* Returns the size in bytes allocated so far. */
1044 :
1045 : tree
1046 196126263 : rli_size_unit_so_far (record_layout_info rli)
1047 : {
1048 196126263 : return byte_from_pos (rli->offset, rli->bitpos);
1049 : }
1050 :
1051 : /* Returns the size in bits allocated so far. */
1052 :
1053 : tree
1054 168987360 : rli_size_so_far (record_layout_info rli)
1055 : {
1056 168987360 : return bit_from_pos (rli->offset, rli->bitpos);
1057 : }
1058 :
1059 : /* FIELD is about to be added to RLI->T. The alignment (in bits) of
1060 : the next available location within the record is given by KNOWN_ALIGN.
1061 : Update the variable alignment fields in RLI, and return the alignment
1062 : to give the FIELD. */
1063 :
1064 : unsigned int
1065 67156396 : update_alignment_for_field (record_layout_info rli, tree field,
1066 : unsigned int known_align)
1067 : {
1068 : /* The alignment required for FIELD. */
1069 67156396 : unsigned int desired_align;
1070 : /* The type of this field. */
1071 67156396 : tree type = TREE_TYPE (field);
1072 : /* True if the field was explicitly aligned by the user. */
1073 67156396 : bool user_align;
1074 67156396 : bool is_bitfield;
1075 :
1076 : /* Do not attempt to align an ERROR_MARK node */
1077 67156396 : if (TREE_CODE (type) == ERROR_MARK)
1078 : return 0;
1079 :
1080 : /* Lay out the field so we know what alignment it needs. */
1081 67156391 : layout_decl (field, known_align);
1082 67156391 : desired_align = DECL_ALIGN (field);
1083 67156391 : user_align = DECL_USER_ALIGN (field);
1084 :
1085 134312782 : is_bitfield = (type != error_mark_node
1086 67156391 : && DECL_BIT_FIELD_TYPE (field)
1087 67845637 : && ! integer_zerop (TYPE_SIZE (type)));
1088 :
1089 : /* Record must have at least as much alignment as any field.
1090 : Otherwise, the alignment of the field within the record is
1091 : meaningless. */
1092 67156391 : if (targetm.ms_bitfield_layout_p (rli->t))
1093 : {
1094 : /* Here, the alignment of the underlying type of a bitfield can
1095 : affect the alignment of a record; even a zero-sized field
1096 : can do this. The alignment should be to the alignment of
1097 : the type, except that for zero-size bitfields this only
1098 : applies if there was an immediately prior, nonzero-size
1099 : bitfield. (That's the way it is, experimentally.) */
1100 206 : if (!is_bitfield
1101 206 : || ((DECL_SIZE (field) == NULL_TREE
1102 131 : || !integer_zerop (DECL_SIZE (field)))
1103 121 : ? !DECL_PACKED (field)
1104 10 : : (rli->prev_field
1105 8 : && DECL_BIT_FIELD_TYPE (rli->prev_field)
1106 8 : && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
1107 : {
1108 202 : unsigned int type_align = TYPE_ALIGN (type);
1109 277 : if (!is_bitfield && DECL_PACKED (field))
1110 : type_align = desired_align;
1111 : else
1112 202 : type_align = MAX (type_align, desired_align);
1113 202 : if (maximum_field_alignment != 0)
1114 68 : type_align = MIN (type_align, maximum_field_alignment);
1115 202 : rli->record_align = MAX (rli->record_align, type_align);
1116 202 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1117 : }
1118 : }
1119 67156185 : else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
1120 : {
1121 : /* Named bit-fields cause the entire structure to have the
1122 : alignment implied by their type. Some targets also apply the same
1123 : rules to unnamed bitfields. */
1124 689115 : if (DECL_NAME (field) != 0
1125 689115 : || targetm.align_anon_bitfield ())
1126 : {
1127 547378 : unsigned int type_align = TYPE_ALIGN (type);
1128 :
1129 : #ifdef ADJUST_FIELD_ALIGN
1130 547378 : if (! TYPE_USER_ALIGN (type))
1131 540524 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1132 : #endif
1133 :
1134 : /* Targets might chose to handle unnamed and hence possibly
1135 : zero-width bitfield. Those are not influenced by #pragmas
1136 : or packed attributes. */
1137 547378 : if (integer_zerop (DECL_SIZE (field)))
1138 : {
1139 0 : if (initial_max_fld_align)
1140 0 : type_align = MIN (type_align,
1141 : initial_max_fld_align * BITS_PER_UNIT);
1142 : }
1143 547378 : else if (maximum_field_alignment != 0)
1144 177 : type_align = MIN (type_align, maximum_field_alignment);
1145 547201 : else if (DECL_PACKED (field))
1146 2853 : type_align = MIN (type_align, BITS_PER_UNIT);
1147 :
1148 : /* The alignment of the record is increased to the maximum
1149 : of the current alignment, the alignment indicated on the
1150 : field (i.e., the alignment specified by an __aligned__
1151 : attribute), and the alignment indicated by the type of
1152 : the field. */
1153 547378 : rli->record_align = MAX (rli->record_align, desired_align);
1154 547378 : rli->record_align = MAX (rli->record_align, type_align);
1155 :
1156 547378 : if (warn_packed)
1157 0 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1158 547378 : user_align |= TYPE_USER_ALIGN (type);
1159 : }
1160 : }
1161 : else
1162 : {
1163 66467070 : rli->record_align = MAX (rli->record_align, desired_align);
1164 66467070 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1165 : }
1166 :
1167 67156391 : TYPE_USER_ALIGN (rli->t) |= user_align;
1168 :
1169 67156391 : return desired_align;
1170 : }
1171 :
1172 : /* Issue a warning if the record alignment, RECORD_ALIGN, is less than
1173 : the field alignment of FIELD or FIELD isn't aligned. */
1174 :
1175 : static void
1176 67148273 : handle_warn_if_not_align (tree field, unsigned int record_align)
1177 : {
1178 67148273 : tree type = TREE_TYPE (field);
1179 :
1180 67148273 : if (type == error_mark_node)
1181 67148273 : return;
1182 :
1183 67148230 : unsigned int warn_if_not_align = 0;
1184 :
1185 67148230 : int opt_w = 0;
1186 :
1187 67148230 : if (warn_if_not_aligned)
1188 : {
1189 67148090 : warn_if_not_align = DECL_WARN_IF_NOT_ALIGN (field);
1190 67148090 : if (!warn_if_not_align)
1191 67148043 : warn_if_not_align = TYPE_WARN_IF_NOT_ALIGN (type);
1192 47 : if (warn_if_not_align)
1193 : opt_w = OPT_Wif_not_aligned;
1194 : }
1195 :
1196 : if (!warn_if_not_align
1197 67148183 : && warn_packed_not_aligned
1198 2651457 : && lookup_attribute ("aligned", TYPE_ATTRIBUTES (type)))
1199 : {
1200 115 : warn_if_not_align = TYPE_ALIGN (type);
1201 115 : opt_w = OPT_Wpacked_not_aligned;
1202 : }
1203 :
1204 67148230 : if (!warn_if_not_align)
1205 67148068 : return;
1206 :
1207 162 : tree context = DECL_CONTEXT (field);
1208 :
1209 162 : warn_if_not_align /= BITS_PER_UNIT;
1210 162 : record_align /= BITS_PER_UNIT;
1211 162 : if ((record_align % warn_if_not_align) != 0)
1212 36 : warning (opt_w, "alignment %u of %qT is less than %u",
1213 : record_align, context, warn_if_not_align);
1214 :
1215 162 : tree off = byte_position (field);
1216 162 : if (!multiple_of_p (TREE_TYPE (off), off, size_int (warn_if_not_align)))
1217 : {
1218 25 : if (TREE_CODE (off) == INTEGER_CST)
1219 24 : warning (opt_w, "%q+D offset %E in %qT isn%'t aligned to %u",
1220 : field, off, context, warn_if_not_align);
1221 : else
1222 1 : warning (opt_w, "%q+D offset %E in %qT may not be aligned to %u",
1223 : field, off, context, warn_if_not_align);
1224 : }
1225 : }
1226 :
1227 : /* Called from place_field to handle unions. */
1228 :
1229 : static void
1230 2259996 : place_union_field (record_layout_info rli, tree field)
1231 : {
1232 2259996 : update_alignment_for_field (rli, field, /*known_align=*/0);
1233 :
1234 2259996 : DECL_FIELD_OFFSET (field) = size_zero_node;
1235 2259996 : DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
1236 2259996 : SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
1237 2259996 : handle_warn_if_not_align (field, rli->record_align);
1238 :
1239 : /* If this is an ERROR_MARK return *after* having set the
1240 : field at the start of the union. This helps when parsing
1241 : invalid fields. */
1242 2259996 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
1243 : return;
1244 :
1245 3937341 : if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1246 2689427 : && TYPE_TYPELESS_STORAGE (TREE_TYPE (field)))
1247 475142 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1248 :
1249 : /* We might see a flexible array member field (with no DECL_SIZE_UNIT), use
1250 : zero size for such field. */
1251 2259991 : tree field_size_unit = DECL_SIZE_UNIT (field)
1252 2259991 : ? DECL_SIZE_UNIT (field)
1253 2259991 : : build_int_cst (sizetype, 0);
1254 : /* We assume the union's size will be a multiple of a byte so we don't
1255 : bother with BITPOS. */
1256 2259991 : if (TREE_CODE (rli->t) == UNION_TYPE)
1257 2259991 : rli->offset = size_binop (MAX_EXPR, rli->offset, field_size_unit);
1258 0 : else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
1259 0 : rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field),
1260 : field_size_unit, rli->offset);
1261 : }
1262 :
1263 : /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
1264 : at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
1265 : units of alignment than the underlying TYPE. */
1266 : static int
1267 533089 : excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
1268 : HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
1269 : {
1270 : /* Note that the calculation of OFFSET might overflow; we calculate it so
1271 : that we still get the right result as long as ALIGN is a power of two. */
1272 533089 : unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
1273 :
1274 533089 : offset = offset % align;
1275 533089 : return ((offset + size + align - 1) / align
1276 533089 : > tree_to_uhwi (TYPE_SIZE (type)) / align);
1277 : }
1278 :
1279 : /* RLI contains information about the layout of a RECORD_TYPE. FIELD
1280 : is a FIELD_DECL to be added after those fields already present in
1281 : T. (FIELD is not actually added to the TYPE_FIELDS list here;
1282 : callers that desire that behavior must manually perform that step.) */
1283 :
1284 : void
1285 405049549 : place_field (record_layout_info rli, tree field)
1286 : {
1287 : /* The alignment required for FIELD. */
1288 405049549 : unsigned int desired_align;
1289 : /* The alignment FIELD would have if we just dropped it into the
1290 : record as it presently stands. */
1291 405049549 : unsigned int known_align;
1292 405049549 : unsigned int actual_align;
1293 : /* The type of this field. */
1294 405049549 : tree type = TREE_TYPE (field);
1295 :
1296 405049549 : gcc_assert (TREE_CODE (field) != ERROR_MARK);
1297 :
1298 : /* If FIELD is static, then treat it like a separate variable, not
1299 : really like a structure field. If it is a FUNCTION_DECL, it's a
1300 : method. In both cases, all we do is lay out the decl, and we do
1301 : it *after* the record is laid out. */
1302 405049549 : if (VAR_P (field))
1303 : {
1304 13649939 : vec_safe_push (rli->pending_statics, field);
1305 13649939 : return;
1306 : }
1307 :
1308 : /* Enumerators and enum types which are local to this class need not
1309 : be laid out. Likewise for initialized constant fields. */
1310 391399610 : else if (TREE_CODE (field) != FIELD_DECL)
1311 : return;
1312 :
1313 : /* Unions are laid out very differently than records, so split
1314 : that code off to another function. */
1315 67148273 : else if (TREE_CODE (rli->t) != RECORD_TYPE)
1316 : {
1317 2259996 : place_union_field (rli, field);
1318 2259996 : return;
1319 : }
1320 :
1321 64888277 : else if (TREE_CODE (type) == ERROR_MARK)
1322 : {
1323 : /* Place this field at the current allocation position, so we
1324 : maintain monotonicity. */
1325 38 : DECL_FIELD_OFFSET (field) = rli->offset;
1326 38 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1327 38 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1328 38 : handle_warn_if_not_align (field, rli->record_align);
1329 38 : return;
1330 : }
1331 :
1332 64888239 : if (AGGREGATE_TYPE_P (type)
1333 64888239 : && TYPE_TYPELESS_STORAGE (type))
1334 1264307 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1335 :
1336 : /* Work out the known alignment so far. Note that A & (-A) is the
1337 : value of the least-significant bit in A that is one. */
1338 64888239 : if (! integer_zerop (rli->bitpos))
1339 18781639 : known_align = least_bit_hwi (tree_to_uhwi (rli->bitpos));
1340 46106600 : else if (integer_zerop (rli->offset))
1341 : known_align = 0;
1342 8634941 : else if (tree_fits_uhwi_p (rli->offset))
1343 8634174 : known_align = (BITS_PER_UNIT
1344 8634174 : * least_bit_hwi (tree_to_uhwi (rli->offset)));
1345 : else
1346 767 : known_align = rli->offset_align;
1347 :
1348 64888239 : desired_align = update_alignment_for_field (rli, field, known_align);
1349 64888239 : if (known_align == 0)
1350 74819532 : known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1351 :
1352 64888239 : if (warn_packed && DECL_PACKED (field))
1353 : {
1354 3 : if (known_align >= TYPE_ALIGN (type))
1355 : {
1356 3 : if (TYPE_ALIGN (type) > desired_align)
1357 : {
1358 3 : if (STRICT_ALIGNMENT)
1359 : warning (OPT_Wattributes, "packed attribute causes "
1360 : "inefficient alignment for %q+D", field);
1361 : /* Don't warn if DECL_PACKED was set by the type. */
1362 3 : else if (!TYPE_PACKED (rli->t))
1363 0 : warning (OPT_Wattributes, "packed attribute is "
1364 : "unnecessary for %q+D", field);
1365 : }
1366 : }
1367 : else
1368 0 : rli->packed_maybe_necessary = 1;
1369 : }
1370 :
1371 : /* Does this field automatically have alignment it needs by virtue
1372 : of the fields that precede it and the record's own alignment? */
1373 64888239 : if (known_align < desired_align
1374 64888239 : && (! targetm.ms_bitfield_layout_p (rli->t)
1375 17 : || rli->prev_field == NULL))
1376 : {
1377 : /* No, we need to skip space before this field.
1378 : Bump the cumulative size to multiple of field alignment. */
1379 :
1380 1448105 : if (!targetm.ms_bitfield_layout_p (rli->t)
1381 1448102 : && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION
1382 2881326 : && !TYPE_ARTIFICIAL (rli->t))
1383 1433190 : warning (OPT_Wpadded, "padding struct to align %q+D", field);
1384 :
1385 : /* If the alignment is still within offset_align, just align
1386 : the bit position. */
1387 1448105 : if (desired_align < rli->offset_align)
1388 1413609 : rli->bitpos = round_up (rli->bitpos, desired_align);
1389 : else
1390 : {
1391 : /* First adjust OFFSET by the partial bits, then align. */
1392 34496 : rli->offset
1393 34496 : = size_binop (PLUS_EXPR, rli->offset,
1394 : fold_convert (sizetype,
1395 : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1396 : bitsize_unit_node)));
1397 34496 : rli->bitpos = bitsize_zero_node;
1398 :
1399 34496 : rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
1400 : }
1401 :
1402 1448105 : if (! TREE_CONSTANT (rli->offset))
1403 416 : rli->offset_align = desired_align;
1404 : }
1405 :
1406 : /* Handle compatibility with PCC. Note that if the record has any
1407 : variable-sized fields, we need not worry about compatibility. */
1408 64888239 : if (PCC_BITFIELD_TYPE_MATTERS
1409 64888239 : && ! targetm.ms_bitfield_layout_p (rli->t)
1410 64888038 : && TREE_CODE (field) == FIELD_DECL
1411 64888038 : && type != error_mark_node
1412 64888038 : && DECL_BIT_FIELD (field)
1413 537208 : && (! DECL_PACKED (field)
1414 : /* Enter for these packed fields only to issue a warning. */
1415 2725 : || TYPE_ALIGN (type) <= BITS_PER_UNIT)
1416 534682 : && maximum_field_alignment == 0
1417 534448 : && ! integer_zerop (DECL_SIZE (field))
1418 533103 : && tree_fits_uhwi_p (DECL_SIZE (field))
1419 533103 : && tree_fits_uhwi_p (rli->offset)
1420 65421328 : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1421 : {
1422 533089 : unsigned int type_align = TYPE_ALIGN (type);
1423 533089 : tree dsize = DECL_SIZE (field);
1424 533089 : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1425 533089 : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1426 533089 : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1427 :
1428 : #ifdef ADJUST_FIELD_ALIGN
1429 533089 : if (! TYPE_USER_ALIGN (type))
1430 528100 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1431 : #endif
1432 :
1433 : /* A bit field may not span more units of alignment of its type
1434 : than its type itself. Advance to next boundary if necessary. */
1435 533089 : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1436 : {
1437 45649 : if (DECL_PACKED (field))
1438 : {
1439 22 : if (warn_packed_bitfield_compat == 1)
1440 14 : inform
1441 14 : (input_location,
1442 : "offset of packed bit-field %qD has changed in GCC 4.4",
1443 : field);
1444 : }
1445 : else
1446 45627 : rli->bitpos = round_up (rli->bitpos, type_align);
1447 : }
1448 :
1449 533089 : if (! DECL_PACKED (field))
1450 532896 : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1451 :
1452 533089 : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1453 : TYPE_WARN_IF_NOT_ALIGN (type));
1454 : }
1455 :
1456 : #ifdef BITFIELD_NBYTES_LIMITED
1457 : if (BITFIELD_NBYTES_LIMITED
1458 : && ! targetm.ms_bitfield_layout_p (rli->t)
1459 : && TREE_CODE (field) == FIELD_DECL
1460 : && type != error_mark_node
1461 : && DECL_BIT_FIELD_TYPE (field)
1462 : && ! DECL_PACKED (field)
1463 : && ! integer_zerop (DECL_SIZE (field))
1464 : && tree_fits_uhwi_p (DECL_SIZE (field))
1465 : && tree_fits_uhwi_p (rli->offset)
1466 : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1467 : {
1468 : unsigned int type_align = TYPE_ALIGN (type);
1469 : tree dsize = DECL_SIZE (field);
1470 : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1471 : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1472 : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1473 :
1474 : #ifdef ADJUST_FIELD_ALIGN
1475 : if (! TYPE_USER_ALIGN (type))
1476 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1477 : #endif
1478 :
1479 : if (maximum_field_alignment != 0)
1480 : type_align = MIN (type_align, maximum_field_alignment);
1481 : /* ??? This test is opposite the test in the containing if
1482 : statement, so this code is unreachable currently. */
1483 : else if (DECL_PACKED (field))
1484 : type_align = MIN (type_align, BITS_PER_UNIT);
1485 :
1486 : /* A bit field may not span the unit of alignment of its type.
1487 : Advance to next boundary if necessary. */
1488 : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1489 : rli->bitpos = round_up (rli->bitpos, type_align);
1490 :
1491 : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1492 : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1493 : TYPE_WARN_IF_NOT_ALIGN (type));
1494 : }
1495 : #endif
1496 :
1497 : /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1498 : A subtlety:
1499 : When a bit field is inserted into a packed record, the whole
1500 : size of the underlying type is used by one or more same-size
1501 : adjacent bitfields. (That is, if its long:3, 32 bits is
1502 : used in the record, and any additional adjacent long bitfields are
1503 : packed into the same chunk of 32 bits. However, if the size
1504 : changes, a new field of that size is allocated.) In an unpacked
1505 : record, this is the same as using alignment, but not equivalent
1506 : when packing.
1507 :
1508 : Note: for compatibility, we use the type size, not the type alignment
1509 : to determine alignment, since that matches the documentation */
1510 :
1511 64888239 : if (targetm.ms_bitfield_layout_p (rli->t))
1512 : {
1513 201 : tree prev_saved = rli->prev_field;
1514 283 : tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL;
1515 :
1516 : /* This is a bitfield if it exists. */
1517 201 : if (rli->prev_field)
1518 : {
1519 82 : bool realign_p = known_align < desired_align;
1520 :
1521 : /* If both are bitfields, nonzero, and the same size, this is
1522 : the middle of a run. Zero declared size fields are special
1523 : and handled as "end of run". (Note: it's nonzero declared
1524 : size, but equal type sizes!) (Since we know that both
1525 : the current and previous fields are bitfields by the
1526 : time we check it, DECL_SIZE must be present for both.) */
1527 82 : if (DECL_BIT_FIELD_TYPE (field)
1528 64 : && !integer_zerop (DECL_SIZE (field))
1529 56 : && !integer_zerop (DECL_SIZE (rli->prev_field))
1530 54 : && tree_fits_shwi_p (DECL_SIZE (rli->prev_field))
1531 54 : && tree_fits_uhwi_p (TYPE_SIZE (type))
1532 136 : && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
1533 : {
1534 : /* We're in the middle of a run of equal type size fields; make
1535 : sure we realign if we run out of bits. (Not decl size,
1536 : type size!) */
1537 52 : HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field));
1538 :
1539 52 : if (rli->remaining_in_alignment < bitsize)
1540 : {
1541 1 : HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type));
1542 :
1543 : /* out of bits; bump up to next 'word'. */
1544 1 : rli->bitpos
1545 1 : = size_binop (PLUS_EXPR, rli->bitpos,
1546 : bitsize_int (rli->remaining_in_alignment));
1547 1 : rli->prev_field = field;
1548 1 : if (typesize < bitsize)
1549 0 : rli->remaining_in_alignment = 0;
1550 : else
1551 1 : rli->remaining_in_alignment = typesize - bitsize;
1552 : }
1553 : else
1554 : {
1555 51 : rli->remaining_in_alignment -= bitsize;
1556 51 : realign_p = false;
1557 : }
1558 : }
1559 : else
1560 : {
1561 : /* End of a run: if leaving a run of bitfields of the same type
1562 : size, we have to "use up" the rest of the bits of the type
1563 : size.
1564 :
1565 : Compute the new position as the sum of the size for the prior
1566 : type and where we first started working on that type.
1567 : Note: since the beginning of the field was aligned then
1568 : of course the end will be too. No round needed. */
1569 :
1570 30 : if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1571 : {
1572 20 : rli->bitpos
1573 20 : = size_binop (PLUS_EXPR, rli->bitpos,
1574 : bitsize_int (rli->remaining_in_alignment));
1575 : }
1576 : else
1577 : /* We "use up" size zero fields; the code below should behave
1578 : as if the prior field was not a bitfield. */
1579 : prev_saved = NULL;
1580 :
1581 : /* Cause a new bitfield to be captured, either this time (if
1582 : currently a bitfield) or next time we see one. */
1583 30 : if (!DECL_BIT_FIELD_TYPE (field)
1584 30 : || integer_zerop (DECL_SIZE (field)))
1585 26 : rli->prev_field = NULL;
1586 : }
1587 :
1588 : /* Does this field automatically have alignment it needs by virtue
1589 : of the fields that precede it and the record's own alignment? */
1590 82 : if (realign_p)
1591 : {
1592 : /* If the alignment is still within offset_align, just align
1593 : the bit position. */
1594 13 : if (desired_align < rli->offset_align)
1595 11 : rli->bitpos = round_up (rli->bitpos, desired_align);
1596 : else
1597 : {
1598 : /* First adjust OFFSET by the partial bits, then align. */
1599 2 : tree d = size_binop (CEIL_DIV_EXPR, rli->bitpos,
1600 : bitsize_unit_node);
1601 2 : rli->offset = size_binop (PLUS_EXPR, rli->offset,
1602 : fold_convert (sizetype, d));
1603 2 : rli->bitpos = bitsize_zero_node;
1604 :
1605 2 : rli->offset = round_up (rli->offset,
1606 : desired_align / BITS_PER_UNIT);
1607 : }
1608 :
1609 13 : if (! TREE_CONSTANT (rli->offset))
1610 0 : rli->offset_align = desired_align;
1611 : }
1612 :
1613 82 : normalize_rli (rli);
1614 : }
1615 :
1616 : /* If we're starting a new run of same type size bitfields
1617 : (or a run of non-bitfields), set up the "first of the run"
1618 : fields.
1619 :
1620 : That is, if the current field is not a bitfield, or if there
1621 : was a prior bitfield the type sizes differ, or if there wasn't
1622 : a prior bitfield the size of the current field is nonzero.
1623 :
1624 : Note: we must be sure to test ONLY the type size if there was
1625 : a prior bitfield and ONLY for the current field being zero if
1626 : there wasn't. */
1627 :
1628 201 : if (!DECL_BIT_FIELD_TYPE (field)
1629 261 : || (prev_saved != NULL
1630 129 : ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))
1631 69 : : !integer_zerop (DECL_SIZE (field))))
1632 : {
1633 : /* Never smaller than a byte for compatibility. */
1634 143 : unsigned int type_align = BITS_PER_UNIT;
1635 :
1636 : /* (When not a bitfield), we could be seeing a flex array (with
1637 : no DECL_SIZE). Since we won't be using remaining_in_alignment
1638 : until we see a bitfield (and come by here again) we just skip
1639 : calculating it. */
1640 143 : if (DECL_SIZE (field) != NULL
1641 143 : && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field)))
1642 285 : && tree_fits_uhwi_p (DECL_SIZE (field)))
1643 : {
1644 142 : unsigned HOST_WIDE_INT bitsize
1645 142 : = tree_to_uhwi (DECL_SIZE (field));
1646 142 : unsigned HOST_WIDE_INT typesize
1647 142 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field)));
1648 :
1649 142 : if (typesize < bitsize)
1650 0 : rli->remaining_in_alignment = 0;
1651 : else
1652 142 : rli->remaining_in_alignment = typesize - bitsize;
1653 : }
1654 :
1655 : /* Now align (conventionally) for the new type. */
1656 143 : if (! DECL_PACKED (field))
1657 137 : type_align = TYPE_ALIGN (TREE_TYPE (field));
1658 :
1659 143 : if (maximum_field_alignment != 0)
1660 56 : type_align = MIN (type_align, maximum_field_alignment);
1661 :
1662 143 : rli->bitpos = round_up (rli->bitpos, type_align);
1663 :
1664 : /* If we really aligned, don't allow subsequent bitfields
1665 : to undo that. */
1666 143 : rli->prev_field = NULL;
1667 : }
1668 : }
1669 :
1670 : /* Offset so far becomes the position of this field after normalizing. */
1671 64888239 : normalize_rli (rli);
1672 64888239 : DECL_FIELD_OFFSET (field) = rli->offset;
1673 64888239 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1674 64888239 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1675 64888239 : handle_warn_if_not_align (field, rli->record_align);
1676 :
1677 : /* Evaluate nonconstant offsets only once, either now or as soon as safe. */
1678 64888239 : if (TREE_CODE (DECL_FIELD_OFFSET (field)) != INTEGER_CST)
1679 1171 : DECL_FIELD_OFFSET (field) = variable_size (DECL_FIELD_OFFSET (field));
1680 :
1681 : /* If this field ended up more aligned than we thought it would be (we
1682 : approximate this by seeing if its position changed), lay out the field
1683 : again; perhaps we can use an integral mode for it now. */
1684 64888239 : if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1685 18212471 : actual_align = least_bit_hwi (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)));
1686 46675768 : else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1687 74819508 : actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1688 9204109 : else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1689 9202955 : actual_align = (BITS_PER_UNIT
1690 9202955 : * least_bit_hwi (tree_to_uhwi (DECL_FIELD_OFFSET (field))));
1691 : else
1692 1154 : actual_align = DECL_OFFSET_ALIGN (field);
1693 : /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1694 : store / extract bit field operations will check the alignment of the
1695 : record against the mode of bit fields. */
1696 :
1697 64888239 : if (known_align != actual_align)
1698 1493038 : layout_decl (field, actual_align);
1699 :
1700 64888239 : if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field))
1701 160591 : rli->prev_field = field;
1702 :
1703 : /* Now add size of this field to the size of the record. If the size is
1704 : not constant, treat the field as being a multiple of bytes and just
1705 : adjust the offset, resetting the bit position. Otherwise, apportion the
1706 : size amongst the bit position and offset. First handle the case of an
1707 : unspecified size, which can happen when we have an invalid nested struct
1708 : definition, such as struct j { struct j { int i; } }. The error message
1709 : is printed in finish_struct. */
1710 64888239 : if (DECL_SIZE (field) == 0)
1711 : /* Do nothing. */;
1712 64797639 : else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1713 64797639 : || TREE_OVERFLOW (DECL_SIZE (field)))
1714 : {
1715 1058 : rli->offset
1716 1058 : = size_binop (PLUS_EXPR, rli->offset,
1717 : fold_convert (sizetype,
1718 : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1719 : bitsize_unit_node)));
1720 1058 : rli->offset
1721 1058 : = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1722 1058 : rli->bitpos = bitsize_zero_node;
1723 1058 : rli->offset_align = MIN (rli->offset_align, desired_align);
1724 :
1725 1058 : if (!multiple_of_p (bitsizetype, DECL_SIZE (field),
1726 2116 : bitsize_int (rli->offset_align)))
1727 : {
1728 304 : tree type = strip_array_types (TREE_TYPE (field));
1729 : /* The above adjusts offset_align just based on the start of the
1730 : field. The field might not have a size that is a multiple of
1731 : that offset_align though. If the field is an array of fixed
1732 : sized elements, assume there can be any multiple of those
1733 : sizes. If it is a variable length aggregate or array of
1734 : variable length aggregates, assume worst that the end is
1735 : just BITS_PER_UNIT aligned. */
1736 304 : if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
1737 : {
1738 304 : if (TREE_INT_CST_LOW (TYPE_SIZE (type)))
1739 : {
1740 304 : unsigned HOST_WIDE_INT sz
1741 304 : = least_bit_hwi (TREE_INT_CST_LOW (TYPE_SIZE (type)));
1742 304 : rli->offset_align = MIN (rli->offset_align, sz);
1743 : }
1744 : }
1745 : else
1746 0 : rli->offset_align = MIN (rli->offset_align, BITS_PER_UNIT);
1747 : }
1748 : }
1749 64796581 : else if (targetm.ms_bitfield_layout_p (rli->t))
1750 : {
1751 201 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1752 :
1753 : /* If FIELD is the last field and doesn't end at the full length
1754 : of the type then pad the struct out to the full length of the
1755 : last type. */
1756 201 : if (DECL_BIT_FIELD_TYPE (field)
1757 201 : && !integer_zerop (DECL_SIZE (field)))
1758 : {
1759 : /* We have to scan, because non-field DECLS are also here. */
1760 : tree probe = field;
1761 182 : while ((probe = DECL_CHAIN (probe)))
1762 135 : if (TREE_CODE (probe) == FIELD_DECL)
1763 : break;
1764 119 : if (!probe)
1765 47 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,
1766 : bitsize_int (rli->remaining_in_alignment));
1767 : }
1768 :
1769 201 : normalize_rli (rli);
1770 : }
1771 : else
1772 : {
1773 64796380 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1774 64796380 : normalize_rli (rli);
1775 : }
1776 : }
1777 :
1778 : /* Assuming that all the fields have been laid out, this function uses
1779 : RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1780 : indicated by RLI. */
1781 :
1782 : static void
1783 60930906 : finalize_record_size (record_layout_info rli)
1784 : {
1785 60930906 : tree unpadded_size, unpadded_size_unit;
1786 :
1787 : /* Now we want just byte and bit offsets, so set the offset alignment
1788 : to be a byte and then normalize. */
1789 60930906 : rli->offset_align = BITS_PER_UNIT;
1790 60930906 : normalize_rli (rli);
1791 :
1792 : /* Determine the desired alignment. */
1793 : #ifdef ROUND_TYPE_ALIGN
1794 : SET_TYPE_ALIGN (rli->t, ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1795 : rli->record_align));
1796 : #else
1797 60930906 : SET_TYPE_ALIGN (rli->t, MAX (TYPE_ALIGN (rli->t), rli->record_align));
1798 : #endif
1799 :
1800 : /* Compute the size so far. Be sure to allow for extra bits in the
1801 : size in bytes. We have guaranteed above that it will be no more
1802 : than a single byte. */
1803 60930906 : unpadded_size = rli_size_so_far (rli);
1804 60930906 : unpadded_size_unit = rli_size_unit_so_far (rli);
1805 60930906 : if (! integer_zerop (rli->bitpos))
1806 2201 : unpadded_size_unit
1807 2201 : = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1808 :
1809 : /* Round the size up to be a multiple of the required alignment. */
1810 60930906 : TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1811 60930906 : TYPE_SIZE_UNIT (rli->t)
1812 60930906 : = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1813 :
1814 60930906 : if (TREE_CONSTANT (unpadded_size)
1815 60930207 : && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0
1816 1260087 : && input_location != BUILTINS_LOCATION
1817 62190872 : && !TYPE_ARTIFICIAL (rli->t))
1818 : {
1819 1251978 : tree pad_size
1820 1251978 : = size_binop (MINUS_EXPR, TYPE_SIZE_UNIT (rli->t), unpadded_size_unit);
1821 1251978 : warning (OPT_Wpadded,
1822 : "padding struct size to alignment boundary with %E bytes", pad_size);
1823 : }
1824 :
1825 19 : if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1826 19 : && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1827 60930908 : && TREE_CONSTANT (unpadded_size))
1828 : {
1829 2 : tree unpacked_size;
1830 :
1831 : #ifdef ROUND_TYPE_ALIGN
1832 : rli->unpacked_align
1833 : = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1834 : #else
1835 2 : rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1836 : #endif
1837 :
1838 2 : unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1839 2 : if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1840 : {
1841 2 : if (TYPE_NAME (rli->t))
1842 : {
1843 2 : tree name;
1844 :
1845 2 : if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1846 2 : name = TYPE_NAME (rli->t);
1847 : else
1848 0 : name = DECL_NAME (TYPE_NAME (rli->t));
1849 :
1850 2 : if (STRICT_ALIGNMENT)
1851 : warning (OPT_Wpacked, "packed attribute causes inefficient "
1852 : "alignment for %qE", name);
1853 : else
1854 2 : warning (OPT_Wpacked,
1855 : "packed attribute is unnecessary for %qE", name);
1856 : }
1857 : else
1858 : {
1859 0 : if (STRICT_ALIGNMENT)
1860 : warning (OPT_Wpacked,
1861 : "packed attribute causes inefficient alignment");
1862 : else
1863 0 : warning (OPT_Wpacked, "packed attribute is unnecessary");
1864 : }
1865 : }
1866 : }
1867 60930906 : }
1868 :
1869 : /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1870 :
1871 : void
1872 114959133 : compute_record_mode (tree type)
1873 : {
1874 114959133 : tree field;
1875 114959133 : machine_mode mode = VOIDmode;
1876 :
1877 : /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1878 : However, if possible, we use a mode that fits in a register
1879 : instead, in order to allow for better optimization down the
1880 : line. */
1881 114959133 : SET_TYPE_MODE (type, BLKmode);
1882 :
1883 114959133 : poly_uint64 type_size;
1884 114959133 : if (!poly_int_tree_p (TYPE_SIZE (type), &type_size))
1885 9673217 : return;
1886 :
1887 : /* A record which has any BLKmode members must itself be
1888 : BLKmode; it can't go in a register. Unless the member is
1889 : BLKmode only because it isn't aligned. */
1890 420982283 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1891 : {
1892 315696367 : if (TREE_CODE (field) != FIELD_DECL)
1893 225626151 : continue;
1894 :
1895 90070216 : poly_uint64 field_size;
1896 90070216 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1897 90069898 : || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1898 53429083 : && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1899 106801412 : && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1900 53372329 : && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1901 80437604 : || !tree_fits_poly_uint64_p (bit_position (field))
1902 80437604 : || DECL_SIZE (field) == 0
1903 170507820 : || !poly_int_tree_p (DECL_SIZE (field), &field_size))
1904 9672480 : return;
1905 :
1906 : /* If this field is the whole struct, remember its mode so
1907 : that, say, we can put a double in a class into a DF
1908 : register instead of forcing it to live in the stack. */
1909 80437604 : if (known_eq (field_size, type_size)
1910 : /* Partial int types (e.g. __int20) may have TYPE_SIZE equal to
1911 : wider types (e.g. int32), despite precision being less. Ensure
1912 : that the TYPE_MODE of the struct does not get set to the partial
1913 : int mode if there is a wider type also in the struct. */
1914 80437604 : && known_gt (GET_MODE_PRECISION (DECL_MODE (field)),
1915 : GET_MODE_PRECISION (mode)))
1916 5391300 : mode = DECL_MODE (field);
1917 :
1918 : /* With some targets, it is sub-optimal to access an aligned
1919 : BLKmode structure as a scalar. */
1920 80437604 : if (targetm.member_type_forces_blk (field, mode))
1921 : return;
1922 : }
1923 :
1924 : /* If we only have one real field; use its mode if that mode's size
1925 : matches the type's size. This generally only applies to RECORD_TYPE.
1926 : For UNION_TYPE, if the widest field is MODE_INT then use that mode.
1927 : If the widest field is MODE_PARTIAL_INT, and the union will be passed
1928 : by reference, then use that mode. */
1929 105285916 : if ((TREE_CODE (type) == RECORD_TYPE
1930 502831 : || (TREE_CODE (type) == UNION_TYPE
1931 502831 : && (GET_MODE_CLASS (mode) == MODE_INT
1932 67217 : || (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
1933 0 : && (targetm.calls.pass_by_reference
1934 0 : (pack_cumulative_args (0),
1935 5238100 : function_arg_info (type, mode, /*named=*/false)))))))
1936 105218699 : && mode != VOIDmode
1937 110524149 : && known_eq (GET_MODE_BITSIZE (mode), type_size))
1938 : ;
1939 : else
1940 100047816 : mode = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1).else_blk ();
1941 :
1942 : /* If structure's known alignment is less than what the scalar
1943 : mode would need, and it matters, then stick with BLKmode. */
1944 105285916 : if (mode != BLKmode
1945 : && STRICT_ALIGNMENT
1946 : && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1947 : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
1948 : {
1949 : /* If this is the only reason this type is BLKmode, then
1950 : don't force containing types to be BLKmode. */
1951 : TYPE_NO_FORCE_BLK (type) = 1;
1952 : mode = BLKmode;
1953 : }
1954 :
1955 105285916 : SET_TYPE_MODE (type, mode);
1956 : }
1957 :
1958 : /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1959 : out. */
1960 :
1961 : static void
1962 1482923405 : finalize_type_size (tree type)
1963 : {
1964 : /* Normally, use the alignment corresponding to the mode chosen.
1965 : However, where strict alignment is not required, avoid
1966 : over-aligning structures, since most compilers do not do this
1967 : alignment. */
1968 1482923405 : bool tua_cleared_p = false;
1969 1482923405 : if (TYPE_MODE (type) != BLKmode
1970 1409983774 : && TYPE_MODE (type) != VOIDmode
1971 2892619977 : && (STRICT_ALIGNMENT || !AGGREGATE_TYPE_P (type)))
1972 : {
1973 1336818698 : unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1974 :
1975 : /* Don't override a larger alignment requirement coming from a user
1976 : alignment of one of the fields. */
1977 1336818698 : if (mode_align >= TYPE_ALIGN (type))
1978 : {
1979 1336818698 : SET_TYPE_ALIGN (type, mode_align);
1980 : /* Remember that we're about to reset this flag. */
1981 1336818698 : tua_cleared_p = TYPE_USER_ALIGN (type);
1982 1336818698 : TYPE_USER_ALIGN (type) = false;
1983 : }
1984 : }
1985 :
1986 : /* Do machine-dependent extra alignment. */
1987 : #ifdef ROUND_TYPE_ALIGN
1988 : SET_TYPE_ALIGN (type,
1989 : ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT));
1990 : #endif
1991 :
1992 : /* If we failed to find a simple way to calculate the unit size
1993 : of the type, find it by division. */
1994 1482923405 : if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1995 : /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1996 : result will fit in sizetype. We will get more efficient code using
1997 : sizetype, so we force a conversion. */
1998 0 : TYPE_SIZE_UNIT (type)
1999 0 : = fold_convert (sizetype,
2000 : size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
2001 : bitsize_unit_node));
2002 :
2003 1482923405 : if (TYPE_SIZE (type) != 0)
2004 : {
2005 1470921994 : TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
2006 1470921994 : TYPE_SIZE_UNIT (type)
2007 2941843988 : = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type));
2008 : }
2009 :
2010 : /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
2011 1482923405 : if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2012 302831 : TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
2013 1482923405 : if (TYPE_SIZE_UNIT (type) != 0
2014 1482923405 : && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
2015 302831 : TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
2016 :
2017 : /* Handle empty records as per the x86-64 psABI. */
2018 1482923405 : TYPE_EMPTY_P (type) = targetm.calls.empty_record_p (type);
2019 :
2020 : /* Also layout any other variants of the type. */
2021 1482923405 : if (TYPE_NEXT_VARIANT (type)
2022 1482923405 : || type != TYPE_MAIN_VARIANT (type))
2023 : {
2024 57955753 : tree variant;
2025 : /* Record layout info of this variant. */
2026 57955753 : tree size = TYPE_SIZE (type);
2027 57955753 : tree size_unit = TYPE_SIZE_UNIT (type);
2028 57955753 : unsigned int align = TYPE_ALIGN (type);
2029 57955753 : unsigned int precision = TYPE_PRECISION (type);
2030 57955753 : unsigned int user_align = TYPE_USER_ALIGN (type);
2031 57955753 : machine_mode mode = TYPE_MODE (type);
2032 57955753 : bool empty_p = TYPE_EMPTY_P (type);
2033 57955753 : bool typeless = AGGREGATE_TYPE_P (type) && TYPE_TYPELESS_STORAGE (type);
2034 :
2035 : /* Copy it into all variants. */
2036 57955753 : for (variant = TYPE_MAIN_VARIANT (type);
2037 193076000 : variant != NULL_TREE;
2038 135120247 : variant = TYPE_NEXT_VARIANT (variant))
2039 : {
2040 135120247 : TYPE_SIZE (variant) = size;
2041 135120247 : TYPE_SIZE_UNIT (variant) = size_unit;
2042 135120247 : unsigned valign = align;
2043 135120247 : if (TYPE_USER_ALIGN (variant))
2044 : {
2045 816285 : valign = MAX (valign, TYPE_ALIGN (variant));
2046 : /* If we reset TYPE_USER_ALIGN on the main variant, we might
2047 : need to reset it on the variants too. TYPE_MODE will be set
2048 : to MODE in this variant, so we can use that. */
2049 816285 : if (tua_cleared_p && GET_MODE_ALIGNMENT (mode) >= valign)
2050 0 : TYPE_USER_ALIGN (variant) = false;
2051 : }
2052 : else
2053 134303962 : TYPE_USER_ALIGN (variant) = user_align;
2054 135120247 : SET_TYPE_ALIGN (variant, valign);
2055 135120247 : TYPE_PRECISION (variant) = precision;
2056 135120247 : SET_TYPE_MODE (variant, mode);
2057 135120247 : TYPE_EMPTY_P (variant) = empty_p;
2058 135120247 : if (AGGREGATE_TYPE_P (variant))
2059 135120227 : TYPE_TYPELESS_STORAGE (variant) = typeless;
2060 : }
2061 : }
2062 1482923405 : }
2063 :
2064 : /* Return a new underlying object for a bitfield started with FIELD. */
2065 :
2066 : static tree
2067 166121 : start_bitfield_representative (tree field)
2068 : {
2069 166121 : tree repr = make_node (FIELD_DECL);
2070 166121 : DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field);
2071 : /* Force the representative to begin at a BITS_PER_UNIT aligned
2072 : boundary - C++ may use tail-padding of a base object to
2073 : continue packing bits so the bitfield region does not start
2074 : at bit zero (see g++.dg/abi/bitfield5.C for example).
2075 : Unallocated bits may happen for other reasons as well,
2076 : for example Ada which allows explicit bit-granular structure layout. */
2077 332242 : DECL_FIELD_BIT_OFFSET (repr)
2078 166121 : = size_binop (BIT_AND_EXPR,
2079 : DECL_FIELD_BIT_OFFSET (field),
2080 : bitsize_int (~(BITS_PER_UNIT - 1)));
2081 166121 : SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field));
2082 166121 : DECL_SIZE (repr) = DECL_SIZE (field);
2083 166121 : DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field);
2084 166121 : DECL_PACKED (repr) = DECL_PACKED (field);
2085 166121 : DECL_CONTEXT (repr) = DECL_CONTEXT (field);
2086 : /* There are no indirect accesses to this field. If we introduce
2087 : some then they have to use the record alias set. This makes
2088 : sure to properly conflict with [indirect] accesses to addressable
2089 : fields of the bitfield group. */
2090 166121 : DECL_NONADDRESSABLE_P (repr) = 1;
2091 166121 : return repr;
2092 : }
2093 :
2094 : /* Finish up a bitfield group that was started by creating the underlying
2095 : object REPR with the last field in the bitfield group FIELD. */
2096 :
2097 : static void
2098 166121 : finish_bitfield_representative (tree repr, tree field)
2099 : {
2100 166121 : unsigned HOST_WIDE_INT bitsize, maxbitsize;
2101 166121 : tree nextf, size;
2102 :
2103 166121 : size = size_diffop (DECL_FIELD_OFFSET (field),
2104 : DECL_FIELD_OFFSET (repr));
2105 332268 : while (TREE_CODE (size) == COMPOUND_EXPR)
2106 26 : size = TREE_OPERAND (size, 1);
2107 166121 : gcc_assert (tree_fits_uhwi_p (size));
2108 166121 : bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT
2109 166121 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2110 166121 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))
2111 166121 : + tree_to_uhwi (DECL_SIZE (field)));
2112 :
2113 : /* Round up bitsize to multiples of BITS_PER_UNIT. */
2114 166121 : bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2115 :
2116 : /* Now nothing tells us how to pad out bitsize ... */
2117 166121 : if (TREE_CODE (DECL_CONTEXT (field)) == RECORD_TYPE)
2118 : {
2119 162679 : nextf = DECL_CHAIN (field);
2120 595327 : while (nextf && TREE_CODE (nextf) != FIELD_DECL)
2121 432648 : nextf = DECL_CHAIN (nextf);
2122 : }
2123 : else
2124 : nextf = NULL_TREE;
2125 162679 : if (nextf)
2126 : {
2127 73953 : tree maxsize;
2128 : /* If there was an error, the field may be not laid out
2129 : correctly. Don't bother to do anything. */
2130 73953 : if (TREE_TYPE (nextf) == error_mark_node)
2131 : {
2132 1 : TREE_TYPE (repr) = error_mark_node;
2133 1 : return;
2134 : }
2135 73952 : maxsize = size_diffop (DECL_FIELD_OFFSET (nextf),
2136 : DECL_FIELD_OFFSET (repr));
2137 73952 : if (tree_fits_uhwi_p (maxsize))
2138 : {
2139 73936 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2140 73936 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf))
2141 73936 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2142 : /* If the group ends within a bitfield nextf does not need to be
2143 : aligned to BITS_PER_UNIT. Thus round up. */
2144 73936 : maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2145 : }
2146 : else
2147 : maxbitsize = bitsize;
2148 : }
2149 : else
2150 : {
2151 : /* Note that if the C++ FE sets up tail-padding to be re-used it
2152 : creates a as-base variant of the type with TYPE_SIZE adjusted
2153 : accordingly. So it is safe to include tail-padding here. */
2154 92168 : tree aggsize = lang_hooks.types.unit_size_without_reusable_padding
2155 92168 : (DECL_CONTEXT (field));
2156 92168 : tree maxsize = size_diffop (aggsize, DECL_FIELD_OFFSET (repr));
2157 : /* We cannot generally rely on maxsize to fold to an integer constant,
2158 : so use bitsize as fallback for this case. */
2159 92168 : if (tree_fits_uhwi_p (maxsize))
2160 92158 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2161 92158 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2162 : else
2163 : maxbitsize = bitsize;
2164 : }
2165 :
2166 : /* Only if we don't artificially break up the representative in
2167 : the middle of a large bitfield with different possibly
2168 : overlapping representatives. And all representatives start
2169 : at byte offset. */
2170 166120 : gcc_assert (maxbitsize % BITS_PER_UNIT == 0);
2171 :
2172 : /* Find the smallest nice mode to use. */
2173 166120 : opt_scalar_int_mode mode_iter;
2174 471420 : FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2175 942524 : if (GET_MODE_BITSIZE (mode_iter.require ()) >= bitsize)
2176 : break;
2177 :
2178 166120 : scalar_int_mode mode;
2179 166120 : if (!mode_iter.exists (&mode)
2180 165962 : || GET_MODE_BITSIZE (mode) > maxbitsize
2181 250548 : || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)
2182 : {
2183 41667 : if (TREE_CODE (TREE_TYPE (field)) == BITINT_TYPE)
2184 : {
2185 76 : struct bitint_info info;
2186 76 : unsigned prec = TYPE_PRECISION (TREE_TYPE (field));
2187 76 : bool ok = targetm.c.bitint_type_info (prec, &info);
2188 76 : gcc_assert (ok);
2189 76 : scalar_int_mode limb_mode
2190 76 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2191 76 : unsigned lprec = GET_MODE_PRECISION (limb_mode);
2192 76 : if (prec > lprec)
2193 : {
2194 : /* For middle/large/huge _BitInt prefer bitsize being a multiple
2195 : of limb precision. */
2196 74 : unsigned HOST_WIDE_INT bsz = CEIL (bitsize, lprec) * lprec;
2197 74 : if (bsz <= maxbitsize)
2198 76 : bitsize = bsz;
2199 : }
2200 : }
2201 : /* We really want a BLKmode representative only as a last resort,
2202 : considering the member b in
2203 : struct { int a : 7; int b : 17; int c; } __attribute__((packed));
2204 : Otherwise we simply want to split the representative up
2205 : allowing for overlaps within the bitfield region as required for
2206 : struct { int a : 7; int b : 7;
2207 : int c : 10; int d; } __attribute__((packed));
2208 : [0, 15] HImode for a and b, [8, 23] HImode for c. */
2209 41667 : DECL_SIZE (repr) = bitsize_int (bitsize);
2210 41667 : DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT);
2211 41667 : SET_DECL_MODE (repr, BLKmode);
2212 41667 : TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node,
2213 41667 : bitsize / BITS_PER_UNIT);
2214 : }
2215 : else
2216 : {
2217 124453 : unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode);
2218 124453 : DECL_SIZE (repr) = bitsize_int (modesize);
2219 124453 : DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT);
2220 124453 : SET_DECL_MODE (repr, mode);
2221 124453 : TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1);
2222 : }
2223 :
2224 : /* Remember whether the bitfield group is at the end of the
2225 : structure or not. */
2226 166120 : DECL_CHAIN (repr) = nextf;
2227 : }
2228 :
2229 : /* Compute and set FIELD_DECLs for the underlying objects we should
2230 : use for bitfield access for the structure T. */
2231 :
2232 : void
2233 60930906 : finish_bitfield_layout (tree t)
2234 : {
2235 60930906 : tree field, prev;
2236 60930906 : tree repr = NULL_TREE;
2237 :
2238 60930906 : if (TREE_CODE (t) == QUAL_UNION_TYPE)
2239 : return;
2240 :
2241 60930906 : for (prev = NULL_TREE, field = TYPE_FIELDS (t);
2242 465746742 : field; field = DECL_CHAIN (field))
2243 : {
2244 404815836 : if (TREE_CODE (field) != FIELD_DECL)
2245 337901276 : continue;
2246 :
2247 : /* In the C++ memory model, consecutive bit fields in a structure are
2248 : considered one memory location and updating a memory location
2249 : may not store into adjacent memory locations. */
2250 66914560 : if (!repr
2251 66914560 : && DECL_BIT_FIELD_TYPE (field))
2252 : {
2253 : /* Start new representative. */
2254 166120 : repr = start_bitfield_representative (field);
2255 : }
2256 66748440 : else if (repr
2257 66748440 : && ! DECL_BIT_FIELD_TYPE (field))
2258 : {
2259 : /* Finish off new representative. */
2260 73314 : finish_bitfield_representative (repr, prev);
2261 73314 : repr = NULL_TREE;
2262 : }
2263 66675126 : else if (DECL_BIT_FIELD_TYPE (field))
2264 : {
2265 522358 : gcc_assert (repr != NULL_TREE);
2266 :
2267 : /* Zero-size bitfields finish off a representative and
2268 : do not have a representative themselves. This is
2269 : required by the C++ memory model. */
2270 522358 : if (integer_zerop (DECL_SIZE (field)))
2271 : {
2272 638 : finish_bitfield_representative (repr, prev);
2273 638 : repr = NULL_TREE;
2274 : }
2275 :
2276 : /* We assume that either DECL_FIELD_OFFSET of the representative
2277 : and each bitfield member is a constant or they are equal.
2278 : This is because we need to be able to compute the bit-offset
2279 : of each field relative to the representative in get_bit_range
2280 : during RTL expansion.
2281 : If these constraints are not met, simply force a new
2282 : representative to be generated. That will at most
2283 : generate worse code but still maintain correctness with
2284 : respect to the C++ memory model. */
2285 521731 : else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))
2286 521709 : && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
2287 11 : || operand_equal_p (DECL_FIELD_OFFSET (repr),
2288 11 : DECL_FIELD_OFFSET (field), 0)))
2289 : {
2290 1 : finish_bitfield_representative (repr, prev);
2291 1 : repr = start_bitfield_representative (field);
2292 : }
2293 : }
2294 : else
2295 66152768 : continue;
2296 :
2297 240073 : if (repr)
2298 687840 : DECL_BIT_FIELD_REPRESENTATIVE (field) = repr;
2299 :
2300 761792 : if (TREE_CODE (t) == RECORD_TYPE)
2301 : prev = field;
2302 3442 : else if (repr)
2303 : {
2304 3442 : finish_bitfield_representative (repr, field);
2305 3442 : repr = NULL_TREE;
2306 : }
2307 : }
2308 :
2309 60930906 : if (repr)
2310 88726 : finish_bitfield_representative (repr, prev);
2311 : }
2312 :
2313 : /* Do all of the work required to layout the type indicated by RLI,
2314 : once the fields have been laid out. This function will call `free'
2315 : for RLI, unless FREE_P is false. Passing a value other than false
2316 : for FREE_P is bad practice; this option only exists to support the
2317 : G++ 3.2 ABI. */
2318 :
2319 : void
2320 60930906 : finish_record_layout (record_layout_info rli, int free_p)
2321 : {
2322 60930906 : tree variant;
2323 :
2324 : /* Compute the final size. */
2325 60930906 : finalize_record_size (rli);
2326 :
2327 : /* Compute the TYPE_MODE for the record. */
2328 60930906 : compute_record_mode (rli->t);
2329 :
2330 : /* Perform any last tweaks to the TYPE_SIZE, etc. */
2331 60930906 : finalize_type_size (rli->t);
2332 :
2333 : /* Compute bitfield representatives. */
2334 60930906 : finish_bitfield_layout (rli->t);
2335 :
2336 : /* Propagate TYPE_PACKED and TYPE_REVERSE_STORAGE_ORDER to variants.
2337 : With C++ templates, it is too early to do this when the attribute
2338 : is being parsed. */
2339 135243529 : for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
2340 74312623 : variant = TYPE_NEXT_VARIANT (variant))
2341 : {
2342 74312623 : TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
2343 148625246 : TYPE_REVERSE_STORAGE_ORDER (variant)
2344 74312623 : = TYPE_REVERSE_STORAGE_ORDER (rli->t);
2345 : }
2346 :
2347 : /* Lay out any static members. This is done now because their type
2348 : may use the record's type. */
2349 74580845 : while (!vec_safe_is_empty (rli->pending_statics))
2350 13649939 : layout_decl (rli->pending_statics->pop (), 0);
2351 :
2352 : /* Clean up. */
2353 60930906 : if (free_p)
2354 : {
2355 60930906 : vec_free (rli->pending_statics);
2356 60930906 : free (rli);
2357 : }
2358 60930906 : }
2359 :
2360 :
2361 : /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
2362 : NAME, its fields are chained in reverse on FIELDS.
2363 :
2364 : If ALIGN_TYPE is non-null, it is given the same alignment as
2365 : ALIGN_TYPE. */
2366 :
2367 : void
2368 1131507 : finish_builtin_struct (tree type, const char *name, tree fields,
2369 : tree align_type)
2370 : {
2371 1131507 : tree tail, next;
2372 :
2373 3422873 : for (tail = NULL_TREE; fields; tail = fields, fields = next)
2374 : {
2375 2291366 : DECL_FIELD_CONTEXT (fields) = type;
2376 2291366 : next = DECL_CHAIN (fields);
2377 2291366 : DECL_CHAIN (fields) = tail;
2378 : }
2379 1131507 : TYPE_FIELDS (type) = tail;
2380 :
2381 1131507 : if (align_type)
2382 : {
2383 1020003 : SET_TYPE_ALIGN (type, TYPE_ALIGN (align_type));
2384 1020003 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
2385 1020003 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2386 : TYPE_WARN_IF_NOT_ALIGN (align_type));
2387 : }
2388 :
2389 1131507 : layout_type (type);
2390 : #if 0 /* not yet, should get fixed properly later */
2391 : TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
2392 : #else
2393 1131507 : TYPE_NAME (type) = build_decl (BUILTINS_LOCATION,
2394 : TYPE_DECL, get_identifier (name), type);
2395 : #endif
2396 1131507 : TYPE_STUB_DECL (type) = TYPE_NAME (type);
2397 1131507 : layout_decl (TYPE_NAME (type), 0);
2398 1131507 : }
2399 :
2400 : /* Compute TYPE_MODE for TYPE (which is ARRAY_TYPE). */
2401 :
2402 80457796 : void compute_array_mode (tree type)
2403 : {
2404 80457796 : gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
2405 :
2406 80457796 : SET_TYPE_MODE (type, BLKmode);
2407 80457796 : if (TYPE_SIZE (type) != 0
2408 68741807 : && ! targetm.member_type_forces_blk (type, VOIDmode)
2409 : /* BLKmode elements force BLKmode aggregate;
2410 : else extract/store fields may lose. */
2411 149199603 : && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
2412 583893 : || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
2413 : {
2414 68157914 : SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type),
2415 : TYPE_SIZE (type)));
2416 68157914 : if (TYPE_MODE (type) != BLKmode
2417 : && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
2418 : && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
2419 : {
2420 : TYPE_NO_FORCE_BLK (type) = 1;
2421 : SET_TYPE_MODE (type, BLKmode);
2422 : }
2423 : }
2424 80457796 : }
2425 :
2426 : /* Calculate the mode, size, and alignment for TYPE.
2427 : For an array type, calculate the element separation as well.
2428 : Record TYPE on the chain of permanent or temporary types
2429 : so that dbxout will find out about it.
2430 :
2431 : TYPE_SIZE of a type is nonzero if the type has been laid out already.
2432 : layout_type does nothing on such a type.
2433 :
2434 : If the type is incomplete, its TYPE_SIZE remains zero. */
2435 :
2436 : void
2437 2595720392 : layout_type (tree type)
2438 : {
2439 2595720392 : gcc_assert (type);
2440 :
2441 2595720392 : if (type == error_mark_node)
2442 : return;
2443 :
2444 : /* We don't want finalize_type_size to copy an alignment attribute to
2445 : variants that don't have it. */
2446 2595720392 : type = TYPE_MAIN_VARIANT (type);
2447 :
2448 : /* Do nothing if type has been laid out before. */
2449 2595720392 : if (TYPE_SIZE (type))
2450 : return;
2451 :
2452 1427142139 : switch (TREE_CODE (type))
2453 : {
2454 0 : case LANG_TYPE:
2455 : /* This kind of type is the responsibility
2456 : of the language-specific code. */
2457 0 : gcc_unreachable ();
2458 :
2459 14358808 : case BOOLEAN_TYPE:
2460 14358808 : case INTEGER_TYPE:
2461 14358808 : case ENUMERAL_TYPE:
2462 14358808 : {
2463 14358808 : scalar_int_mode mode
2464 14358808 : = smallest_int_mode_for_size (TYPE_PRECISION (type)).require ();
2465 14358808 : SET_TYPE_MODE (type, mode);
2466 28717616 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2467 : /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */
2468 28717616 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2469 14358808 : break;
2470 : }
2471 :
2472 58770 : case BITINT_TYPE:
2473 58770 : {
2474 58770 : struct bitint_info info;
2475 58770 : int cnt;
2476 58770 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
2477 58770 : gcc_assert (ok);
2478 58770 : scalar_int_mode limb_mode
2479 58770 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2480 58770 : if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
2481 : {
2482 1601 : SET_TYPE_MODE (type, limb_mode);
2483 1601 : gcc_assert (info.abi_limb_mode == info.limb_mode);
2484 : cnt = 1;
2485 : }
2486 : else
2487 : {
2488 57169 : SET_TYPE_MODE (type, BLKmode);
2489 57169 : cnt = CEIL (TYPE_PRECISION (type), GET_MODE_PRECISION (limb_mode));
2490 57169 : gcc_assert (info.abi_limb_mode == info.limb_mode
2491 : || !info.big_endian == !WORDS_BIG_ENDIAN);
2492 : }
2493 117540 : TYPE_SIZE (type) = bitsize_int (cnt * GET_MODE_BITSIZE (limb_mode));
2494 117540 : TYPE_SIZE_UNIT (type) = size_int (cnt * GET_MODE_SIZE (limb_mode));
2495 58770 : SET_TYPE_ALIGN (type, GET_MODE_ALIGNMENT (limb_mode));
2496 58770 : if (cnt > 1)
2497 : {
2498 : /* Use same mode as compute_record_mode would use for a structure
2499 : containing cnt limb_mode elements. */
2500 57169 : machine_mode mode = mode_for_size_tree (TYPE_SIZE (type),
2501 57169 : MODE_INT, 1).else_blk ();
2502 57169 : if (mode == BLKmode)
2503 : break;
2504 12385 : finalize_type_size (type);
2505 12385 : SET_TYPE_MODE (type, mode);
2506 12385 : if (STRICT_ALIGNMENT
2507 : && !(TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
2508 : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
2509 : {
2510 : /* If this is the only reason this type is BLKmode, then
2511 : don't force containing types to be BLKmode. */
2512 : TYPE_NO_FORCE_BLK (type) = 1;
2513 : SET_TYPE_MODE (type, BLKmode);
2514 : }
2515 12385 : if (TYPE_NEXT_VARIANT (type) || type != TYPE_MAIN_VARIANT (type))
2516 0 : for (tree variant = TYPE_MAIN_VARIANT (type);
2517 0 : variant != NULL_TREE;
2518 0 : variant = TYPE_NEXT_VARIANT (variant))
2519 : {
2520 0 : SET_TYPE_MODE (variant, mode);
2521 0 : if (STRICT_ALIGNMENT
2522 : && !(TYPE_ALIGN (variant) >= BIGGEST_ALIGNMENT
2523 : || (TYPE_ALIGN (variant)
2524 : >= GET_MODE_ALIGNMENT (mode))))
2525 : {
2526 : TYPE_NO_FORCE_BLK (variant) = 1;
2527 : SET_TYPE_MODE (variant, BLKmode);
2528 : }
2529 : }
2530 12385 : return;
2531 : }
2532 : break;
2533 : }
2534 :
2535 4169494 : case REAL_TYPE:
2536 4169494 : {
2537 : /* Allow the caller to choose the type mode, which is how decimal
2538 : floats are distinguished from binary ones. */
2539 4169494 : if (TYPE_MODE (type) == VOIDmode)
2540 2074138 : SET_TYPE_MODE
2541 : (type, float_mode_for_size (TYPE_PRECISION (type)).require ());
2542 4169494 : scalar_float_mode mode = as_a <scalar_float_mode> (TYPE_MODE (type));
2543 8338988 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2544 8338988 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2545 4169494 : break;
2546 : }
2547 :
2548 10275192 : case FIXED_POINT_TYPE:
2549 10275192 : {
2550 : /* TYPE_MODE (type) has been set already. */
2551 10275192 : scalar_mode mode = SCALAR_TYPE_MODE (type);
2552 20550384 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2553 20550384 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2554 10275192 : break;
2555 : }
2556 :
2557 5283431 : case COMPLEX_TYPE:
2558 5283431 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2559 5283431 : if (TYPE_MODE (TREE_TYPE (type)) == BLKmode)
2560 : {
2561 4558 : gcc_checking_assert (TREE_CODE (TREE_TYPE (type)) == BITINT_TYPE);
2562 4558 : SET_TYPE_MODE (type, BLKmode);
2563 4558 : TYPE_SIZE (type)
2564 4558 : = int_const_binop (MULT_EXPR, TYPE_SIZE (TREE_TYPE (type)),
2565 4558 : bitsize_int (2));
2566 4558 : TYPE_SIZE_UNIT (type)
2567 4558 : = int_const_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (type)),
2568 4558 : bitsize_int (2));
2569 4558 : break;
2570 : }
2571 5278873 : SET_TYPE_MODE (type,
2572 : GET_MODE_COMPLEX_MODE (TYPE_MODE (TREE_TYPE (type))));
2573 :
2574 10557746 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
2575 10557746 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
2576 5278873 : break;
2577 :
2578 70187479 : case VECTOR_TYPE:
2579 70187479 : {
2580 70187479 : poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (type);
2581 70187479 : tree innertype = TREE_TYPE (type);
2582 :
2583 : /* Find an appropriate mode for the vector type. */
2584 70187479 : if (TYPE_MODE (type) == VOIDmode)
2585 34817577 : SET_TYPE_MODE (type,
2586 : mode_for_vector (SCALAR_TYPE_MODE (innertype),
2587 : nunits).else_blk ());
2588 :
2589 70187479 : TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type));
2590 70187479 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2591 : /* Several boolean vector elements may fit in a single unit. */
2592 70187479 : if (VECTOR_BOOLEAN_TYPE_P (type)
2593 72649114 : && type->type_common.mode != BLKmode)
2594 : {
2595 2461632 : TYPE_SIZE_UNIT (type)
2596 4923264 : = size_int (GET_MODE_SIZE (type->type_common.mode));
2597 2461632 : TYPE_SIZE (type)
2598 7384896 : = bitsize_int (GET_MODE_BITSIZE (type->type_common.mode));
2599 : }
2600 : else
2601 : {
2602 67725847 : TYPE_SIZE_UNIT (type)
2603 135451694 : = size_int (GET_MODE_SIZE (SCALAR_TYPE_MODE (innertype))
2604 : * nunits);
2605 67725847 : TYPE_SIZE (type)
2606 203177541 : = bitsize_int (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (innertype))
2607 : * nunits);
2608 : }
2609 :
2610 : /* For vector types, we do not default to the mode's alignment.
2611 : Instead, query a target hook, defaulting to natural alignment.
2612 : This prevents ABI changes depending on whether or not native
2613 : vector modes are supported. */
2614 70187479 : SET_TYPE_ALIGN (type, targetm.vector_alignment (type));
2615 :
2616 : /* However, if the underlying mode requires a bigger alignment than
2617 : what the target hook provides, we cannot use the mode. For now,
2618 : simply reject that case. */
2619 70187479 : gcc_assert (TYPE_ALIGN (type)
2620 : >= GET_MODE_ALIGNMENT (TYPE_MODE (type)));
2621 70187479 : break;
2622 : }
2623 :
2624 285422 : case VOID_TYPE:
2625 : /* This is an incomplete type and so doesn't have a size. */
2626 285422 : SET_TYPE_ALIGN (type, 1);
2627 285422 : TYPE_USER_ALIGN (type) = 0;
2628 285422 : SET_TYPE_MODE (type, VOIDmode);
2629 285422 : break;
2630 :
2631 936594 : case OFFSET_TYPE:
2632 940645 : TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
2633 940645 : TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE_UNITS);
2634 : /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be
2635 : integral, which may be an __intN. */
2636 940645 : SET_TYPE_MODE (type, int_mode_for_size (POINTER_SIZE, 0).require ());
2637 940645 : TYPE_PRECISION (type) = POINTER_SIZE;
2638 936594 : break;
2639 :
2640 984630517 : case FUNCTION_TYPE:
2641 984630517 : case METHOD_TYPE:
2642 : /* It's hard to see what the mode and size of a function ought to
2643 : be, but we do know the alignment is FUNCTION_BOUNDARY, so
2644 : make it consistent with that. */
2645 984630517 : SET_TYPE_MODE (type,
2646 : int_mode_for_size (FUNCTION_BOUNDARY, 0).else_blk ());
2647 984630517 : TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
2648 984630517 : TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
2649 984630517 : break;
2650 :
2651 251348996 : case POINTER_TYPE:
2652 251348996 : case REFERENCE_TYPE:
2653 251348996 : {
2654 251348996 : scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
2655 502697992 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2656 502697992 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2657 251348996 : TYPE_UNSIGNED (type) = 1;
2658 251348996 : TYPE_PRECISION (type) = GET_MODE_PRECISION (mode);
2659 : }
2660 251348996 : break;
2661 :
2662 80457796 : case ARRAY_TYPE:
2663 80457796 : {
2664 80457796 : tree index = TYPE_DOMAIN (type);
2665 80457796 : tree element = TREE_TYPE (type);
2666 :
2667 : /* We need to know both bounds in order to compute the size. */
2668 70054252 : if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
2669 149258622 : && TYPE_SIZE (element))
2670 : {
2671 68741807 : tree ub = TYPE_MAX_VALUE (index);
2672 68741807 : tree lb = TYPE_MIN_VALUE (index);
2673 68741807 : tree element_size = TYPE_SIZE (element);
2674 68741807 : tree length;
2675 :
2676 : /* Make sure that an array of zero-sized element is zero-sized
2677 : regardless of its extent. */
2678 68741807 : if (integer_zerop (element_size))
2679 4711 : length = size_zero_node;
2680 :
2681 : /* The computation should happen in the original signedness so
2682 : that (possible) negative values are handled appropriately
2683 : when determining overflow. */
2684 : else
2685 : {
2686 : /* ??? When it is obvious that the range is signed
2687 : represent it using ssizetype. */
2688 68737096 : if (TREE_CODE (lb) == INTEGER_CST
2689 68736454 : && TREE_CODE (ub) == INTEGER_CST
2690 68437480 : && TYPE_UNSIGNED (TREE_TYPE (lb))
2691 136310776 : && tree_int_cst_lt (ub, lb))
2692 : {
2693 425 : lb = wide_int_to_tree (ssizetype,
2694 425 : offset_int::from (wi::to_wide (lb),
2695 : SIGNED));
2696 425 : ub = wide_int_to_tree (ssizetype,
2697 850 : offset_int::from (wi::to_wide (ub),
2698 : SIGNED));
2699 : }
2700 68737096 : length
2701 68737096 : = fold_convert (sizetype,
2702 : size_binop (PLUS_EXPR,
2703 : build_int_cst (TREE_TYPE (lb), 1),
2704 : size_binop (MINUS_EXPR, ub, lb)));
2705 : }
2706 :
2707 : /* ??? We have no way to distinguish a null-sized array from an
2708 : array spanning the whole sizetype range, so we arbitrarily
2709 : decide that [0, -1] is the only valid representation. */
2710 68741807 : if (integer_zerop (length)
2711 42701 : && TREE_OVERFLOW (length)
2712 68767541 : && integer_zerop (lb))
2713 25734 : length = size_zero_node;
2714 :
2715 68741807 : TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
2716 : bits_from_bytes (length));
2717 :
2718 : /* If we know the size of the element, calculate the total size
2719 : directly, rather than do some division thing below. This
2720 : optimization helps Fortran assumed-size arrays (where the
2721 : size of the array is determined at runtime) substantially. */
2722 68741807 : if (TYPE_SIZE_UNIT (element))
2723 68741807 : TYPE_SIZE_UNIT (type)
2724 137483614 : = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
2725 : }
2726 :
2727 : /* Now round the alignment and size,
2728 : using machine-dependent criteria if any. */
2729 :
2730 80457796 : unsigned align = TYPE_ALIGN (element);
2731 80457796 : if (TYPE_USER_ALIGN (type))
2732 2699958 : align = MAX (align, TYPE_ALIGN (type));
2733 : else
2734 77757838 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
2735 80457796 : if (!TYPE_WARN_IF_NOT_ALIGN (type))
2736 80457796 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2737 : TYPE_WARN_IF_NOT_ALIGN (element));
2738 : #ifdef ROUND_TYPE_ALIGN
2739 : align = ROUND_TYPE_ALIGN (type, align, BITS_PER_UNIT);
2740 : #else
2741 80457796 : align = MAX (align, BITS_PER_UNIT);
2742 : #endif
2743 80457796 : SET_TYPE_ALIGN (type, align);
2744 80457796 : compute_array_mode (type);
2745 80457796 : if (AGGREGATE_TYPE_P (element))
2746 5457477 : TYPE_TYPELESS_STORAGE (type) = TYPE_TYPELESS_STORAGE (element);
2747 : /* When the element size is constant, check that it is at least as
2748 : large as the element alignment. */
2749 80457796 : if (TYPE_SIZE_UNIT (element)
2750 80390306 : && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
2751 : /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
2752 : TYPE_ALIGN_UNIT. */
2753 80357480 : && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element))
2754 160815261 : && !integer_zerop (TYPE_SIZE_UNIT (element)))
2755 : {
2756 80347067 : if (compare_tree_int (TYPE_SIZE_UNIT (element),
2757 80347067 : TYPE_ALIGN_UNIT (element)) < 0)
2758 11 : error ("alignment of array elements is greater than "
2759 : "element size");
2760 80347056 : else if (TYPE_ALIGN_UNIT (element) > 1
2761 116336074 : && (wi::zext (wi::to_wide (TYPE_SIZE_UNIT (element)),
2762 17994509 : ffs_hwi (TYPE_ALIGN_UNIT (element)) - 1)
2763 116336074 : != 0))
2764 6 : error ("size of array element is not a multiple of its "
2765 : "alignment");
2766 : }
2767 : break;
2768 : }
2769 :
2770 5149640 : case RECORD_TYPE:
2771 5149640 : case UNION_TYPE:
2772 5149640 : case QUAL_UNION_TYPE:
2773 5149640 : {
2774 5149640 : tree field;
2775 5149640 : record_layout_info rli;
2776 :
2777 : /* Initialize the layout information. */
2778 5149640 : rli = start_record_layout (type);
2779 :
2780 : /* If this is a QUAL_UNION_TYPE, we want to process the fields
2781 : in the reverse order in building the COND_EXPR that denotes
2782 : its size. We reverse them again later. */
2783 5149640 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2784 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2785 :
2786 : /* Place all the fields. */
2787 26149988 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2788 21000348 : place_field (rli, field);
2789 :
2790 5149640 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2791 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2792 :
2793 : /* Finish laying out the record. */
2794 5149640 : finish_record_layout (rli, /*free_p=*/true);
2795 : }
2796 5149640 : break;
2797 :
2798 0 : default:
2799 0 : gcc_unreachable ();
2800 : }
2801 :
2802 : /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
2803 : records and unions, finish_record_layout already called this
2804 : function. */
2805 1427129754 : if (!RECORD_OR_UNION_TYPE_P (type))
2806 1421980114 : finalize_type_size (type);
2807 :
2808 : /* We should never see alias sets on incomplete aggregates. And we
2809 : should not call layout_type on not incomplete aggregates. */
2810 1427129754 : if (AGGREGATE_TYPE_P (type))
2811 85607436 : gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type));
2812 : }
2813 :
2814 : /* Return the least alignment required for type TYPE. */
2815 :
2816 : unsigned int
2817 39731317 : min_align_of_type (tree type)
2818 : {
2819 39731317 : unsigned int align = TYPE_ALIGN (type);
2820 39731317 : if (!TYPE_USER_ALIGN (type))
2821 : {
2822 72117407 : align = MIN (align, BIGGEST_ALIGNMENT);
2823 : #ifdef BIGGEST_FIELD_ALIGNMENT
2824 : align = MIN (align, BIGGEST_FIELD_ALIGNMENT);
2825 : #endif
2826 37054968 : unsigned int field_align = align;
2827 : #ifdef ADJUST_FIELD_ALIGN
2828 37054968 : field_align = ADJUST_FIELD_ALIGN (NULL_TREE, type, field_align);
2829 : #endif
2830 37054968 : align = MIN (align, field_align);
2831 : }
2832 39731317 : return align / BITS_PER_UNIT;
2833 : }
2834 :
2835 : /* Create and return a type for signed integers of PRECISION bits. */
2836 :
2837 : tree
2838 2693058 : make_signed_type (int precision)
2839 : {
2840 2693058 : tree type = make_node (INTEGER_TYPE);
2841 :
2842 2693058 : TYPE_PRECISION (type) = precision;
2843 :
2844 2693058 : fixup_signed_type (type);
2845 2693058 : return type;
2846 : }
2847 :
2848 : /* Create and return a type for unsigned integers of PRECISION bits. */
2849 :
2850 : tree
2851 10098033 : make_unsigned_type (int precision)
2852 : {
2853 10098033 : tree type = make_node (INTEGER_TYPE);
2854 :
2855 10098033 : TYPE_PRECISION (type) = precision;
2856 :
2857 10098033 : fixup_unsigned_type (type);
2858 10098033 : return type;
2859 : }
2860 :
2861 : /* Create and return a type for fract of PRECISION bits, UNSIGNEDP,
2862 : and SATP. */
2863 :
2864 : tree
2865 5708440 : make_fract_type (int precision, int unsignedp, int satp)
2866 : {
2867 5708440 : tree type = make_node (FIXED_POINT_TYPE);
2868 :
2869 5708440 : TYPE_PRECISION (type) = precision;
2870 :
2871 5708440 : if (satp)
2872 2854220 : TYPE_SATURATING (type) = 1;
2873 :
2874 : /* Lay out the type: set its alignment, size, etc. */
2875 5708440 : TYPE_UNSIGNED (type) = unsignedp;
2876 5708440 : enum mode_class mclass = unsignedp ? MODE_UFRACT : MODE_FRACT;
2877 5708440 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2878 5708440 : layout_type (type);
2879 :
2880 5708440 : return type;
2881 : }
2882 :
2883 : /* Create and return a type for accum of PRECISION bits, UNSIGNEDP,
2884 : and SATP. */
2885 :
2886 : tree
2887 4566752 : make_accum_type (int precision, int unsignedp, int satp)
2888 : {
2889 4566752 : tree type = make_node (FIXED_POINT_TYPE);
2890 :
2891 4566752 : TYPE_PRECISION (type) = precision;
2892 :
2893 4566752 : if (satp)
2894 2283376 : TYPE_SATURATING (type) = 1;
2895 :
2896 : /* Lay out the type: set its alignment, size, etc. */
2897 4566752 : TYPE_UNSIGNED (type) = unsignedp;
2898 4566752 : enum mode_class mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM;
2899 4566752 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2900 4566752 : layout_type (type);
2901 :
2902 4566752 : return type;
2903 : }
2904 :
2905 : /* Initialize sizetypes so layout_type can use them. */
2906 :
2907 : void
2908 285422 : initialize_sizetypes (void)
2909 : {
2910 285422 : int precision, bprecision;
2911 :
2912 : /* Get sizetypes precision from the SIZE_TYPE target macro. */
2913 292676 : if (strcmp (SIZETYPE, "unsigned int") == 0)
2914 : precision = INT_TYPE_SIZE;
2915 278168 : else if (strcmp (SIZETYPE, "long unsigned int") == 0)
2916 278168 : precision = LONG_TYPE_SIZE;
2917 0 : else if (strcmp (SIZETYPE, "long long unsigned int") == 0)
2918 : precision = LONG_LONG_TYPE_SIZE;
2919 0 : else if (strcmp (SIZETYPE, "short unsigned int") == 0)
2920 : precision = SHORT_TYPE_SIZE;
2921 : else
2922 : {
2923 : int i;
2924 :
2925 : precision = -1;
2926 0 : for (i = 0; i < NUM_INT_N_ENTS; i++)
2927 0 : if (int_n_enabled_p[i])
2928 : {
2929 0 : char name[50], altname[50];
2930 0 : sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
2931 0 : sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
2932 :
2933 0 : if (strcmp (name, SIZETYPE) == 0
2934 0 : || strcmp (altname, SIZETYPE) == 0)
2935 : {
2936 0 : precision = int_n_data[i].bitsize;
2937 : }
2938 : }
2939 0 : if (precision == -1)
2940 0 : gcc_unreachable ();
2941 : }
2942 :
2943 285422 : bprecision
2944 570844 : = MIN (precision + LOG2_BITS_PER_UNIT + 1, MAX_FIXED_MODE_SIZE);
2945 285422 : bprecision
2946 285422 : = GET_MODE_PRECISION (smallest_int_mode_for_size (bprecision).require ());
2947 285422 : if (bprecision > HOST_BITS_PER_DOUBLE_INT)
2948 : bprecision = HOST_BITS_PER_DOUBLE_INT;
2949 :
2950 : /* Create stubs for sizetype and bitsizetype so we can create constants. */
2951 285422 : sizetype = make_node (INTEGER_TYPE);
2952 285422 : TYPE_NAME (sizetype) = get_identifier ("sizetype");
2953 285422 : TYPE_PRECISION (sizetype) = precision;
2954 285422 : TYPE_UNSIGNED (sizetype) = 1;
2955 285422 : bitsizetype = make_node (INTEGER_TYPE);
2956 285422 : TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype");
2957 285422 : TYPE_PRECISION (bitsizetype) = bprecision;
2958 285422 : TYPE_UNSIGNED (bitsizetype) = 1;
2959 :
2960 : /* Now layout both types manually. */
2961 285422 : scalar_int_mode mode = smallest_int_mode_for_size (precision).require ();
2962 285422 : SET_TYPE_MODE (sizetype, mode);
2963 285422 : SET_TYPE_ALIGN (sizetype, GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)));
2964 285422 : TYPE_SIZE (sizetype) = bitsize_int (precision);
2965 570844 : TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (mode));
2966 285422 : set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED);
2967 :
2968 285422 : mode = smallest_int_mode_for_size (bprecision).require ();
2969 285422 : SET_TYPE_MODE (bitsizetype, mode);
2970 285422 : SET_TYPE_ALIGN (bitsizetype, GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)));
2971 285422 : TYPE_SIZE (bitsizetype) = bitsize_int (bprecision);
2972 570844 : TYPE_SIZE_UNIT (bitsizetype) = size_int (GET_MODE_SIZE (mode));
2973 285422 : set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED);
2974 :
2975 : /* Create the signed variants of *sizetype. */
2976 285422 : ssizetype = make_signed_type (TYPE_PRECISION (sizetype));
2977 285422 : TYPE_NAME (ssizetype) = get_identifier ("ssizetype");
2978 285422 : sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype));
2979 285422 : TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype");
2980 285422 : }
2981 :
2982 : /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2983 : or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2984 : for TYPE, based on the PRECISION and whether or not the TYPE
2985 : IS_UNSIGNED. PRECISION need not correspond to a width supported
2986 : natively by the hardware; for example, on a machine with 8-bit,
2987 : 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2988 : 61. */
2989 :
2990 : void
2991 16249705 : set_min_and_max_values_for_integral_type (tree type,
2992 : int precision,
2993 : signop sgn)
2994 : {
2995 : /* For bitfields with zero width we end up creating integer types
2996 : with zero precision. Don't assign any minimum/maximum values
2997 : to those types, they don't have any valid value. */
2998 16249705 : if (precision < 1)
2999 : return;
3000 :
3001 16249381 : gcc_assert (precision <= WIDE_INT_MAX_PRECISION);
3002 :
3003 16249381 : TYPE_MIN_VALUE (type)
3004 32498762 : = wide_int_to_tree (type, wi::min_value (precision, sgn));
3005 16249381 : TYPE_MAX_VALUE (type)
3006 32500042 : = wide_int_to_tree (type, wi::max_value (precision, sgn));
3007 : }
3008 :
3009 : /* Set the extreme values of TYPE based on its precision in bits,
3010 : then lay it out. Used when make_signed_type won't do
3011 : because the tree code is not INTEGER_TYPE. */
3012 :
3013 : void
3014 3244298 : fixup_signed_type (tree type)
3015 : {
3016 3244298 : int precision = TYPE_PRECISION (type);
3017 :
3018 3244298 : set_min_and_max_values_for_integral_type (type, precision, SIGNED);
3019 :
3020 : /* Lay out the type: set its alignment, size, etc. */
3021 3244298 : layout_type (type);
3022 3244298 : }
3023 :
3024 : /* Set the extreme values of TYPE based on its precision in bits,
3025 : then lay it out. This is used both in `make_unsigned_type'
3026 : and for enumeral types. */
3027 :
3028 : void
3029 10958772 : fixup_unsigned_type (tree type)
3030 : {
3031 10958772 : int precision = TYPE_PRECISION (type);
3032 :
3033 10958772 : TYPE_UNSIGNED (type) = 1;
3034 :
3035 10958772 : set_min_and_max_values_for_integral_type (type, precision, UNSIGNED);
3036 :
3037 : /* Lay out the type: set its alignment, size, etc. */
3038 10958772 : layout_type (type);
3039 10958772 : }
3040 :
3041 : /* Construct an iterator for a bitfield that spans BITSIZE bits,
3042 : starting at BITPOS.
3043 :
3044 : BITREGION_START is the bit position of the first bit in this
3045 : sequence of bit fields. BITREGION_END is the last bit in this
3046 : sequence. If these two fields are non-zero, we should restrict the
3047 : memory access to that range. Otherwise, we are allowed to touch
3048 : any adjacent non bit-fields.
3049 :
3050 : ALIGN is the alignment of the underlying object in bits.
3051 : VOLATILEP says whether the bitfield is volatile. */
3052 :
3053 3305278 : bit_field_mode_iterator
3054 : ::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3055 : poly_int64 bitregion_start,
3056 : poly_int64 bitregion_end,
3057 3305278 : unsigned int align, bool volatilep)
3058 3305278 : : m_mode (NARROWEST_INT_MODE), m_bitsize (bitsize),
3059 3305278 : m_bitpos (bitpos), m_bitregion_start (bitregion_start),
3060 3305278 : m_bitregion_end (bitregion_end), m_align (align),
3061 3305278 : m_volatilep (volatilep), m_count (0)
3062 : {
3063 3305278 : if (known_eq (m_bitregion_end, 0))
3064 : {
3065 : /* We can assume that any aligned chunk of ALIGN bits that overlaps
3066 : the bitfield is mapped and won't trap, provided that ALIGN isn't
3067 : too large. The cap is the biggest required alignment for data,
3068 : or at least the word size. And force one such chunk at least. */
3069 293830 : unsigned HOST_WIDE_INT units
3070 1068592 : = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD));
3071 293830 : if (bitsize <= 0)
3072 : bitsize = 1;
3073 293830 : HOST_WIDE_INT end = bitpos + bitsize + units - 1;
3074 293830 : m_bitregion_end = end - end % units - 1;
3075 : }
3076 3305278 : }
3077 :
3078 : /* Calls to this function return successively larger modes that can be used
3079 : to represent the bitfield. Return true if another bitfield mode is
3080 : available, storing it in *OUT_MODE if so. */
3081 :
3082 : bool
3083 3306518 : bit_field_mode_iterator::next_mode (scalar_int_mode *out_mode)
3084 : {
3085 3306518 : scalar_int_mode mode;
3086 4052392 : for (; m_mode.exists (&mode); m_mode = GET_MODE_WIDER_MODE (mode))
3087 : {
3088 4052392 : unsigned int unit = GET_MODE_BITSIZE (mode);
3089 :
3090 : /* Skip modes that don't have full precision. */
3091 4052392 : if (unit != GET_MODE_PRECISION (mode))
3092 745874 : continue;
3093 :
3094 : /* Stop if the mode is too wide to handle efficiently. */
3095 8104784 : if (unit > MAX_FIXED_MODE_SIZE)
3096 : break;
3097 :
3098 : /* Don't deliver more than one multiword mode; the smallest one
3099 : should be used. */
3100 4035781 : if (m_count > 0 && unit > BITS_PER_WORD)
3101 : break;
3102 :
3103 : /* Skip modes that are too small. */
3104 4035641 : unsigned HOST_WIDE_INT substart = (unsigned HOST_WIDE_INT) m_bitpos % unit;
3105 4035641 : unsigned HOST_WIDE_INT subend = substart + m_bitsize;
3106 4035641 : if (subend > unit)
3107 745874 : continue;
3108 :
3109 : /* Stop if the mode goes outside the bitregion. */
3110 3289767 : HOST_WIDE_INT start = m_bitpos - substart;
3111 3289767 : if (maybe_ne (m_bitregion_start, 0)
3112 3289767 : && maybe_lt (start, m_bitregion_start))
3113 : break;
3114 3289741 : HOST_WIDE_INT end = start + unit;
3115 3289741 : if (maybe_gt (end, m_bitregion_end + 1))
3116 : break;
3117 :
3118 : /* Stop if the mode requires too much alignment. */
3119 3275178 : if (GET_MODE_ALIGNMENT (mode) > m_align
3120 3275178 : && targetm.slow_unaligned_access (mode, m_align))
3121 : break;
3122 :
3123 3275178 : *out_mode = mode;
3124 3275178 : m_mode = GET_MODE_WIDER_MODE (mode);
3125 3275178 : m_count++;
3126 3275178 : return true;
3127 : }
3128 : return false;
3129 : }
3130 :
3131 : /* Return true if smaller modes are generally preferred for this kind
3132 : of bitfield. */
3133 :
3134 : bool
3135 3255859 : bit_field_mode_iterator::prefer_smaller_modes ()
3136 : {
3137 3255859 : return (m_volatilep
3138 3255859 : ? targetm.narrow_volatile_bitfield ()
3139 3255859 : : !SLOW_BYTE_ACCESS);
3140 : }
3141 :
3142 : /* Find the best machine mode to use when referencing a bit field of length
3143 : BITSIZE bits starting at BITPOS.
3144 :
3145 : BITREGION_START is the bit position of the first bit in this
3146 : sequence of bit fields. BITREGION_END is the last bit in this
3147 : sequence. If these two fields are non-zero, we should restrict the
3148 : memory access to that range. Otherwise, we are allowed to touch
3149 : any adjacent non bit-fields.
3150 :
3151 : The chosen mode must have no more than LARGEST_MODE_BITSIZE bits.
3152 : INT_MAX is a suitable value for LARGEST_MODE_BITSIZE if the caller
3153 : doesn't want to apply a specific limit.
3154 :
3155 : If no mode meets all these conditions, we return VOIDmode.
3156 :
3157 : The underlying object is known to be aligned to a boundary of ALIGN bits.
3158 :
3159 : If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
3160 : smallest mode meeting these conditions.
3161 :
3162 : If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
3163 : largest mode (but a mode no wider than UNITS_PER_WORD) that meets
3164 : all the conditions.
3165 :
3166 : If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
3167 : decide which of the above modes should be used. */
3168 :
3169 : bool
3170 3098363 : get_best_mode (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3171 : poly_uint64 bitregion_start, poly_uint64 bitregion_end,
3172 : unsigned int align,
3173 : unsigned HOST_WIDE_INT largest_mode_bitsize, bool volatilep,
3174 : scalar_int_mode *best_mode)
3175 : {
3176 3098363 : bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start,
3177 3098363 : bitregion_end, align, volatilep);
3178 3098363 : scalar_int_mode mode;
3179 3098363 : bool found = false;
3180 3098363 : while (iter.next_mode (&mode)
3181 : /* ??? For historical reasons, reject modes that would normally
3182 : receive greater alignment, even if unaligned accesses are
3183 : acceptable. This has both advantages and disadvantages.
3184 : Removing this check means that something like:
3185 :
3186 : struct s { unsigned int x; unsigned int y; };
3187 : int f (struct s *s) { return s->x == 0 && s->y == 0; }
3188 :
3189 : can be implemented using a single load and compare on
3190 : 64-bit machines that have no alignment restrictions.
3191 : For example, on powerpc64-linux-gnu, we would generate:
3192 :
3193 : ld 3,0(3)
3194 : cntlzd 3,3
3195 : srdi 3,3,6
3196 : blr
3197 :
3198 : rather than:
3199 :
3200 : lwz 9,0(3)
3201 : cmpwi 7,9,0
3202 : bne 7,.L3
3203 : lwz 3,4(3)
3204 : cntlzw 3,3
3205 : srwi 3,3,5
3206 : extsw 3,3
3207 : blr
3208 : .p2align 4,,15
3209 : .L3:
3210 : li 3,0
3211 : blr
3212 :
3213 : However, accessing more than one field can make life harder
3214 : for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c
3215 : has a series of unsigned short copies followed by a series of
3216 : unsigned short comparisons. With this check, both the copies
3217 : and comparisons remain 16-bit accesses and FRE is able
3218 : to eliminate the latter. Without the check, the comparisons
3219 : can be done using 2 64-bit operations, which FRE isn't able
3220 : to handle in the same way.
3221 :
3222 : Either way, it would probably be worth disabling this check
3223 : during expand. One particular example where removing the
3224 : check would help is the get_best_mode call in store_bit_field.
3225 : If we are given a memory bitregion of 128 bits that is aligned
3226 : to a 64-bit boundary, and the bitfield we want to modify is
3227 : in the second half of the bitregion, this check causes
3228 : store_bitfield to turn the memory into a 64-bit reference
3229 : to the _first_ half of the region. We later use
3230 : adjust_bitfield_address to get a reference to the correct half,
3231 : but doing so looks to adjust_bitfield_address as though we are
3232 : moving past the end of the original object, so it drops the
3233 : associated MEM_EXPR and MEM_OFFSET. Removing the check
3234 : causes store_bit_field to keep a 128-bit memory reference,
3235 : so that the final bitfield reference still has a MEM_EXPR
3236 : and MEM_OFFSET. */
3237 3074429 : && GET_MODE_ALIGNMENT (mode) <= align
3238 6156111 : && GET_MODE_BITSIZE (mode) <= largest_mode_bitsize)
3239 : {
3240 3055422 : *best_mode = mode;
3241 3055422 : found = true;
3242 3055422 : if (iter.prefer_smaller_modes ())
3243 : break;
3244 : }
3245 :
3246 3098363 : return found;
3247 : }
3248 :
3249 : /* Gets minimal and maximal values for MODE (signed or unsigned depending on
3250 : SIGN). The returned constants are made to be usable in TARGET_MODE. */
3251 :
3252 : void
3253 62694615 : get_mode_bounds (scalar_int_mode mode, int sign,
3254 : scalar_int_mode target_mode,
3255 : rtx *mmin, rtx *mmax)
3256 : {
3257 62694615 : unsigned size = GET_MODE_PRECISION (mode);
3258 62694615 : unsigned HOST_WIDE_INT min_val, max_val;
3259 :
3260 62694615 : gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
3261 :
3262 : /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */
3263 62694615 : if (mode == BImode)
3264 : {
3265 : if (STORE_FLAG_VALUE < 0)
3266 : {
3267 : min_val = STORE_FLAG_VALUE;
3268 : max_val = 0;
3269 : }
3270 : else
3271 : {
3272 : min_val = 0;
3273 : max_val = STORE_FLAG_VALUE;
3274 : }
3275 : }
3276 62694615 : else if (sign)
3277 : {
3278 55924271 : min_val = -(HOST_WIDE_INT_1U << (size - 1));
3279 55924271 : max_val = (HOST_WIDE_INT_1U << (size - 1)) - 1;
3280 : }
3281 : else
3282 : {
3283 6770344 : min_val = 0;
3284 6770344 : max_val = (HOST_WIDE_INT_1U << (size - 1) << 1) - 1;
3285 : }
3286 :
3287 62694615 : *mmin = gen_int_mode (min_val, target_mode);
3288 62694615 : *mmax = gen_int_mode (max_val, target_mode);
3289 62694615 : }
3290 :
3291 : #include "gt-stor-layout.h"
|