Branch data Line data Source code
1 : : /* C-compiler utilities for types and variables storage layout
2 : : Copyright (C) 1987-2025 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "target.h"
25 : : #include "function.h"
26 : : #include "rtl.h"
27 : : #include "tree.h"
28 : : #include "memmodel.h"
29 : : #include "tm_p.h"
30 : : #include "stringpool.h"
31 : : #include "regs.h"
32 : : #include "emit-rtl.h"
33 : : #include "cgraph.h"
34 : : #include "diagnostic-core.h"
35 : : #include "fold-const.h"
36 : : #include "stor-layout.h"
37 : : #include "varasm.h"
38 : : #include "print-tree.h"
39 : : #include "langhooks.h"
40 : : #include "tree-inline.h"
41 : : #include "dumpfile.h"
42 : : #include "gimplify.h"
43 : : #include "attribs.h"
44 : : #include "debug.h"
45 : : #include "calls.h"
46 : :
47 : : /* Data type for the expressions representing sizes of data types.
48 : : It is the first integer type laid out. */
49 : : tree sizetype_tab[(int) stk_type_kind_last];
50 : :
51 : : /* If nonzero, this is an upper limit on alignment of structure fields.
52 : : The value is measured in bits. */
53 : : unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
54 : :
55 : : static tree self_referential_size (tree);
56 : : static void finalize_record_size (record_layout_info);
57 : : static void finalize_type_size (tree);
58 : : static void place_union_field (record_layout_info, tree);
59 : : static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
60 : : HOST_WIDE_INT, tree);
61 : : extern void debug_rli (record_layout_info);
62 : :
63 : : /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
64 : : to serve as the actual size-expression for a type or decl. */
65 : :
66 : : tree
67 : 605197 : variable_size (tree size)
68 : : {
69 : : /* Obviously. */
70 : 605197 : if (TREE_CONSTANT (size))
71 : : return size;
72 : :
73 : : /* If the size is self-referential, we can't make a SAVE_EXPR (see
74 : : save_expr for the rationale). But we can do something else. */
75 : 605135 : if (CONTAINS_PLACEHOLDER_P (size))
76 : 0 : return self_referential_size (size);
77 : :
78 : : /* If we are in the global binding level, we can't make a SAVE_EXPR
79 : : since it may end up being shared across functions, so it is up
80 : : to the front-end to deal with this case. */
81 : 605135 : if (lang_hooks.decls.global_bindings_p ())
82 : : return size;
83 : :
84 : 305220 : return save_expr (size);
85 : : }
86 : :
87 : : /* An array of functions used for self-referential size computation. */
88 : : static GTY(()) vec<tree, va_gc> *size_functions;
89 : :
90 : : /* Return true if T is a self-referential component reference. */
91 : :
92 : : static bool
93 : 0 : self_referential_component_ref_p (tree t)
94 : : {
95 : 0 : if (TREE_CODE (t) != COMPONENT_REF)
96 : : return false;
97 : :
98 : 0 : while (REFERENCE_CLASS_P (t))
99 : 0 : t = TREE_OPERAND (t, 0);
100 : :
101 : 0 : return (TREE_CODE (t) == PLACEHOLDER_EXPR);
102 : : }
103 : :
104 : : /* Similar to copy_tree_r but do not copy component references involving
105 : : PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr
106 : : and substituted in substitute_in_expr. */
107 : :
108 : : static tree
109 : 0 : copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data)
110 : : {
111 : 0 : enum tree_code code = TREE_CODE (*tp);
112 : :
113 : : /* Stop at types, decls, constants like copy_tree_r. */
114 : 0 : if (TREE_CODE_CLASS (code) == tcc_type
115 : : || TREE_CODE_CLASS (code) == tcc_declaration
116 : 0 : || TREE_CODE_CLASS (code) == tcc_constant)
117 : : {
118 : 0 : *walk_subtrees = 0;
119 : 0 : return NULL_TREE;
120 : : }
121 : :
122 : : /* This is the pattern built in ada/make_aligning_type. */
123 : 0 : else if (code == ADDR_EXPR
124 : 0 : && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR)
125 : : {
126 : 0 : *walk_subtrees = 0;
127 : 0 : return NULL_TREE;
128 : : }
129 : :
130 : : /* Default case: the component reference. */
131 : 0 : else if (self_referential_component_ref_p (*tp))
132 : : {
133 : 0 : *walk_subtrees = 0;
134 : 0 : return NULL_TREE;
135 : : }
136 : :
137 : : /* We're not supposed to have them in self-referential size trees
138 : : because we wouldn't properly control when they are evaluated.
139 : : However, not creating superfluous SAVE_EXPRs requires accurate
140 : : tracking of readonly-ness all the way down to here, which we
141 : : cannot always guarantee in practice. So punt in this case. */
142 : 0 : else if (code == SAVE_EXPR)
143 : 0 : return error_mark_node;
144 : :
145 : 0 : else if (code == STATEMENT_LIST)
146 : 0 : gcc_unreachable ();
147 : :
148 : 0 : return copy_tree_r (tp, walk_subtrees, data);
149 : : }
150 : :
151 : : /* Given a SIZE expression that is self-referential, return an equivalent
152 : : expression to serve as the actual size expression for a type. */
153 : :
154 : : static tree
155 : 0 : self_referential_size (tree size)
156 : : {
157 : 0 : static unsigned HOST_WIDE_INT fnno = 0;
158 : 0 : vec<tree> self_refs = vNULL;
159 : 0 : tree param_type_list = NULL, param_decl_list = NULL;
160 : 0 : tree t, ref, return_type, fntype, fnname, fndecl;
161 : 0 : unsigned int i;
162 : 0 : char buf[128];
163 : 0 : vec<tree, va_gc> *args = NULL;
164 : :
165 : : /* Do not factor out simple operations. */
166 : 0 : t = skip_simple_constant_arithmetic (size);
167 : 0 : if (TREE_CODE (t) == CALL_EXPR || self_referential_component_ref_p (t))
168 : : return size;
169 : :
170 : : /* Collect the list of self-references in the expression. */
171 : 0 : find_placeholder_in_expr (size, &self_refs);
172 : 0 : gcc_assert (self_refs.length () > 0);
173 : :
174 : : /* Obtain a private copy of the expression. */
175 : 0 : t = size;
176 : 0 : if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE)
177 : : return size;
178 : 0 : size = t;
179 : :
180 : : /* Build the parameter and argument lists in parallel; also
181 : : substitute the former for the latter in the expression. */
182 : 0 : vec_alloc (args, self_refs.length ());
183 : 0 : FOR_EACH_VEC_ELT (self_refs, i, ref)
184 : : {
185 : 0 : tree subst, param_name, param_type, param_decl;
186 : :
187 : 0 : if (DECL_P (ref))
188 : : {
189 : : /* We shouldn't have true variables here. */
190 : 0 : gcc_assert (TREE_READONLY (ref));
191 : : subst = ref;
192 : : }
193 : : /* This is the pattern built in ada/make_aligning_type. */
194 : 0 : else if (TREE_CODE (ref) == ADDR_EXPR)
195 : : subst = ref;
196 : : /* Default case: the component reference. */
197 : : else
198 : 0 : subst = TREE_OPERAND (ref, 1);
199 : :
200 : 0 : sprintf (buf, "p%d", i);
201 : 0 : param_name = get_identifier (buf);
202 : 0 : param_type = TREE_TYPE (ref);
203 : 0 : param_decl
204 : 0 : = build_decl (input_location, PARM_DECL, param_name, param_type);
205 : 0 : DECL_ARG_TYPE (param_decl) = param_type;
206 : 0 : DECL_ARTIFICIAL (param_decl) = 1;
207 : 0 : TREE_READONLY (param_decl) = 1;
208 : :
209 : 0 : size = substitute_in_expr (size, subst, param_decl);
210 : :
211 : 0 : param_type_list = tree_cons (NULL_TREE, param_type, param_type_list);
212 : 0 : param_decl_list = chainon (param_decl, param_decl_list);
213 : 0 : args->quick_push (ref);
214 : : }
215 : :
216 : 0 : self_refs.release ();
217 : :
218 : : /* Append 'void' to indicate that the number of parameters is fixed. */
219 : 0 : param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
220 : :
221 : : /* The 3 lists have been created in reverse order. */
222 : 0 : param_type_list = nreverse (param_type_list);
223 : 0 : param_decl_list = nreverse (param_decl_list);
224 : :
225 : : /* Build the function type. */
226 : 0 : return_type = TREE_TYPE (size);
227 : 0 : fntype = build_function_type (return_type, param_type_list);
228 : :
229 : : /* Build the function declaration. */
230 : 0 : sprintf (buf, "SZ" HOST_WIDE_INT_PRINT_UNSIGNED, fnno++);
231 : 0 : fnname = get_file_function_name (buf);
232 : 0 : fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype);
233 : 0 : for (t = param_decl_list; t; t = DECL_CHAIN (t))
234 : 0 : DECL_CONTEXT (t) = fndecl;
235 : 0 : DECL_ARGUMENTS (fndecl) = param_decl_list;
236 : 0 : DECL_RESULT (fndecl)
237 : 0 : = build_decl (input_location, RESULT_DECL, 0, return_type);
238 : 0 : DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
239 : :
240 : : /* The function has been created by the compiler and we don't
241 : : want to emit debug info for it. */
242 : 0 : DECL_ARTIFICIAL (fndecl) = 1;
243 : 0 : DECL_IGNORED_P (fndecl) = 1;
244 : :
245 : : /* It is supposed to be "const" and never throw. */
246 : 0 : TREE_READONLY (fndecl) = 1;
247 : 0 : TREE_NOTHROW (fndecl) = 1;
248 : :
249 : : /* We want it to be inlined when this is deemed profitable, as
250 : : well as discarded if every call has been integrated. */
251 : 0 : DECL_DECLARED_INLINE_P (fndecl) = 1;
252 : :
253 : : /* It is made up of a unique return statement. */
254 : 0 : DECL_INITIAL (fndecl) = make_node (BLOCK);
255 : 0 : BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl;
256 : 0 : t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size);
257 : 0 : DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t);
258 : 0 : TREE_STATIC (fndecl) = 1;
259 : :
260 : : /* Put it onto the list of size functions. */
261 : 0 : vec_safe_push (size_functions, fndecl);
262 : :
263 : : /* Replace the original expression with a call to the size function. */
264 : 0 : return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args);
265 : : }
266 : :
267 : : /* Take, queue and compile all the size functions. It is essential that
268 : : the size functions be gimplified at the very end of the compilation
269 : : in order to guarantee transparent handling of self-referential sizes.
270 : : Otherwise the GENERIC inliner would not be able to inline them back
271 : : at each of their call sites, thus creating artificial non-constant
272 : : size expressions which would trigger nasty problems later on. */
273 : :
274 : : void
275 : 254293 : finalize_size_functions (void)
276 : : {
277 : 254293 : unsigned int i;
278 : 254293 : tree fndecl;
279 : :
280 : 254293 : for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++)
281 : : {
282 : 0 : allocate_struct_function (fndecl, false);
283 : 0 : set_cfun (NULL);
284 : 0 : dump_function (TDI_original, fndecl);
285 : :
286 : : /* As these functions are used to describe the layout of variable-length
287 : : structures, debug info generation needs their implementation. */
288 : 0 : debug_hooks->size_function (fndecl);
289 : 0 : gimplify_function_tree (fndecl);
290 : 0 : cgraph_node::finalize_function (fndecl, false);
291 : : }
292 : :
293 : 254293 : vec_free (size_functions);
294 : 254293 : }
295 : :
296 : : /* Return a machine mode of class MCLASS with SIZE bits of precision,
297 : : if one exists. The mode may have padding bits as well the SIZE
298 : : value bits. If LIMIT is nonzero, disregard modes wider than
299 : : MAX_FIXED_MODE_SIZE. */
300 : :
301 : : opt_machine_mode
302 : 1051529994 : mode_for_size (poly_uint64 size, enum mode_class mclass, int limit)
303 : : {
304 : 1051529994 : machine_mode mode;
305 : 1051529994 : int i;
306 : :
307 : 1181861782 : if (limit && maybe_gt (size, (unsigned int) MAX_FIXED_MODE_SIZE))
308 : 33855570 : return opt_machine_mode ();
309 : :
310 : : /* Get the first mode which has this size, in the specified class. */
311 : 1468626090 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
312 : 1438887387 : if (known_eq (GET_MODE_PRECISION (mode), size))
313 : 987935721 : return mode;
314 : :
315 : 29738703 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
316 : 59446630 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
317 : 29723315 : if (known_eq (int_n_data[i].bitsize, size)
318 : 29723315 : && int_n_enabled_p[i])
319 : 0 : return int_n_data[i].m;
320 : :
321 : 29738703 : return opt_machine_mode ();
322 : : }
323 : :
324 : : /* Similar, except passed a tree node. */
325 : :
326 : : opt_machine_mode
327 : 123733005 : mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
328 : : {
329 : 123733005 : unsigned HOST_WIDE_INT uhwi;
330 : 123733005 : unsigned int ui;
331 : :
332 : 123733005 : if (!tree_fits_uhwi_p (size))
333 : 258428 : return opt_machine_mode ();
334 : 123474577 : uhwi = tree_to_uhwi (size);
335 : 123474577 : ui = uhwi;
336 : 123474577 : if (uhwi != ui)
337 : 539 : return opt_machine_mode ();
338 : 123474038 : return mode_for_size (ui, mclass, limit);
339 : : }
340 : :
341 : : /* Return the narrowest mode of class MCLASS that contains at least
342 : : SIZE bits, if such a mode exists. */
343 : :
344 : : opt_machine_mode
345 : 18451102 : smallest_mode_for_size (poly_uint64 size, enum mode_class mclass)
346 : : {
347 : 18451102 : machine_mode mode = VOIDmode;
348 : 18451102 : int i;
349 : :
350 : : /* Get the first mode which has at least this size, in the
351 : : specified class. */
352 : 61448260 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
353 : 61448260 : if (known_ge (GET_MODE_PRECISION (mode), size))
354 : : break;
355 : :
356 : 18451102 : if (mode == VOIDmode)
357 : 0 : return opt_machine_mode ();
358 : :
359 : 18451102 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
360 : 36902204 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
361 : 18451102 : if (known_ge (int_n_data[i].bitsize, size)
362 : 18451027 : && known_lt (int_n_data[i].bitsize, GET_MODE_PRECISION (mode))
363 : 18451102 : && int_n_enabled_p[i])
364 : 0 : mode = int_n_data[i].m;
365 : :
366 : 18451102 : return mode;
367 : : }
368 : :
369 : : /* Return an integer mode of exactly the same size as MODE, if one exists. */
370 : :
371 : : opt_scalar_int_mode
372 : 1767758 : int_mode_for_mode (machine_mode mode)
373 : : {
374 : 1767758 : switch (GET_MODE_CLASS (mode))
375 : : {
376 : 1326601 : case MODE_INT:
377 : 1326601 : case MODE_PARTIAL_INT:
378 : 1326601 : return as_a <scalar_int_mode> (mode);
379 : :
380 : 259102 : case MODE_COMPLEX_INT:
381 : 259102 : case MODE_COMPLEX_FLOAT:
382 : 259102 : case MODE_FLOAT:
383 : 259102 : case MODE_DECIMAL_FLOAT:
384 : 259102 : case MODE_FRACT:
385 : 259102 : case MODE_ACCUM:
386 : 259102 : case MODE_UFRACT:
387 : 259102 : case MODE_UACCUM:
388 : 259102 : case MODE_VECTOR_BOOL:
389 : 259102 : case MODE_VECTOR_INT:
390 : 259102 : case MODE_VECTOR_FLOAT:
391 : 259102 : case MODE_VECTOR_FRACT:
392 : 259102 : case MODE_VECTOR_ACCUM:
393 : 259102 : case MODE_VECTOR_UFRACT:
394 : 259102 : case MODE_VECTOR_UACCUM:
395 : 518204 : return int_mode_for_size (GET_MODE_BITSIZE (mode), 0);
396 : :
397 : 0 : case MODE_OPAQUE:
398 : 0 : return opt_scalar_int_mode ();
399 : :
400 : 182055 : case MODE_RANDOM:
401 : 182055 : if (mode == BLKmode)
402 : 182055 : return opt_scalar_int_mode ();
403 : :
404 : : /* fall through */
405 : :
406 : 0 : case MODE_CC:
407 : 0 : default:
408 : 0 : gcc_unreachable ();
409 : : }
410 : : }
411 : :
412 : : /* Find a mode that can be used for efficient bitwise operations on MODE,
413 : : if one exists. */
414 : :
415 : : opt_machine_mode
416 : 21122 : bitwise_mode_for_mode (machine_mode mode)
417 : : {
418 : : /* Quick exit if we already have a suitable mode. */
419 : 21122 : scalar_int_mode int_mode;
420 : 21122 : if (is_a <scalar_int_mode> (mode, &int_mode)
421 : 38584 : && GET_MODE_BITSIZE (int_mode) <= MAX_FIXED_MODE_SIZE)
422 : 19292 : return int_mode;
423 : :
424 : : /* Reuse the sanity checks from int_mode_for_mode. */
425 : 1830 : gcc_checking_assert ((int_mode_for_mode (mode), true));
426 : :
427 : 3660 : poly_int64 bitsize = GET_MODE_BITSIZE (mode);
428 : :
429 : : /* Try to replace complex modes with complex modes. In general we
430 : : expect both components to be processed independently, so we only
431 : : care whether there is a register for the inner mode. */
432 : 1830 : if (COMPLEX_MODE_P (mode))
433 : : {
434 : 400 : machine_mode trial = mode;
435 : 400 : if ((GET_MODE_CLASS (trial) == MODE_COMPLEX_INT
436 : 460 : || mode_for_size (bitsize, MODE_COMPLEX_INT, false).exists (&trial))
437 : 1072 : && have_regs_of_mode[GET_MODE_INNER (trial)])
438 : 276 : return trial;
439 : : }
440 : :
441 : : /* Try to replace vector modes with vector modes. Also try using vector
442 : : modes if an integer mode would be too big. */
443 : 1345 : if (VECTOR_MODE_P (mode)
444 : 4242 : || maybe_gt (bitsize, MAX_FIXED_MODE_SIZE))
445 : : {
446 : 347 : machine_mode trial = mode;
447 : 347 : if ((GET_MODE_CLASS (trial) == MODE_VECTOR_INT
448 : 211 : || mode_for_size (bitsize, MODE_VECTOR_INT, 0).exists (&trial))
449 : 283 : && have_regs_of_mode[trial]
450 : 557 : && targetm.vector_mode_supported_p (trial))
451 : 210 : return trial;
452 : : }
453 : :
454 : : /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */
455 : 1344 : return mode_for_size (bitsize, MODE_INT, true);
456 : : }
457 : :
458 : : /* Find a type that can be used for efficient bitwise operations on MODE.
459 : : Return null if no such mode exists. */
460 : :
461 : : tree
462 : 21122 : bitwise_type_for_mode (machine_mode mode)
463 : : {
464 : 21122 : if (!bitwise_mode_for_mode (mode).exists (&mode))
465 : 137 : return NULL_TREE;
466 : :
467 : 20985 : unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode);
468 : 20985 : tree inner_type = build_nonstandard_integer_type (inner_size, true);
469 : :
470 : 20985 : if (VECTOR_MODE_P (mode))
471 : 210 : return build_vector_type_for_mode (inner_type, mode);
472 : :
473 : 20775 : if (COMPLEX_MODE_P (mode))
474 : 276 : return build_complex_type (inner_type);
475 : :
476 : 40998 : gcc_checking_assert (GET_MODE_INNER (mode) == mode);
477 : : return inner_type;
478 : : }
479 : :
480 : : /* Find a mode that can be used for efficient bitwise operations on SIZE
481 : : bits, if one exists. */
482 : :
483 : : opt_machine_mode
484 : 19461 : bitwise_mode_for_size (poly_uint64 size)
485 : : {
486 : 38922 : if (known_le (size, (unsigned int) MAX_FIXED_MODE_SIZE))
487 : 19248 : return mode_for_size (size, MODE_INT, true);
488 : :
489 : : machine_mode mode, ret = VOIDmode;
490 : 3715 : FOR_EACH_MODE_FROM (mode, MIN_MODE_VECTOR_INT)
491 : 7422 : if (known_eq (GET_MODE_BITSIZE (mode), size)
492 : 229 : && (ret == VOIDmode || GET_MODE_INNER (mode) == QImode)
493 : 229 : && have_regs_of_mode[mode]
494 : 3920 : && targetm.vector_mode_supported_p (mode))
495 : : {
496 : 418 : if (GET_MODE_INNER (mode) == QImode)
497 : 209 : return mode;
498 : 0 : else if (ret == VOIDmode)
499 : 3502 : ret = mode;
500 : : }
501 : 4 : if (ret != VOIDmode)
502 : 0 : return ret;
503 : 4 : return opt_machine_mode ();
504 : : }
505 : :
506 : : /* Find a mode that is suitable for representing a vector with NUNITS
507 : : elements of mode INNERMODE, if one exists. The returned mode can be
508 : : either an integer mode or a vector mode. */
509 : :
510 : : opt_machine_mode
511 : 68597346 : mode_for_vector (scalar_mode innermode, poly_uint64 nunits)
512 : : {
513 : 68597346 : machine_mode mode;
514 : :
515 : : /* First, look for a supported vector type. */
516 : 68597346 : if (SCALAR_FLOAT_MODE_P (innermode))
517 : : mode = MIN_MODE_VECTOR_FLOAT;
518 : 64637227 : else if (SCALAR_FRACT_MODE_P (innermode))
519 : : mode = MIN_MODE_VECTOR_FRACT;
520 : 64637227 : else if (SCALAR_UFRACT_MODE_P (innermode))
521 : : mode = MIN_MODE_VECTOR_UFRACT;
522 : 64637227 : else if (SCALAR_ACCUM_MODE_P (innermode))
523 : : mode = MIN_MODE_VECTOR_ACCUM;
524 : 64637227 : else if (SCALAR_UACCUM_MODE_P (innermode))
525 : : mode = MIN_MODE_VECTOR_UACCUM;
526 : : else
527 : 68597346 : mode = MIN_MODE_VECTOR_INT;
528 : :
529 : : /* Only check the broader vector_mode_supported_any_target_p here.
530 : : We'll filter through target-specific availability and
531 : : vector_mode_supported_p later in vector_type_mode. */
532 : 796619013 : FOR_EACH_MODE_FROM (mode, mode)
533 : 1592561218 : if (known_eq (GET_MODE_NUNITS (mode), nunits)
534 : 369302544 : && GET_MODE_INNER (mode) == innermode
535 : 864539551 : && targetm.vector_mode_supported_any_target_p (mode))
536 : 68258942 : return mode;
537 : :
538 : : /* For integers, try mapping it to a same-sized scalar mode. */
539 : 338404 : if (GET_MODE_CLASS (innermode) == MODE_INT)
540 : : {
541 : 16109 : poly_uint64 nbits = nunits * GET_MODE_BITSIZE (innermode);
542 : 16109 : if (int_mode_for_size (nbits, 0).exists (&mode)
543 : 15649 : && have_regs_of_mode[mode])
544 : 15649 : return mode;
545 : : }
546 : :
547 : 322755 : return opt_machine_mode ();
548 : : }
549 : :
550 : : /* If a piece of code is using vector mode VECTOR_MODE and also wants
551 : : to operate on elements of mode ELEMENT_MODE, return the vector mode
552 : : it should use for those elements. If NUNITS is nonzero, ensure that
553 : : the mode has exactly NUNITS elements, otherwise pick whichever vector
554 : : size pairs the most naturally with VECTOR_MODE; this may mean choosing
555 : : a mode with a different size and/or number of elements, depending on
556 : : what the target prefers. Return an empty opt_machine_mode if there
557 : : is no supported vector mode with the required properties.
558 : :
559 : : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
560 : : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
561 : :
562 : : opt_machine_mode
563 : 33499382 : related_vector_mode (machine_mode vector_mode, scalar_mode element_mode,
564 : : poly_uint64 nunits)
565 : : {
566 : 33499382 : gcc_assert (VECTOR_MODE_P (vector_mode));
567 : 33499382 : return targetm.vectorize.related_mode (vector_mode, element_mode, nunits);
568 : : }
569 : :
570 : : /* If a piece of code is using vector mode VECTOR_MODE and also wants
571 : : to operate on integer vectors with the same element size and number
572 : : of elements, return the vector mode it should use. Return an empty
573 : : opt_machine_mode if there is no supported vector mode with the
574 : : required properties.
575 : :
576 : : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
577 : : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
578 : :
579 : : opt_machine_mode
580 : 18279 : related_int_vector_mode (machine_mode vector_mode)
581 : : {
582 : 18279 : gcc_assert (VECTOR_MODE_P (vector_mode));
583 : 18279 : scalar_int_mode int_mode;
584 : 36558 : if (int_mode_for_mode (GET_MODE_INNER (vector_mode)).exists (&int_mode))
585 : 36558 : return related_vector_mode (vector_mode, int_mode,
586 : 18279 : GET_MODE_NUNITS (vector_mode));
587 : 0 : return opt_machine_mode ();
588 : : }
589 : :
590 : : /* Return the alignment of MODE. This will be bounded by 1 and
591 : : BIGGEST_ALIGNMENT. */
592 : :
593 : : unsigned int
594 : 1489427819 : get_mode_alignment (machine_mode mode)
595 : : {
596 : 2884180893 : return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
597 : : }
598 : :
599 : : /* Return the natural mode of an array, given that it is SIZE bytes in
600 : : total and has elements of type ELEM_TYPE. */
601 : :
602 : : static machine_mode
603 : 59649937 : mode_for_array (tree elem_type, tree size)
604 : : {
605 : 59649937 : tree elem_size;
606 : 59649937 : poly_uint64 int_size, int_elem_size;
607 : 59649937 : unsigned HOST_WIDE_INT num_elems;
608 : 59649937 : bool limit_p;
609 : :
610 : : /* One-element arrays get the component type's mode. */
611 : 59649937 : elem_size = TYPE_SIZE (elem_type);
612 : 59649937 : if (simple_cst_equal (size, elem_size))
613 : 3369817 : return TYPE_MODE (elem_type);
614 : :
615 : 56280120 : limit_p = true;
616 : 56280120 : if (poly_int_tree_p (size, &int_size)
617 : 56021692 : && poly_int_tree_p (elem_size, &int_elem_size)
618 : 56021692 : && maybe_ne (int_elem_size, 0U)
619 : 56280120 : && constant_multiple_p (int_size, int_elem_size, &num_elems))
620 : : {
621 : 56021692 : machine_mode elem_mode = TYPE_MODE (elem_type);
622 : 56021692 : machine_mode mode;
623 : 56021692 : if (targetm.array_mode (elem_mode, num_elems).exists (&mode))
624 : 0 : return mode;
625 : 56021692 : if (targetm.array_mode_supported_p (elem_mode, num_elems))
626 : 56280120 : limit_p = false;
627 : : }
628 : 56280120 : return mode_for_size_tree (size, MODE_INT, limit_p).else_blk ();
629 : : }
630 : :
631 : : /* Subroutine of layout_decl: Force alignment required for the data type.
632 : : But if the decl itself wants greater alignment, don't override that. */
633 : :
634 : : static inline void
635 : 1650853610 : do_type_align (tree type, tree decl)
636 : : {
637 : 1650853610 : if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
638 : : {
639 : 1586643952 : SET_DECL_ALIGN (decl, TYPE_ALIGN (type));
640 : 1586643952 : if (TREE_CODE (decl) == FIELD_DECL)
641 : 48822886 : DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
642 : : }
643 : 1650853610 : if (TYPE_WARN_IF_NOT_ALIGN (type) > DECL_WARN_IF_NOT_ALIGN (decl))
644 : 51 : SET_DECL_WARN_IF_NOT_ALIGN (decl, TYPE_WARN_IF_NOT_ALIGN (type));
645 : 1650853610 : }
646 : :
647 : : /* Set the size, mode and alignment of a ..._DECL node.
648 : : TYPE_DECL does need this for C++.
649 : : Note that LABEL_DECL and CONST_DECL nodes do not need this,
650 : : and FUNCTION_DECL nodes have them set up in a special (and simple) way.
651 : : Don't call layout_decl for them.
652 : :
653 : : KNOWN_ALIGN is the amount of alignment we can assume this
654 : : decl has with no special effort. It is relevant only for FIELD_DECLs
655 : : and depends on the previous fields.
656 : : All that matters about KNOWN_ALIGN is which powers of 2 divide it.
657 : : If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
658 : : the record will be aligned to suit. */
659 : :
660 : : void
661 : 1651342016 : layout_decl (tree decl, unsigned int known_align)
662 : : {
663 : 1651342016 : tree type = TREE_TYPE (decl);
664 : 1651342016 : enum tree_code code = TREE_CODE (decl);
665 : 1651342016 : rtx rtl = NULL_RTX;
666 : 1651342016 : location_t loc = DECL_SOURCE_LOCATION (decl);
667 : :
668 : 1651342016 : if (code == CONST_DECL)
669 : : return;
670 : :
671 : 1651342016 : gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
672 : : || code == TYPE_DECL || code == FIELD_DECL);
673 : :
674 : 1651342016 : rtl = DECL_RTL_IF_SET (decl);
675 : :
676 : 1651342016 : if (type == error_mark_node)
677 : 2920 : type = void_type_node;
678 : :
679 : : /* Usually the size and mode come from the data type without change,
680 : : however, the front-end may set the explicit width of the field, so its
681 : : size may not be the same as the size of its type. This happens with
682 : : bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
683 : : also happens with other fields. For example, the C++ front-end creates
684 : : zero-sized fields corresponding to empty base classes, and depends on
685 : : layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
686 : : size in bytes from the size in bits. If we have already set the mode,
687 : : don't set it again since we can be called twice for FIELD_DECLs. */
688 : :
689 : 1651342016 : DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
690 : 1651342016 : if (DECL_MODE (decl) == VOIDmode)
691 : 1606840626 : SET_DECL_MODE (decl, TYPE_MODE (type));
692 : :
693 : 1651342016 : if (DECL_SIZE (decl) == 0)
694 : : {
695 : 1607432618 : DECL_SIZE (decl) = TYPE_SIZE (type);
696 : 1607432618 : DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
697 : : }
698 : 43909398 : else if (DECL_SIZE_UNIT (decl) == 0)
699 : 470053 : DECL_SIZE_UNIT (decl)
700 : 940106 : = fold_convert_loc (loc, sizetype,
701 : 470053 : size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl),
702 : : bitsize_unit_node));
703 : :
704 : 1651342016 : if (code != FIELD_DECL)
705 : : /* For non-fields, update the alignment from the type. */
706 : 1597851228 : do_type_align (type, decl);
707 : : else
708 : : /* For fields, it's a bit more complicated... */
709 : : {
710 : 53490788 : bool old_user_align = DECL_USER_ALIGN (decl);
711 : 53490788 : bool zero_bitfield = false;
712 : 53490788 : bool packed_p = DECL_PACKED (decl);
713 : 53490788 : unsigned int mfa;
714 : :
715 : 53490788 : if (DECL_BIT_FIELD (decl))
716 : : {
717 : 489459 : DECL_BIT_FIELD_TYPE (decl) = type;
718 : :
719 : : /* A zero-length bit-field affects the alignment of the next
720 : : field. In essence such bit-fields are not influenced by
721 : : any packing due to #pragma pack or attribute packed. */
722 : 489459 : if (integer_zerop (DECL_SIZE (decl))
723 : 489459 : && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
724 : : {
725 : 2130 : zero_bitfield = true;
726 : 2130 : packed_p = false;
727 : 2130 : if (PCC_BITFIELD_TYPE_MATTERS)
728 : 2130 : do_type_align (type, decl);
729 : : else
730 : : {
731 : : #ifdef EMPTY_FIELD_BOUNDARY
732 : : if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
733 : : {
734 : : SET_DECL_ALIGN (decl, EMPTY_FIELD_BOUNDARY);
735 : : DECL_USER_ALIGN (decl) = 0;
736 : : }
737 : : #endif
738 : : }
739 : : }
740 : :
741 : : /* See if we can use an ordinary integer mode for a bit-field.
742 : : Conditions are: a fixed size that is correct for another mode,
743 : : occupying a complete byte or bytes on proper boundary. */
744 : 489459 : if (TYPE_SIZE (type) != 0
745 : 489459 : && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
746 : 978918 : && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
747 : : {
748 : 489206 : machine_mode xmode;
749 : 489206 : if (mode_for_size_tree (DECL_SIZE (decl),
750 : 327645 : MODE_INT, 1).exists (&xmode))
751 : : {
752 : 161561 : unsigned int xalign = GET_MODE_ALIGNMENT (xmode);
753 : 153465 : if (!(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
754 : 314756 : && (known_align == 0 || known_align >= xalign))
755 : : {
756 : 153540 : SET_DECL_ALIGN (decl, MAX (xalign, DECL_ALIGN (decl)));
757 : 153540 : SET_DECL_MODE (decl, xmode);
758 : 153540 : DECL_BIT_FIELD (decl) = 0;
759 : : }
760 : : }
761 : : }
762 : :
763 : : /* Turn off DECL_BIT_FIELD if we won't need it set. */
764 : 489712 : if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
765 : 253 : && known_align >= TYPE_ALIGN (type)
766 : 489509 : && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
767 : 1 : DECL_BIT_FIELD (decl) = 0;
768 : : }
769 : 53001329 : else if (packed_p && DECL_USER_ALIGN (decl))
770 : : /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
771 : : round up; we'll reduce it again below. We want packing to
772 : : supersede USER_ALIGN inherited from the type, but defer to
773 : : alignment explicitly specified on the field decl. */;
774 : : else
775 : 53000252 : do_type_align (type, decl);
776 : :
777 : : /* If the field is packed and not explicitly aligned, give it the
778 : : minimum alignment. Note that do_type_align may set
779 : : DECL_USER_ALIGN, so we need to check old_user_align instead. */
780 : 53490788 : if (packed_p
781 : 53490788 : && !old_user_align)
782 : 6778 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), BITS_PER_UNIT));
783 : :
784 : 53490788 : if (! packed_p && ! DECL_USER_ALIGN (decl))
785 : : {
786 : : /* Some targets (i.e. i386, VMS) limit struct field alignment
787 : : to a lower boundary than alignment of variables unless
788 : : it was overridden by attribute aligned. */
789 : : #ifdef BIGGEST_FIELD_ALIGNMENT
790 : : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl),
791 : : (unsigned) BIGGEST_FIELD_ALIGNMENT));
792 : : #endif
793 : : #ifdef ADJUST_FIELD_ALIGN
794 : 52892654 : SET_DECL_ALIGN (decl, ADJUST_FIELD_ALIGN (decl, TREE_TYPE (decl),
795 : : DECL_ALIGN (decl)));
796 : : #endif
797 : : }
798 : :
799 : 53490788 : if (zero_bitfield)
800 : 2130 : mfa = initial_max_fld_align * BITS_PER_UNIT;
801 : : else
802 : 53488658 : mfa = maximum_field_alignment;
803 : : /* Should this be controlled by DECL_USER_ALIGN, too? */
804 : 53490788 : if (mfa != 0)
805 : 550 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), mfa));
806 : : }
807 : :
808 : : /* Evaluate nonconstant size only once, either now or as soon as safe. */
809 : 1651342016 : if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
810 : 26077 : DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
811 : 1651342016 : if (DECL_SIZE_UNIT (decl) != 0
812 : 1651342016 : && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
813 : 26077 : DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
814 : :
815 : : /* If requested, warn about definitions of large data objects. */
816 : 965948997 : if ((code == PARM_DECL || (code == VAR_DECL && !DECL_NONLOCAL_FRAME (decl)))
817 : 1883334335 : && !DECL_EXTERNAL (decl))
818 : : {
819 : 1138054373 : tree size = DECL_SIZE_UNIT (decl);
820 : :
821 : 1138054373 : if (size != 0 && TREE_CODE (size) == INTEGER_CST)
822 : : {
823 : : /* -Wlarger-than= argument of HOST_WIDE_INT_MAX is treated
824 : : as if PTRDIFF_MAX had been specified, with the value
825 : : being that on the target rather than the host. */
826 : 1024566130 : unsigned HOST_WIDE_INT max_size = warn_larger_than_size;
827 : 1024566130 : if (max_size == HOST_WIDE_INT_MAX)
828 : 1024566069 : max_size = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
829 : :
830 : 1024566130 : if (compare_tree_int (size, max_size) > 0)
831 : 23 : warning (OPT_Wlarger_than_, "size of %q+D %E bytes exceeds "
832 : : "maximum object size %wu",
833 : : decl, size, max_size);
834 : : }
835 : : }
836 : :
837 : : /* If the RTL was already set, update its mode and mem attributes. */
838 : 1651342016 : if (rtl)
839 : : {
840 : 37755 : PUT_MODE (rtl, DECL_MODE (decl));
841 : 37755 : SET_DECL_RTL (decl, 0);
842 : 37755 : if (MEM_P (rtl))
843 : 37755 : set_mem_attributes (rtl, decl, 1);
844 : 37755 : SET_DECL_RTL (decl, rtl);
845 : : }
846 : : }
847 : :
848 : : /* Given a VAR_DECL, PARM_DECL, RESULT_DECL, or FIELD_DECL, clears the
849 : : results of a previous call to layout_decl and calls it again. */
850 : :
851 : : void
852 : 397418619 : relayout_decl (tree decl)
853 : : {
854 : 397418619 : DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
855 : 397418619 : SET_DECL_MODE (decl, VOIDmode);
856 : 397418619 : if (!DECL_USER_ALIGN (decl))
857 : 397417446 : SET_DECL_ALIGN (decl, 0);
858 : 397418619 : if (DECL_RTL_SET_P (decl))
859 : 0 : SET_DECL_RTL (decl, 0);
860 : :
861 : 397418619 : layout_decl (decl, 0);
862 : 397418619 : }
863 : :
864 : : /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
865 : : QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
866 : : is to be passed to all other layout functions for this record. It is the
867 : : responsibility of the caller to call `free' for the storage returned.
868 : : Note that garbage collection is not permitted until we finish laying
869 : : out the record. */
870 : :
871 : : record_layout_info
872 : 42231982 : start_record_layout (tree t)
873 : : {
874 : 42231982 : record_layout_info rli = XNEW (struct record_layout_info_s);
875 : :
876 : 42231982 : rli->t = t;
877 : :
878 : : /* If the type has a minimum specified alignment (via an attribute
879 : : declaration, for example) use it -- otherwise, start with a
880 : : one-byte alignment. */
881 : 42231982 : rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
882 : 42231982 : rli->unpacked_align = rli->record_align;
883 : 84170999 : rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
884 : :
885 : : #ifdef STRUCTURE_SIZE_BOUNDARY
886 : : /* Packed structures don't need to have minimum size. */
887 : : if (! TYPE_PACKED (t))
888 : : {
889 : : unsigned tmp;
890 : :
891 : : /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */
892 : : tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY;
893 : : if (maximum_field_alignment != 0)
894 : : tmp = MIN (tmp, maximum_field_alignment);
895 : : rli->record_align = MAX (rli->record_align, tmp);
896 : : }
897 : : #endif
898 : :
899 : 42231982 : rli->offset = size_zero_node;
900 : 42231982 : rli->bitpos = bitsize_zero_node;
901 : 42231982 : rli->prev_field = 0;
902 : 42231982 : rli->pending_statics = 0;
903 : 42231982 : rli->packed_maybe_necessary = 0;
904 : 42231982 : rli->remaining_in_alignment = 0;
905 : :
906 : 42231982 : return rli;
907 : : }
908 : :
909 : : /* Fold sizetype value X to bitsizetype, given that X represents a type
910 : : size or offset. */
911 : :
912 : : static tree
913 : 267510397 : bits_from_bytes (tree x)
914 : : {
915 : 267510397 : if (POLY_INT_CST_P (x))
916 : : /* The runtime calculation isn't allowed to overflow sizetype;
917 : : increasing the runtime values must always increase the size
918 : : or offset of the object. This means that the object imposes
919 : : a maximum value on the runtime parameters, but we don't record
920 : : what that is. */
921 : : return build_poly_int_cst
922 : : (bitsizetype,
923 : : poly_wide_int::from (poly_int_cst_value (x),
924 : : TYPE_PRECISION (bitsizetype),
925 : : TYPE_SIGN (TREE_TYPE (x))));
926 : 267510397 : x = fold_convert (bitsizetype, x);
927 : 267510397 : gcc_checking_assert (x);
928 : 267510397 : return x;
929 : : }
930 : :
931 : : /* Return the combined bit position for the byte offset OFFSET and the
932 : : bit position BITPOS.
933 : :
934 : : These functions operate on byte and bit positions present in FIELD_DECLs
935 : : and assume that these expressions result in no (intermediate) overflow.
936 : : This assumption is necessary to fold the expressions as much as possible,
937 : : so as to avoid creating artificially variable-sized types in languages
938 : : supporting variable-sized types like Ada. */
939 : :
940 : : tree
941 : 207278060 : bit_from_pos (tree offset, tree bitpos)
942 : : {
943 : 207278060 : return size_binop (PLUS_EXPR, bitpos,
944 : : size_binop (MULT_EXPR, bits_from_bytes (offset),
945 : : bitsize_unit_node));
946 : : }
947 : :
948 : : /* Return the combined truncated byte position for the byte offset OFFSET and
949 : : the bit position BITPOS. */
950 : :
951 : : tree
952 : 232719426 : byte_from_pos (tree offset, tree bitpos)
953 : : {
954 : 232719426 : tree bytepos;
955 : 232719426 : if (TREE_CODE (bitpos) == MULT_EXPR
956 : 232719426 : && tree_int_cst_equal (TREE_OPERAND (bitpos, 1), bitsize_unit_node))
957 : 0 : bytepos = TREE_OPERAND (bitpos, 0);
958 : : else
959 : 232719426 : bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node);
960 : 232719426 : return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos));
961 : : }
962 : :
963 : : /* Split the bit position POS into a byte offset *POFFSET and a bit
964 : : position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */
965 : :
966 : : void
967 : 52072000 : pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
968 : : tree pos)
969 : : {
970 : 52072000 : tree toff_align = bitsize_int (off_align);
971 : 52072000 : if (TREE_CODE (pos) == MULT_EXPR
972 : 52072000 : && tree_int_cst_equal (TREE_OPERAND (pos, 1), toff_align))
973 : : {
974 : 0 : *poffset = size_binop (MULT_EXPR,
975 : : fold_convert (sizetype, TREE_OPERAND (pos, 0)),
976 : : size_int (off_align / BITS_PER_UNIT));
977 : 0 : *pbitpos = bitsize_zero_node;
978 : : }
979 : : else
980 : : {
981 : 52072000 : *poffset = size_binop (MULT_EXPR,
982 : : fold_convert (sizetype,
983 : : size_binop (FLOOR_DIV_EXPR, pos,
984 : : toff_align)),
985 : : size_int (off_align / BITS_PER_UNIT));
986 : 52072000 : *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align);
987 : : }
988 : 52072000 : }
989 : :
990 : : /* Given a pointer to bit and byte offsets and an offset alignment,
991 : : normalize the offsets so they are within the alignment. */
992 : :
993 : : void
994 : 163566796 : normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
995 : : {
996 : : /* If the bit position is now larger than it should be, adjust it
997 : : downwards. */
998 : 163566796 : if (compare_tree_int (*pbitpos, off_align) >= 0)
999 : : {
1000 : 52072000 : tree offset, bitpos;
1001 : 52072000 : pos_from_bit (&offset, &bitpos, off_align, *pbitpos);
1002 : 52072000 : *poffset = size_binop (PLUS_EXPR, *poffset, offset);
1003 : 52072000 : *pbitpos = bitpos;
1004 : : }
1005 : 163566796 : }
1006 : :
1007 : : /* Print debugging information about the information in RLI. */
1008 : :
1009 : : DEBUG_FUNCTION void
1010 : 0 : debug_rli (record_layout_info rli)
1011 : : {
1012 : 0 : print_node_brief (stderr, "type", rli->t, 0);
1013 : 0 : print_node_brief (stderr, "\noffset", rli->offset, 0);
1014 : 0 : print_node_brief (stderr, " bitpos", rli->bitpos, 0);
1015 : :
1016 : 0 : fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
1017 : : rli->record_align, rli->unpacked_align,
1018 : : rli->offset_align);
1019 : :
1020 : : /* The ms_struct code is the only that uses this. */
1021 : 0 : if (targetm.ms_bitfield_layout_p (rli->t))
1022 : 0 : fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
1023 : :
1024 : 0 : if (rli->packed_maybe_necessary)
1025 : 0 : fprintf (stderr, "packed may be necessary\n");
1026 : :
1027 : 0 : if (!vec_safe_is_empty (rli->pending_statics))
1028 : : {
1029 : 0 : fprintf (stderr, "pending statics:\n");
1030 : 0 : debug (rli->pending_statics);
1031 : : }
1032 : 0 : }
1033 : :
1034 : : /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
1035 : : BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
1036 : :
1037 : : void
1038 : 163566796 : normalize_rli (record_layout_info rli)
1039 : : {
1040 : 163566796 : normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
1041 : 163566796 : }
1042 : :
1043 : : /* Returns the size in bytes allocated so far. */
1044 : :
1045 : : tree
1046 : 133702794 : rli_size_unit_so_far (record_layout_info rli)
1047 : : {
1048 : 133702794 : return byte_from_pos (rli->offset, rli->bitpos);
1049 : : }
1050 : :
1051 : : /* Returns the size in bits allocated so far. */
1052 : :
1053 : : tree
1054 : 113165420 : rli_size_so_far (record_layout_info rli)
1055 : : {
1056 : 113165420 : return bit_from_pos (rli->offset, rli->bitpos);
1057 : : }
1058 : :
1059 : : /* FIELD is about to be added to RLI->T. The alignment (in bits) of
1060 : : the next available location within the record is given by KNOWN_ALIGN.
1061 : : Update the variable alignment fields in RLI, and return the alignment
1062 : : to give the FIELD. */
1063 : :
1064 : : unsigned int
1065 : 52103177 : update_alignment_for_field (record_layout_info rli, tree field,
1066 : : unsigned int known_align)
1067 : : {
1068 : : /* The alignment required for FIELD. */
1069 : 52103177 : unsigned int desired_align;
1070 : : /* The type of this field. */
1071 : 52103177 : tree type = TREE_TYPE (field);
1072 : : /* True if the field was explicitly aligned by the user. */
1073 : 52103177 : bool user_align;
1074 : 52103177 : bool is_bitfield;
1075 : :
1076 : : /* Do not attempt to align an ERROR_MARK node */
1077 : 52103177 : if (TREE_CODE (type) == ERROR_MARK)
1078 : : return 0;
1079 : :
1080 : : /* Lay out the field so we know what alignment it needs. */
1081 : 52103174 : layout_decl (field, known_align);
1082 : 52103174 : desired_align = DECL_ALIGN (field);
1083 : 52103174 : user_align = DECL_USER_ALIGN (field);
1084 : :
1085 : 104206348 : is_bitfield = (type != error_mark_node
1086 : 52103174 : && DECL_BIT_FIELD_TYPE (field)
1087 : 52573227 : && ! integer_zerop (TYPE_SIZE (type)));
1088 : :
1089 : : /* Record must have at least as much alignment as any field.
1090 : : Otherwise, the alignment of the field within the record is
1091 : : meaningless. */
1092 : 52103174 : if (targetm.ms_bitfield_layout_p (rli->t))
1093 : : {
1094 : : /* Here, the alignment of the underlying type of a bitfield can
1095 : : affect the alignment of a record; even a zero-sized field
1096 : : can do this. The alignment should be to the alignment of
1097 : : the type, except that for zero-size bitfields this only
1098 : : applies if there was an immediately prior, nonzero-size
1099 : : bitfield. (That's the way it is, experimentally.) */
1100 : 206 : if (!is_bitfield
1101 : 206 : || ((DECL_SIZE (field) == NULL_TREE
1102 : 131 : || !integer_zerop (DECL_SIZE (field)))
1103 : 121 : ? !DECL_PACKED (field)
1104 : 10 : : (rli->prev_field
1105 : 8 : && DECL_BIT_FIELD_TYPE (rli->prev_field)
1106 : 8 : && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
1107 : : {
1108 : 202 : unsigned int type_align = TYPE_ALIGN (type);
1109 : 277 : if (!is_bitfield && DECL_PACKED (field))
1110 : : type_align = desired_align;
1111 : : else
1112 : 202 : type_align = MAX (type_align, desired_align);
1113 : 202 : if (maximum_field_alignment != 0)
1114 : 68 : type_align = MIN (type_align, maximum_field_alignment);
1115 : 202 : rli->record_align = MAX (rli->record_align, type_align);
1116 : 202 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1117 : : }
1118 : : }
1119 : 52102968 : else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
1120 : : {
1121 : : /* Named bit-fields cause the entire structure to have the
1122 : : alignment implied by their type. Some targets also apply the same
1123 : : rules to unnamed bitfields. */
1124 : 469922 : if (DECL_NAME (field) != 0
1125 : 469922 : || targetm.align_anon_bitfield ())
1126 : : {
1127 : 326120 : unsigned int type_align = TYPE_ALIGN (type);
1128 : :
1129 : : #ifdef ADJUST_FIELD_ALIGN
1130 : 326120 : if (! TYPE_USER_ALIGN (type))
1131 : 319266 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1132 : : #endif
1133 : :
1134 : : /* Targets might chose to handle unnamed and hence possibly
1135 : : zero-width bitfield. Those are not influenced by #pragmas
1136 : : or packed attributes. */
1137 : 326120 : if (integer_zerop (DECL_SIZE (field)))
1138 : : {
1139 : 0 : if (initial_max_fld_align)
1140 : 0 : type_align = MIN (type_align,
1141 : : initial_max_fld_align * BITS_PER_UNIT);
1142 : : }
1143 : 326120 : else if (maximum_field_alignment != 0)
1144 : 177 : type_align = MIN (type_align, maximum_field_alignment);
1145 : 325943 : else if (DECL_PACKED (field))
1146 : 2846 : type_align = MIN (type_align, BITS_PER_UNIT);
1147 : :
1148 : : /* The alignment of the record is increased to the maximum
1149 : : of the current alignment, the alignment indicated on the
1150 : : field (i.e., the alignment specified by an __aligned__
1151 : : attribute), and the alignment indicated by the type of
1152 : : the field. */
1153 : 326120 : rli->record_align = MAX (rli->record_align, desired_align);
1154 : 326120 : rli->record_align = MAX (rli->record_align, type_align);
1155 : :
1156 : 326120 : if (warn_packed)
1157 : 0 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1158 : 326120 : user_align |= TYPE_USER_ALIGN (type);
1159 : : }
1160 : : }
1161 : : else
1162 : : {
1163 : 51633046 : rli->record_align = MAX (rli->record_align, desired_align);
1164 : 51633046 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1165 : : }
1166 : :
1167 : 52103174 : TYPE_USER_ALIGN (rli->t) |= user_align;
1168 : :
1169 : 52103174 : return desired_align;
1170 : : }
1171 : :
1172 : : /* Issue a warning if the record alignment, RECORD_ALIGN, is less than
1173 : : the field alignment of FIELD or FIELD isn't aligned. */
1174 : :
1175 : : static void
1176 : 52095062 : handle_warn_if_not_align (tree field, unsigned int record_align)
1177 : : {
1178 : 52095062 : tree type = TREE_TYPE (field);
1179 : :
1180 : 52095062 : if (type == error_mark_node)
1181 : 52095062 : return;
1182 : :
1183 : 52095021 : unsigned int warn_if_not_align = 0;
1184 : :
1185 : 52095021 : int opt_w = 0;
1186 : :
1187 : 52095021 : if (warn_if_not_aligned)
1188 : : {
1189 : 52094881 : warn_if_not_align = DECL_WARN_IF_NOT_ALIGN (field);
1190 : 52094881 : if (!warn_if_not_align)
1191 : 52094834 : warn_if_not_align = TYPE_WARN_IF_NOT_ALIGN (type);
1192 : 47 : if (warn_if_not_align)
1193 : : opt_w = OPT_Wif_not_aligned;
1194 : : }
1195 : :
1196 : : if (!warn_if_not_align
1197 : 52094974 : && warn_packed_not_aligned
1198 : 2583079 : && lookup_attribute ("aligned", TYPE_ATTRIBUTES (type)))
1199 : : {
1200 : 115 : warn_if_not_align = TYPE_ALIGN (type);
1201 : 115 : opt_w = OPT_Wpacked_not_aligned;
1202 : : }
1203 : :
1204 : 52095021 : if (!warn_if_not_align)
1205 : 52094859 : return;
1206 : :
1207 : 162 : tree context = DECL_CONTEXT (field);
1208 : :
1209 : 162 : warn_if_not_align /= BITS_PER_UNIT;
1210 : 162 : record_align /= BITS_PER_UNIT;
1211 : 162 : if ((record_align % warn_if_not_align) != 0)
1212 : 36 : warning (opt_w, "alignment %u of %qT is less than %u",
1213 : : record_align, context, warn_if_not_align);
1214 : :
1215 : 162 : tree off = byte_position (field);
1216 : 162 : if (!multiple_of_p (TREE_TYPE (off), off, size_int (warn_if_not_align)))
1217 : : {
1218 : 25 : if (TREE_CODE (off) == INTEGER_CST)
1219 : 24 : warning (opt_w, "%q+D offset %E in %qT isn%'t aligned to %u",
1220 : : field, off, context, warn_if_not_align);
1221 : : else
1222 : 1 : warning (opt_w, "%q+D offset %E in %qT may not be aligned to %u",
1223 : : field, off, context, warn_if_not_align);
1224 : : }
1225 : : }
1226 : :
1227 : : /* Called from place_field to handle unions. */
1228 : :
1229 : : static void
1230 : 1866711 : place_union_field (record_layout_info rli, tree field)
1231 : : {
1232 : 1866711 : update_alignment_for_field (rli, field, /*known_align=*/0);
1233 : :
1234 : 1866711 : DECL_FIELD_OFFSET (field) = size_zero_node;
1235 : 1866711 : DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
1236 : 1866711 : SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
1237 : 1866711 : handle_warn_if_not_align (field, rli->record_align);
1238 : :
1239 : : /* If this is an ERROR_MARK return *after* having set the
1240 : : field at the start of the union. This helps when parsing
1241 : : invalid fields. */
1242 : 1866711 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
1243 : : return;
1244 : :
1245 : 3168435 : if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1246 : 2217980 : && TYPE_TYPELESS_STORAGE (TREE_TYPE (field)))
1247 : 468420 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1248 : :
1249 : : /* We might see a flexible array member field (with no DECL_SIZE_UNIT), use
1250 : : zero size for such field. */
1251 : 1866708 : tree field_size_unit = DECL_SIZE_UNIT (field)
1252 : 1866708 : ? DECL_SIZE_UNIT (field)
1253 : 1866708 : : build_int_cst (sizetype, 0);
1254 : : /* We assume the union's size will be a multiple of a byte so we don't
1255 : : bother with BITPOS. */
1256 : 1866708 : if (TREE_CODE (rli->t) == UNION_TYPE)
1257 : 1866708 : rli->offset = size_binop (MAX_EXPR, rli->offset, field_size_unit);
1258 : 0 : else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
1259 : 0 : rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field),
1260 : : field_size_unit, rli->offset);
1261 : : }
1262 : :
1263 : : /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
1264 : : at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
1265 : : units of alignment than the underlying TYPE. */
1266 : : static int
1267 : 311777 : excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
1268 : : HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
1269 : : {
1270 : : /* Note that the calculation of OFFSET might overflow; we calculate it so
1271 : : that we still get the right result as long as ALIGN is a power of two. */
1272 : 311777 : unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
1273 : :
1274 : 311777 : offset = offset % align;
1275 : 311777 : return ((offset + size + align - 1) / align
1276 : 311777 : > tree_to_uhwi (TYPE_SIZE (type)) / align);
1277 : : }
1278 : :
1279 : : /* RLI contains information about the layout of a RECORD_TYPE. FIELD
1280 : : is a FIELD_DECL to be added after those fields already present in
1281 : : T. (FIELD is not actually added to the TYPE_FIELDS list here;
1282 : : callers that desire that behavior must manually perform that step.) */
1283 : :
1284 : : void
1285 : 318377382 : place_field (record_layout_info rli, tree field)
1286 : : {
1287 : : /* The alignment required for FIELD. */
1288 : 318377382 : unsigned int desired_align;
1289 : : /* The alignment FIELD would have if we just dropped it into the
1290 : : record as it presently stands. */
1291 : 318377382 : unsigned int known_align;
1292 : 318377382 : unsigned int actual_align;
1293 : : /* The type of this field. */
1294 : 318377382 : tree type = TREE_TYPE (field);
1295 : :
1296 : 318377382 : gcc_assert (TREE_CODE (field) != ERROR_MARK);
1297 : :
1298 : : /* If FIELD is static, then treat it like a separate variable, not
1299 : : really like a structure field. If it is a FUNCTION_DECL, it's a
1300 : : method. In both cases, all we do is lay out the decl, and we do
1301 : : it *after* the record is laid out. */
1302 : 318377382 : if (VAR_P (field))
1303 : : {
1304 : 9948098 : vec_safe_push (rli->pending_statics, field);
1305 : 9948098 : return;
1306 : : }
1307 : :
1308 : : /* Enumerators and enum types which are local to this class need not
1309 : : be laid out. Likewise for initialized constant fields. */
1310 : 308429284 : else if (TREE_CODE (field) != FIELD_DECL)
1311 : : return;
1312 : :
1313 : : /* Unions are laid out very differently than records, so split
1314 : : that code off to another function. */
1315 : 52095062 : else if (TREE_CODE (rli->t) != RECORD_TYPE)
1316 : : {
1317 : 1866711 : place_union_field (rli, field);
1318 : 1866711 : return;
1319 : : }
1320 : :
1321 : 50228351 : else if (TREE_CODE (type) == ERROR_MARK)
1322 : : {
1323 : : /* Place this field at the current allocation position, so we
1324 : : maintain monotonicity. */
1325 : 38 : DECL_FIELD_OFFSET (field) = rli->offset;
1326 : 38 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1327 : 38 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1328 : 38 : handle_warn_if_not_align (field, rli->record_align);
1329 : 38 : return;
1330 : : }
1331 : :
1332 : 50228313 : if (AGGREGATE_TYPE_P (type)
1333 : 50228313 : && TYPE_TYPELESS_STORAGE (type))
1334 : 1130930 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1335 : :
1336 : : /* Work out the known alignment so far. Note that A & (-A) is the
1337 : : value of the least-significant bit in A that is one. */
1338 : 50228313 : if (! integer_zerop (rli->bitpos))
1339 : 17218704 : known_align = least_bit_hwi (tree_to_uhwi (rli->bitpos));
1340 : 33009609 : else if (integer_zerop (rli->offset))
1341 : : known_align = 0;
1342 : 8019622 : else if (tree_fits_uhwi_p (rli->offset))
1343 : 8018857 : known_align = (BITS_PER_UNIT
1344 : 8018857 : * least_bit_hwi (tree_to_uhwi (rli->offset)));
1345 : : else
1346 : 765 : known_align = rli->offset_align;
1347 : :
1348 : 50228313 : desired_align = update_alignment_for_field (rli, field, known_align);
1349 : 50228313 : if (known_align == 0)
1350 : 49865026 : known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1351 : :
1352 : 50228313 : if (warn_packed && DECL_PACKED (field))
1353 : : {
1354 : 3 : if (known_align >= TYPE_ALIGN (type))
1355 : : {
1356 : 3 : if (TYPE_ALIGN (type) > desired_align)
1357 : : {
1358 : 3 : if (STRICT_ALIGNMENT)
1359 : : warning (OPT_Wattributes, "packed attribute causes "
1360 : : "inefficient alignment for %q+D", field);
1361 : : /* Don't warn if DECL_PACKED was set by the type. */
1362 : 3 : else if (!TYPE_PACKED (rli->t))
1363 : 0 : warning (OPT_Wattributes, "packed attribute is "
1364 : : "unnecessary for %q+D", field);
1365 : : }
1366 : : }
1367 : : else
1368 : 0 : rli->packed_maybe_necessary = 1;
1369 : : }
1370 : :
1371 : : /* Does this field automatically have alignment it needs by virtue
1372 : : of the fields that precede it and the record's own alignment? */
1373 : 50228313 : if (known_align < desired_align
1374 : 50228313 : && (! targetm.ms_bitfield_layout_p (rli->t)
1375 : 17 : || rli->prev_field == NULL))
1376 : : {
1377 : : /* No, we need to skip space before this field.
1378 : : Bump the cumulative size to multiple of field alignment. */
1379 : :
1380 : 1369538 : if (!targetm.ms_bitfield_layout_p (rli->t)
1381 : 1369535 : && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION
1382 : 2724224 : && !TYPE_ARTIFICIAL (rli->t))
1383 : 1354655 : warning (OPT_Wpadded, "padding struct to align %q+D", field);
1384 : :
1385 : : /* If the alignment is still within offset_align, just align
1386 : : the bit position. */
1387 : 1369538 : if (desired_align < rli->offset_align)
1388 : 1335331 : rli->bitpos = round_up (rli->bitpos, desired_align);
1389 : : else
1390 : : {
1391 : : /* First adjust OFFSET by the partial bits, then align. */
1392 : 34207 : rli->offset
1393 : 34207 : = size_binop (PLUS_EXPR, rli->offset,
1394 : : fold_convert (sizetype,
1395 : : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1396 : : bitsize_unit_node)));
1397 : 34207 : rli->bitpos = bitsize_zero_node;
1398 : :
1399 : 34207 : rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
1400 : : }
1401 : :
1402 : 1369538 : if (! TREE_CONSTANT (rli->offset))
1403 : 416 : rli->offset_align = desired_align;
1404 : : }
1405 : :
1406 : : /* Handle compatibility with PCC. Note that if the record has any
1407 : : variable-sized fields, we need not worry about compatibility. */
1408 : 50228313 : if (PCC_BITFIELD_TYPE_MATTERS
1409 : 50228313 : && ! targetm.ms_bitfield_layout_p (rli->t)
1410 : 50228112 : && TREE_CODE (field) == FIELD_DECL
1411 : 50228112 : && type != error_mark_node
1412 : 50228112 : && DECL_BIT_FIELD (field)
1413 : 315860 : && (! DECL_PACKED (field)
1414 : : /* Enter for these packed fields only to issue a warning. */
1415 : 2718 : || TYPE_ALIGN (type) <= BITS_PER_UNIT)
1416 : 313334 : && maximum_field_alignment == 0
1417 : 313100 : && ! integer_zerop (DECL_SIZE (field))
1418 : 311791 : && tree_fits_uhwi_p (DECL_SIZE (field))
1419 : 311791 : && tree_fits_uhwi_p (rli->offset)
1420 : 50540090 : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1421 : : {
1422 : 311777 : unsigned int type_align = TYPE_ALIGN (type);
1423 : 311777 : tree dsize = DECL_SIZE (field);
1424 : 311777 : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1425 : 311777 : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1426 : 311777 : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1427 : :
1428 : : #ifdef ADJUST_FIELD_ALIGN
1429 : 311777 : if (! TYPE_USER_ALIGN (type))
1430 : 306788 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1431 : : #endif
1432 : :
1433 : : /* A bit field may not span more units of alignment of its type
1434 : : than its type itself. Advance to next boundary if necessary. */
1435 : 311777 : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1436 : : {
1437 : 18793 : if (DECL_PACKED (field))
1438 : : {
1439 : 22 : if (warn_packed_bitfield_compat == 1)
1440 : 14 : inform
1441 : 14 : (input_location,
1442 : : "offset of packed bit-field %qD has changed in GCC 4.4",
1443 : : field);
1444 : : }
1445 : : else
1446 : 18771 : rli->bitpos = round_up (rli->bitpos, type_align);
1447 : : }
1448 : :
1449 : 311777 : if (! DECL_PACKED (field))
1450 : 311591 : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1451 : :
1452 : 311777 : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1453 : : TYPE_WARN_IF_NOT_ALIGN (type));
1454 : : }
1455 : :
1456 : : #ifdef BITFIELD_NBYTES_LIMITED
1457 : : if (BITFIELD_NBYTES_LIMITED
1458 : : && ! targetm.ms_bitfield_layout_p (rli->t)
1459 : : && TREE_CODE (field) == FIELD_DECL
1460 : : && type != error_mark_node
1461 : : && DECL_BIT_FIELD_TYPE (field)
1462 : : && ! DECL_PACKED (field)
1463 : : && ! integer_zerop (DECL_SIZE (field))
1464 : : && tree_fits_uhwi_p (DECL_SIZE (field))
1465 : : && tree_fits_uhwi_p (rli->offset)
1466 : : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1467 : : {
1468 : : unsigned int type_align = TYPE_ALIGN (type);
1469 : : tree dsize = DECL_SIZE (field);
1470 : : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1471 : : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1472 : : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1473 : :
1474 : : #ifdef ADJUST_FIELD_ALIGN
1475 : : if (! TYPE_USER_ALIGN (type))
1476 : : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1477 : : #endif
1478 : :
1479 : : if (maximum_field_alignment != 0)
1480 : : type_align = MIN (type_align, maximum_field_alignment);
1481 : : /* ??? This test is opposite the test in the containing if
1482 : : statement, so this code is unreachable currently. */
1483 : : else if (DECL_PACKED (field))
1484 : : type_align = MIN (type_align, BITS_PER_UNIT);
1485 : :
1486 : : /* A bit field may not span the unit of alignment of its type.
1487 : : Advance to next boundary if necessary. */
1488 : : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1489 : : rli->bitpos = round_up (rli->bitpos, type_align);
1490 : :
1491 : : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1492 : : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1493 : : TYPE_WARN_IF_NOT_ALIGN (type));
1494 : : }
1495 : : #endif
1496 : :
1497 : : /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1498 : : A subtlety:
1499 : : When a bit field is inserted into a packed record, the whole
1500 : : size of the underlying type is used by one or more same-size
1501 : : adjacent bitfields. (That is, if its long:3, 32 bits is
1502 : : used in the record, and any additional adjacent long bitfields are
1503 : : packed into the same chunk of 32 bits. However, if the size
1504 : : changes, a new field of that size is allocated.) In an unpacked
1505 : : record, this is the same as using alignment, but not equivalent
1506 : : when packing.
1507 : :
1508 : : Note: for compatibility, we use the type size, not the type alignment
1509 : : to determine alignment, since that matches the documentation */
1510 : :
1511 : 50228313 : if (targetm.ms_bitfield_layout_p (rli->t))
1512 : : {
1513 : 201 : tree prev_saved = rli->prev_field;
1514 : 283 : tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL;
1515 : :
1516 : : /* This is a bitfield if it exists. */
1517 : 201 : if (rli->prev_field)
1518 : : {
1519 : 82 : bool realign_p = known_align < desired_align;
1520 : :
1521 : : /* If both are bitfields, nonzero, and the same size, this is
1522 : : the middle of a run. Zero declared size fields are special
1523 : : and handled as "end of run". (Note: it's nonzero declared
1524 : : size, but equal type sizes!) (Since we know that both
1525 : : the current and previous fields are bitfields by the
1526 : : time we check it, DECL_SIZE must be present for both.) */
1527 : 82 : if (DECL_BIT_FIELD_TYPE (field)
1528 : 64 : && !integer_zerop (DECL_SIZE (field))
1529 : 56 : && !integer_zerop (DECL_SIZE (rli->prev_field))
1530 : 54 : && tree_fits_shwi_p (DECL_SIZE (rli->prev_field))
1531 : 54 : && tree_fits_uhwi_p (TYPE_SIZE (type))
1532 : 136 : && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
1533 : : {
1534 : : /* We're in the middle of a run of equal type size fields; make
1535 : : sure we realign if we run out of bits. (Not decl size,
1536 : : type size!) */
1537 : 52 : HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field));
1538 : :
1539 : 52 : if (rli->remaining_in_alignment < bitsize)
1540 : : {
1541 : 1 : HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type));
1542 : :
1543 : : /* out of bits; bump up to next 'word'. */
1544 : 1 : rli->bitpos
1545 : 1 : = size_binop (PLUS_EXPR, rli->bitpos,
1546 : : bitsize_int (rli->remaining_in_alignment));
1547 : 1 : rli->prev_field = field;
1548 : 1 : if (typesize < bitsize)
1549 : 0 : rli->remaining_in_alignment = 0;
1550 : : else
1551 : 1 : rli->remaining_in_alignment = typesize - bitsize;
1552 : : }
1553 : : else
1554 : : {
1555 : 51 : rli->remaining_in_alignment -= bitsize;
1556 : 51 : realign_p = false;
1557 : : }
1558 : : }
1559 : : else
1560 : : {
1561 : : /* End of a run: if leaving a run of bitfields of the same type
1562 : : size, we have to "use up" the rest of the bits of the type
1563 : : size.
1564 : :
1565 : : Compute the new position as the sum of the size for the prior
1566 : : type and where we first started working on that type.
1567 : : Note: since the beginning of the field was aligned then
1568 : : of course the end will be too. No round needed. */
1569 : :
1570 : 30 : if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1571 : : {
1572 : 20 : rli->bitpos
1573 : 20 : = size_binop (PLUS_EXPR, rli->bitpos,
1574 : : bitsize_int (rli->remaining_in_alignment));
1575 : : }
1576 : : else
1577 : : /* We "use up" size zero fields; the code below should behave
1578 : : as if the prior field was not a bitfield. */
1579 : : prev_saved = NULL;
1580 : :
1581 : : /* Cause a new bitfield to be captured, either this time (if
1582 : : currently a bitfield) or next time we see one. */
1583 : 30 : if (!DECL_BIT_FIELD_TYPE (field)
1584 : 30 : || integer_zerop (DECL_SIZE (field)))
1585 : 26 : rli->prev_field = NULL;
1586 : : }
1587 : :
1588 : : /* Does this field automatically have alignment it needs by virtue
1589 : : of the fields that precede it and the record's own alignment? */
1590 : 82 : if (realign_p)
1591 : : {
1592 : : /* If the alignment is still within offset_align, just align
1593 : : the bit position. */
1594 : 13 : if (desired_align < rli->offset_align)
1595 : 11 : rli->bitpos = round_up (rli->bitpos, desired_align);
1596 : : else
1597 : : {
1598 : : /* First adjust OFFSET by the partial bits, then align. */
1599 : 2 : tree d = size_binop (CEIL_DIV_EXPR, rli->bitpos,
1600 : : bitsize_unit_node);
1601 : 2 : rli->offset = size_binop (PLUS_EXPR, rli->offset,
1602 : : fold_convert (sizetype, d));
1603 : 2 : rli->bitpos = bitsize_zero_node;
1604 : :
1605 : 2 : rli->offset = round_up (rli->offset,
1606 : : desired_align / BITS_PER_UNIT);
1607 : : }
1608 : :
1609 : 13 : if (! TREE_CONSTANT (rli->offset))
1610 : 0 : rli->offset_align = desired_align;
1611 : : }
1612 : :
1613 : 82 : normalize_rli (rli);
1614 : : }
1615 : :
1616 : : /* If we're starting a new run of same type size bitfields
1617 : : (or a run of non-bitfields), set up the "first of the run"
1618 : : fields.
1619 : :
1620 : : That is, if the current field is not a bitfield, or if there
1621 : : was a prior bitfield the type sizes differ, or if there wasn't
1622 : : a prior bitfield the size of the current field is nonzero.
1623 : :
1624 : : Note: we must be sure to test ONLY the type size if there was
1625 : : a prior bitfield and ONLY for the current field being zero if
1626 : : there wasn't. */
1627 : :
1628 : 201 : if (!DECL_BIT_FIELD_TYPE (field)
1629 : 261 : || (prev_saved != NULL
1630 : 129 : ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))
1631 : 69 : : !integer_zerop (DECL_SIZE (field))))
1632 : : {
1633 : : /* Never smaller than a byte for compatibility. */
1634 : 143 : unsigned int type_align = BITS_PER_UNIT;
1635 : :
1636 : : /* (When not a bitfield), we could be seeing a flex array (with
1637 : : no DECL_SIZE). Since we won't be using remaining_in_alignment
1638 : : until we see a bitfield (and come by here again) we just skip
1639 : : calculating it. */
1640 : 143 : if (DECL_SIZE (field) != NULL
1641 : 143 : && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field)))
1642 : 285 : && tree_fits_uhwi_p (DECL_SIZE (field)))
1643 : : {
1644 : 142 : unsigned HOST_WIDE_INT bitsize
1645 : 142 : = tree_to_uhwi (DECL_SIZE (field));
1646 : 142 : unsigned HOST_WIDE_INT typesize
1647 : 142 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field)));
1648 : :
1649 : 142 : if (typesize < bitsize)
1650 : 0 : rli->remaining_in_alignment = 0;
1651 : : else
1652 : 142 : rli->remaining_in_alignment = typesize - bitsize;
1653 : : }
1654 : :
1655 : : /* Now align (conventionally) for the new type. */
1656 : 143 : if (! DECL_PACKED (field))
1657 : 137 : type_align = TYPE_ALIGN (TREE_TYPE (field));
1658 : :
1659 : 143 : if (maximum_field_alignment != 0)
1660 : 56 : type_align = MIN (type_align, maximum_field_alignment);
1661 : :
1662 : 143 : rli->bitpos = round_up (rli->bitpos, type_align);
1663 : :
1664 : : /* If we really aligned, don't allow subsequent bitfields
1665 : : to undo that. */
1666 : 143 : rli->prev_field = NULL;
1667 : : }
1668 : : }
1669 : :
1670 : : /* Offset so far becomes the position of this field after normalizing. */
1671 : 50228313 : normalize_rli (rli);
1672 : 50228313 : DECL_FIELD_OFFSET (field) = rli->offset;
1673 : 50228313 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1674 : 50228313 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1675 : 50228313 : handle_warn_if_not_align (field, rli->record_align);
1676 : :
1677 : : /* Evaluate nonconstant offsets only once, either now or as soon as safe. */
1678 : 50228313 : if (TREE_CODE (DECL_FIELD_OFFSET (field)) != INTEGER_CST)
1679 : 1169 : DECL_FIELD_OFFSET (field) = variable_size (DECL_FIELD_OFFSET (field));
1680 : :
1681 : : /* If this field ended up more aligned than we thought it would be (we
1682 : : approximate this by seeing if its position changed), lay out the field
1683 : : again; perhaps we can use an integral mode for it now. */
1684 : 50228313 : if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1685 : 16655803 : actual_align = least_bit_hwi (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)));
1686 : 33572510 : else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1687 : 49865004 : actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1688 : 8582523 : else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1689 : 8581371 : actual_align = (BITS_PER_UNIT
1690 : 8581371 : * least_bit_hwi (tree_to_uhwi (DECL_FIELD_OFFSET (field))));
1691 : : else
1692 : 1152 : actual_align = DECL_OFFSET_ALIGN (field);
1693 : : /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1694 : : store / extract bit field operations will check the alignment of the
1695 : : record against the mode of bit fields. */
1696 : :
1697 : 50228313 : if (known_align != actual_align)
1698 : 1387614 : layout_decl (field, actual_align);
1699 : :
1700 : 50228313 : if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field))
1701 : 95786 : rli->prev_field = field;
1702 : :
1703 : : /* Now add size of this field to the size of the record. If the size is
1704 : : not constant, treat the field as being a multiple of bytes and just
1705 : : adjust the offset, resetting the bit position. Otherwise, apportion the
1706 : : size amongst the bit position and offset. First handle the case of an
1707 : : unspecified size, which can happen when we have an invalid nested struct
1708 : : definition, such as struct j { struct j { int i; } }. The error message
1709 : : is printed in finish_struct. */
1710 : 50228313 : if (DECL_SIZE (field) == 0)
1711 : : /* Do nothing. */;
1712 : 50139281 : else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1713 : 50139281 : || TREE_OVERFLOW (DECL_SIZE (field)))
1714 : : {
1715 : 1054 : rli->offset
1716 : 1054 : = size_binop (PLUS_EXPR, rli->offset,
1717 : : fold_convert (sizetype,
1718 : : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1719 : : bitsize_unit_node)));
1720 : 1054 : rli->offset
1721 : 1054 : = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1722 : 1054 : rli->bitpos = bitsize_zero_node;
1723 : 1054 : rli->offset_align = MIN (rli->offset_align, desired_align);
1724 : :
1725 : 1054 : if (!multiple_of_p (bitsizetype, DECL_SIZE (field),
1726 : 2108 : bitsize_int (rli->offset_align)))
1727 : : {
1728 : 303 : tree type = strip_array_types (TREE_TYPE (field));
1729 : : /* The above adjusts offset_align just based on the start of the
1730 : : field. The field might not have a size that is a multiple of
1731 : : that offset_align though. If the field is an array of fixed
1732 : : sized elements, assume there can be any multiple of those
1733 : : sizes. If it is a variable length aggregate or array of
1734 : : variable length aggregates, assume worst that the end is
1735 : : just BITS_PER_UNIT aligned. */
1736 : 303 : if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
1737 : : {
1738 : 303 : if (TREE_INT_CST_LOW (TYPE_SIZE (type)))
1739 : : {
1740 : 303 : unsigned HOST_WIDE_INT sz
1741 : 303 : = least_bit_hwi (TREE_INT_CST_LOW (TYPE_SIZE (type)));
1742 : 303 : rli->offset_align = MIN (rli->offset_align, sz);
1743 : : }
1744 : : }
1745 : : else
1746 : 0 : rli->offset_align = MIN (rli->offset_align, BITS_PER_UNIT);
1747 : : }
1748 : : }
1749 : 50138227 : else if (targetm.ms_bitfield_layout_p (rli->t))
1750 : : {
1751 : 201 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1752 : :
1753 : : /* If FIELD is the last field and doesn't end at the full length
1754 : : of the type then pad the struct out to the full length of the
1755 : : last type. */
1756 : 201 : if (DECL_BIT_FIELD_TYPE (field)
1757 : 201 : && !integer_zerop (DECL_SIZE (field)))
1758 : : {
1759 : : /* We have to scan, because non-field DECLS are also here. */
1760 : : tree probe = field;
1761 : 182 : while ((probe = DECL_CHAIN (probe)))
1762 : 135 : if (TREE_CODE (probe) == FIELD_DECL)
1763 : : break;
1764 : 119 : if (!probe)
1765 : 47 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,
1766 : : bitsize_int (rli->remaining_in_alignment));
1767 : : }
1768 : :
1769 : 201 : normalize_rli (rli);
1770 : : }
1771 : : else
1772 : : {
1773 : 50138026 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1774 : 50138026 : normalize_rli (rli);
1775 : : }
1776 : : }
1777 : :
1778 : : /* Assuming that all the fields have been laid out, this function uses
1779 : : RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1780 : : indicated by RLI. */
1781 : :
1782 : : static void
1783 : 42231982 : finalize_record_size (record_layout_info rli)
1784 : : {
1785 : 42231982 : tree unpadded_size, unpadded_size_unit;
1786 : :
1787 : : /* Now we want just byte and bit offsets, so set the offset alignment
1788 : : to be a byte and then normalize. */
1789 : 42231982 : rli->offset_align = BITS_PER_UNIT;
1790 : 42231982 : normalize_rli (rli);
1791 : :
1792 : : /* Determine the desired alignment. */
1793 : : #ifdef ROUND_TYPE_ALIGN
1794 : : SET_TYPE_ALIGN (rli->t, ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1795 : : rli->record_align));
1796 : : #else
1797 : 42231982 : SET_TYPE_ALIGN (rli->t, MAX (TYPE_ALIGN (rli->t), rli->record_align));
1798 : : #endif
1799 : :
1800 : : /* Compute the size so far. Be sure to allow for extra bits in the
1801 : : size in bytes. We have guaranteed above that it will be no more
1802 : : than a single byte. */
1803 : 42231982 : unpadded_size = rli_size_so_far (rli);
1804 : 42231982 : unpadded_size_unit = rli_size_unit_so_far (rli);
1805 : 42231982 : if (! integer_zerop (rli->bitpos))
1806 : 2184 : unpadded_size_unit
1807 : 2184 : = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1808 : :
1809 : : /* Round the size up to be a multiple of the required alignment. */
1810 : 42231982 : TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1811 : 42231982 : TYPE_SIZE_UNIT (rli->t)
1812 : 42231982 : = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1813 : :
1814 : 42231982 : if (TREE_CONSTANT (unpadded_size)
1815 : 42231287 : && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0
1816 : 1054575 : && input_location != BUILTINS_LOCATION
1817 : 43286318 : && !TYPE_ARTIFICIAL (rli->t))
1818 : : {
1819 : 1046659 : tree pad_size
1820 : 1046659 : = size_binop (MINUS_EXPR, TYPE_SIZE_UNIT (rli->t), unpadded_size_unit);
1821 : 1046659 : warning (OPT_Wpadded,
1822 : : "padding struct size to alignment boundary with %E bytes", pad_size);
1823 : : }
1824 : :
1825 : 19 : if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1826 : 19 : && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1827 : 42231984 : && TREE_CONSTANT (unpadded_size))
1828 : : {
1829 : 2 : tree unpacked_size;
1830 : :
1831 : : #ifdef ROUND_TYPE_ALIGN
1832 : : rli->unpacked_align
1833 : : = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1834 : : #else
1835 : 2 : rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1836 : : #endif
1837 : :
1838 : 2 : unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1839 : 2 : if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1840 : : {
1841 : 2 : if (TYPE_NAME (rli->t))
1842 : : {
1843 : 2 : tree name;
1844 : :
1845 : 2 : if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1846 : 2 : name = TYPE_NAME (rli->t);
1847 : : else
1848 : 0 : name = DECL_NAME (TYPE_NAME (rli->t));
1849 : :
1850 : 2 : if (STRICT_ALIGNMENT)
1851 : : warning (OPT_Wpacked, "packed attribute causes inefficient "
1852 : : "alignment for %qE", name);
1853 : : else
1854 : 2 : warning (OPT_Wpacked,
1855 : : "packed attribute is unnecessary for %qE", name);
1856 : : }
1857 : : else
1858 : : {
1859 : 0 : if (STRICT_ALIGNMENT)
1860 : : warning (OPT_Wpacked,
1861 : : "packed attribute causes inefficient alignment");
1862 : : else
1863 : 0 : warning (OPT_Wpacked, "packed attribute is unnecessary");
1864 : : }
1865 : : }
1866 : : }
1867 : 42231982 : }
1868 : :
1869 : : /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1870 : :
1871 : : void
1872 : 77698701 : compute_record_mode (tree type)
1873 : : {
1874 : 77698701 : tree field;
1875 : 77698701 : machine_mode mode = VOIDmode;
1876 : :
1877 : : /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1878 : : However, if possible, we use a mode that fits in a register
1879 : : instead, in order to allow for better optimization down the
1880 : : line. */
1881 : 77698701 : SET_TYPE_MODE (type, BLKmode);
1882 : :
1883 : 77698701 : poly_uint64 type_size;
1884 : 77698701 : if (!poly_int_tree_p (TYPE_SIZE (type), &type_size))
1885 : 8094667 : return;
1886 : :
1887 : : /* A record which has any BLKmode members must itself be
1888 : : BLKmode; it can't go in a register. Unless the member is
1889 : : BLKmode only because it isn't aligned. */
1890 : 305505156 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1891 : : {
1892 : 235901122 : if (TREE_CODE (field) != FIELD_DECL)
1893 : 165976579 : continue;
1894 : :
1895 : 69924543 : poly_uint64 field_size;
1896 : 69924543 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1897 : 69924238 : || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1898 : 40112763 : && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1899 : 80171049 : && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1900 : 40058286 : && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1901 : 61843663 : || !tree_fits_poly_uint64_p (bit_position (field))
1902 : 61843663 : || DECL_SIZE (field) == 0
1903 : 131768206 : || !poly_int_tree_p (DECL_SIZE (field), &field_size))
1904 : 8093934 : return;
1905 : :
1906 : : /* If this field is the whole struct, remember its mode so
1907 : : that, say, we can put a double in a class into a DF
1908 : : register instead of forcing it to live in the stack. */
1909 : 61843663 : if (known_eq (field_size, type_size)
1910 : : /* Partial int types (e.g. __int20) may have TYPE_SIZE equal to
1911 : : wider types (e.g. int32), despite precision being less. Ensure
1912 : : that the TYPE_MODE of the struct does not get set to the partial
1913 : : int mode if there is a wider type also in the struct. */
1914 : 61843663 : && known_gt (GET_MODE_PRECISION (DECL_MODE (field)),
1915 : : GET_MODE_PRECISION (mode)))
1916 : 4251118 : mode = DECL_MODE (field);
1917 : :
1918 : : /* With some targets, it is sub-optimal to access an aligned
1919 : : BLKmode structure as a scalar. */
1920 : 61843663 : if (targetm.member_type_forces_blk (field, mode))
1921 : : return;
1922 : : }
1923 : :
1924 : : /* If we only have one real field; use its mode if that mode's size
1925 : : matches the type's size. This generally only applies to RECORD_TYPE.
1926 : : For UNION_TYPE, if the widest field is MODE_INT then use that mode.
1927 : : If the widest field is MODE_PARTIAL_INT, and the union will be passed
1928 : : by reference, then use that mode. */
1929 : 69604034 : if ((TREE_CODE (type) == RECORD_TYPE
1930 : 457880 : || (TREE_CODE (type) == UNION_TYPE
1931 : 457880 : && (GET_MODE_CLASS (mode) == MODE_INT
1932 : 63111 : || (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
1933 : 0 : && (targetm.calls.pass_by_reference
1934 : 0 : (pack_cumulative_args (0),
1935 : 4128995 : function_arg_info (type, mode, /*named=*/false)))))))
1936 : 69540923 : && mode != VOIDmode
1937 : 73733163 : && known_eq (GET_MODE_BITSIZE (mode), type_size))
1938 : : ;
1939 : : else
1940 : 65475039 : mode = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1).else_blk ();
1941 : :
1942 : : /* If structure's known alignment is less than what the scalar
1943 : : mode would need, and it matters, then stick with BLKmode. */
1944 : 69604034 : if (mode != BLKmode
1945 : : && STRICT_ALIGNMENT
1946 : : && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1947 : : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
1948 : : {
1949 : : /* If this is the only reason this type is BLKmode, then
1950 : : don't force containing types to be BLKmode. */
1951 : : TYPE_NO_FORCE_BLK (type) = 1;
1952 : : mode = BLKmode;
1953 : : }
1954 : :
1955 : 69604034 : SET_TYPE_MODE (type, mode);
1956 : : }
1957 : :
1958 : : /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1959 : : out. */
1960 : :
1961 : : static void
1962 : 1274622514 : finalize_type_size (tree type)
1963 : : {
1964 : : /* Normally, use the alignment corresponding to the mode chosen.
1965 : : However, where strict alignment is not required, avoid
1966 : : over-aligning structures, since most compilers do not do this
1967 : : alignment. */
1968 : 1274622514 : bool tua_cleared_p = false;
1969 : 1274622514 : if (TYPE_MODE (type) != BLKmode
1970 : 1213008515 : && TYPE_MODE (type) != VOIDmode
1971 : 2487346245 : && (STRICT_ALIGNMENT || !AGGREGATE_TYPE_P (type)))
1972 : : {
1973 : 1159807456 : unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1974 : :
1975 : : /* Don't override a larger alignment requirement coming from a user
1976 : : alignment of one of the fields. */
1977 : 1159807456 : if (mode_align >= TYPE_ALIGN (type))
1978 : : {
1979 : 1159807456 : SET_TYPE_ALIGN (type, mode_align);
1980 : : /* Remember that we're about to reset this flag. */
1981 : 1159807456 : tua_cleared_p = TYPE_USER_ALIGN (type);
1982 : 1159807456 : TYPE_USER_ALIGN (type) = false;
1983 : : }
1984 : : }
1985 : :
1986 : : /* Do machine-dependent extra alignment. */
1987 : : #ifdef ROUND_TYPE_ALIGN
1988 : : SET_TYPE_ALIGN (type,
1989 : : ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT));
1990 : : #endif
1991 : :
1992 : : /* If we failed to find a simple way to calculate the unit size
1993 : : of the type, find it by division. */
1994 : 1274622514 : if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1995 : : /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1996 : : result will fit in sizetype. We will get more efficient code using
1997 : : sizetype, so we force a conversion. */
1998 : 0 : TYPE_SIZE_UNIT (type)
1999 : 0 : = fold_convert (sizetype,
2000 : : size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
2001 : : bitsize_unit_node));
2002 : :
2003 : 1274622514 : if (TYPE_SIZE (type) != 0)
2004 : : {
2005 : 1267373468 : TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
2006 : 1267373468 : TYPE_SIZE_UNIT (type)
2007 : 2534746936 : = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type));
2008 : : }
2009 : :
2010 : : /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
2011 : 1274622514 : if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2012 : 275230 : TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
2013 : 1274622514 : if (TYPE_SIZE_UNIT (type) != 0
2014 : 1274622514 : && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
2015 : 275230 : TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
2016 : :
2017 : : /* Handle empty records as per the x86-64 psABI. */
2018 : 1274622514 : TYPE_EMPTY_P (type) = targetm.calls.empty_record_p (type);
2019 : :
2020 : : /* Also layout any other variants of the type. */
2021 : 1274622514 : if (TYPE_NEXT_VARIANT (type)
2022 : 1274622514 : || type != TYPE_MAIN_VARIANT (type))
2023 : : {
2024 : 38558627 : tree variant;
2025 : : /* Record layout info of this variant. */
2026 : 38558627 : tree size = TYPE_SIZE (type);
2027 : 38558627 : tree size_unit = TYPE_SIZE_UNIT (type);
2028 : 38558627 : unsigned int align = TYPE_ALIGN (type);
2029 : 38558627 : unsigned int precision = TYPE_PRECISION (type);
2030 : 38558627 : unsigned int user_align = TYPE_USER_ALIGN (type);
2031 : 38558627 : machine_mode mode = TYPE_MODE (type);
2032 : 38558627 : bool empty_p = TYPE_EMPTY_P (type);
2033 : 38558627 : bool typeless = AGGREGATE_TYPE_P (type) && TYPE_TYPELESS_STORAGE (type);
2034 : :
2035 : : /* Copy it into all variants. */
2036 : 38558627 : for (variant = TYPE_MAIN_VARIANT (type);
2037 : 128644935 : variant != NULL_TREE;
2038 : 90086308 : variant = TYPE_NEXT_VARIANT (variant))
2039 : : {
2040 : 90086308 : TYPE_SIZE (variant) = size;
2041 : 90086308 : TYPE_SIZE_UNIT (variant) = size_unit;
2042 : 90086308 : unsigned valign = align;
2043 : 90086308 : if (TYPE_USER_ALIGN (variant))
2044 : : {
2045 : 583570 : valign = MAX (valign, TYPE_ALIGN (variant));
2046 : : /* If we reset TYPE_USER_ALIGN on the main variant, we might
2047 : : need to reset it on the variants too. TYPE_MODE will be set
2048 : : to MODE in this variant, so we can use that. */
2049 : 583570 : if (tua_cleared_p && GET_MODE_ALIGNMENT (mode) >= valign)
2050 : 0 : TYPE_USER_ALIGN (variant) = false;
2051 : : }
2052 : : else
2053 : 89502738 : TYPE_USER_ALIGN (variant) = user_align;
2054 : 90086308 : SET_TYPE_ALIGN (variant, valign);
2055 : 90086308 : TYPE_PRECISION (variant) = precision;
2056 : 90086308 : SET_TYPE_MODE (variant, mode);
2057 : 90086308 : TYPE_EMPTY_P (variant) = empty_p;
2058 : 90086308 : if (AGGREGATE_TYPE_P (variant))
2059 : 90086288 : TYPE_TYPELESS_STORAGE (variant) = typeless;
2060 : : }
2061 : : }
2062 : 1274622514 : }
2063 : :
2064 : : /* Return a new underlying object for a bitfield started with FIELD. */
2065 : :
2066 : : static tree
2067 : 101299 : start_bitfield_representative (tree field)
2068 : : {
2069 : 101299 : tree repr = make_node (FIELD_DECL);
2070 : 101299 : DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field);
2071 : : /* Force the representative to begin at a BITS_PER_UNIT aligned
2072 : : boundary - C++ may use tail-padding of a base object to
2073 : : continue packing bits so the bitfield region does not start
2074 : : at bit zero (see g++.dg/abi/bitfield5.C for example).
2075 : : Unallocated bits may happen for other reasons as well,
2076 : : for example Ada which allows explicit bit-granular structure layout. */
2077 : 202598 : DECL_FIELD_BIT_OFFSET (repr)
2078 : 101299 : = size_binop (BIT_AND_EXPR,
2079 : : DECL_FIELD_BIT_OFFSET (field),
2080 : : bitsize_int (~(BITS_PER_UNIT - 1)));
2081 : 101299 : SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field));
2082 : 101299 : DECL_SIZE (repr) = DECL_SIZE (field);
2083 : 101299 : DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field);
2084 : 101299 : DECL_PACKED (repr) = DECL_PACKED (field);
2085 : 101299 : DECL_CONTEXT (repr) = DECL_CONTEXT (field);
2086 : : /* There are no indirect accesses to this field. If we introduce
2087 : : some then they have to use the record alias set. This makes
2088 : : sure to properly conflict with [indirect] accesses to addressable
2089 : : fields of the bitfield group. */
2090 : 101299 : DECL_NONADDRESSABLE_P (repr) = 1;
2091 : 101299 : return repr;
2092 : : }
2093 : :
2094 : : /* Finish up a bitfield group that was started by creating the underlying
2095 : : object REPR with the last field in the bitfield group FIELD. */
2096 : :
2097 : : static void
2098 : 101299 : finish_bitfield_representative (tree repr, tree field)
2099 : : {
2100 : 101299 : unsigned HOST_WIDE_INT bitsize, maxbitsize;
2101 : 101299 : tree nextf, size;
2102 : :
2103 : 101299 : size = size_diffop (DECL_FIELD_OFFSET (field),
2104 : : DECL_FIELD_OFFSET (repr));
2105 : 202624 : while (TREE_CODE (size) == COMPOUND_EXPR)
2106 : 26 : size = TREE_OPERAND (size, 1);
2107 : 101299 : gcc_assert (tree_fits_uhwi_p (size));
2108 : 101299 : bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT
2109 : 101299 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2110 : 101299 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))
2111 : 101299 : + tree_to_uhwi (DECL_SIZE (field)));
2112 : :
2113 : : /* Round up bitsize to multiples of BITS_PER_UNIT. */
2114 : 101299 : bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2115 : :
2116 : : /* Now nothing tells us how to pad out bitsize ... */
2117 : 101299 : if (TREE_CODE (DECL_CONTEXT (field)) == RECORD_TYPE)
2118 : : {
2119 : 97863 : nextf = DECL_CHAIN (field);
2120 : 219974 : while (nextf && TREE_CODE (nextf) != FIELD_DECL)
2121 : 122111 : nextf = DECL_CHAIN (nextf);
2122 : : }
2123 : : else
2124 : : nextf = NULL_TREE;
2125 : 97863 : if (nextf)
2126 : : {
2127 : 62757 : tree maxsize;
2128 : : /* If there was an error, the field may be not laid out
2129 : : correctly. Don't bother to do anything. */
2130 : 62757 : if (TREE_TYPE (nextf) == error_mark_node)
2131 : : {
2132 : 1 : TREE_TYPE (repr) = error_mark_node;
2133 : 1 : return;
2134 : : }
2135 : 62756 : maxsize = size_diffop (DECL_FIELD_OFFSET (nextf),
2136 : : DECL_FIELD_OFFSET (repr));
2137 : 62756 : if (tree_fits_uhwi_p (maxsize))
2138 : : {
2139 : 62740 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2140 : 62740 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf))
2141 : 62740 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2142 : : /* If the group ends within a bitfield nextf does not need to be
2143 : : aligned to BITS_PER_UNIT. Thus round up. */
2144 : 62740 : maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2145 : : }
2146 : : else
2147 : : maxbitsize = bitsize;
2148 : : }
2149 : : else
2150 : : {
2151 : : /* Note that if the C++ FE sets up tail-padding to be re-used it
2152 : : creates a as-base variant of the type with TYPE_SIZE adjusted
2153 : : accordingly. So it is safe to include tail-padding here. */
2154 : 38542 : tree aggsize = lang_hooks.types.unit_size_without_reusable_padding
2155 : 38542 : (DECL_CONTEXT (field));
2156 : 38542 : tree maxsize = size_diffop (aggsize, DECL_FIELD_OFFSET (repr));
2157 : : /* We cannot generally rely on maxsize to fold to an integer constant,
2158 : : so use bitsize as fallback for this case. */
2159 : 38542 : if (tree_fits_uhwi_p (maxsize))
2160 : 38532 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2161 : 38532 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2162 : : else
2163 : : maxbitsize = bitsize;
2164 : : }
2165 : :
2166 : : /* Only if we don't artificially break up the representative in
2167 : : the middle of a large bitfield with different possibly
2168 : : overlapping representatives. And all representatives start
2169 : : at byte offset. */
2170 : 101298 : gcc_assert (maxbitsize % BITS_PER_UNIT == 0);
2171 : :
2172 : : /* Find the smallest nice mode to use. */
2173 : 101298 : opt_scalar_int_mode mode_iter;
2174 : 339177 : FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2175 : 678038 : if (GET_MODE_BITSIZE (mode_iter.require ()) >= bitsize)
2176 : : break;
2177 : :
2178 : 101298 : scalar_int_mode mode;
2179 : 101298 : if (!mode_iter.exists (&mode)
2180 : 101140 : || GET_MODE_BITSIZE (mode) > maxbitsize
2181 : 120998 : || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)
2182 : : {
2183 : 41620 : if (TREE_CODE (TREE_TYPE (field)) == BITINT_TYPE)
2184 : : {
2185 : 75 : struct bitint_info info;
2186 : 75 : unsigned prec = TYPE_PRECISION (TREE_TYPE (field));
2187 : 75 : bool ok = targetm.c.bitint_type_info (prec, &info);
2188 : 75 : gcc_assert (ok);
2189 : 75 : scalar_int_mode limb_mode
2190 : 75 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2191 : 75 : unsigned lprec = GET_MODE_PRECISION (limb_mode);
2192 : 75 : if (prec > lprec)
2193 : : {
2194 : : /* For middle/large/huge _BitInt prefer bitsize being a multiple
2195 : : of limb precision. */
2196 : 73 : unsigned HOST_WIDE_INT bsz = CEIL (bitsize, lprec) * lprec;
2197 : 73 : if (bsz <= maxbitsize)
2198 : 75 : bitsize = bsz;
2199 : : }
2200 : : }
2201 : : /* We really want a BLKmode representative only as a last resort,
2202 : : considering the member b in
2203 : : struct { int a : 7; int b : 17; int c; } __attribute__((packed));
2204 : : Otherwise we simply want to split the representative up
2205 : : allowing for overlaps within the bitfield region as required for
2206 : : struct { int a : 7; int b : 7;
2207 : : int c : 10; int d; } __attribute__((packed));
2208 : : [0, 15] HImode for a and b, [8, 23] HImode for c. */
2209 : 41620 : DECL_SIZE (repr) = bitsize_int (bitsize);
2210 : 41620 : DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT);
2211 : 41620 : SET_DECL_MODE (repr, BLKmode);
2212 : 41620 : TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node,
2213 : 41620 : bitsize / BITS_PER_UNIT);
2214 : : }
2215 : : else
2216 : : {
2217 : 59678 : unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode);
2218 : 59678 : DECL_SIZE (repr) = bitsize_int (modesize);
2219 : 59678 : DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT);
2220 : 59678 : SET_DECL_MODE (repr, mode);
2221 : 59678 : TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1);
2222 : : }
2223 : :
2224 : : /* Remember whether the bitfield group is at the end of the
2225 : : structure or not. */
2226 : 101298 : DECL_CHAIN (repr) = nextf;
2227 : : }
2228 : :
2229 : : /* Compute and set FIELD_DECLs for the underlying objects we should
2230 : : use for bitfield access for the structure T. */
2231 : :
2232 : : void
2233 : 42231982 : finish_bitfield_layout (tree t)
2234 : : {
2235 : 42231982 : tree field, prev;
2236 : 42231982 : tree repr = NULL_TREE;
2237 : :
2238 : 42231982 : if (TREE_CODE (t) == QUAL_UNION_TYPE)
2239 : : return;
2240 : :
2241 : 42231982 : for (prev = NULL_TREE, field = TYPE_FIELDS (t);
2242 : 364529821 : field; field = DECL_CHAIN (field))
2243 : : {
2244 : 322297839 : if (TREE_CODE (field) != FIELD_DECL)
2245 : 266282320 : continue;
2246 : :
2247 : : /* In the C++ memory model, consecutive bit fields in a structure are
2248 : : considered one memory location and updating a memory location
2249 : : may not store into adjacent memory locations. */
2250 : 56015519 : if (!repr
2251 : 56015519 : && DECL_BIT_FIELD_TYPE (field))
2252 : : {
2253 : : /* Start new representative. */
2254 : 101298 : repr = start_bitfield_representative (field);
2255 : : }
2256 : 55914221 : else if (repr
2257 : 55914221 : && ! DECL_BIT_FIELD_TYPE (field))
2258 : : {
2259 : : /* Finish off new representative. */
2260 : 62132 : finish_bitfield_representative (repr, prev);
2261 : 62132 : repr = NULL_TREE;
2262 : : }
2263 : 55852089 : else if (DECL_BIT_FIELD_TYPE (field))
2264 : : {
2265 : 367987 : gcc_assert (repr != NULL_TREE);
2266 : :
2267 : : /* Zero-size bitfields finish off a representative and
2268 : : do not have a representative themselves. This is
2269 : : required by the C++ memory model. */
2270 : 367987 : if (integer_zerop (DECL_SIZE (field)))
2271 : : {
2272 : 624 : finish_bitfield_representative (repr, prev);
2273 : 624 : repr = NULL_TREE;
2274 : : }
2275 : :
2276 : : /* We assume that either DECL_FIELD_OFFSET of the representative
2277 : : and each bitfield member is a constant or they are equal.
2278 : : This is because we need to be able to compute the bit-offset
2279 : : of each field relative to the representative in get_bit_range
2280 : : during RTL expansion.
2281 : : If these constraints are not met, simply force a new
2282 : : representative to be generated. That will at most
2283 : : generate worse code but still maintain correctness with
2284 : : respect to the C++ memory model. */
2285 : 367374 : else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))
2286 : 367352 : && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
2287 : 11 : || operand_equal_p (DECL_FIELD_OFFSET (repr),
2288 : 11 : DECL_FIELD_OFFSET (field), 0)))
2289 : : {
2290 : 1 : finish_bitfield_representative (repr, prev);
2291 : 1 : repr = start_bitfield_representative (field);
2292 : : }
2293 : : }
2294 : : else
2295 : 55484102 : continue;
2296 : :
2297 : 164055 : if (repr)
2298 : 468661 : DECL_BIT_FIELD_REPRESENTATIVE (field) = repr;
2299 : :
2300 : 531417 : if (TREE_CODE (t) == RECORD_TYPE)
2301 : : prev = field;
2302 : 3436 : else if (repr)
2303 : : {
2304 : 3436 : finish_bitfield_representative (repr, field);
2305 : 3436 : repr = NULL_TREE;
2306 : : }
2307 : : }
2308 : :
2309 : 42231982 : if (repr)
2310 : 35106 : finish_bitfield_representative (repr, prev);
2311 : : }
2312 : :
2313 : : /* Do all of the work required to layout the type indicated by RLI,
2314 : : once the fields have been laid out. This function will call `free'
2315 : : for RLI, unless FREE_P is false. Passing a value other than false
2316 : : for FREE_P is bad practice; this option only exists to support the
2317 : : G++ 3.2 ABI. */
2318 : :
2319 : : void
2320 : 42231982 : finish_record_layout (record_layout_info rli, int free_p)
2321 : : {
2322 : 42231982 : tree variant;
2323 : :
2324 : : /* Compute the final size. */
2325 : 42231982 : finalize_record_size (rli);
2326 : :
2327 : : /* Compute the TYPE_MODE for the record. */
2328 : 42231982 : compute_record_mode (rli->t);
2329 : :
2330 : : /* Perform any last tweaks to the TYPE_SIZE, etc. */
2331 : 42231982 : finalize_type_size (rli->t);
2332 : :
2333 : : /* Compute bitfield representatives. */
2334 : 42231982 : finish_bitfield_layout (rli->t);
2335 : :
2336 : : /* Propagate TYPE_PACKED and TYPE_REVERSE_STORAGE_ORDER to variants.
2337 : : With C++ templates, it is too early to do this when the attribute
2338 : : is being parsed. */
2339 : 91879579 : for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
2340 : 49647597 : variant = TYPE_NEXT_VARIANT (variant))
2341 : : {
2342 : 49647597 : TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
2343 : 99295194 : TYPE_REVERSE_STORAGE_ORDER (variant)
2344 : 49647597 : = TYPE_REVERSE_STORAGE_ORDER (rli->t);
2345 : : }
2346 : :
2347 : : /* Lay out any static members. This is done now because their type
2348 : : may use the record's type. */
2349 : 52180080 : while (!vec_safe_is_empty (rli->pending_statics))
2350 : 9948098 : layout_decl (rli->pending_statics->pop (), 0);
2351 : :
2352 : : /* Clean up. */
2353 : 42231982 : if (free_p)
2354 : : {
2355 : 42231982 : vec_free (rli->pending_statics);
2356 : 42231982 : free (rli);
2357 : : }
2358 : 42231982 : }
2359 : :
2360 : :
2361 : : /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
2362 : : NAME, its fields are chained in reverse on FIELDS.
2363 : :
2364 : : If ALIGN_TYPE is non-null, it is given the same alignment as
2365 : : ALIGN_TYPE. */
2366 : :
2367 : : void
2368 : 1237691 : finish_builtin_struct (tree type, const char *name, tree fields,
2369 : : tree align_type)
2370 : : {
2371 : 1237691 : tree tail, next;
2372 : :
2373 : 3741957 : for (tail = NULL_TREE; fields; tail = fields, fields = next)
2374 : : {
2375 : 2504266 : DECL_FIELD_CONTEXT (fields) = type;
2376 : 2504266 : next = DECL_CHAIN (fields);
2377 : 2504266 : DECL_CHAIN (fields) = tail;
2378 : : }
2379 : 1237691 : TYPE_FIELDS (type) = tail;
2380 : :
2381 : 1237691 : if (align_type)
2382 : : {
2383 : 1126079 : SET_TYPE_ALIGN (type, TYPE_ALIGN (align_type));
2384 : 1126079 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
2385 : 1126079 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2386 : : TYPE_WARN_IF_NOT_ALIGN (align_type));
2387 : : }
2388 : :
2389 : 1237691 : layout_type (type);
2390 : : #if 0 /* not yet, should get fixed properly later */
2391 : : TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
2392 : : #else
2393 : 1237691 : TYPE_NAME (type) = build_decl (BUILTINS_LOCATION,
2394 : : TYPE_DECL, get_identifier (name), type);
2395 : : #endif
2396 : 1237691 : TYPE_STUB_DECL (type) = TYPE_NAME (type);
2397 : 1237691 : layout_decl (TYPE_NAME (type), 0);
2398 : 1237691 : }
2399 : :
2400 : : /* Compute TYPE_MODE for TYPE (which is ARRAY_TYPE). */
2401 : :
2402 : 67196599 : void compute_array_mode (tree type)
2403 : : {
2404 : 67196599 : gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
2405 : :
2406 : 67196599 : SET_TYPE_MODE (type, BLKmode);
2407 : 67196599 : if (TYPE_SIZE (type) != 0
2408 : 60232337 : && ! targetm.member_type_forces_blk (type, VOIDmode)
2409 : : /* BLKmode elements force BLKmode aggregate;
2410 : : else extract/store fields may lose. */
2411 : 127428936 : && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
2412 : 582400 : || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
2413 : : {
2414 : 59649937 : SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type),
2415 : : TYPE_SIZE (type)));
2416 : 59649937 : if (TYPE_MODE (type) != BLKmode
2417 : : && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
2418 : : && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
2419 : : {
2420 : : TYPE_NO_FORCE_BLK (type) = 1;
2421 : : SET_TYPE_MODE (type, BLKmode);
2422 : : }
2423 : : }
2424 : 67196599 : }
2425 : :
2426 : : /* Calculate the mode, size, and alignment for TYPE.
2427 : : For an array type, calculate the element separation as well.
2428 : : Record TYPE on the chain of permanent or temporary types
2429 : : so that dbxout will find out about it.
2430 : :
2431 : : TYPE_SIZE of a type is nonzero if the type has been laid out already.
2432 : : layout_type does nothing on such a type.
2433 : :
2434 : : If the type is incomplete, its TYPE_SIZE remains zero. */
2435 : :
2436 : : void
2437 : 2129503559 : layout_type (tree type)
2438 : : {
2439 : 2129503559 : gcc_assert (type);
2440 : :
2441 : 2129503559 : if (type == error_mark_node)
2442 : : return;
2443 : :
2444 : : /* We don't want finalize_type_size to copy an alignment attribute to
2445 : : variants that don't have it. */
2446 : 2129503515 : type = TYPE_MAIN_VARIANT (type);
2447 : :
2448 : : /* Do nothing if type has been laid out before. */
2449 : 2129503515 : if (TYPE_SIZE (type))
2450 : : return;
2451 : :
2452 : 1237557949 : switch (TREE_CODE (type))
2453 : : {
2454 : 0 : case LANG_TYPE:
2455 : : /* This kind of type is the responsibility
2456 : : of the language-specific code. */
2457 : 0 : gcc_unreachable ();
2458 : :
2459 : 12908346 : case BOOLEAN_TYPE:
2460 : 12908346 : case INTEGER_TYPE:
2461 : 12908346 : case ENUMERAL_TYPE:
2462 : 12908346 : {
2463 : 12908346 : scalar_int_mode mode
2464 : 12908346 : = smallest_int_mode_for_size (TYPE_PRECISION (type)).require ();
2465 : 12908346 : SET_TYPE_MODE (type, mode);
2466 : 25816692 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2467 : : /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */
2468 : 25816692 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2469 : 12908346 : break;
2470 : : }
2471 : :
2472 : 1490005 : case BITINT_TYPE:
2473 : 1490005 : {
2474 : 1490005 : struct bitint_info info;
2475 : 1490005 : int cnt;
2476 : 1490005 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
2477 : 1490005 : gcc_assert (ok);
2478 : 1490005 : scalar_int_mode limb_mode
2479 : 1490005 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2480 : 1490005 : if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
2481 : : {
2482 : 1365 : SET_TYPE_MODE (type, limb_mode);
2483 : 1365 : gcc_assert (info.abi_limb_mode == info.limb_mode);
2484 : : cnt = 1;
2485 : : }
2486 : : else
2487 : : {
2488 : 1488640 : SET_TYPE_MODE (type, BLKmode);
2489 : 1488640 : cnt = CEIL (TYPE_PRECISION (type), GET_MODE_PRECISION (limb_mode));
2490 : 1488640 : gcc_assert (info.abi_limb_mode == info.limb_mode
2491 : : || !info.big_endian == !WORDS_BIG_ENDIAN);
2492 : : }
2493 : 2980010 : TYPE_SIZE (type) = bitsize_int (cnt * GET_MODE_BITSIZE (limb_mode));
2494 : 2980010 : TYPE_SIZE_UNIT (type) = size_int (cnt * GET_MODE_SIZE (limb_mode));
2495 : 1490005 : SET_TYPE_ALIGN (type, GET_MODE_ALIGNMENT (limb_mode));
2496 : 1490005 : if (cnt > 1)
2497 : : {
2498 : : /* Use same mode as compute_record_mode would use for a structure
2499 : : containing cnt limb_mode elements. */
2500 : 1488640 : machine_mode mode = mode_for_size_tree (TYPE_SIZE (type),
2501 : 1488640 : MODE_INT, 1).else_blk ();
2502 : 1488640 : if (mode == BLKmode)
2503 : : break;
2504 : 37383 : finalize_type_size (type);
2505 : 37383 : SET_TYPE_MODE (type, mode);
2506 : 37383 : if (STRICT_ALIGNMENT
2507 : : && !(TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
2508 : : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
2509 : : {
2510 : : /* If this is the only reason this type is BLKmode, then
2511 : : don't force containing types to be BLKmode. */
2512 : : TYPE_NO_FORCE_BLK (type) = 1;
2513 : : SET_TYPE_MODE (type, BLKmode);
2514 : : }
2515 : 37383 : if (TYPE_NEXT_VARIANT (type) || type != TYPE_MAIN_VARIANT (type))
2516 : 0 : for (tree variant = TYPE_MAIN_VARIANT (type);
2517 : 0 : variant != NULL_TREE;
2518 : 0 : variant = TYPE_NEXT_VARIANT (variant))
2519 : : {
2520 : 0 : SET_TYPE_MODE (variant, mode);
2521 : 0 : if (STRICT_ALIGNMENT
2522 : : && !(TYPE_ALIGN (variant) >= BIGGEST_ALIGNMENT
2523 : : || (TYPE_ALIGN (variant)
2524 : : >= GET_MODE_ALIGNMENT (mode))))
2525 : : {
2526 : : TYPE_NO_FORCE_BLK (variant) = 1;
2527 : : SET_TYPE_MODE (variant, BLKmode);
2528 : : }
2529 : : }
2530 : 37383 : return;
2531 : : }
2532 : : break;
2533 : : }
2534 : :
2535 : 4158405 : case REAL_TYPE:
2536 : 4158405 : {
2537 : : /* Allow the caller to choose the type mode, which is how decimal
2538 : : floats are distinguished from binary ones. */
2539 : 4158405 : if (TYPE_MODE (type) == VOIDmode)
2540 : 2069881 : SET_TYPE_MODE
2541 : : (type, float_mode_for_size (TYPE_PRECISION (type)).require ());
2542 : 4158405 : scalar_float_mode mode = as_a <scalar_float_mode> (TYPE_MODE (type));
2543 : 8316810 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2544 : 8316810 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2545 : 4158405 : break;
2546 : : }
2547 : :
2548 : 10252224 : case FIXED_POINT_TYPE:
2549 : 10252224 : {
2550 : : /* TYPE_MODE (type) has been set already. */
2551 : 10252224 : scalar_mode mode = SCALAR_TYPE_MODE (type);
2552 : 20504448 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2553 : 20504448 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2554 : 10252224 : break;
2555 : : }
2556 : :
2557 : 5203436 : case COMPLEX_TYPE:
2558 : 5203436 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2559 : 5203436 : if (TYPE_MODE (TREE_TYPE (type)) == BLKmode)
2560 : : {
2561 : 4556 : gcc_checking_assert (TREE_CODE (TREE_TYPE (type)) == BITINT_TYPE);
2562 : 4556 : SET_TYPE_MODE (type, BLKmode);
2563 : 4556 : TYPE_SIZE (type)
2564 : 4556 : = int_const_binop (MULT_EXPR, TYPE_SIZE (TREE_TYPE (type)),
2565 : 4556 : bitsize_int (2));
2566 : 4556 : TYPE_SIZE_UNIT (type)
2567 : 4556 : = int_const_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (type)),
2568 : 4556 : bitsize_int (2));
2569 : 4556 : break;
2570 : : }
2571 : 5198880 : SET_TYPE_MODE (type,
2572 : : GET_MODE_COMPLEX_MODE (TYPE_MODE (TREE_TYPE (type))));
2573 : :
2574 : 10397760 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
2575 : 10397760 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
2576 : 5198880 : break;
2577 : :
2578 : 70321170 : case VECTOR_TYPE:
2579 : 70321170 : {
2580 : 70321170 : poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (type);
2581 : 70321170 : tree innertype = TREE_TYPE (type);
2582 : :
2583 : : /* Find an appropriate mode for the vector type. */
2584 : 70321170 : if (TYPE_MODE (type) == VOIDmode)
2585 : 35051164 : SET_TYPE_MODE (type,
2586 : : mode_for_vector (SCALAR_TYPE_MODE (innertype),
2587 : : nunits).else_blk ());
2588 : :
2589 : 70321170 : TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type));
2590 : 70321170 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2591 : : /* Several boolean vector elements may fit in a single unit. */
2592 : 70321170 : if (VECTOR_BOOLEAN_TYPE_P (type)
2593 : 72240735 : && type->type_common.mode != BLKmode)
2594 : : {
2595 : 1919562 : TYPE_SIZE_UNIT (type)
2596 : 3839124 : = size_int (GET_MODE_SIZE (type->type_common.mode));
2597 : 1919562 : TYPE_SIZE (type)
2598 : 5758686 : = bitsize_int (GET_MODE_BITSIZE (type->type_common.mode));
2599 : : }
2600 : : else
2601 : : {
2602 : 68401608 : TYPE_SIZE_UNIT (type)
2603 : 136803216 : = size_int (GET_MODE_SIZE (SCALAR_TYPE_MODE (innertype))
2604 : : * nunits);
2605 : 68401608 : TYPE_SIZE (type)
2606 : 205204824 : = bitsize_int (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (innertype))
2607 : : * nunits);
2608 : : }
2609 : :
2610 : : /* For vector types, we do not default to the mode's alignment.
2611 : : Instead, query a target hook, defaulting to natural alignment.
2612 : : This prevents ABI changes depending on whether or not native
2613 : : vector modes are supported. */
2614 : 70321170 : SET_TYPE_ALIGN (type, targetm.vector_alignment (type));
2615 : :
2616 : : /* However, if the underlying mode requires a bigger alignment than
2617 : : what the target hook provides, we cannot use the mode. For now,
2618 : : simply reject that case. */
2619 : 70321170 : gcc_assert (TYPE_ALIGN (type)
2620 : : >= GET_MODE_ALIGNMENT (TYPE_MODE (type)));
2621 : 70321170 : break;
2622 : : }
2623 : :
2624 : 284784 : case VOID_TYPE:
2625 : : /* This is an incomplete type and so doesn't have a size. */
2626 : 284784 : SET_TYPE_ALIGN (type, 1);
2627 : 284784 : TYPE_USER_ALIGN (type) = 0;
2628 : 284784 : SET_TYPE_MODE (type, VOIDmode);
2629 : 284784 : break;
2630 : :
2631 : 946338 : case OFFSET_TYPE:
2632 : 950457 : TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
2633 : 950457 : TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE_UNITS);
2634 : : /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be
2635 : : integral, which may be an __intN. */
2636 : 950457 : SET_TYPE_MODE (type, int_mode_for_size (POINTER_SIZE, 0).require ());
2637 : 950457 : TYPE_PRECISION (type) = POINTER_SIZE;
2638 : 946338 : break;
2639 : :
2640 : 864973071 : case FUNCTION_TYPE:
2641 : 864973071 : case METHOD_TYPE:
2642 : : /* It's hard to see what the mode and size of a function ought to
2643 : : be, but we do know the alignment is FUNCTION_BOUNDARY, so
2644 : : make it consistent with that. */
2645 : 864973071 : SET_TYPE_MODE (type,
2646 : : int_mode_for_size (FUNCTION_BOUNDARY, 0).else_blk ());
2647 : 864973071 : TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
2648 : 864973071 : TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
2649 : 864973071 : break;
2650 : :
2651 : 194656154 : case POINTER_TYPE:
2652 : 194656154 : case REFERENCE_TYPE:
2653 : 194656154 : {
2654 : 194656154 : scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
2655 : 389312308 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2656 : 389312308 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2657 : 194656154 : TYPE_UNSIGNED (type) = 1;
2658 : 194656154 : TYPE_PRECISION (type) = GET_MODE_PRECISION (mode);
2659 : : }
2660 : 194656154 : break;
2661 : :
2662 : 67196599 : case ARRAY_TYPE:
2663 : 67196599 : {
2664 : 67196599 : tree index = TYPE_DOMAIN (type);
2665 : 67196599 : tree element = TREE_TYPE (type);
2666 : :
2667 : : /* We need to know both bounds in order to compute the size. */
2668 : 61528419 : if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
2669 : 127490849 : && TYPE_SIZE (element))
2670 : : {
2671 : 60232337 : tree ub = TYPE_MAX_VALUE (index);
2672 : 60232337 : tree lb = TYPE_MIN_VALUE (index);
2673 : 60232337 : tree element_size = TYPE_SIZE (element);
2674 : 60232337 : tree length;
2675 : :
2676 : : /* Make sure that an array of zero-sized element is zero-sized
2677 : : regardless of its extent. */
2678 : 60232337 : if (integer_zerop (element_size))
2679 : 4701 : length = size_zero_node;
2680 : :
2681 : : /* The computation should happen in the original signedness so
2682 : : that (possible) negative values are handled appropriately
2683 : : when determining overflow. */
2684 : : else
2685 : : {
2686 : : /* ??? When it is obvious that the range is signed
2687 : : represent it using ssizetype. */
2688 : 60227636 : if (TREE_CODE (lb) == INTEGER_CST
2689 : 60227016 : && TREE_CODE (ub) == INTEGER_CST
2690 : 59955421 : && TYPE_UNSIGNED (TREE_TYPE (lb))
2691 : 119415787 : && tree_int_cst_lt (ub, lb))
2692 : : {
2693 : 474 : lb = wide_int_to_tree (ssizetype,
2694 : 474 : offset_int::from (wi::to_wide (lb),
2695 : : SIGNED));
2696 : 474 : ub = wide_int_to_tree (ssizetype,
2697 : 948 : offset_int::from (wi::to_wide (ub),
2698 : : SIGNED));
2699 : : }
2700 : 60227636 : length
2701 : 60227636 : = fold_convert (sizetype,
2702 : : size_binop (PLUS_EXPR,
2703 : : build_int_cst (TREE_TYPE (lb), 1),
2704 : : size_binop (MINUS_EXPR, ub, lb)));
2705 : : }
2706 : :
2707 : : /* ??? We have no way to distinguish a null-sized array from an
2708 : : array spanning the whole sizetype range, so we arbitrarily
2709 : : decide that [0, -1] is the only valid representation. */
2710 : 60232337 : if (integer_zerop (length)
2711 : 42242 : && TREE_OVERFLOW (length)
2712 : 60257901 : && integer_zerop (lb))
2713 : 25564 : length = size_zero_node;
2714 : :
2715 : 60232337 : TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
2716 : : bits_from_bytes (length));
2717 : :
2718 : : /* If we know the size of the element, calculate the total size
2719 : : directly, rather than do some division thing below. This
2720 : : optimization helps Fortran assumed-size arrays (where the
2721 : : size of the array is determined at runtime) substantially. */
2722 : 60232337 : if (TYPE_SIZE_UNIT (element))
2723 : 60232337 : TYPE_SIZE_UNIT (type)
2724 : 120464674 : = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
2725 : : }
2726 : :
2727 : : /* Now round the alignment and size,
2728 : : using machine-dependent criteria if any. */
2729 : :
2730 : 67196599 : unsigned align = TYPE_ALIGN (element);
2731 : 67196599 : if (TYPE_USER_ALIGN (type))
2732 : 679385 : align = MAX (align, TYPE_ALIGN (type));
2733 : : else
2734 : 66517214 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
2735 : 67196599 : if (!TYPE_WARN_IF_NOT_ALIGN (type))
2736 : 67196599 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2737 : : TYPE_WARN_IF_NOT_ALIGN (element));
2738 : : #ifdef ROUND_TYPE_ALIGN
2739 : : align = ROUND_TYPE_ALIGN (type, align, BITS_PER_UNIT);
2740 : : #else
2741 : 67196599 : align = MAX (align, BITS_PER_UNIT);
2742 : : #endif
2743 : 67196599 : SET_TYPE_ALIGN (type, align);
2744 : 67196599 : compute_array_mode (type);
2745 : 67196599 : if (AGGREGATE_TYPE_P (element))
2746 : 2349113 : TYPE_TYPELESS_STORAGE (type) = TYPE_TYPELESS_STORAGE (element);
2747 : : /* When the element size is constant, check that it is at least as
2748 : : large as the element alignment. */
2749 : 67196599 : if (TYPE_SIZE_UNIT (element)
2750 : 67125943 : && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
2751 : : /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
2752 : : TYPE_ALIGN_UNIT. */
2753 : 67093899 : && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element))
2754 : 134290483 : && !integer_zerop (TYPE_SIZE_UNIT (element)))
2755 : : {
2756 : 67083995 : if (compare_tree_int (TYPE_SIZE_UNIT (element),
2757 : 67083995 : TYPE_ALIGN_UNIT (element)) < 0)
2758 : 11 : error ("alignment of array elements is greater than "
2759 : : "element size");
2760 : 67083984 : else if (TYPE_ALIGN_UNIT (element) > 1
2761 : 90979330 : && (wi::zext (wi::to_wide (TYPE_SIZE_UNIT (element)),
2762 : 11947673 : ffs_hwi (TYPE_ALIGN_UNIT (element)) - 1)
2763 : 90979330 : != 0))
2764 : 6 : error ("size of array element is not a multiple of its "
2765 : : "alignment");
2766 : : }
2767 : : break;
2768 : : }
2769 : :
2770 : 5167417 : case RECORD_TYPE:
2771 : 5167417 : case UNION_TYPE:
2772 : 5167417 : case QUAL_UNION_TYPE:
2773 : 5167417 : {
2774 : 5167417 : tree field;
2775 : 5167417 : record_layout_info rli;
2776 : :
2777 : : /* Initialize the layout information. */
2778 : 5167417 : rli = start_record_layout (type);
2779 : :
2780 : : /* If this is a QUAL_UNION_TYPE, we want to process the fields
2781 : : in the reverse order in building the COND_EXPR that denotes
2782 : : its size. We reverse them again later. */
2783 : 5167417 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2784 : 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2785 : :
2786 : : /* Place all the fields. */
2787 : 25998718 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2788 : 20831301 : place_field (rli, field);
2789 : :
2790 : 5167417 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2791 : 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2792 : :
2793 : : /* Finish laying out the record. */
2794 : 5167417 : finish_record_layout (rli, /*free_p=*/true);
2795 : : }
2796 : 5167417 : break;
2797 : :
2798 : 0 : default:
2799 : 0 : gcc_unreachable ();
2800 : : }
2801 : :
2802 : : /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
2803 : : records and unions, finish_record_layout already called this
2804 : : function. */
2805 : 1237520566 : if (!RECORD_OR_UNION_TYPE_P (type))
2806 : 1232353149 : finalize_type_size (type);
2807 : :
2808 : : /* We should never see alias sets on incomplete aggregates. And we
2809 : : should not call layout_type on not incomplete aggregates. */
2810 : 1237520566 : if (AGGREGATE_TYPE_P (type))
2811 : 72364016 : gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type));
2812 : : }
2813 : :
2814 : : /* Return the least alignment required for type TYPE. */
2815 : :
2816 : : unsigned int
2817 : 38392743 : min_align_of_type (tree type)
2818 : : {
2819 : 38392743 : unsigned int align = TYPE_ALIGN (type);
2820 : 38392743 : if (!TYPE_USER_ALIGN (type))
2821 : : {
2822 : 69895920 : align = MIN (align, BIGGEST_ALIGNMENT);
2823 : : #ifdef BIGGEST_FIELD_ALIGNMENT
2824 : : align = MIN (align, BIGGEST_FIELD_ALIGNMENT);
2825 : : #endif
2826 : 35927050 : unsigned int field_align = align;
2827 : : #ifdef ADJUST_FIELD_ALIGN
2828 : 35927050 : field_align = ADJUST_FIELD_ALIGN (NULL_TREE, type, field_align);
2829 : : #endif
2830 : 35927050 : align = MIN (align, field_align);
2831 : : }
2832 : 38392743 : return align / BITS_PER_UNIT;
2833 : : }
2834 : :
2835 : : /* Create and return a type for signed integers of PRECISION bits. */
2836 : :
2837 : : tree
2838 : 2685627 : make_signed_type (int precision)
2839 : : {
2840 : 2685627 : tree type = make_node (INTEGER_TYPE);
2841 : :
2842 : 2685627 : TYPE_PRECISION (type) = precision;
2843 : :
2844 : 2685627 : fixup_signed_type (type);
2845 : 2685627 : return type;
2846 : : }
2847 : :
2848 : : /* Create and return a type for unsigned integers of PRECISION bits. */
2849 : :
2850 : : tree
2851 : 8692451 : make_unsigned_type (int precision)
2852 : : {
2853 : 8692451 : tree type = make_node (INTEGER_TYPE);
2854 : :
2855 : 8692451 : TYPE_PRECISION (type) = precision;
2856 : :
2857 : 8692451 : fixup_unsigned_type (type);
2858 : 8692451 : return type;
2859 : : }
2860 : :
2861 : : /* Create and return a type for fract of PRECISION bits, UNSIGNEDP,
2862 : : and SATP. */
2863 : :
2864 : : tree
2865 : 5695680 : make_fract_type (int precision, int unsignedp, int satp)
2866 : : {
2867 : 5695680 : tree type = make_node (FIXED_POINT_TYPE);
2868 : :
2869 : 5695680 : TYPE_PRECISION (type) = precision;
2870 : :
2871 : 5695680 : if (satp)
2872 : 2847840 : TYPE_SATURATING (type) = 1;
2873 : :
2874 : : /* Lay out the type: set its alignment, size, etc. */
2875 : 5695680 : TYPE_UNSIGNED (type) = unsignedp;
2876 : 5695680 : enum mode_class mclass = unsignedp ? MODE_UFRACT : MODE_FRACT;
2877 : 5695680 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2878 : 5695680 : layout_type (type);
2879 : :
2880 : 5695680 : return type;
2881 : : }
2882 : :
2883 : : /* Create and return a type for accum of PRECISION bits, UNSIGNEDP,
2884 : : and SATP. */
2885 : :
2886 : : tree
2887 : 4556544 : make_accum_type (int precision, int unsignedp, int satp)
2888 : : {
2889 : 4556544 : tree type = make_node (FIXED_POINT_TYPE);
2890 : :
2891 : 4556544 : TYPE_PRECISION (type) = precision;
2892 : :
2893 : 4556544 : if (satp)
2894 : 2278272 : TYPE_SATURATING (type) = 1;
2895 : :
2896 : : /* Lay out the type: set its alignment, size, etc. */
2897 : 4556544 : TYPE_UNSIGNED (type) = unsignedp;
2898 : 4556544 : enum mode_class mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM;
2899 : 4556544 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2900 : 4556544 : layout_type (type);
2901 : :
2902 : 4556544 : return type;
2903 : : }
2904 : :
2905 : : /* Initialize sizetypes so layout_type can use them. */
2906 : :
2907 : : void
2908 : 284784 : initialize_sizetypes (void)
2909 : : {
2910 : 284784 : int precision, bprecision;
2911 : :
2912 : : /* Get sizetypes precision from the SIZE_TYPE target macro. */
2913 : 291963 : if (strcmp (SIZETYPE, "unsigned int") == 0)
2914 : : precision = INT_TYPE_SIZE;
2915 : 277605 : else if (strcmp (SIZETYPE, "long unsigned int") == 0)
2916 : 277605 : precision = LONG_TYPE_SIZE;
2917 : 0 : else if (strcmp (SIZETYPE, "long long unsigned int") == 0)
2918 : : precision = LONG_LONG_TYPE_SIZE;
2919 : 0 : else if (strcmp (SIZETYPE, "short unsigned int") == 0)
2920 : : precision = SHORT_TYPE_SIZE;
2921 : : else
2922 : : {
2923 : : int i;
2924 : :
2925 : : precision = -1;
2926 : 0 : for (i = 0; i < NUM_INT_N_ENTS; i++)
2927 : 0 : if (int_n_enabled_p[i])
2928 : : {
2929 : 0 : char name[50], altname[50];
2930 : 0 : sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
2931 : 0 : sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
2932 : :
2933 : 0 : if (strcmp (name, SIZETYPE) == 0
2934 : 0 : || strcmp (altname, SIZETYPE) == 0)
2935 : : {
2936 : 0 : precision = int_n_data[i].bitsize;
2937 : : }
2938 : : }
2939 : 0 : if (precision == -1)
2940 : 0 : gcc_unreachable ();
2941 : : }
2942 : :
2943 : 284784 : bprecision
2944 : 569568 : = MIN (precision + LOG2_BITS_PER_UNIT + 1, MAX_FIXED_MODE_SIZE);
2945 : 284784 : bprecision
2946 : 284784 : = GET_MODE_PRECISION (smallest_int_mode_for_size (bprecision).require ());
2947 : 284784 : if (bprecision > HOST_BITS_PER_DOUBLE_INT)
2948 : : bprecision = HOST_BITS_PER_DOUBLE_INT;
2949 : :
2950 : : /* Create stubs for sizetype and bitsizetype so we can create constants. */
2951 : 284784 : sizetype = make_node (INTEGER_TYPE);
2952 : 284784 : TYPE_NAME (sizetype) = get_identifier ("sizetype");
2953 : 284784 : TYPE_PRECISION (sizetype) = precision;
2954 : 284784 : TYPE_UNSIGNED (sizetype) = 1;
2955 : 284784 : bitsizetype = make_node (INTEGER_TYPE);
2956 : 284784 : TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype");
2957 : 284784 : TYPE_PRECISION (bitsizetype) = bprecision;
2958 : 284784 : TYPE_UNSIGNED (bitsizetype) = 1;
2959 : :
2960 : : /* Now layout both types manually. */
2961 : 284784 : scalar_int_mode mode = smallest_int_mode_for_size (precision).require ();
2962 : 284784 : SET_TYPE_MODE (sizetype, mode);
2963 : 284784 : SET_TYPE_ALIGN (sizetype, GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)));
2964 : 284784 : TYPE_SIZE (sizetype) = bitsize_int (precision);
2965 : 569568 : TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (mode));
2966 : 284784 : set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED);
2967 : :
2968 : 284784 : mode = smallest_int_mode_for_size (bprecision).require ();
2969 : 284784 : SET_TYPE_MODE (bitsizetype, mode);
2970 : 284784 : SET_TYPE_ALIGN (bitsizetype, GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)));
2971 : 284784 : TYPE_SIZE (bitsizetype) = bitsize_int (bprecision);
2972 : 569568 : TYPE_SIZE_UNIT (bitsizetype) = size_int (GET_MODE_SIZE (mode));
2973 : 284784 : set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED);
2974 : :
2975 : : /* Create the signed variants of *sizetype. */
2976 : 284784 : ssizetype = make_signed_type (TYPE_PRECISION (sizetype));
2977 : 284784 : TYPE_NAME (ssizetype) = get_identifier ("ssizetype");
2978 : 284784 : sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype));
2979 : 284784 : TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype");
2980 : 284784 : }
2981 : :
2982 : : /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2983 : : or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2984 : : for TYPE, based on the PRECISION and whether or not the TYPE
2985 : : IS_UNSIGNED. PRECISION need not correspond to a width supported
2986 : : natively by the hardware; for example, on a machine with 8-bit,
2987 : : 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2988 : : 61. */
2989 : :
2990 : : void
2991 : 16108915 : set_min_and_max_values_for_integral_type (tree type,
2992 : : int precision,
2993 : : signop sgn)
2994 : : {
2995 : : /* For bitfields with zero width we end up creating integer types
2996 : : with zero precision. Don't assign any minimum/maximum values
2997 : : to those types, they don't have any valid value. */
2998 : 16108915 : if (precision < 1)
2999 : : return;
3000 : :
3001 : 16108619 : gcc_assert (precision <= WIDE_INT_MAX_PRECISION);
3002 : :
3003 : 16108619 : TYPE_MIN_VALUE (type)
3004 : 32217238 : = wide_int_to_tree (type, wi::min_value (precision, sgn));
3005 : 16108619 : TYPE_MAX_VALUE (type)
3006 : 32249741 : = wide_int_to_tree (type, wi::max_value (precision, sgn));
3007 : : }
3008 : :
3009 : : /* Set the extreme values of TYPE based on its precision in bits,
3010 : : then lay it out. Used when make_signed_type won't do
3011 : : because the tree code is not INTEGER_TYPE. */
3012 : :
3013 : : void
3014 : 4637789 : fixup_signed_type (tree type)
3015 : : {
3016 : 4637789 : int precision = TYPE_PRECISION (type);
3017 : :
3018 : 4637789 : set_min_and_max_values_for_integral_type (type, precision, SIGNED);
3019 : :
3020 : : /* Lay out the type: set its alignment, size, etc. */
3021 : 4637789 : layout_type (type);
3022 : 4637789 : }
3023 : :
3024 : : /* Set the extreme values of TYPE based on its precision in bits,
3025 : : then lay it out. This is used both in `make_unsigned_type'
3026 : : and for enumeral types. */
3027 : :
3028 : : void
3029 : 9563073 : fixup_unsigned_type (tree type)
3030 : : {
3031 : 9563073 : int precision = TYPE_PRECISION (type);
3032 : :
3033 : 9563073 : TYPE_UNSIGNED (type) = 1;
3034 : :
3035 : 9563073 : set_min_and_max_values_for_integral_type (type, precision, UNSIGNED);
3036 : :
3037 : : /* Lay out the type: set its alignment, size, etc. */
3038 : 9563073 : layout_type (type);
3039 : 9563073 : }
3040 : :
3041 : : /* Construct an iterator for a bitfield that spans BITSIZE bits,
3042 : : starting at BITPOS.
3043 : :
3044 : : BITREGION_START is the bit position of the first bit in this
3045 : : sequence of bit fields. BITREGION_END is the last bit in this
3046 : : sequence. If these two fields are non-zero, we should restrict the
3047 : : memory access to that range. Otherwise, we are allowed to touch
3048 : : any adjacent non bit-fields.
3049 : :
3050 : : ALIGN is the alignment of the underlying object in bits.
3051 : : VOLATILEP says whether the bitfield is volatile. */
3052 : :
3053 : 1334532 : bit_field_mode_iterator
3054 : : ::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3055 : : poly_int64 bitregion_start,
3056 : : poly_int64 bitregion_end,
3057 : 1334532 : unsigned int align, bool volatilep)
3058 : 1334532 : : m_mode (NARROWEST_INT_MODE), m_bitsize (bitsize),
3059 : 1334532 : m_bitpos (bitpos), m_bitregion_start (bitregion_start),
3060 : 1334532 : m_bitregion_end (bitregion_end), m_align (align),
3061 : 1334532 : m_volatilep (volatilep), m_count (0)
3062 : : {
3063 : 1334532 : if (known_eq (m_bitregion_end, 0))
3064 : : {
3065 : : /* We can assume that any aligned chunk of ALIGN bits that overlaps
3066 : : the bitfield is mapped and won't trap, provided that ALIGN isn't
3067 : : too large. The cap is the biggest required alignment for data,
3068 : : or at least the word size. And force one such chunk at least. */
3069 : 312158 : unsigned HOST_WIDE_INT units
3070 : 1122580 : = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD));
3071 : 312158 : if (bitsize <= 0)
3072 : : bitsize = 1;
3073 : 312158 : HOST_WIDE_INT end = bitpos + bitsize + units - 1;
3074 : 312158 : m_bitregion_end = end - end % units - 1;
3075 : : }
3076 : 1334532 : }
3077 : :
3078 : : /* Calls to this function return successively larger modes that can be used
3079 : : to represent the bitfield. Return true if another bitfield mode is
3080 : : available, storing it in *OUT_MODE if so. */
3081 : :
3082 : : bool
3083 : 1335566 : bit_field_mode_iterator::next_mode (scalar_int_mode *out_mode)
3084 : : {
3085 : 1335566 : scalar_int_mode mode;
3086 : 2307760 : for (; m_mode.exists (&mode); m_mode = GET_MODE_WIDER_MODE (mode))
3087 : : {
3088 : 2307760 : unsigned int unit = GET_MODE_BITSIZE (mode);
3089 : :
3090 : : /* Skip modes that don't have full precision. */
3091 : 2307760 : if (unit != GET_MODE_PRECISION (mode))
3092 : 972194 : continue;
3093 : :
3094 : : /* Stop if the mode is too wide to handle efficiently. */
3095 : 4615520 : if (unit > MAX_FIXED_MODE_SIZE)
3096 : : break;
3097 : :
3098 : : /* Don't deliver more than one multiword mode; the smallest one
3099 : : should be used. */
3100 : 2255339 : if (m_count > 0 && unit > BITS_PER_WORD)
3101 : : break;
3102 : :
3103 : : /* Skip modes that are too small. */
3104 : 2255230 : unsigned HOST_WIDE_INT substart = (unsigned HOST_WIDE_INT) m_bitpos % unit;
3105 : 2255230 : unsigned HOST_WIDE_INT subend = substart + m_bitsize;
3106 : 2255230 : if (subend > unit)
3107 : 972194 : continue;
3108 : :
3109 : : /* Stop if the mode goes outside the bitregion. */
3110 : 1283036 : HOST_WIDE_INT start = m_bitpos - substart;
3111 : 1283036 : if (maybe_ne (m_bitregion_start, 0)
3112 : 1283036 : && maybe_lt (start, m_bitregion_start))
3113 : : break;
3114 : 1283006 : HOST_WIDE_INT end = start + unit;
3115 : 1283006 : if (maybe_gt (end, m_bitregion_end + 1))
3116 : : break;
3117 : :
3118 : : /* Stop if the mode requires too much alignment. */
3119 : 1268434 : if (GET_MODE_ALIGNMENT (mode) > m_align
3120 : 1268434 : && targetm.slow_unaligned_access (mode, m_align))
3121 : : break;
3122 : :
3123 : 1268434 : *out_mode = mode;
3124 : 1268434 : m_mode = GET_MODE_WIDER_MODE (mode);
3125 : 1268434 : m_count++;
3126 : 1268434 : return true;
3127 : : }
3128 : : return false;
3129 : : }
3130 : :
3131 : : /* Return true if smaller modes are generally preferred for this kind
3132 : : of bitfield. */
3133 : :
3134 : : bool
3135 : 1245331 : bit_field_mode_iterator::prefer_smaller_modes ()
3136 : : {
3137 : 1245331 : return (m_volatilep
3138 : 1245331 : ? targetm.narrow_volatile_bitfield ()
3139 : 1245331 : : !SLOW_BYTE_ACCESS);
3140 : : }
3141 : :
3142 : : /* Find the best machine mode to use when referencing a bit field of length
3143 : : BITSIZE bits starting at BITPOS.
3144 : :
3145 : : BITREGION_START is the bit position of the first bit in this
3146 : : sequence of bit fields. BITREGION_END is the last bit in this
3147 : : sequence. If these two fields are non-zero, we should restrict the
3148 : : memory access to that range. Otherwise, we are allowed to touch
3149 : : any adjacent non bit-fields.
3150 : :
3151 : : The chosen mode must have no more than LARGEST_MODE_BITSIZE bits.
3152 : : INT_MAX is a suitable value for LARGEST_MODE_BITSIZE if the caller
3153 : : doesn't want to apply a specific limit.
3154 : :
3155 : : If no mode meets all these conditions, we return VOIDmode.
3156 : :
3157 : : The underlying object is known to be aligned to a boundary of ALIGN bits.
3158 : :
3159 : : If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
3160 : : smallest mode meeting these conditions.
3161 : :
3162 : : If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
3163 : : largest mode (but a mode no wider than UNITS_PER_WORD) that meets
3164 : : all the conditions.
3165 : :
3166 : : If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
3167 : : decide which of the above modes should be used. */
3168 : :
3169 : : bool
3170 : 1122959 : get_best_mode (int bitsize, int bitpos,
3171 : : poly_uint64 bitregion_start, poly_uint64 bitregion_end,
3172 : : unsigned int align,
3173 : : unsigned HOST_WIDE_INT largest_mode_bitsize, bool volatilep,
3174 : : scalar_int_mode *best_mode)
3175 : : {
3176 : 1122959 : bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start,
3177 : 1122959 : bitregion_end, align, volatilep);
3178 : 1122959 : scalar_int_mode mode;
3179 : 1122959 : bool found = false;
3180 : 1122959 : while (iter.next_mode (&mode)
3181 : : /* ??? For historical reasons, reject modes that would normally
3182 : : receive greater alignment, even if unaligned accesses are
3183 : : acceptable. This has both advantages and disadvantages.
3184 : : Removing this check means that something like:
3185 : :
3186 : : struct s { unsigned int x; unsigned int y; };
3187 : : int f (struct s *s) { return s->x == 0 && s->y == 0; }
3188 : :
3189 : : can be implemented using a single load and compare on
3190 : : 64-bit machines that have no alignment restrictions.
3191 : : For example, on powerpc64-linux-gnu, we would generate:
3192 : :
3193 : : ld 3,0(3)
3194 : : cntlzd 3,3
3195 : : srdi 3,3,6
3196 : : blr
3197 : :
3198 : : rather than:
3199 : :
3200 : : lwz 9,0(3)
3201 : : cmpwi 7,9,0
3202 : : bne 7,.L3
3203 : : lwz 3,4(3)
3204 : : cntlzw 3,3
3205 : : srwi 3,3,5
3206 : : extsw 3,3
3207 : : blr
3208 : : .p2align 4,,15
3209 : : .L3:
3210 : : li 3,0
3211 : : blr
3212 : :
3213 : : However, accessing more than one field can make life harder
3214 : : for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c
3215 : : has a series of unsigned short copies followed by a series of
3216 : : unsigned short comparisons. With this check, both the copies
3217 : : and comparisons remain 16-bit accesses and FRE is able
3218 : : to eliminate the latter. Without the check, the comparisons
3219 : : can be done using 2 64-bit operations, which FRE isn't able
3220 : : to handle in the same way.
3221 : :
3222 : : Either way, it would probably be worth disabling this check
3223 : : during expand. One particular example where removing the
3224 : : check would help is the get_best_mode call in store_bit_field.
3225 : : If we are given a memory bitregion of 128 bits that is aligned
3226 : : to a 64-bit boundary, and the bitfield we want to modify is
3227 : : in the second half of the bitregion, this check causes
3228 : : store_bitfield to turn the memory into a 64-bit reference
3229 : : to the _first_ half of the region. We later use
3230 : : adjust_bitfield_address to get a reference to the correct half,
3231 : : but doing so looks to adjust_bitfield_address as though we are
3232 : : moving past the end of the original object, so it drops the
3233 : : associated MEM_EXPR and MEM_OFFSET. Removing the check
3234 : : causes store_bit_field to keep a 128-bit memory reference,
3235 : : so that the final bitfield reference still has a MEM_EXPR
3236 : : and MEM_OFFSET. */
3237 : 1063095 : && GET_MODE_ALIGNMENT (mode) <= align
3238 : 2165351 : && GET_MODE_BITSIZE (mode) <= largest_mode_bitsize)
3239 : : {
3240 : 1040234 : *best_mode = mode;
3241 : 1040234 : found = true;
3242 : 1040234 : if (iter.prefer_smaller_modes ())
3243 : : break;
3244 : : }
3245 : :
3246 : 1122959 : return found;
3247 : : }
3248 : :
3249 : : /* Gets minimal and maximal values for MODE (signed or unsigned depending on
3250 : : SIGN). The returned constants are made to be usable in TARGET_MODE. */
3251 : :
3252 : : void
3253 : 61435596 : get_mode_bounds (scalar_int_mode mode, int sign,
3254 : : scalar_int_mode target_mode,
3255 : : rtx *mmin, rtx *mmax)
3256 : : {
3257 : 61435596 : unsigned size = GET_MODE_PRECISION (mode);
3258 : 61435596 : unsigned HOST_WIDE_INT min_val, max_val;
3259 : :
3260 : 61435596 : gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
3261 : :
3262 : : /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */
3263 : 61435596 : if (mode == BImode)
3264 : : {
3265 : : if (STORE_FLAG_VALUE < 0)
3266 : : {
3267 : : min_val = STORE_FLAG_VALUE;
3268 : : max_val = 0;
3269 : : }
3270 : : else
3271 : : {
3272 : : min_val = 0;
3273 : : max_val = STORE_FLAG_VALUE;
3274 : : }
3275 : : }
3276 : 61435596 : else if (sign)
3277 : : {
3278 : 55442276 : min_val = -(HOST_WIDE_INT_1U << (size - 1));
3279 : 55442276 : max_val = (HOST_WIDE_INT_1U << (size - 1)) - 1;
3280 : : }
3281 : : else
3282 : : {
3283 : 5993320 : min_val = 0;
3284 : 5993320 : max_val = (HOST_WIDE_INT_1U << (size - 1) << 1) - 1;
3285 : : }
3286 : :
3287 : 61435596 : *mmin = gen_int_mode (min_val, target_mode);
3288 : 61435596 : *mmax = gen_int_mode (max_val, target_mode);
3289 : 61435596 : }
3290 : :
3291 : : #include "gt-stor-layout.h"
|