Branch data Line data Source code
1 : : /* C-compiler utilities for types and variables storage layout
2 : : Copyright (C) 1987-2024 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "target.h"
25 : : #include "function.h"
26 : : #include "rtl.h"
27 : : #include "tree.h"
28 : : #include "memmodel.h"
29 : : #include "tm_p.h"
30 : : #include "stringpool.h"
31 : : #include "regs.h"
32 : : #include "emit-rtl.h"
33 : : #include "cgraph.h"
34 : : #include "diagnostic-core.h"
35 : : #include "fold-const.h"
36 : : #include "stor-layout.h"
37 : : #include "varasm.h"
38 : : #include "print-tree.h"
39 : : #include "langhooks.h"
40 : : #include "tree-inline.h"
41 : : #include "dumpfile.h"
42 : : #include "gimplify.h"
43 : : #include "attribs.h"
44 : : #include "debug.h"
45 : : #include "calls.h"
46 : :
47 : : /* Data type for the expressions representing sizes of data types.
48 : : It is the first integer type laid out. */
49 : : tree sizetype_tab[(int) stk_type_kind_last];
50 : :
51 : : /* If nonzero, this is an upper limit on alignment of structure fields.
52 : : The value is measured in bits. */
53 : : unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
54 : :
55 : : static tree self_referential_size (tree);
56 : : static void finalize_record_size (record_layout_info);
57 : : static void finalize_type_size (tree);
58 : : static void place_union_field (record_layout_info, tree);
59 : : static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
60 : : HOST_WIDE_INT, tree);
61 : : extern void debug_rli (record_layout_info);
62 : :
63 : : /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
64 : : to serve as the actual size-expression for a type or decl. */
65 : :
66 : : tree
67 : 560964 : variable_size (tree size)
68 : : {
69 : : /* Obviously. */
70 : 560964 : if (TREE_CONSTANT (size))
71 : : return size;
72 : :
73 : : /* If the size is self-referential, we can't make a SAVE_EXPR (see
74 : : save_expr for the rationale). But we can do something else. */
75 : 560902 : if (CONTAINS_PLACEHOLDER_P (size))
76 : 0 : return self_referential_size (size);
77 : :
78 : : /* If we are in the global binding level, we can't make a SAVE_EXPR
79 : : since it may end up being shared across functions, so it is up
80 : : to the front-end to deal with this case. */
81 : 560902 : if (lang_hooks.decls.global_bindings_p ())
82 : : return size;
83 : :
84 : 295165 : return save_expr (size);
85 : : }
86 : :
87 : : /* An array of functions used for self-referential size computation. */
88 : : static GTY(()) vec<tree, va_gc> *size_functions;
89 : :
90 : : /* Return true if T is a self-referential component reference. */
91 : :
92 : : static bool
93 : 0 : self_referential_component_ref_p (tree t)
94 : : {
95 : 0 : if (TREE_CODE (t) != COMPONENT_REF)
96 : : return false;
97 : :
98 : 0 : while (REFERENCE_CLASS_P (t))
99 : 0 : t = TREE_OPERAND (t, 0);
100 : :
101 : 0 : return (TREE_CODE (t) == PLACEHOLDER_EXPR);
102 : : }
103 : :
104 : : /* Similar to copy_tree_r but do not copy component references involving
105 : : PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr
106 : : and substituted in substitute_in_expr. */
107 : :
108 : : static tree
109 : 0 : copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data)
110 : : {
111 : 0 : enum tree_code code = TREE_CODE (*tp);
112 : :
113 : : /* Stop at types, decls, constants like copy_tree_r. */
114 : 0 : if (TREE_CODE_CLASS (code) == tcc_type
115 : : || TREE_CODE_CLASS (code) == tcc_declaration
116 : 0 : || TREE_CODE_CLASS (code) == tcc_constant)
117 : : {
118 : 0 : *walk_subtrees = 0;
119 : 0 : return NULL_TREE;
120 : : }
121 : :
122 : : /* This is the pattern built in ada/make_aligning_type. */
123 : 0 : else if (code == ADDR_EXPR
124 : 0 : && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR)
125 : : {
126 : 0 : *walk_subtrees = 0;
127 : 0 : return NULL_TREE;
128 : : }
129 : :
130 : : /* Default case: the component reference. */
131 : 0 : else if (self_referential_component_ref_p (*tp))
132 : : {
133 : 0 : *walk_subtrees = 0;
134 : 0 : return NULL_TREE;
135 : : }
136 : :
137 : : /* We're not supposed to have them in self-referential size trees
138 : : because we wouldn't properly control when they are evaluated.
139 : : However, not creating superfluous SAVE_EXPRs requires accurate
140 : : tracking of readonly-ness all the way down to here, which we
141 : : cannot always guarantee in practice. So punt in this case. */
142 : 0 : else if (code == SAVE_EXPR)
143 : 0 : return error_mark_node;
144 : :
145 : 0 : else if (code == STATEMENT_LIST)
146 : 0 : gcc_unreachable ();
147 : :
148 : 0 : return copy_tree_r (tp, walk_subtrees, data);
149 : : }
150 : :
151 : : /* Given a SIZE expression that is self-referential, return an equivalent
152 : : expression to serve as the actual size expression for a type. */
153 : :
154 : : static tree
155 : 0 : self_referential_size (tree size)
156 : : {
157 : 0 : static unsigned HOST_WIDE_INT fnno = 0;
158 : 0 : vec<tree> self_refs = vNULL;
159 : 0 : tree param_type_list = NULL, param_decl_list = NULL;
160 : 0 : tree t, ref, return_type, fntype, fnname, fndecl;
161 : 0 : unsigned int i;
162 : 0 : char buf[128];
163 : 0 : vec<tree, va_gc> *args = NULL;
164 : :
165 : : /* Do not factor out simple operations. */
166 : 0 : t = skip_simple_constant_arithmetic (size);
167 : 0 : if (TREE_CODE (t) == CALL_EXPR || self_referential_component_ref_p (t))
168 : : return size;
169 : :
170 : : /* Collect the list of self-references in the expression. */
171 : 0 : find_placeholder_in_expr (size, &self_refs);
172 : 0 : gcc_assert (self_refs.length () > 0);
173 : :
174 : : /* Obtain a private copy of the expression. */
175 : 0 : t = size;
176 : 0 : if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE)
177 : : return size;
178 : 0 : size = t;
179 : :
180 : : /* Build the parameter and argument lists in parallel; also
181 : : substitute the former for the latter in the expression. */
182 : 0 : vec_alloc (args, self_refs.length ());
183 : 0 : FOR_EACH_VEC_ELT (self_refs, i, ref)
184 : : {
185 : 0 : tree subst, param_name, param_type, param_decl;
186 : :
187 : 0 : if (DECL_P (ref))
188 : : {
189 : : /* We shouldn't have true variables here. */
190 : 0 : gcc_assert (TREE_READONLY (ref));
191 : : subst = ref;
192 : : }
193 : : /* This is the pattern built in ada/make_aligning_type. */
194 : 0 : else if (TREE_CODE (ref) == ADDR_EXPR)
195 : : subst = ref;
196 : : /* Default case: the component reference. */
197 : : else
198 : 0 : subst = TREE_OPERAND (ref, 1);
199 : :
200 : 0 : sprintf (buf, "p%d", i);
201 : 0 : param_name = get_identifier (buf);
202 : 0 : param_type = TREE_TYPE (ref);
203 : 0 : param_decl
204 : 0 : = build_decl (input_location, PARM_DECL, param_name, param_type);
205 : 0 : DECL_ARG_TYPE (param_decl) = param_type;
206 : 0 : DECL_ARTIFICIAL (param_decl) = 1;
207 : 0 : TREE_READONLY (param_decl) = 1;
208 : :
209 : 0 : size = substitute_in_expr (size, subst, param_decl);
210 : :
211 : 0 : param_type_list = tree_cons (NULL_TREE, param_type, param_type_list);
212 : 0 : param_decl_list = chainon (param_decl, param_decl_list);
213 : 0 : args->quick_push (ref);
214 : : }
215 : :
216 : 0 : self_refs.release ();
217 : :
218 : : /* Append 'void' to indicate that the number of parameters is fixed. */
219 : 0 : param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
220 : :
221 : : /* The 3 lists have been created in reverse order. */
222 : 0 : param_type_list = nreverse (param_type_list);
223 : 0 : param_decl_list = nreverse (param_decl_list);
224 : :
225 : : /* Build the function type. */
226 : 0 : return_type = TREE_TYPE (size);
227 : 0 : fntype = build_function_type (return_type, param_type_list);
228 : :
229 : : /* Build the function declaration. */
230 : 0 : sprintf (buf, "SZ" HOST_WIDE_INT_PRINT_UNSIGNED, fnno++);
231 : 0 : fnname = get_file_function_name (buf);
232 : 0 : fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype);
233 : 0 : for (t = param_decl_list; t; t = DECL_CHAIN (t))
234 : 0 : DECL_CONTEXT (t) = fndecl;
235 : 0 : DECL_ARGUMENTS (fndecl) = param_decl_list;
236 : 0 : DECL_RESULT (fndecl)
237 : 0 : = build_decl (input_location, RESULT_DECL, 0, return_type);
238 : 0 : DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
239 : :
240 : : /* The function has been created by the compiler and we don't
241 : : want to emit debug info for it. */
242 : 0 : DECL_ARTIFICIAL (fndecl) = 1;
243 : 0 : DECL_IGNORED_P (fndecl) = 1;
244 : :
245 : : /* It is supposed to be "const" and never throw. */
246 : 0 : TREE_READONLY (fndecl) = 1;
247 : 0 : TREE_NOTHROW (fndecl) = 1;
248 : :
249 : : /* We want it to be inlined when this is deemed profitable, as
250 : : well as discarded if every call has been integrated. */
251 : 0 : DECL_DECLARED_INLINE_P (fndecl) = 1;
252 : :
253 : : /* It is made up of a unique return statement. */
254 : 0 : DECL_INITIAL (fndecl) = make_node (BLOCK);
255 : 0 : BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl;
256 : 0 : t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size);
257 : 0 : DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t);
258 : 0 : TREE_STATIC (fndecl) = 1;
259 : :
260 : : /* Put it onto the list of size functions. */
261 : 0 : vec_safe_push (size_functions, fndecl);
262 : :
263 : : /* Replace the original expression with a call to the size function. */
264 : 0 : return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args);
265 : : }
266 : :
267 : : /* Take, queue and compile all the size functions. It is essential that
268 : : the size functions be gimplified at the very end of the compilation
269 : : in order to guarantee transparent handling of self-referential sizes.
270 : : Otherwise the GENERIC inliner would not be able to inline them back
271 : : at each of their call sites, thus creating artificial non-constant
272 : : size expressions which would trigger nasty problems later on. */
273 : :
274 : : void
275 : 253767 : finalize_size_functions (void)
276 : : {
277 : 253767 : unsigned int i;
278 : 253767 : tree fndecl;
279 : :
280 : 253767 : for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++)
281 : : {
282 : 0 : allocate_struct_function (fndecl, false);
283 : 0 : set_cfun (NULL);
284 : 0 : dump_function (TDI_original, fndecl);
285 : :
286 : : /* As these functions are used to describe the layout of variable-length
287 : : structures, debug info generation needs their implementation. */
288 : 0 : debug_hooks->size_function (fndecl);
289 : 0 : gimplify_function_tree (fndecl);
290 : 0 : cgraph_node::finalize_function (fndecl, false);
291 : : }
292 : :
293 : 253767 : vec_free (size_functions);
294 : 253767 : }
295 : :
296 : : /* Return a machine mode of class MCLASS with SIZE bits of precision,
297 : : if one exists. The mode may have padding bits as well the SIZE
298 : : value bits. If LIMIT is nonzero, disregard modes wider than
299 : : MAX_FIXED_MODE_SIZE. */
300 : :
301 : : opt_machine_mode
302 : 1112703170 : mode_for_size (poly_uint64 size, enum mode_class mclass, int limit)
303 : : {
304 : 1112703170 : machine_mode mode;
305 : 1112703170 : int i;
306 : :
307 : 1233318258 : if (limit && maybe_gt (size, (unsigned int) MAX_FIXED_MODE_SIZE))
308 : 28236530 : return opt_machine_mode ();
309 : :
310 : : /* Get the first mode which has this size, in the specified class. */
311 : 1503654545 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
312 : 1477122088 : if (known_eq (GET_MODE_PRECISION (mode), size))
313 : 1057934183 : return mode;
314 : :
315 : 26532457 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
316 : 53031024 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
317 : 26515512 : if (known_eq (int_n_data[i].bitsize, size)
318 : 26515512 : && int_n_enabled_p[i])
319 : 0 : return int_n_data[i].m;
320 : :
321 : 26532457 : return opt_machine_mode ();
322 : : }
323 : :
324 : : /* Similar, except passed a tree node. */
325 : :
326 : : opt_machine_mode
327 : 115047089 : mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
328 : : {
329 : 115047089 : unsigned HOST_WIDE_INT uhwi;
330 : 115047089 : unsigned int ui;
331 : :
332 : 115047089 : if (!tree_fits_uhwi_p (size))
333 : 238109 : return opt_machine_mode ();
334 : 114808980 : uhwi = tree_to_uhwi (size);
335 : 114808980 : ui = uhwi;
336 : 114808980 : if (uhwi != ui)
337 : 548 : return opt_machine_mode ();
338 : 114808432 : return mode_for_size (ui, mclass, limit);
339 : : }
340 : :
341 : : /* Return the narrowest mode of class MCLASS that contains at least
342 : : SIZE bits. Abort if no such mode exists. */
343 : :
344 : : machine_mode
345 : 12955364 : smallest_mode_for_size (poly_uint64 size, enum mode_class mclass)
346 : : {
347 : 12955364 : machine_mode mode = VOIDmode;
348 : 12955364 : int i;
349 : :
350 : : /* Get the first mode which has at least this size, in the
351 : : specified class. */
352 : 42615131 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
353 : 42615131 : if (known_ge (GET_MODE_PRECISION (mode), size))
354 : : break;
355 : :
356 : 12955364 : gcc_assert (mode != VOIDmode);
357 : :
358 : 12955364 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
359 : 25910728 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
360 : 12955364 : if (known_ge (int_n_data[i].bitsize, size)
361 : 12955234 : && known_lt (int_n_data[i].bitsize, GET_MODE_PRECISION (mode))
362 : 12955364 : && int_n_enabled_p[i])
363 : 0 : mode = int_n_data[i].m;
364 : :
365 : 12955364 : return mode;
366 : : }
367 : :
368 : : /* Return an integer mode of exactly the same size as MODE, if one exists. */
369 : :
370 : : opt_scalar_int_mode
371 : 1693621 : int_mode_for_mode (machine_mode mode)
372 : : {
373 : 1693621 : switch (GET_MODE_CLASS (mode))
374 : : {
375 : 1253505 : case MODE_INT:
376 : 1253505 : case MODE_PARTIAL_INT:
377 : 1253505 : return as_a <scalar_int_mode> (mode);
378 : :
379 : 273941 : case MODE_COMPLEX_INT:
380 : 273941 : case MODE_COMPLEX_FLOAT:
381 : 273941 : case MODE_FLOAT:
382 : 273941 : case MODE_DECIMAL_FLOAT:
383 : 273941 : case MODE_FRACT:
384 : 273941 : case MODE_ACCUM:
385 : 273941 : case MODE_UFRACT:
386 : 273941 : case MODE_UACCUM:
387 : 273941 : case MODE_VECTOR_BOOL:
388 : 273941 : case MODE_VECTOR_INT:
389 : 273941 : case MODE_VECTOR_FLOAT:
390 : 273941 : case MODE_VECTOR_FRACT:
391 : 273941 : case MODE_VECTOR_ACCUM:
392 : 273941 : case MODE_VECTOR_UFRACT:
393 : 273941 : case MODE_VECTOR_UACCUM:
394 : 547882 : return int_mode_for_size (GET_MODE_BITSIZE (mode), 0);
395 : :
396 : 0 : case MODE_OPAQUE:
397 : 0 : return opt_scalar_int_mode ();
398 : :
399 : 166175 : case MODE_RANDOM:
400 : 166175 : if (mode == BLKmode)
401 : 166175 : return opt_scalar_int_mode ();
402 : :
403 : : /* fall through */
404 : :
405 : 0 : case MODE_CC:
406 : 0 : default:
407 : 0 : gcc_unreachable ();
408 : : }
409 : : }
410 : :
411 : : /* Find a mode that can be used for efficient bitwise operations on MODE,
412 : : if one exists. */
413 : :
414 : : opt_machine_mode
415 : 20262 : bitwise_mode_for_mode (machine_mode mode)
416 : : {
417 : : /* Quick exit if we already have a suitable mode. */
418 : 20262 : scalar_int_mode int_mode;
419 : 20262 : if (is_a <scalar_int_mode> (mode, &int_mode)
420 : 57362 : && GET_MODE_BITSIZE (int_mode) <= MAX_FIXED_MODE_SIZE)
421 : 18550 : return int_mode;
422 : :
423 : : /* Reuse the sanity checks from int_mode_for_mode. */
424 : 1712 : gcc_checking_assert ((int_mode_for_mode (mode), true));
425 : :
426 : 3424 : poly_int64 bitsize = GET_MODE_BITSIZE (mode);
427 : :
428 : : /* Try to replace complex modes with complex modes. In general we
429 : : expect both components to be processed independently, so we only
430 : : care whether there is a register for the inner mode. */
431 : 1712 : if (COMPLEX_MODE_P (mode))
432 : : {
433 : 400 : machine_mode trial = mode;
434 : 400 : if ((GET_MODE_CLASS (trial) == MODE_COMPLEX_INT
435 : 460 : || mode_for_size (bitsize, MODE_COMPLEX_INT, false).exists (&trial))
436 : 1072 : && have_regs_of_mode[GET_MODE_INNER (trial)])
437 : 276 : return trial;
438 : : }
439 : :
440 : : /* Try to replace vector modes with vector modes. Also try using vector
441 : : modes if an integer mode would be too big. */
442 : 1436 : if (VECTOR_MODE_P (mode)
443 : 2512 : || maybe_gt (bitsize, MAX_FIXED_MODE_SIZE))
444 : : {
445 : 317 : machine_mode trial = mode;
446 : 317 : if ((GET_MODE_CLASS (trial) == MODE_VECTOR_INT
447 : 211 : || mode_for_size (bitsize, MODE_VECTOR_INT, 0).exists (&trial))
448 : 253 : && have_regs_of_mode[trial]
449 : 497 : && targetm.vector_mode_supported_p (trial))
450 : 180 : return trial;
451 : : }
452 : :
453 : : /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */
454 : 1256 : return mode_for_size (bitsize, MODE_INT, true);
455 : : }
456 : :
457 : : /* Find a type that can be used for efficient bitwise operations on MODE.
458 : : Return null if no such mode exists. */
459 : :
460 : : tree
461 : 20262 : bitwise_type_for_mode (machine_mode mode)
462 : : {
463 : 40387 : if (!bitwise_mode_for_mode (mode).exists (&mode))
464 : : return NULL_TREE;
465 : :
466 : 20125 : unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode);
467 : 20125 : tree inner_type = build_nonstandard_integer_type (inner_size, true);
468 : :
469 : 20125 : if (VECTOR_MODE_P (mode))
470 : 180 : return build_vector_type_for_mode (inner_type, mode);
471 : :
472 : 19945 : if (COMPLEX_MODE_P (mode))
473 : 276 : return build_complex_type (inner_type);
474 : :
475 : 39338 : gcc_checking_assert (GET_MODE_INNER (mode) == mode);
476 : : return inner_type;
477 : : }
478 : :
479 : : /* Find a mode that can be used for efficient bitwise operations on SIZE
480 : : bits, if one exists. */
481 : :
482 : : opt_machine_mode
483 : 18691 : bitwise_mode_for_size (poly_uint64 size)
484 : : {
485 : 37382 : if (known_le (size, (unsigned int) MAX_FIXED_MODE_SIZE))
486 : 18508 : return mode_for_size (size, MODE_INT, true);
487 : :
488 : : machine_mode mode, ret = VOIDmode;
489 : 3230 : FOR_EACH_MODE_FROM (mode, MIN_MODE_VECTOR_INT)
490 : 6452 : if (known_eq (GET_MODE_BITSIZE (mode), size)
491 : 199 : && (ret == VOIDmode || GET_MODE_INNER (mode) == QImode)
492 : 199 : && have_regs_of_mode[mode]
493 : 3405 : && targetm.vector_mode_supported_p (mode))
494 : : {
495 : 358 : if (GET_MODE_INNER (mode) == QImode)
496 : 179 : return mode;
497 : 0 : else if (ret == VOIDmode)
498 : 3047 : ret = mode;
499 : : }
500 : 4 : if (ret != VOIDmode)
501 : 0 : return ret;
502 : 4 : return opt_machine_mode ();
503 : : }
504 : :
505 : : /* Find a mode that is suitable for representing a vector with NUNITS
506 : : elements of mode INNERMODE, if one exists. The returned mode can be
507 : : either an integer mode or a vector mode. */
508 : :
509 : : opt_machine_mode
510 : 89577951 : mode_for_vector (scalar_mode innermode, poly_uint64 nunits)
511 : : {
512 : 89577951 : machine_mode mode;
513 : :
514 : : /* First, look for a supported vector type. */
515 : 89577951 : if (SCALAR_FLOAT_MODE_P (innermode))
516 : : mode = MIN_MODE_VECTOR_FLOAT;
517 : 85327091 : else if (SCALAR_FRACT_MODE_P (innermode))
518 : : mode = MIN_MODE_VECTOR_FRACT;
519 : 85327091 : else if (SCALAR_UFRACT_MODE_P (innermode))
520 : : mode = MIN_MODE_VECTOR_UFRACT;
521 : 85327091 : else if (SCALAR_ACCUM_MODE_P (innermode))
522 : : mode = MIN_MODE_VECTOR_ACCUM;
523 : 85327091 : else if (SCALAR_UACCUM_MODE_P (innermode))
524 : : mode = MIN_MODE_VECTOR_UACCUM;
525 : : else
526 : 89577951 : mode = MIN_MODE_VECTOR_INT;
527 : :
528 : : /* Only check the broader vector_mode_supported_any_target_p here.
529 : : We'll filter through target-specific availability and
530 : : vector_mode_supported_p later in vector_type_mode. */
531 : 1097964921 : FOR_EACH_MODE_FROM (mode, mode)
532 : 2195318688 : if (known_eq (GET_MODE_NUNITS (mode), nunits)
533 : 532801160 : && GET_MODE_INNER (mode) == innermode
534 : 1186931718 : && targetm.vector_mode_supported_any_target_p (mode))
535 : 89272374 : return mode;
536 : :
537 : : /* For integers, try mapping it to a same-sized scalar mode. */
538 : 305577 : if (GET_MODE_CLASS (innermode) == MODE_INT)
539 : : {
540 : 15706 : poly_uint64 nbits = nunits * GET_MODE_BITSIZE (innermode);
541 : 15706 : if (int_mode_for_size (nbits, 0).exists (&mode)
542 : 15430 : && have_regs_of_mode[mode])
543 : 15430 : return mode;
544 : : }
545 : :
546 : 290147 : return opt_machine_mode ();
547 : : }
548 : :
549 : : /* If a piece of code is using vector mode VECTOR_MODE and also wants
550 : : to operate on elements of mode ELEMENT_MODE, return the vector mode
551 : : it should use for those elements. If NUNITS is nonzero, ensure that
552 : : the mode has exactly NUNITS elements, otherwise pick whichever vector
553 : : size pairs the most naturally with VECTOR_MODE; this may mean choosing
554 : : a mode with a different size and/or number of elements, depending on
555 : : what the target prefers. Return an empty opt_machine_mode if there
556 : : is no supported vector mode with the required properties.
557 : :
558 : : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
559 : : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
560 : :
561 : : opt_machine_mode
562 : 43503580 : related_vector_mode (machine_mode vector_mode, scalar_mode element_mode,
563 : : poly_uint64 nunits)
564 : : {
565 : 43503580 : gcc_assert (VECTOR_MODE_P (vector_mode));
566 : 43503580 : return targetm.vectorize.related_mode (vector_mode, element_mode, nunits);
567 : : }
568 : :
569 : : /* If a piece of code is using vector mode VECTOR_MODE and also wants
570 : : to operate on integer vectors with the same element size and number
571 : : of elements, return the vector mode it should use. Return an empty
572 : : opt_machine_mode if there is no supported vector mode with the
573 : : required properties.
574 : :
575 : : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
576 : : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
577 : :
578 : : opt_machine_mode
579 : 15198 : related_int_vector_mode (machine_mode vector_mode)
580 : : {
581 : 15198 : gcc_assert (VECTOR_MODE_P (vector_mode));
582 : 15198 : scalar_int_mode int_mode;
583 : 30396 : if (int_mode_for_mode (GET_MODE_INNER (vector_mode)).exists (&int_mode))
584 : 30396 : return related_vector_mode (vector_mode, int_mode,
585 : 15198 : GET_MODE_NUNITS (vector_mode));
586 : 0 : return opt_machine_mode ();
587 : : }
588 : :
589 : : /* Return the alignment of MODE. This will be bounded by 1 and
590 : : BIGGEST_ALIGNMENT. */
591 : :
592 : : unsigned int
593 : 1623601735 : get_mode_alignment (machine_mode mode)
594 : : {
595 : 3132115655 : return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
596 : : }
597 : :
598 : : /* Return the natural mode of an array, given that it is SIZE bytes in
599 : : total and has elements of type ELEM_TYPE. */
600 : :
601 : : static machine_mode
602 : 50691446 : mode_for_array (tree elem_type, tree size)
603 : : {
604 : 50691446 : tree elem_size;
605 : 50691446 : poly_uint64 int_size, int_elem_size;
606 : 50691446 : unsigned HOST_WIDE_INT num_elems;
607 : 50691446 : bool limit_p;
608 : :
609 : : /* One-element arrays get the component type's mode. */
610 : 50691446 : elem_size = TYPE_SIZE (elem_type);
611 : 50691446 : if (simple_cst_equal (size, elem_size))
612 : 2831654 : return TYPE_MODE (elem_type);
613 : :
614 : 47859792 : limit_p = true;
615 : 47859792 : if (poly_int_tree_p (size, &int_size)
616 : 47621683 : && poly_int_tree_p (elem_size, &int_elem_size)
617 : 47621683 : && maybe_ne (int_elem_size, 0U)
618 : 47859792 : && constant_multiple_p (int_size, int_elem_size, &num_elems))
619 : : {
620 : 47621683 : machine_mode elem_mode = TYPE_MODE (elem_type);
621 : 47621683 : machine_mode mode;
622 : 47621683 : if (targetm.array_mode (elem_mode, num_elems).exists (&mode))
623 : 0 : return mode;
624 : 47621683 : if (targetm.array_mode_supported_p (elem_mode, num_elems))
625 : 47859792 : limit_p = false;
626 : : }
627 : 47859792 : return mode_for_size_tree (size, MODE_INT, limit_p).else_blk ();
628 : : }
629 : :
630 : : /* Subroutine of layout_decl: Force alignment required for the data type.
631 : : But if the decl itself wants greater alignment, don't override that. */
632 : :
633 : : static inline void
634 : 1563907705 : do_type_align (tree type, tree decl)
635 : : {
636 : 1563907705 : if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
637 : : {
638 : 1513510712 : SET_DECL_ALIGN (decl, TYPE_ALIGN (type));
639 : 1513510712 : if (TREE_CODE (decl) == FIELD_DECL)
640 : 47739619 : DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
641 : : }
642 : 1563907705 : if (TYPE_WARN_IF_NOT_ALIGN (type) > DECL_WARN_IF_NOT_ALIGN (decl))
643 : 63 : SET_DECL_WARN_IF_NOT_ALIGN (decl, TYPE_WARN_IF_NOT_ALIGN (type));
644 : 1563907705 : }
645 : :
646 : : /* Set the size, mode and alignment of a ..._DECL node.
647 : : TYPE_DECL does need this for C++.
648 : : Note that LABEL_DECL and CONST_DECL nodes do not need this,
649 : : and FUNCTION_DECL nodes have them set up in a special (and simple) way.
650 : : Don't call layout_decl for them.
651 : :
652 : : KNOWN_ALIGN is the amount of alignment we can assume this
653 : : decl has with no special effort. It is relevant only for FIELD_DECLs
654 : : and depends on the previous fields.
655 : : All that matters about KNOWN_ALIGN is which powers of 2 divide it.
656 : : If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
657 : : the record will be aligned to suit. */
658 : :
659 : : void
660 : 1564333557 : layout_decl (tree decl, unsigned int known_align)
661 : : {
662 : 1564333557 : tree type = TREE_TYPE (decl);
663 : 1564333557 : enum tree_code code = TREE_CODE (decl);
664 : 1564333557 : rtx rtl = NULL_RTX;
665 : 1564333557 : location_t loc = DECL_SOURCE_LOCATION (decl);
666 : :
667 : 1564333557 : if (code == CONST_DECL)
668 : : return;
669 : :
670 : 1564333557 : gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
671 : : || code == TYPE_DECL || code == FIELD_DECL);
672 : :
673 : 1564333557 : rtl = DECL_RTL_IF_SET (decl);
674 : :
675 : 1564333557 : if (type == error_mark_node)
676 : 3055 : type = void_type_node;
677 : :
678 : : /* Usually the size and mode come from the data type without change,
679 : : however, the front-end may set the explicit width of the field, so its
680 : : size may not be the same as the size of its type. This happens with
681 : : bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
682 : : also happens with other fields. For example, the C++ front-end creates
683 : : zero-sized fields corresponding to empty base classes, and depends on
684 : : layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
685 : : size in bytes from the size in bits. If we have already set the mode,
686 : : don't set it again since we can be called twice for FIELD_DECLs. */
687 : :
688 : 1564333557 : DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
689 : 1564333557 : if (DECL_MODE (decl) == VOIDmode)
690 : 1525440953 : SET_DECL_MODE (decl, TYPE_MODE (type));
691 : :
692 : 1564333557 : if (DECL_SIZE (decl) == 0)
693 : : {
694 : 1526095671 : DECL_SIZE (decl) = TYPE_SIZE (type);
695 : 1526095671 : DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
696 : : }
697 : 38237886 : else if (DECL_SIZE_UNIT (decl) == 0)
698 : 417339 : DECL_SIZE_UNIT (decl)
699 : 834678 : = fold_convert_loc (loc, sizetype,
700 : 417339 : size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl),
701 : : bitsize_unit_node));
702 : :
703 : 1564333557 : if (code != FIELD_DECL)
704 : : /* For non-fields, update the alignment from the type. */
705 : 1512133049 : do_type_align (type, decl);
706 : : else
707 : : /* For fields, it's a bit more complicated... */
708 : : {
709 : 52200508 : bool old_user_align = DECL_USER_ALIGN (decl);
710 : 52200508 : bool zero_bitfield = false;
711 : 52200508 : bool packed_p = DECL_PACKED (decl);
712 : 52200508 : unsigned int mfa;
713 : :
714 : 52200508 : if (DECL_BIT_FIELD (decl))
715 : : {
716 : 426861 : DECL_BIT_FIELD_TYPE (decl) = type;
717 : :
718 : : /* A zero-length bit-field affects the alignment of the next
719 : : field. In essence such bit-fields are not influenced by
720 : : any packing due to #pragma pack or attribute packed. */
721 : 426861 : if (integer_zerop (DECL_SIZE (decl))
722 : 426861 : && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
723 : : {
724 : 2086 : zero_bitfield = true;
725 : 2086 : packed_p = false;
726 : 2086 : if (PCC_BITFIELD_TYPE_MATTERS)
727 : 2086 : do_type_align (type, decl);
728 : : else
729 : : {
730 : : #ifdef EMPTY_FIELD_BOUNDARY
731 : : if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
732 : : {
733 : : SET_DECL_ALIGN (decl, EMPTY_FIELD_BOUNDARY);
734 : : DECL_USER_ALIGN (decl) = 0;
735 : : }
736 : : #endif
737 : : }
738 : : }
739 : :
740 : : /* See if we can use an ordinary integer mode for a bit-field.
741 : : Conditions are: a fixed size that is correct for another mode,
742 : : occupying a complete byte or bytes on proper boundary. */
743 : 426861 : if (TYPE_SIZE (type) != 0
744 : 426861 : && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
745 : 853722 : && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
746 : : {
747 : 426770 : machine_mode xmode;
748 : 426770 : if (mode_for_size_tree (DECL_SIZE (decl),
749 : 264870 : MODE_INT, 1).exists (&xmode))
750 : : {
751 : 161900 : unsigned int xalign = GET_MODE_ALIGNMENT (xmode);
752 : 158603 : if (!(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
753 : 320230 : && (known_align == 0 || known_align >= xalign))
754 : : {
755 : 158786 : SET_DECL_ALIGN (decl, MAX (xalign, DECL_ALIGN (decl)));
756 : 158786 : SET_DECL_MODE (decl, xmode);
757 : 158786 : DECL_BIT_FIELD (decl) = 0;
758 : : }
759 : : }
760 : : }
761 : :
762 : : /* Turn off DECL_BIT_FIELD if we won't need it set. */
763 : 426952 : if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
764 : 91 : && known_align >= TYPE_ALIGN (type)
765 : 426867 : && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
766 : 1 : DECL_BIT_FIELD (decl) = 0;
767 : : }
768 : 51773647 : else if (packed_p && DECL_USER_ALIGN (decl))
769 : : /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
770 : : round up; we'll reduce it again below. We want packing to
771 : : supersede USER_ALIGN inherited from the type, but defer to
772 : : alignment explicitly specified on the field decl. */;
773 : : else
774 : 51772570 : do_type_align (type, decl);
775 : :
776 : : /* If the field is packed and not explicitly aligned, give it the
777 : : minimum alignment. Note that do_type_align may set
778 : : DECL_USER_ALIGN, so we need to check old_user_align instead. */
779 : 52200508 : if (packed_p
780 : 52200508 : && !old_user_align)
781 : 6600 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), BITS_PER_UNIT));
782 : :
783 : 52200508 : if (! packed_p && ! DECL_USER_ALIGN (decl))
784 : : {
785 : : /* Some targets (i.e. i386, VMS) limit struct field alignment
786 : : to a lower boundary than alignment of variables unless
787 : : it was overridden by attribute aligned. */
788 : : #ifdef BIGGEST_FIELD_ALIGNMENT
789 : : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl),
790 : : (unsigned) BIGGEST_FIELD_ALIGNMENT));
791 : : #endif
792 : : #ifdef ADJUST_FIELD_ALIGN
793 : 51580023 : SET_DECL_ALIGN (decl, ADJUST_FIELD_ALIGN (decl, TREE_TYPE (decl),
794 : : DECL_ALIGN (decl)));
795 : : #endif
796 : : }
797 : :
798 : 52200508 : if (zero_bitfield)
799 : 2086 : mfa = initial_max_fld_align * BITS_PER_UNIT;
800 : : else
801 : 52198422 : mfa = maximum_field_alignment;
802 : : /* Should this be controlled by DECL_USER_ALIGN, too? */
803 : 52200508 : if (mfa != 0)
804 : 567 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), mfa));
805 : : }
806 : :
807 : : /* Evaluate nonconstant size only once, either now or as soon as safe. */
808 : 1564333557 : if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
809 : 25331 : DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
810 : 1564333557 : if (DECL_SIZE_UNIT (decl) != 0
811 : 1564333557 : && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
812 : 25331 : DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
813 : :
814 : : /* If requested, warn about definitions of large data objects. */
815 : 891460434 : if ((code == PARM_DECL || (code == VAR_DECL && !DECL_NONLOCAL_FRAME (decl)))
816 : 1770061635 : && !DECL_EXTERNAL (decl))
817 : : {
818 : 1074774405 : tree size = DECL_SIZE_UNIT (decl);
819 : :
820 : 1074774405 : if (size != 0 && TREE_CODE (size) == INTEGER_CST)
821 : : {
822 : : /* -Wlarger-than= argument of HOST_WIDE_INT_MAX is treated
823 : : as if PTRDIFF_MAX had been specified, with the value
824 : : being that on the target rather than the host. */
825 : 972363941 : unsigned HOST_WIDE_INT max_size = warn_larger_than_size;
826 : 972363941 : if (max_size == HOST_WIDE_INT_MAX)
827 : 972363866 : max_size = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
828 : :
829 : 972363941 : if (compare_tree_int (size, max_size) > 0)
830 : 28 : warning (OPT_Wlarger_than_, "size of %q+D %E bytes exceeds "
831 : : "maximum object size %wu",
832 : : decl, size, max_size);
833 : : }
834 : : }
835 : :
836 : : /* If the RTL was already set, update its mode and mem attributes. */
837 : 1564333557 : if (rtl)
838 : : {
839 : 38431 : PUT_MODE (rtl, DECL_MODE (decl));
840 : 38431 : SET_DECL_RTL (decl, 0);
841 : 38431 : if (MEM_P (rtl))
842 : 38431 : set_mem_attributes (rtl, decl, 1);
843 : 38431 : SET_DECL_RTL (decl, rtl);
844 : : }
845 : : }
846 : :
847 : : /* Given a VAR_DECL, PARM_DECL, RESULT_DECL, or FIELD_DECL, clears the
848 : : results of a previous call to layout_decl and calls it again. */
849 : :
850 : : void
851 : 371609703 : relayout_decl (tree decl)
852 : : {
853 : 371609703 : DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
854 : 371609703 : SET_DECL_MODE (decl, VOIDmode);
855 : 371609703 : if (!DECL_USER_ALIGN (decl))
856 : 371608734 : SET_DECL_ALIGN (decl, 0);
857 : 371609703 : if (DECL_RTL_SET_P (decl))
858 : 0 : SET_DECL_RTL (decl, 0);
859 : :
860 : 371609703 : layout_decl (decl, 0);
861 : 371609703 : }
862 : :
863 : : /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
864 : : QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
865 : : is to be passed to all other layout functions for this record. It is the
866 : : responsibility of the caller to call `free' for the storage returned.
867 : : Note that garbage collection is not permitted until we finish laying
868 : : out the record. */
869 : :
870 : : record_layout_info
871 : 41856897 : start_record_layout (tree t)
872 : : {
873 : 41856897 : record_layout_info rli = XNEW (struct record_layout_info_s);
874 : :
875 : 41856897 : rli->t = t;
876 : :
877 : : /* If the type has a minimum specified alignment (via an attribute
878 : : declaration, for example) use it -- otherwise, start with a
879 : : one-byte alignment. */
880 : 41856897 : rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
881 : 41856897 : rli->unpacked_align = rli->record_align;
882 : 83431423 : rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
883 : :
884 : : #ifdef STRUCTURE_SIZE_BOUNDARY
885 : : /* Packed structures don't need to have minimum size. */
886 : : if (! TYPE_PACKED (t))
887 : : {
888 : : unsigned tmp;
889 : :
890 : : /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */
891 : : tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY;
892 : : if (maximum_field_alignment != 0)
893 : : tmp = MIN (tmp, maximum_field_alignment);
894 : : rli->record_align = MAX (rli->record_align, tmp);
895 : : }
896 : : #endif
897 : :
898 : 41856897 : rli->offset = size_zero_node;
899 : 41856897 : rli->bitpos = bitsize_zero_node;
900 : 41856897 : rli->prev_field = 0;
901 : 41856897 : rli->pending_statics = 0;
902 : 41856897 : rli->packed_maybe_necessary = 0;
903 : 41856897 : rli->remaining_in_alignment = 0;
904 : :
905 : 41856897 : return rli;
906 : : }
907 : :
908 : : /* Fold sizetype value X to bitsizetype, given that X represents a type
909 : : size or offset. */
910 : :
911 : : static tree
912 : 341672891 : bits_from_bytes (tree x)
913 : : {
914 : 341672891 : if (POLY_INT_CST_P (x))
915 : : /* The runtime calculation isn't allowed to overflow sizetype;
916 : : increasing the runtime values must always increase the size
917 : : or offset of the object. This means that the object imposes
918 : : a maximum value on the runtime parameters, but we don't record
919 : : what that is. */
920 : : return build_poly_int_cst
921 : : (bitsizetype,
922 : : poly_wide_int::from (poly_int_cst_value (x),
923 : : TYPE_PRECISION (bitsizetype),
924 : : TYPE_SIGN (TREE_TYPE (x))));
925 : 341672891 : x = fold_convert (bitsizetype, x);
926 : 341672891 : gcc_checking_assert (x);
927 : 341672891 : return x;
928 : : }
929 : :
930 : : /* Return the combined bit position for the byte offset OFFSET and the
931 : : bit position BITPOS.
932 : :
933 : : These functions operate on byte and bit positions present in FIELD_DECLs
934 : : and assume that these expressions result in no (intermediate) overflow.
935 : : This assumption is necessary to fold the expressions as much as possible,
936 : : so as to avoid creating artificially variable-sized types in languages
937 : : supporting variable-sized types like Ada. */
938 : :
939 : : tree
940 : 198818738 : bit_from_pos (tree offset, tree bitpos)
941 : : {
942 : 198818738 : return size_binop (PLUS_EXPR, bitpos,
943 : : size_binop (MULT_EXPR, bits_from_bytes (offset),
944 : : bitsize_unit_node));
945 : : }
946 : :
947 : : /* Return the combined truncated byte position for the byte offset OFFSET and
948 : : the bit position BITPOS. */
949 : :
950 : : tree
951 : 226166982 : byte_from_pos (tree offset, tree bitpos)
952 : : {
953 : 226166982 : tree bytepos;
954 : 226166982 : if (TREE_CODE (bitpos) == MULT_EXPR
955 : 226166982 : && tree_int_cst_equal (TREE_OPERAND (bitpos, 1), bitsize_unit_node))
956 : 0 : bytepos = TREE_OPERAND (bitpos, 0);
957 : : else
958 : 226166982 : bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node);
959 : 226166982 : return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos));
960 : : }
961 : :
962 : : /* Split the bit position POS into a byte offset *POFFSET and a bit
963 : : position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */
964 : :
965 : : void
966 : 51335600 : pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
967 : : tree pos)
968 : : {
969 : 51335600 : tree toff_align = bitsize_int (off_align);
970 : 51335600 : if (TREE_CODE (pos) == MULT_EXPR
971 : 51335600 : && tree_int_cst_equal (TREE_OPERAND (pos, 1), toff_align))
972 : : {
973 : 0 : *poffset = size_binop (MULT_EXPR,
974 : : fold_convert (sizetype, TREE_OPERAND (pos, 0)),
975 : : size_int (off_align / BITS_PER_UNIT));
976 : 0 : *pbitpos = bitsize_zero_node;
977 : : }
978 : : else
979 : : {
980 : 51335600 : *poffset = size_binop (MULT_EXPR,
981 : : fold_convert (sizetype,
982 : : size_binop (FLOOR_DIV_EXPR, pos,
983 : : toff_align)),
984 : : size_int (off_align / BITS_PER_UNIT));
985 : 51335600 : *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align);
986 : : }
987 : 51335600 : }
988 : :
989 : : /* Given a pointer to bit and byte offsets and an offset alignment,
990 : : normalize the offsets so they are within the alignment. */
991 : :
992 : : void
993 : 160498504 : normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
994 : : {
995 : : /* If the bit position is now larger than it should be, adjust it
996 : : downwards. */
997 : 160498504 : if (compare_tree_int (*pbitpos, off_align) >= 0)
998 : : {
999 : 51335600 : tree offset, bitpos;
1000 : 51335600 : pos_from_bit (&offset, &bitpos, off_align, *pbitpos);
1001 : 51335600 : *poffset = size_binop (PLUS_EXPR, *poffset, offset);
1002 : 51335600 : *pbitpos = bitpos;
1003 : : }
1004 : 160498504 : }
1005 : :
1006 : : /* Print debugging information about the information in RLI. */
1007 : :
1008 : : DEBUG_FUNCTION void
1009 : 0 : debug_rli (record_layout_info rli)
1010 : : {
1011 : 0 : print_node_brief (stderr, "type", rli->t, 0);
1012 : 0 : print_node_brief (stderr, "\noffset", rli->offset, 0);
1013 : 0 : print_node_brief (stderr, " bitpos", rli->bitpos, 0);
1014 : :
1015 : 0 : fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
1016 : : rli->record_align, rli->unpacked_align,
1017 : : rli->offset_align);
1018 : :
1019 : : /* The ms_struct code is the only that uses this. */
1020 : 0 : if (targetm.ms_bitfield_layout_p (rli->t))
1021 : 0 : fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
1022 : :
1023 : 0 : if (rli->packed_maybe_necessary)
1024 : 0 : fprintf (stderr, "packed may be necessary\n");
1025 : :
1026 : 0 : if (!vec_safe_is_empty (rli->pending_statics))
1027 : : {
1028 : 0 : fprintf (stderr, "pending statics:\n");
1029 : 0 : debug (rli->pending_statics);
1030 : : }
1031 : 0 : }
1032 : :
1033 : : /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
1034 : : BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
1035 : :
1036 : : void
1037 : 160498504 : normalize_rli (record_layout_info rli)
1038 : : {
1039 : 160498504 : normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
1040 : 160498504 : }
1041 : :
1042 : : /* Returns the size in bytes allocated so far. */
1043 : :
1044 : : tree
1045 : 132823085 : rli_size_unit_so_far (record_layout_info rli)
1046 : : {
1047 : 132823085 : return byte_from_pos (rli->offset, rli->bitpos);
1048 : : }
1049 : :
1050 : : /* Returns the size in bits allocated so far. */
1051 : :
1052 : : tree
1053 : 112318895 : rli_size_so_far (record_layout_info rli)
1054 : : {
1055 : 112318895 : return bit_from_pos (rli->offset, rli->bitpos);
1056 : : }
1057 : :
1058 : : /* FIELD is about to be added to RLI->T. The alignment (in bits) of
1059 : : the next available location within the record is given by KNOWN_ALIGN.
1060 : : Update the variable alignment fields in RLI, and return the alignment
1061 : : to give the FIELD. */
1062 : :
1063 : : unsigned int
1064 : 50856822 : update_alignment_for_field (record_layout_info rli, tree field,
1065 : : unsigned int known_align)
1066 : : {
1067 : : /* The alignment required for FIELD. */
1068 : 50856822 : unsigned int desired_align;
1069 : : /* The type of this field. */
1070 : 50856822 : tree type = TREE_TYPE (field);
1071 : : /* True if the field was explicitly aligned by the user. */
1072 : 50856822 : bool user_align;
1073 : 50856822 : bool is_bitfield;
1074 : :
1075 : : /* Do not attempt to align an ERROR_MARK node */
1076 : 50856822 : if (TREE_CODE (type) == ERROR_MARK)
1077 : : return 0;
1078 : :
1079 : : /* Lay out the field so we know what alignment it needs. */
1080 : 50856817 : layout_decl (field, known_align);
1081 : 50856817 : desired_align = DECL_ALIGN (field);
1082 : 50856817 : user_align = DECL_USER_ALIGN (field);
1083 : :
1084 : 101713634 : is_bitfield = (type != error_mark_node
1085 : 50856817 : && DECL_BIT_FIELD_TYPE (field)
1086 : 51274156 : && ! integer_zerop (TYPE_SIZE (type)));
1087 : :
1088 : : /* Record must have at least as much alignment as any field.
1089 : : Otherwise, the alignment of the field within the record is
1090 : : meaningless. */
1091 : 50856817 : if (targetm.ms_bitfield_layout_p (rli->t))
1092 : : {
1093 : : /* Here, the alignment of the underlying type of a bitfield can
1094 : : affect the alignment of a record; even a zero-sized field
1095 : : can do this. The alignment should be to the alignment of
1096 : : the type, except that for zero-size bitfields this only
1097 : : applies if there was an immediately prior, nonzero-size
1098 : : bitfield. (That's the way it is, experimentally.) */
1099 : 206 : if (!is_bitfield
1100 : 206 : || ((DECL_SIZE (field) == NULL_TREE
1101 : 131 : || !integer_zerop (DECL_SIZE (field)))
1102 : 121 : ? !DECL_PACKED (field)
1103 : 10 : : (rli->prev_field
1104 : 8 : && DECL_BIT_FIELD_TYPE (rli->prev_field)
1105 : 8 : && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
1106 : : {
1107 : 202 : unsigned int type_align = TYPE_ALIGN (type);
1108 : 277 : if (!is_bitfield && DECL_PACKED (field))
1109 : : type_align = desired_align;
1110 : : else
1111 : 195 : type_align = MAX (type_align, desired_align);
1112 : 202 : if (maximum_field_alignment != 0)
1113 : 68 : type_align = MIN (type_align, maximum_field_alignment);
1114 : 202 : rli->record_align = MAX (rli->record_align, type_align);
1115 : 202 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1116 : : }
1117 : : }
1118 : 50856611 : else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
1119 : : {
1120 : : /* Named bit-fields cause the entire structure to have the
1121 : : alignment implied by their type. Some targets also apply the same
1122 : : rules to unnamed bitfields. */
1123 : 417208 : if (DECL_NAME (field) != 0
1124 : 417208 : || targetm.align_anon_bitfield ())
1125 : : {
1126 : 272375 : unsigned int type_align = TYPE_ALIGN (type);
1127 : :
1128 : : #ifdef ADJUST_FIELD_ALIGN
1129 : 272375 : if (! TYPE_USER_ALIGN (type))
1130 : 265521 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1131 : : #endif
1132 : :
1133 : : /* Targets might chose to handle unnamed and hence possibly
1134 : : zero-width bitfield. Those are not influenced by #pragmas
1135 : : or packed attributes. */
1136 : 272375 : if (integer_zerop (DECL_SIZE (field)))
1137 : : {
1138 : 0 : if (initial_max_fld_align)
1139 : 0 : type_align = MIN (type_align,
1140 : : initial_max_fld_align * BITS_PER_UNIT);
1141 : : }
1142 : 272375 : else if (maximum_field_alignment != 0)
1143 : 177 : type_align = MIN (type_align, maximum_field_alignment);
1144 : 272198 : else if (DECL_PACKED (field))
1145 : 2858 : type_align = MIN (type_align, BITS_PER_UNIT);
1146 : :
1147 : : /* The alignment of the record is increased to the maximum
1148 : : of the current alignment, the alignment indicated on the
1149 : : field (i.e., the alignment specified by an __aligned__
1150 : : attribute), and the alignment indicated by the type of
1151 : : the field. */
1152 : 272375 : rli->record_align = MAX (rli->record_align, desired_align);
1153 : 272375 : rli->record_align = MAX (rli->record_align, type_align);
1154 : :
1155 : 272375 : if (warn_packed)
1156 : 0 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1157 : 272375 : user_align |= TYPE_USER_ALIGN (type);
1158 : : }
1159 : : }
1160 : : else
1161 : : {
1162 : 50439403 : rli->record_align = MAX (rli->record_align, desired_align);
1163 : 50439403 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1164 : : }
1165 : :
1166 : 50856817 : TYPE_USER_ALIGN (rli->t) |= user_align;
1167 : :
1168 : 50856817 : return desired_align;
1169 : : }
1170 : :
1171 : : /* Issue a warning if the record alignment, RECORD_ALIGN, is less than
1172 : : the field alignment of FIELD or FIELD isn't aligned. */
1173 : :
1174 : : static void
1175 : 50848594 : handle_warn_if_not_align (tree field, unsigned int record_align)
1176 : : {
1177 : 50848594 : tree type = TREE_TYPE (field);
1178 : :
1179 : 50848594 : if (type == error_mark_node)
1180 : 50848594 : return;
1181 : :
1182 : 50848547 : unsigned int warn_if_not_align = 0;
1183 : :
1184 : 50848547 : int opt_w = 0;
1185 : :
1186 : 50848547 : if (warn_if_not_aligned)
1187 : : {
1188 : 50848372 : warn_if_not_align = DECL_WARN_IF_NOT_ALIGN (field);
1189 : 50848372 : if (!warn_if_not_align)
1190 : 50848314 : warn_if_not_align = TYPE_WARN_IF_NOT_ALIGN (type);
1191 : 58 : if (warn_if_not_align)
1192 : : opt_w = OPT_Wif_not_aligned;
1193 : : }
1194 : :
1195 : : if (!warn_if_not_align
1196 : 50848489 : && warn_packed_not_aligned
1197 : 2305120 : && lookup_attribute ("aligned", TYPE_ATTRIBUTES (type)))
1198 : : {
1199 : 177 : warn_if_not_align = TYPE_ALIGN (type);
1200 : 177 : opt_w = OPT_Wpacked_not_aligned;
1201 : : }
1202 : :
1203 : 50848547 : if (!warn_if_not_align)
1204 : 50848312 : return;
1205 : :
1206 : 235 : tree context = DECL_CONTEXT (field);
1207 : :
1208 : 235 : warn_if_not_align /= BITS_PER_UNIT;
1209 : 235 : record_align /= BITS_PER_UNIT;
1210 : 235 : if ((record_align % warn_if_not_align) != 0)
1211 : 45 : warning (opt_w, "alignment %u of %qT is less than %u",
1212 : : record_align, context, warn_if_not_align);
1213 : :
1214 : 235 : tree off = byte_position (field);
1215 : 235 : if (!multiple_of_p (TREE_TYPE (off), off, size_int (warn_if_not_align)))
1216 : : {
1217 : 31 : if (TREE_CODE (off) == INTEGER_CST)
1218 : 30 : warning (opt_w, "%q+D offset %E in %qT isn%'t aligned to %u",
1219 : : field, off, context, warn_if_not_align);
1220 : : else
1221 : 1 : warning (opt_w, "%q+D offset %E in %qT may not be aligned to %u",
1222 : : field, off, context, warn_if_not_align);
1223 : : }
1224 : : }
1225 : :
1226 : : /* Called from place_field to handle unions. */
1227 : :
1228 : : static void
1229 : 1837106 : place_union_field (record_layout_info rli, tree field)
1230 : : {
1231 : 1837106 : update_alignment_for_field (rli, field, /*known_align=*/0);
1232 : :
1233 : 1837106 : DECL_FIELD_OFFSET (field) = size_zero_node;
1234 : 1837106 : DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
1235 : 1837106 : SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
1236 : 1837106 : handle_warn_if_not_align (field, rli->record_align);
1237 : :
1238 : : /* If this is an ERROR_MARK return *after* having set the
1239 : : field at the start of the union. This helps when parsing
1240 : : invalid fields. */
1241 : 1837106 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
1242 : : return;
1243 : :
1244 : 3103019 : if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1245 : 2211154 : && TYPE_TYPELESS_STORAGE (TREE_TYPE (field)))
1246 : 257437 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1247 : :
1248 : : /* We assume the union's size will be a multiple of a byte so we don't
1249 : : bother with BITPOS. */
1250 : 1837101 : if (TREE_CODE (rli->t) == UNION_TYPE)
1251 : 1837101 : rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1252 : 0 : else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
1253 : 0 : rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field),
1254 : : DECL_SIZE_UNIT (field), rli->offset);
1255 : : }
1256 : :
1257 : : /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
1258 : : at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
1259 : : units of alignment than the underlying TYPE. */
1260 : : static int
1261 : 253867 : excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
1262 : : HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
1263 : : {
1264 : : /* Note that the calculation of OFFSET might overflow; we calculate it so
1265 : : that we still get the right result as long as ALIGN is a power of two. */
1266 : 253867 : unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
1267 : :
1268 : 253867 : offset = offset % align;
1269 : 253867 : return ((offset + size + align - 1) / align
1270 : 253867 : > tree_to_uhwi (TYPE_SIZE (type)) / align);
1271 : : }
1272 : :
1273 : : /* RLI contains information about the layout of a RECORD_TYPE. FIELD
1274 : : is a FIELD_DECL to be added after those fields already present in
1275 : : T. (FIELD is not actually added to the TYPE_FIELDS list here;
1276 : : callers that desire that behavior must manually perform that step.) */
1277 : :
1278 : : void
1279 : 310183848 : place_field (record_layout_info rli, tree field)
1280 : : {
1281 : : /* The alignment required for FIELD. */
1282 : 310183848 : unsigned int desired_align;
1283 : : /* The alignment FIELD would have if we just dropped it into the
1284 : : record as it presently stands. */
1285 : 310183848 : unsigned int known_align;
1286 : 310183848 : unsigned int actual_align;
1287 : : /* The type of this field. */
1288 : 310183848 : tree type = TREE_TYPE (field);
1289 : :
1290 : 310183848 : gcc_assert (TREE_CODE (field) != ERROR_MARK);
1291 : :
1292 : : /* If FIELD is static, then treat it like a separate variable, not
1293 : : really like a structure field. If it is a FUNCTION_DECL, it's a
1294 : : method. In both cases, all we do is lay out the decl, and we do
1295 : : it *after* the record is laid out. */
1296 : 310183848 : if (VAR_P (field))
1297 : : {
1298 : 9666099 : vec_safe_push (rli->pending_statics, field);
1299 : 9666099 : return;
1300 : : }
1301 : :
1302 : : /* Enumerators and enum types which are local to this class need not
1303 : : be laid out. Likewise for initialized constant fields. */
1304 : 300517749 : else if (TREE_CODE (field) != FIELD_DECL)
1305 : : return;
1306 : :
1307 : : /* Unions are laid out very differently than records, so split
1308 : : that code off to another function. */
1309 : 50848594 : else if (TREE_CODE (rli->t) != RECORD_TYPE)
1310 : : {
1311 : 1837106 : place_union_field (rli, field);
1312 : 1837106 : return;
1313 : : }
1314 : :
1315 : 49011488 : else if (TREE_CODE (type) == ERROR_MARK)
1316 : : {
1317 : : /* Place this field at the current allocation position, so we
1318 : : maintain monotonicity. */
1319 : 42 : DECL_FIELD_OFFSET (field) = rli->offset;
1320 : 42 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1321 : 42 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1322 : 42 : handle_warn_if_not_align (field, rli->record_align);
1323 : 42 : return;
1324 : : }
1325 : :
1326 : 49011446 : if (AGGREGATE_TYPE_P (type)
1327 : 49011446 : && TYPE_TYPELESS_STORAGE (type))
1328 : 913529 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1329 : :
1330 : : /* Work out the known alignment so far. Note that A & (-A) is the
1331 : : value of the least-significant bit in A that is one. */
1332 : 49011446 : if (! integer_zerop (rli->bitpos))
1333 : 16599253 : known_align = least_bit_hwi (tree_to_uhwi (rli->bitpos));
1334 : 32412193 : else if (integer_zerop (rli->offset))
1335 : : known_align = 0;
1336 : 7751976 : else if (tree_fits_uhwi_p (rli->offset))
1337 : 7751212 : known_align = (BITS_PER_UNIT
1338 : 7751212 : * least_bit_hwi (tree_to_uhwi (rli->offset)));
1339 : : else
1340 : 764 : known_align = rli->offset_align;
1341 : :
1342 : 49011446 : desired_align = update_alignment_for_field (rli, field, known_align);
1343 : 49011446 : if (known_align == 0)
1344 : 49206567 : known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1345 : :
1346 : 49011446 : if (warn_packed && DECL_PACKED (field))
1347 : : {
1348 : 4 : if (known_align >= TYPE_ALIGN (type))
1349 : : {
1350 : 4 : if (TYPE_ALIGN (type) > desired_align)
1351 : : {
1352 : 4 : if (STRICT_ALIGNMENT)
1353 : : warning (OPT_Wattributes, "packed attribute causes "
1354 : : "inefficient alignment for %q+D", field);
1355 : : /* Don't warn if DECL_PACKED was set by the type. */
1356 : 4 : else if (!TYPE_PACKED (rli->t))
1357 : 0 : warning (OPT_Wattributes, "packed attribute is "
1358 : : "unnecessary for %q+D", field);
1359 : : }
1360 : : }
1361 : : else
1362 : 0 : rli->packed_maybe_necessary = 1;
1363 : : }
1364 : :
1365 : : /* Does this field automatically have alignment it needs by virtue
1366 : : of the fields that precede it and the record's own alignment? */
1367 : 49011446 : if (known_align < desired_align
1368 : 49011446 : && (! targetm.ms_bitfield_layout_p (rli->t)
1369 : 17 : || rli->prev_field == NULL))
1370 : : {
1371 : : /* No, we need to skip space before this field.
1372 : : Bump the cumulative size to multiple of field alignment. */
1373 : :
1374 : 1335491 : if (!targetm.ms_bitfield_layout_p (rli->t)
1375 : 1335488 : && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION
1376 : 2656140 : && !TYPE_ARTIFICIAL (rli->t))
1377 : 1320618 : warning (OPT_Wpadded, "padding struct to align %q+D", field);
1378 : :
1379 : : /* If the alignment is still within offset_align, just align
1380 : : the bit position. */
1381 : 1335491 : if (desired_align < rli->offset_align)
1382 : 1299868 : rli->bitpos = round_up (rli->bitpos, desired_align);
1383 : : else
1384 : : {
1385 : : /* First adjust OFFSET by the partial bits, then align. */
1386 : 35623 : rli->offset
1387 : 35623 : = size_binop (PLUS_EXPR, rli->offset,
1388 : : fold_convert (sizetype,
1389 : : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1390 : : bitsize_unit_node)));
1391 : 35623 : rli->bitpos = bitsize_zero_node;
1392 : :
1393 : 35623 : rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
1394 : : }
1395 : :
1396 : 1335491 : if (! TREE_CONSTANT (rli->offset))
1397 : 416 : rli->offset_align = desired_align;
1398 : : }
1399 : :
1400 : : /* Handle compatibility with PCC. Note that if the record has any
1401 : : variable-sized fields, we need not worry about compatibility. */
1402 : 49011446 : if (PCC_BITFIELD_TYPE_MATTERS
1403 : 49011446 : && ! targetm.ms_bitfield_layout_p (rli->t)
1404 : 49011245 : && TREE_CODE (field) == FIELD_DECL
1405 : 49011245 : && type != error_mark_node
1406 : 49011245 : && DECL_BIT_FIELD (field)
1407 : 257922 : && (! DECL_PACKED (field)
1408 : : /* Enter for these packed fields only to issue a warning. */
1409 : 2733 : || TYPE_ALIGN (type) <= BITS_PER_UNIT)
1410 : 255390 : && maximum_field_alignment == 0
1411 : 255156 : && ! integer_zerop (DECL_SIZE (field))
1412 : 253881 : && tree_fits_uhwi_p (DECL_SIZE (field))
1413 : 253881 : && tree_fits_uhwi_p (rli->offset)
1414 : 49265313 : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1415 : : {
1416 : 253867 : unsigned int type_align = TYPE_ALIGN (type);
1417 : 253867 : tree dsize = DECL_SIZE (field);
1418 : 253867 : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1419 : 253867 : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1420 : 253867 : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1421 : :
1422 : : #ifdef ADJUST_FIELD_ALIGN
1423 : 253867 : if (! TYPE_USER_ALIGN (type))
1424 : 248878 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1425 : : #endif
1426 : :
1427 : : /* A bit field may not span more units of alignment of its type
1428 : : than its type itself. Advance to next boundary if necessary. */
1429 : 253867 : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1430 : : {
1431 : 8925 : if (DECL_PACKED (field))
1432 : : {
1433 : 27 : if (warn_packed_bitfield_compat == 1)
1434 : 17 : inform
1435 : 17 : (input_location,
1436 : : "offset of packed bit-field %qD has changed in GCC 4.4",
1437 : : field);
1438 : : }
1439 : : else
1440 : 8898 : rli->bitpos = round_up (rli->bitpos, type_align);
1441 : : }
1442 : :
1443 : 253867 : if (! DECL_PACKED (field))
1444 : 253672 : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1445 : :
1446 : 253867 : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1447 : : TYPE_WARN_IF_NOT_ALIGN (type));
1448 : : }
1449 : :
1450 : : #ifdef BITFIELD_NBYTES_LIMITED
1451 : : if (BITFIELD_NBYTES_LIMITED
1452 : : && ! targetm.ms_bitfield_layout_p (rli->t)
1453 : : && TREE_CODE (field) == FIELD_DECL
1454 : : && type != error_mark_node
1455 : : && DECL_BIT_FIELD_TYPE (field)
1456 : : && ! DECL_PACKED (field)
1457 : : && ! integer_zerop (DECL_SIZE (field))
1458 : : && tree_fits_uhwi_p (DECL_SIZE (field))
1459 : : && tree_fits_uhwi_p (rli->offset)
1460 : : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1461 : : {
1462 : : unsigned int type_align = TYPE_ALIGN (type);
1463 : : tree dsize = DECL_SIZE (field);
1464 : : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1465 : : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1466 : : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1467 : :
1468 : : #ifdef ADJUST_FIELD_ALIGN
1469 : : if (! TYPE_USER_ALIGN (type))
1470 : : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1471 : : #endif
1472 : :
1473 : : if (maximum_field_alignment != 0)
1474 : : type_align = MIN (type_align, maximum_field_alignment);
1475 : : /* ??? This test is opposite the test in the containing if
1476 : : statement, so this code is unreachable currently. */
1477 : : else if (DECL_PACKED (field))
1478 : : type_align = MIN (type_align, BITS_PER_UNIT);
1479 : :
1480 : : /* A bit field may not span the unit of alignment of its type.
1481 : : Advance to next boundary if necessary. */
1482 : : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1483 : : rli->bitpos = round_up (rli->bitpos, type_align);
1484 : :
1485 : : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1486 : : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1487 : : TYPE_WARN_IF_NOT_ALIGN (type));
1488 : : }
1489 : : #endif
1490 : :
1491 : : /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1492 : : A subtlety:
1493 : : When a bit field is inserted into a packed record, the whole
1494 : : size of the underlying type is used by one or more same-size
1495 : : adjacent bitfields. (That is, if its long:3, 32 bits is
1496 : : used in the record, and any additional adjacent long bitfields are
1497 : : packed into the same chunk of 32 bits. However, if the size
1498 : : changes, a new field of that size is allocated.) In an unpacked
1499 : : record, this is the same as using alignment, but not equivalent
1500 : : when packing.
1501 : :
1502 : : Note: for compatibility, we use the type size, not the type alignment
1503 : : to determine alignment, since that matches the documentation */
1504 : :
1505 : 49011446 : if (targetm.ms_bitfield_layout_p (rli->t))
1506 : : {
1507 : 201 : tree prev_saved = rli->prev_field;
1508 : 283 : tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL;
1509 : :
1510 : : /* This is a bitfield if it exists. */
1511 : 201 : if (rli->prev_field)
1512 : : {
1513 : 82 : bool realign_p = known_align < desired_align;
1514 : :
1515 : : /* If both are bitfields, nonzero, and the same size, this is
1516 : : the middle of a run. Zero declared size fields are special
1517 : : and handled as "end of run". (Note: it's nonzero declared
1518 : : size, but equal type sizes!) (Since we know that both
1519 : : the current and previous fields are bitfields by the
1520 : : time we check it, DECL_SIZE must be present for both.) */
1521 : 82 : if (DECL_BIT_FIELD_TYPE (field)
1522 : 64 : && !integer_zerop (DECL_SIZE (field))
1523 : 56 : && !integer_zerop (DECL_SIZE (rli->prev_field))
1524 : 54 : && tree_fits_shwi_p (DECL_SIZE (rli->prev_field))
1525 : 54 : && tree_fits_uhwi_p (TYPE_SIZE (type))
1526 : 136 : && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
1527 : : {
1528 : : /* We're in the middle of a run of equal type size fields; make
1529 : : sure we realign if we run out of bits. (Not decl size,
1530 : : type size!) */
1531 : 52 : HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field));
1532 : :
1533 : 52 : if (rli->remaining_in_alignment < bitsize)
1534 : : {
1535 : 1 : HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type));
1536 : :
1537 : : /* out of bits; bump up to next 'word'. */
1538 : 1 : rli->bitpos
1539 : 1 : = size_binop (PLUS_EXPR, rli->bitpos,
1540 : : bitsize_int (rli->remaining_in_alignment));
1541 : 1 : rli->prev_field = field;
1542 : 1 : if (typesize < bitsize)
1543 : 0 : rli->remaining_in_alignment = 0;
1544 : : else
1545 : 1 : rli->remaining_in_alignment = typesize - bitsize;
1546 : : }
1547 : : else
1548 : : {
1549 : 51 : rli->remaining_in_alignment -= bitsize;
1550 : 51 : realign_p = false;
1551 : : }
1552 : : }
1553 : : else
1554 : : {
1555 : : /* End of a run: if leaving a run of bitfields of the same type
1556 : : size, we have to "use up" the rest of the bits of the type
1557 : : size.
1558 : :
1559 : : Compute the new position as the sum of the size for the prior
1560 : : type and where we first started working on that type.
1561 : : Note: since the beginning of the field was aligned then
1562 : : of course the end will be too. No round needed. */
1563 : :
1564 : 30 : if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1565 : : {
1566 : 20 : rli->bitpos
1567 : 20 : = size_binop (PLUS_EXPR, rli->bitpos,
1568 : : bitsize_int (rli->remaining_in_alignment));
1569 : : }
1570 : : else
1571 : : /* We "use up" size zero fields; the code below should behave
1572 : : as if the prior field was not a bitfield. */
1573 : : prev_saved = NULL;
1574 : :
1575 : : /* Cause a new bitfield to be captured, either this time (if
1576 : : currently a bitfield) or next time we see one. */
1577 : 30 : if (!DECL_BIT_FIELD_TYPE (field)
1578 : 30 : || integer_zerop (DECL_SIZE (field)))
1579 : 26 : rli->prev_field = NULL;
1580 : : }
1581 : :
1582 : : /* Does this field automatically have alignment it needs by virtue
1583 : : of the fields that precede it and the record's own alignment? */
1584 : 82 : if (realign_p)
1585 : : {
1586 : : /* If the alignment is still within offset_align, just align
1587 : : the bit position. */
1588 : 13 : if (desired_align < rli->offset_align)
1589 : 11 : rli->bitpos = round_up (rli->bitpos, desired_align);
1590 : : else
1591 : : {
1592 : : /* First adjust OFFSET by the partial bits, then align. */
1593 : 2 : tree d = size_binop (CEIL_DIV_EXPR, rli->bitpos,
1594 : : bitsize_unit_node);
1595 : 2 : rli->offset = size_binop (PLUS_EXPR, rli->offset,
1596 : : fold_convert (sizetype, d));
1597 : 2 : rli->bitpos = bitsize_zero_node;
1598 : :
1599 : 2 : rli->offset = round_up (rli->offset,
1600 : : desired_align / BITS_PER_UNIT);
1601 : : }
1602 : :
1603 : 13 : if (! TREE_CONSTANT (rli->offset))
1604 : 0 : rli->offset_align = desired_align;
1605 : : }
1606 : :
1607 : 82 : normalize_rli (rli);
1608 : : }
1609 : :
1610 : : /* If we're starting a new run of same type size bitfields
1611 : : (or a run of non-bitfields), set up the "first of the run"
1612 : : fields.
1613 : :
1614 : : That is, if the current field is not a bitfield, or if there
1615 : : was a prior bitfield the type sizes differ, or if there wasn't
1616 : : a prior bitfield the size of the current field is nonzero.
1617 : :
1618 : : Note: we must be sure to test ONLY the type size if there was
1619 : : a prior bitfield and ONLY for the current field being zero if
1620 : : there wasn't. */
1621 : :
1622 : 201 : if (!DECL_BIT_FIELD_TYPE (field)
1623 : 261 : || (prev_saved != NULL
1624 : 129 : ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))
1625 : 69 : : !integer_zerop (DECL_SIZE (field))))
1626 : : {
1627 : : /* Never smaller than a byte for compatibility. */
1628 : 143 : unsigned int type_align = BITS_PER_UNIT;
1629 : :
1630 : : /* (When not a bitfield), we could be seeing a flex array (with
1631 : : no DECL_SIZE). Since we won't be using remaining_in_alignment
1632 : : until we see a bitfield (and come by here again) we just skip
1633 : : calculating it. */
1634 : 143 : if (DECL_SIZE (field) != NULL
1635 : 143 : && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field)))
1636 : 285 : && tree_fits_uhwi_p (DECL_SIZE (field)))
1637 : : {
1638 : 142 : unsigned HOST_WIDE_INT bitsize
1639 : 142 : = tree_to_uhwi (DECL_SIZE (field));
1640 : 142 : unsigned HOST_WIDE_INT typesize
1641 : 142 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field)));
1642 : :
1643 : 142 : if (typesize < bitsize)
1644 : 0 : rli->remaining_in_alignment = 0;
1645 : : else
1646 : 142 : rli->remaining_in_alignment = typesize - bitsize;
1647 : : }
1648 : :
1649 : : /* Now align (conventionally) for the new type. */
1650 : 143 : if (! DECL_PACKED (field))
1651 : 137 : type_align = TYPE_ALIGN (TREE_TYPE (field));
1652 : :
1653 : 143 : if (maximum_field_alignment != 0)
1654 : 56 : type_align = MIN (type_align, maximum_field_alignment);
1655 : :
1656 : 143 : rli->bitpos = round_up (rli->bitpos, type_align);
1657 : :
1658 : : /* If we really aligned, don't allow subsequent bitfields
1659 : : to undo that. */
1660 : 143 : rli->prev_field = NULL;
1661 : : }
1662 : : }
1663 : :
1664 : : /* Offset so far becomes the position of this field after normalizing. */
1665 : 49011446 : normalize_rli (rli);
1666 : 49011446 : DECL_FIELD_OFFSET (field) = rli->offset;
1667 : 49011446 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1668 : 49011446 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1669 : 49011446 : handle_warn_if_not_align (field, rli->record_align);
1670 : :
1671 : : /* Evaluate nonconstant offsets only once, either now or as soon as safe. */
1672 : 49011446 : if (TREE_CODE (DECL_FIELD_OFFSET (field)) != INTEGER_CST)
1673 : 1168 : DECL_FIELD_OFFSET (field) = variable_size (DECL_FIELD_OFFSET (field));
1674 : :
1675 : : /* If this field ended up more aligned than we thought it would be (we
1676 : : approximate this by seeing if its position changed), lay out the field
1677 : : again; perhaps we can use an integral mode for it now. */
1678 : 49011446 : if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1679 : 16053342 : actual_align = least_bit_hwi (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)));
1680 : 32958104 : else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1681 : 49206541 : actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1682 : 8297887 : else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1683 : 8296736 : actual_align = (BITS_PER_UNIT
1684 : 8296736 : * least_bit_hwi (tree_to_uhwi (DECL_FIELD_OFFSET (field))));
1685 : : else
1686 : 1151 : actual_align = DECL_OFFSET_ALIGN (field);
1687 : : /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1688 : : store / extract bit field operations will check the alignment of the
1689 : : record against the mode of bit fields. */
1690 : :
1691 : 49011446 : if (known_align != actual_align)
1692 : 1343691 : layout_decl (field, actual_align);
1693 : :
1694 : 49011446 : if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field))
1695 : 62444 : rli->prev_field = field;
1696 : :
1697 : : /* Now add size of this field to the size of the record. If the size is
1698 : : not constant, treat the field as being a multiple of bytes and just
1699 : : adjust the offset, resetting the bit position. Otherwise, apportion the
1700 : : size amongst the bit position and offset. First handle the case of an
1701 : : unspecified size, which can happen when we have an invalid nested struct
1702 : : definition, such as struct j { struct j { int i; } }. The error message
1703 : : is printed in finish_struct. */
1704 : 49011446 : if (DECL_SIZE (field) == 0)
1705 : : /* Do nothing. */;
1706 : 48947113 : else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1707 : 48947113 : || TREE_OVERFLOW (DECL_SIZE (field)))
1708 : : {
1709 : 1042 : rli->offset
1710 : 1042 : = size_binop (PLUS_EXPR, rli->offset,
1711 : : fold_convert (sizetype,
1712 : : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1713 : : bitsize_unit_node)));
1714 : 1042 : rli->offset
1715 : 1042 : = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1716 : 1042 : rli->bitpos = bitsize_zero_node;
1717 : 1042 : rli->offset_align = MIN (rli->offset_align, desired_align);
1718 : :
1719 : 1042 : if (!multiple_of_p (bitsizetype, DECL_SIZE (field),
1720 : 2084 : bitsize_int (rli->offset_align)))
1721 : : {
1722 : 302 : tree type = strip_array_types (TREE_TYPE (field));
1723 : : /* The above adjusts offset_align just based on the start of the
1724 : : field. The field might not have a size that is a multiple of
1725 : : that offset_align though. If the field is an array of fixed
1726 : : sized elements, assume there can be any multiple of those
1727 : : sizes. If it is a variable length aggregate or array of
1728 : : variable length aggregates, assume worst that the end is
1729 : : just BITS_PER_UNIT aligned. */
1730 : 302 : if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
1731 : : {
1732 : 302 : if (TREE_INT_CST_LOW (TYPE_SIZE (type)))
1733 : : {
1734 : 302 : unsigned HOST_WIDE_INT sz
1735 : 302 : = least_bit_hwi (TREE_INT_CST_LOW (TYPE_SIZE (type)));
1736 : 302 : rli->offset_align = MIN (rli->offset_align, sz);
1737 : : }
1738 : : }
1739 : : else
1740 : 0 : rli->offset_align = MIN (rli->offset_align, BITS_PER_UNIT);
1741 : : }
1742 : : }
1743 : 48946071 : else if (targetm.ms_bitfield_layout_p (rli->t))
1744 : : {
1745 : 201 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1746 : :
1747 : : /* If FIELD is the last field and doesn't end at the full length
1748 : : of the type then pad the struct out to the full length of the
1749 : : last type. */
1750 : 201 : if (DECL_BIT_FIELD_TYPE (field)
1751 : 201 : && !integer_zerop (DECL_SIZE (field)))
1752 : : {
1753 : : /* We have to scan, because non-field DECLS are also here. */
1754 : : tree probe = field;
1755 : 182 : while ((probe = DECL_CHAIN (probe)))
1756 : 135 : if (TREE_CODE (probe) == FIELD_DECL)
1757 : : break;
1758 : 119 : if (!probe)
1759 : 47 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,
1760 : : bitsize_int (rli->remaining_in_alignment));
1761 : : }
1762 : :
1763 : 201 : normalize_rli (rli);
1764 : : }
1765 : : else
1766 : : {
1767 : 48945870 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1768 : 48945870 : normalize_rli (rli);
1769 : : }
1770 : : }
1771 : :
1772 : : /* Assuming that all the fields have been laid out, this function uses
1773 : : RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1774 : : indicated by RLI. */
1775 : :
1776 : : static void
1777 : 41856897 : finalize_record_size (record_layout_info rli)
1778 : : {
1779 : 41856897 : tree unpadded_size, unpadded_size_unit;
1780 : :
1781 : : /* Now we want just byte and bit offsets, so set the offset alignment
1782 : : to be a byte and then normalize. */
1783 : 41856897 : rli->offset_align = BITS_PER_UNIT;
1784 : 41856897 : normalize_rli (rli);
1785 : :
1786 : : /* Determine the desired alignment. */
1787 : : #ifdef ROUND_TYPE_ALIGN
1788 : : SET_TYPE_ALIGN (rli->t, ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1789 : : rli->record_align));
1790 : : #else
1791 : 41856897 : SET_TYPE_ALIGN (rli->t, MAX (TYPE_ALIGN (rli->t), rli->record_align));
1792 : : #endif
1793 : :
1794 : : /* Compute the size so far. Be sure to allow for extra bits in the
1795 : : size in bytes. We have guaranteed above that it will be no more
1796 : : than a single byte. */
1797 : 41856897 : unpadded_size = rli_size_so_far (rli);
1798 : 41856897 : unpadded_size_unit = rli_size_unit_so_far (rli);
1799 : 41856897 : if (! integer_zerop (rli->bitpos))
1800 : 2114 : unpadded_size_unit
1801 : 2114 : = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1802 : :
1803 : : /* Round the size up to be a multiple of the required alignment. */
1804 : 41856897 : TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1805 : 41856897 : TYPE_SIZE_UNIT (rli->t)
1806 : 41856897 : = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1807 : :
1808 : 41856897 : if (TREE_CONSTANT (unpadded_size)
1809 : 41856216 : && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0
1810 : 1020122 : && input_location != BUILTINS_LOCATION
1811 : 42876803 : && !TYPE_ARTIFICIAL (rli->t))
1812 : : {
1813 : 1012293 : tree pad_size
1814 : 1012293 : = size_binop (MINUS_EXPR, TYPE_SIZE_UNIT (rli->t), unpadded_size_unit);
1815 : 1012293 : warning (OPT_Wpadded,
1816 : : "padding struct size to alignment boundary with %E bytes", pad_size);
1817 : : }
1818 : :
1819 : 24 : if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1820 : 24 : && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1821 : 41856899 : && TREE_CONSTANT (unpadded_size))
1822 : : {
1823 : 2 : tree unpacked_size;
1824 : :
1825 : : #ifdef ROUND_TYPE_ALIGN
1826 : : rli->unpacked_align
1827 : : = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1828 : : #else
1829 : 2 : rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1830 : : #endif
1831 : :
1832 : 2 : unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1833 : 2 : if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1834 : : {
1835 : 2 : if (TYPE_NAME (rli->t))
1836 : : {
1837 : 2 : tree name;
1838 : :
1839 : 2 : if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1840 : 2 : name = TYPE_NAME (rli->t);
1841 : : else
1842 : 0 : name = DECL_NAME (TYPE_NAME (rli->t));
1843 : :
1844 : 2 : if (STRICT_ALIGNMENT)
1845 : : warning (OPT_Wpacked, "packed attribute causes inefficient "
1846 : : "alignment for %qE", name);
1847 : : else
1848 : 2 : warning (OPT_Wpacked,
1849 : : "packed attribute is unnecessary for %qE", name);
1850 : : }
1851 : : else
1852 : : {
1853 : 0 : if (STRICT_ALIGNMENT)
1854 : : warning (OPT_Wpacked,
1855 : : "packed attribute causes inefficient alignment");
1856 : : else
1857 : 0 : warning (OPT_Wpacked, "packed attribute is unnecessary");
1858 : : }
1859 : : }
1860 : : }
1861 : 41856897 : }
1862 : :
1863 : : /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1864 : :
1865 : : void
1866 : 77087896 : compute_record_mode (tree type)
1867 : : {
1868 : 77087896 : tree field;
1869 : 77087896 : machine_mode mode = VOIDmode;
1870 : :
1871 : : /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1872 : : However, if possible, we use a mode that fits in a register
1873 : : instead, in order to allow for better optimization down the
1874 : : line. */
1875 : 77087896 : SET_TYPE_MODE (type, BLKmode);
1876 : :
1877 : 77087896 : poly_uint64 type_size;
1878 : 77087896 : if (!poly_int_tree_p (TYPE_SIZE (type), &type_size))
1879 : 7752298 : return;
1880 : :
1881 : : /* A record which has any BLKmode members must itself be
1882 : : BLKmode; it can't go in a register. Unless the member is
1883 : : BLKmode only because it isn't aligned. */
1884 : 300636935 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1885 : : {
1886 : 231301337 : if (TREE_CODE (field) != FIELD_DECL)
1887 : 162946655 : continue;
1888 : :
1889 : 68354682 : poly_uint64 field_size;
1890 : 68354682 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1891 : 68354279 : || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1892 : 39632218 : && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1893 : 79221638 : && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1894 : 39589420 : && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1895 : 60614325 : || !tree_fits_poly_uint64_p (bit_position (field))
1896 : 60614325 : || DECL_SIZE (field) == 0
1897 : 128969007 : || !poly_int_tree_p (DECL_SIZE (field), &field_size))
1898 : 7751574 : return;
1899 : :
1900 : : /* If this field is the whole struct, remember its mode so
1901 : : that, say, we can put a double in a class into a DF
1902 : : register instead of forcing it to live in the stack. */
1903 : 60614325 : if (known_eq (field_size, type_size)
1904 : : /* Partial int types (e.g. __int20) may have TYPE_SIZE equal to
1905 : : wider types (e.g. int32), despite precision being less. Ensure
1906 : : that the TYPE_MODE of the struct does not get set to the partial
1907 : : int mode if there is a wider type also in the struct. */
1908 : 60614325 : && known_gt (GET_MODE_PRECISION (DECL_MODE (field)),
1909 : : GET_MODE_PRECISION (mode)))
1910 : 4190605 : mode = DECL_MODE (field);
1911 : :
1912 : : /* With some targets, it is sub-optimal to access an aligned
1913 : : BLKmode structure as a scalar. */
1914 : 60614325 : if (targetm.member_type_forces_blk (field, mode))
1915 : : return;
1916 : : }
1917 : :
1918 : : /* If we only have one real field; use its mode if that mode's size
1919 : : matches the type's size. This generally only applies to RECORD_TYPE.
1920 : : For UNION_TYPE, if the widest field is MODE_INT then use that mode.
1921 : : If the widest field is MODE_PARTIAL_INT, and the union will be passed
1922 : : by reference, then use that mode. */
1923 : 69335598 : if ((TREE_CODE (type) == RECORD_TYPE
1924 : 452742 : || (TREE_CODE (type) == UNION_TYPE
1925 : 452742 : && (GET_MODE_CLASS (mode) == MODE_INT
1926 : 59506 : || (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
1927 : 0 : && (targetm.calls.pass_by_reference
1928 : 0 : (pack_cumulative_args (0),
1929 : 4079384 : function_arg_info (type, mode, /*named=*/false)))))))
1930 : 69276092 : && mode != VOIDmode
1931 : 73415127 : && known_eq (GET_MODE_BITSIZE (mode), type_size))
1932 : : ;
1933 : : else
1934 : 65256214 : mode = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1).else_blk ();
1935 : :
1936 : : /* If structure's known alignment is less than what the scalar
1937 : : mode would need, and it matters, then stick with BLKmode. */
1938 : 69335598 : if (mode != BLKmode
1939 : : && STRICT_ALIGNMENT
1940 : : && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1941 : : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
1942 : : {
1943 : : /* If this is the only reason this type is BLKmode, then
1944 : : don't force containing types to be BLKmode. */
1945 : : TYPE_NO_FORCE_BLK (type) = 1;
1946 : : mode = BLKmode;
1947 : : }
1948 : :
1949 : 69335598 : SET_TYPE_MODE (type, mode);
1950 : : }
1951 : :
1952 : : /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1953 : : out. */
1954 : :
1955 : : static void
1956 : 1342714672 : finalize_type_size (tree type)
1957 : : {
1958 : : /* Normally, use the alignment corresponding to the mode chosen.
1959 : : However, where strict alignment is not required, avoid
1960 : : over-aligning structures, since most compilers do not do this
1961 : : alignment. */
1962 : 1342714672 : bool tua_cleared_p = false;
1963 : 1342714672 : if (TYPE_MODE (type) != BLKmode
1964 : 1291010209 : && TYPE_MODE (type) != VOIDmode
1965 : 2633443233 : && (STRICT_ALIGNMENT || !AGGREGATE_TYPE_P (type)))
1966 : : {
1967 : 1238902270 : unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1968 : :
1969 : : /* Don't override a larger alignment requirement coming from a user
1970 : : alignment of one of the fields. */
1971 : 1238902270 : if (mode_align >= TYPE_ALIGN (type))
1972 : : {
1973 : 1238902270 : SET_TYPE_ALIGN (type, mode_align);
1974 : : /* Remember that we're about to reset this flag. */
1975 : 1238902270 : tua_cleared_p = TYPE_USER_ALIGN (type);
1976 : 1238902270 : TYPE_USER_ALIGN (type) = false;
1977 : : }
1978 : : }
1979 : :
1980 : : /* Do machine-dependent extra alignment. */
1981 : : #ifdef ROUND_TYPE_ALIGN
1982 : : SET_TYPE_ALIGN (type,
1983 : : ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT));
1984 : : #endif
1985 : :
1986 : : /* If we failed to find a simple way to calculate the unit size
1987 : : of the type, find it by division. */
1988 : 1342714672 : if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1989 : : /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1990 : : result will fit in sizetype. We will get more efficient code using
1991 : : sizetype, so we force a conversion. */
1992 : 0 : TYPE_SIZE_UNIT (type)
1993 : 0 : = fold_convert (sizetype,
1994 : : size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1995 : : bitsize_unit_node));
1996 : :
1997 : 1342714672 : if (TYPE_SIZE (type) != 0)
1998 : : {
1999 : 1337025982 : TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
2000 : 1337025982 : TYPE_SIZE_UNIT (type)
2001 : 2674051964 : = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type));
2002 : : }
2003 : :
2004 : : /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
2005 : 1342714672 : if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2006 : 253752 : TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
2007 : 1342714672 : if (TYPE_SIZE_UNIT (type) != 0
2008 : 1342714672 : && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
2009 : 253752 : TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
2010 : :
2011 : : /* Handle empty records as per the x86-64 psABI. */
2012 : 1342714672 : TYPE_EMPTY_P (type) = targetm.calls.empty_record_p (type);
2013 : :
2014 : : /* Also layout any other variants of the type. */
2015 : 1342714672 : if (TYPE_NEXT_VARIANT (type)
2016 : 1342714672 : || type != TYPE_MAIN_VARIANT (type))
2017 : : {
2018 : 38139912 : tree variant;
2019 : : /* Record layout info of this variant. */
2020 : 38139912 : tree size = TYPE_SIZE (type);
2021 : 38139912 : tree size_unit = TYPE_SIZE_UNIT (type);
2022 : 38139912 : unsigned int align = TYPE_ALIGN (type);
2023 : 38139912 : unsigned int precision = TYPE_PRECISION (type);
2024 : 38139912 : unsigned int user_align = TYPE_USER_ALIGN (type);
2025 : 38139912 : machine_mode mode = TYPE_MODE (type);
2026 : 38139912 : bool empty_p = TYPE_EMPTY_P (type);
2027 : 38139912 : bool typeless = AGGREGATE_TYPE_P (type) && TYPE_TYPELESS_STORAGE (type);
2028 : :
2029 : : /* Copy it into all variants. */
2030 : 38139912 : for (variant = TYPE_MAIN_VARIANT (type);
2031 : 126879104 : variant != NULL_TREE;
2032 : 88739192 : variant = TYPE_NEXT_VARIANT (variant))
2033 : : {
2034 : 88739192 : TYPE_SIZE (variant) = size;
2035 : 88739192 : TYPE_SIZE_UNIT (variant) = size_unit;
2036 : 88739192 : unsigned valign = align;
2037 : 88739192 : if (TYPE_USER_ALIGN (variant))
2038 : : {
2039 : 585050 : valign = MAX (valign, TYPE_ALIGN (variant));
2040 : : /* If we reset TYPE_USER_ALIGN on the main variant, we might
2041 : : need to reset it on the variants too. TYPE_MODE will be set
2042 : : to MODE in this variant, so we can use that. */
2043 : 585050 : if (tua_cleared_p && GET_MODE_ALIGNMENT (mode) >= valign)
2044 : 0 : TYPE_USER_ALIGN (variant) = false;
2045 : : }
2046 : : else
2047 : 88154142 : TYPE_USER_ALIGN (variant) = user_align;
2048 : 88739192 : SET_TYPE_ALIGN (variant, valign);
2049 : 88739192 : TYPE_PRECISION (variant) = precision;
2050 : 88739192 : SET_TYPE_MODE (variant, mode);
2051 : 88739192 : TYPE_EMPTY_P (variant) = empty_p;
2052 : 88739192 : if (AGGREGATE_TYPE_P (variant))
2053 : 88739176 : TYPE_TYPELESS_STORAGE (variant) = typeless;
2054 : : }
2055 : : }
2056 : 1342714672 : }
2057 : :
2058 : : /* Return a new underlying object for a bitfield started with FIELD. */
2059 : :
2060 : : static tree
2061 : 67929 : start_bitfield_representative (tree field)
2062 : : {
2063 : 67929 : tree repr = make_node (FIELD_DECL);
2064 : 67929 : DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field);
2065 : : /* Force the representative to begin at a BITS_PER_UNIT aligned
2066 : : boundary - C++ may use tail-padding of a base object to
2067 : : continue packing bits so the bitfield region does not start
2068 : : at bit zero (see g++.dg/abi/bitfield5.C for example).
2069 : : Unallocated bits may happen for other reasons as well,
2070 : : for example Ada which allows explicit bit-granular structure layout. */
2071 : 135858 : DECL_FIELD_BIT_OFFSET (repr)
2072 : 67929 : = size_binop (BIT_AND_EXPR,
2073 : : DECL_FIELD_BIT_OFFSET (field),
2074 : : bitsize_int (~(BITS_PER_UNIT - 1)));
2075 : 67929 : SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field));
2076 : 67929 : DECL_SIZE (repr) = DECL_SIZE (field);
2077 : 67929 : DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field);
2078 : 67929 : DECL_PACKED (repr) = DECL_PACKED (field);
2079 : 67929 : DECL_CONTEXT (repr) = DECL_CONTEXT (field);
2080 : : /* There are no indirect accesses to this field. If we introduce
2081 : : some then they have to use the record alias set. This makes
2082 : : sure to properly conflict with [indirect] accesses to addressable
2083 : : fields of the bitfield group. */
2084 : 67929 : DECL_NONADDRESSABLE_P (repr) = 1;
2085 : 67929 : return repr;
2086 : : }
2087 : :
2088 : : /* Finish up a bitfield group that was started by creating the underlying
2089 : : object REPR with the last field in the bitfield group FIELD. */
2090 : :
2091 : : static void
2092 : 67929 : finish_bitfield_representative (tree repr, tree field)
2093 : : {
2094 : 67929 : unsigned HOST_WIDE_INT bitsize, maxbitsize;
2095 : 67929 : tree nextf, size;
2096 : :
2097 : 67929 : size = size_diffop (DECL_FIELD_OFFSET (field),
2098 : : DECL_FIELD_OFFSET (repr));
2099 : 135884 : while (TREE_CODE (size) == COMPOUND_EXPR)
2100 : 26 : size = TREE_OPERAND (size, 1);
2101 : 67929 : gcc_assert (tree_fits_uhwi_p (size));
2102 : 67929 : bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT
2103 : 67929 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2104 : 67929 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))
2105 : 67929 : + tree_to_uhwi (DECL_SIZE (field)));
2106 : :
2107 : : /* Round up bitsize to multiples of BITS_PER_UNIT. */
2108 : 67929 : bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2109 : :
2110 : : /* Now nothing tells us how to pad out bitsize ... */
2111 : 67929 : if (TREE_CODE (DECL_CONTEXT (field)) == RECORD_TYPE)
2112 : : {
2113 : 64517 : nextf = DECL_CHAIN (field);
2114 : 106352 : while (nextf && TREE_CODE (nextf) != FIELD_DECL)
2115 : 41835 : nextf = DECL_CHAIN (nextf);
2116 : : }
2117 : : else
2118 : : nextf = NULL_TREE;
2119 : 64517 : if (nextf)
2120 : : {
2121 : 37399 : tree maxsize;
2122 : : /* If there was an error, the field may be not laid out
2123 : : correctly. Don't bother to do anything. */
2124 : 37399 : if (TREE_TYPE (nextf) == error_mark_node)
2125 : : {
2126 : 3 : TREE_TYPE (repr) = error_mark_node;
2127 : 3 : return;
2128 : : }
2129 : 37396 : maxsize = size_diffop (DECL_FIELD_OFFSET (nextf),
2130 : : DECL_FIELD_OFFSET (repr));
2131 : 37396 : if (tree_fits_uhwi_p (maxsize))
2132 : : {
2133 : 74760 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2134 : 37380 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf))
2135 : 37380 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2136 : : /* If the group ends within a bitfield nextf does not need to be
2137 : : aligned to BITS_PER_UNIT. Thus round up. */
2138 : 37380 : maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2139 : : }
2140 : : else
2141 : : maxbitsize = bitsize;
2142 : : }
2143 : : else
2144 : : {
2145 : : /* Note that if the C++ FE sets up tail-padding to be re-used it
2146 : : creates a as-base variant of the type with TYPE_SIZE adjusted
2147 : : accordingly. So it is safe to include tail-padding here. */
2148 : 30530 : tree aggsize = lang_hooks.types.unit_size_without_reusable_padding
2149 : 30530 : (DECL_CONTEXT (field));
2150 : 30530 : tree maxsize = size_diffop (aggsize, DECL_FIELD_OFFSET (repr));
2151 : : /* We cannot generally rely on maxsize to fold to an integer constant,
2152 : : so use bitsize as fallback for this case. */
2153 : 30530 : if (tree_fits_uhwi_p (maxsize))
2154 : 61040 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2155 : 30520 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2156 : : else
2157 : : maxbitsize = bitsize;
2158 : : }
2159 : :
2160 : : /* Only if we don't artificially break up the representative in
2161 : : the middle of a large bitfield with different possibly
2162 : : overlapping representatives. And all representatives start
2163 : : at byte offset. */
2164 : 67926 : gcc_assert (maxbitsize % BITS_PER_UNIT == 0);
2165 : :
2166 : : /* Find the smallest nice mode to use. */
2167 : 67926 : opt_scalar_int_mode mode_iter;
2168 : 241824 : FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2169 : 483376 : if (GET_MODE_BITSIZE (mode_iter.require ()) >= bitsize)
2170 : : break;
2171 : :
2172 : 67926 : scalar_int_mode mode;
2173 : 67926 : if (!mode_iter.exists (&mode)
2174 : 67790 : || GET_MODE_BITSIZE (mode) > maxbitsize
2175 : 171524 : || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)
2176 : : {
2177 : 16948 : if (TREE_CODE (TREE_TYPE (field)) == BITINT_TYPE)
2178 : : {
2179 : 45 : struct bitint_info info;
2180 : 45 : unsigned prec = TYPE_PRECISION (TREE_TYPE (field));
2181 : 45 : bool ok = targetm.c.bitint_type_info (prec, &info);
2182 : 45 : gcc_assert (ok);
2183 : 45 : scalar_int_mode limb_mode
2184 : 45 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2185 : 45 : unsigned lprec = GET_MODE_PRECISION (limb_mode);
2186 : 45 : if (prec > lprec)
2187 : : {
2188 : : /* For middle/large/huge _BitInt prefer bitsize being a multiple
2189 : : of limb precision. */
2190 : 43 : unsigned HOST_WIDE_INT bsz = CEIL (bitsize, lprec) * lprec;
2191 : 43 : if (bsz <= maxbitsize)
2192 : 43 : bitsize = bsz;
2193 : : }
2194 : : }
2195 : : /* We really want a BLKmode representative only as a last resort,
2196 : : considering the member b in
2197 : : struct { int a : 7; int b : 17; int c; } __attribute__((packed));
2198 : : Otherwise we simply want to split the representative up
2199 : : allowing for overlaps within the bitfield region as required for
2200 : : struct { int a : 7; int b : 7;
2201 : : int c : 10; int d; } __attribute__((packed));
2202 : : [0, 15] HImode for a and b, [8, 23] HImode for c. */
2203 : 16948 : DECL_SIZE (repr) = bitsize_int (bitsize);
2204 : 16948 : DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT);
2205 : 16948 : SET_DECL_MODE (repr, BLKmode);
2206 : 16948 : TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node,
2207 : 33896 : bitsize / BITS_PER_UNIT);
2208 : : }
2209 : : else
2210 : : {
2211 : 50978 : unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode);
2212 : 50978 : DECL_SIZE (repr) = bitsize_int (modesize);
2213 : 50978 : DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT);
2214 : 50978 : SET_DECL_MODE (repr, mode);
2215 : 50978 : TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1);
2216 : : }
2217 : :
2218 : : /* Remember whether the bitfield group is at the end of the
2219 : : structure or not. */
2220 : 67926 : DECL_CHAIN (repr) = nextf;
2221 : : }
2222 : :
2223 : : /* Compute and set FIELD_DECLs for the underlying objects we should
2224 : : use for bitfield access for the structure T. */
2225 : :
2226 : : void
2227 : 41856897 : finish_bitfield_layout (tree t)
2228 : : {
2229 : 41856897 : tree field, prev;
2230 : 41856897 : tree repr = NULL_TREE;
2231 : :
2232 : 41856897 : if (TREE_CODE (t) == QUAL_UNION_TYPE)
2233 : : return;
2234 : :
2235 : 41856897 : for (prev = NULL_TREE, field = TYPE_FIELDS (t);
2236 : 355843555 : field; field = DECL_CHAIN (field))
2237 : : {
2238 : 313986658 : if (TREE_CODE (field) != FIELD_DECL)
2239 : 259335254 : continue;
2240 : :
2241 : : /* In the C++ memory model, consecutive bit fields in a structure are
2242 : : considered one memory location and updating a memory location
2243 : : may not store into adjacent memory locations. */
2244 : 54651404 : if (!repr
2245 : 54651404 : && DECL_BIT_FIELD_TYPE (field))
2246 : : {
2247 : : /* Start new representative. */
2248 : 67928 : repr = start_bitfield_representative (field);
2249 : : }
2250 : 54583476 : else if (repr
2251 : 54583476 : && ! DECL_BIT_FIELD_TYPE (field))
2252 : : {
2253 : : /* Finish off new representative. */
2254 : 36782 : finish_bitfield_representative (repr, prev);
2255 : 36782 : repr = NULL_TREE;
2256 : : }
2257 : 54546694 : else if (DECL_BIT_FIELD_TYPE (field))
2258 : : {
2259 : 348634 : gcc_assert (repr != NULL_TREE);
2260 : :
2261 : : /* Zero-size bitfields finish off a representative and
2262 : : do not have a representative themselves. This is
2263 : : required by the C++ memory model. */
2264 : 348634 : if (integer_zerop (DECL_SIZE (field)))
2265 : : {
2266 : 616 : finish_bitfield_representative (repr, prev);
2267 : 616 : repr = NULL_TREE;
2268 : : }
2269 : :
2270 : : /* We assume that either DECL_FIELD_OFFSET of the representative
2271 : : and each bitfield member is a constant or they are equal.
2272 : : This is because we need to be able to compute the bit-offset
2273 : : of each field relative to the representative in get_bit_range
2274 : : during RTL expansion.
2275 : : If these constraints are not met, simply force a new
2276 : : representative to be generated. That will at most
2277 : : generate worse code but still maintain correctness with
2278 : : respect to the C++ memory model. */
2279 : 348029 : else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))
2280 : 348007 : && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
2281 : 11 : || operand_equal_p (DECL_FIELD_OFFSET (repr),
2282 : 11 : DECL_FIELD_OFFSET (field), 0)))
2283 : : {
2284 : 1 : finish_bitfield_representative (repr, prev);
2285 : 1 : repr = start_bitfield_representative (field);
2286 : : }
2287 : : }
2288 : : else
2289 : 54198060 : continue;
2290 : :
2291 : 453344 : if (repr)
2292 : 415946 : DECL_BIT_FIELD_REPRESENTATIVE (field) = repr;
2293 : :
2294 : 453344 : if (TREE_CODE (t) == RECORD_TYPE)
2295 : : prev = field;
2296 : 3412 : else if (repr)
2297 : : {
2298 : 3412 : finish_bitfield_representative (repr, field);
2299 : 3412 : repr = NULL_TREE;
2300 : : }
2301 : : }
2302 : :
2303 : 41856897 : if (repr)
2304 : 27118 : finish_bitfield_representative (repr, prev);
2305 : : }
2306 : :
2307 : : /* Do all of the work required to layout the type indicated by RLI,
2308 : : once the fields have been laid out. This function will call `free'
2309 : : for RLI, unless FREE_P is false. Passing a value other than false
2310 : : for FREE_P is bad practice; this option only exists to support the
2311 : : G++ 3.2 ABI. */
2312 : :
2313 : : void
2314 : 41856897 : finish_record_layout (record_layout_info rli, int free_p)
2315 : : {
2316 : 41856897 : tree variant;
2317 : :
2318 : : /* Compute the final size. */
2319 : 41856897 : finalize_record_size (rli);
2320 : :
2321 : : /* Compute the TYPE_MODE for the record. */
2322 : 41856897 : compute_record_mode (rli->t);
2323 : :
2324 : : /* Perform any last tweaks to the TYPE_SIZE, etc. */
2325 : 41856897 : finalize_type_size (rli->t);
2326 : :
2327 : : /* Compute bitfield representatives. */
2328 : 41856897 : finish_bitfield_layout (rli->t);
2329 : :
2330 : : /* Propagate TYPE_PACKED and TYPE_REVERSE_STORAGE_ORDER to variants.
2331 : : With C++ templates, it is too early to do this when the attribute
2332 : : is being parsed. */
2333 : 90966886 : for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
2334 : 49109989 : variant = TYPE_NEXT_VARIANT (variant))
2335 : : {
2336 : 49109989 : TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
2337 : 98219978 : TYPE_REVERSE_STORAGE_ORDER (variant)
2338 : 49109989 : = TYPE_REVERSE_STORAGE_ORDER (rli->t);
2339 : : }
2340 : :
2341 : : /* Lay out any static members. This is done now because their type
2342 : : may use the record's type. */
2343 : 51522996 : while (!vec_safe_is_empty (rli->pending_statics))
2344 : 9666099 : layout_decl (rli->pending_statics->pop (), 0);
2345 : :
2346 : : /* Clean up. */
2347 : 41856897 : if (free_p)
2348 : : {
2349 : 41856897 : vec_free (rli->pending_statics);
2350 : 41856897 : free (rli);
2351 : : }
2352 : 41856897 : }
2353 : :
2354 : :
2355 : : /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
2356 : : NAME, its fields are chained in reverse on FIELDS.
2357 : :
2358 : : If ALIGN_TYPE is non-null, it is given the same alignment as
2359 : : ALIGN_TYPE. */
2360 : :
2361 : : void
2362 : 1225408 : finish_builtin_struct (tree type, const char *name, tree fields,
2363 : : tree align_type)
2364 : : {
2365 : 1225408 : tree tail, next;
2366 : :
2367 : 3702780 : for (tail = NULL_TREE; fields; tail = fields, fields = next)
2368 : : {
2369 : 2477372 : DECL_FIELD_CONTEXT (fields) = type;
2370 : 2477372 : next = DECL_CHAIN (fields);
2371 : 2477372 : DECL_CHAIN (fields) = tail;
2372 : : }
2373 : 1225408 : TYPE_FIELDS (type) = tail;
2374 : :
2375 : 1225408 : if (align_type)
2376 : : {
2377 : 1111741 : SET_TYPE_ALIGN (type, TYPE_ALIGN (align_type));
2378 : 1111741 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
2379 : 1111741 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2380 : : TYPE_WARN_IF_NOT_ALIGN (align_type));
2381 : : }
2382 : :
2383 : 1225408 : layout_type (type);
2384 : : #if 0 /* not yet, should get fixed properly later */
2385 : : TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
2386 : : #else
2387 : 1225408 : TYPE_NAME (type) = build_decl (BUILTINS_LOCATION,
2388 : : TYPE_DECL, get_identifier (name), type);
2389 : : #endif
2390 : 1225408 : TYPE_STUB_DECL (type) = TYPE_NAME (type);
2391 : 1225408 : layout_decl (TYPE_NAME (type), 0);
2392 : 1225408 : }
2393 : :
2394 : : /* Calculate the mode, size, and alignment for TYPE.
2395 : : For an array type, calculate the element separation as well.
2396 : : Record TYPE on the chain of permanent or temporary types
2397 : : so that dbxout will find out about it.
2398 : :
2399 : : TYPE_SIZE of a type is nonzero if the type has been laid out already.
2400 : : layout_type does nothing on such a type.
2401 : :
2402 : : If the type is incomplete, its TYPE_SIZE remains zero. */
2403 : :
2404 : : void
2405 : 2152572772 : layout_type (tree type)
2406 : : {
2407 : 2152572772 : gcc_assert (type);
2408 : :
2409 : 2152572772 : if (type == error_mark_node)
2410 : : return;
2411 : :
2412 : : /* We don't want finalize_type_size to copy an alignment attribute to
2413 : : variants that don't have it. */
2414 : 2152572772 : type = TYPE_MAIN_VARIANT (type);
2415 : :
2416 : : /* Do nothing if type has been laid out before. */
2417 : 2152572772 : if (TYPE_SIZE (type))
2418 : : return;
2419 : :
2420 : 1305904723 : switch (TREE_CODE (type))
2421 : : {
2422 : 0 : case LANG_TYPE:
2423 : : /* This kind of type is the responsibility
2424 : : of the language-specific code. */
2425 : 0 : gcc_unreachable ();
2426 : :
2427 : 7958650 : case BOOLEAN_TYPE:
2428 : 7958650 : case INTEGER_TYPE:
2429 : 7958650 : case ENUMERAL_TYPE:
2430 : 7958650 : {
2431 : 7958650 : scalar_int_mode mode
2432 : 7958650 : = smallest_int_mode_for_size (TYPE_PRECISION (type));
2433 : 7958650 : SET_TYPE_MODE (type, mode);
2434 : 15917300 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2435 : : /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */
2436 : 15917300 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2437 : 7958650 : break;
2438 : : }
2439 : :
2440 : 1505596 : case BITINT_TYPE:
2441 : 1505596 : {
2442 : 1505596 : struct bitint_info info;
2443 : 1505596 : int cnt;
2444 : 1505596 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
2445 : 1505596 : gcc_assert (ok);
2446 : 1505596 : scalar_int_mode limb_mode
2447 : 1505596 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2448 : 1505596 : if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
2449 : : {
2450 : 1283 : SET_TYPE_MODE (type, limb_mode);
2451 : 1283 : gcc_assert (info.abi_limb_mode == info.limb_mode);
2452 : : cnt = 1;
2453 : : }
2454 : : else
2455 : : {
2456 : 1504313 : SET_TYPE_MODE (type, BLKmode);
2457 : 1504313 : cnt = CEIL (TYPE_PRECISION (type), GET_MODE_PRECISION (limb_mode));
2458 : 1504313 : gcc_assert (info.abi_limb_mode == info.limb_mode
2459 : : || !info.big_endian == !WORDS_BIG_ENDIAN);
2460 : : }
2461 : 3011192 : TYPE_SIZE (type) = bitsize_int (cnt * GET_MODE_BITSIZE (limb_mode));
2462 : 3011192 : TYPE_SIZE_UNIT (type) = size_int (cnt * GET_MODE_SIZE (limb_mode));
2463 : 1505596 : SET_TYPE_ALIGN (type, GET_MODE_ALIGNMENT (limb_mode));
2464 : 1505596 : if (cnt > 1)
2465 : : {
2466 : : /* Use same mode as compute_record_mode would use for a structure
2467 : : containing cnt limb_mode elements. */
2468 : 1504313 : machine_mode mode = mode_for_size_tree (TYPE_SIZE (type),
2469 : 1504313 : MODE_INT, 1).else_blk ();
2470 : 1504313 : if (mode == BLKmode)
2471 : : break;
2472 : 34570 : finalize_type_size (type);
2473 : 34570 : SET_TYPE_MODE (type, mode);
2474 : 34570 : if (STRICT_ALIGNMENT
2475 : : && !(TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
2476 : : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
2477 : : {
2478 : : /* If this is the only reason this type is BLKmode, then
2479 : : don't force containing types to be BLKmode. */
2480 : : TYPE_NO_FORCE_BLK (type) = 1;
2481 : : SET_TYPE_MODE (type, BLKmode);
2482 : : }
2483 : 34570 : if (TYPE_NEXT_VARIANT (type) || type != TYPE_MAIN_VARIANT (type))
2484 : 0 : for (tree variant = TYPE_MAIN_VARIANT (type);
2485 : 0 : variant != NULL_TREE;
2486 : 0 : variant = TYPE_NEXT_VARIANT (variant))
2487 : : {
2488 : 0 : SET_TYPE_MODE (variant, mode);
2489 : 0 : if (STRICT_ALIGNMENT
2490 : : && !(TYPE_ALIGN (variant) >= BIGGEST_ALIGNMENT
2491 : : || (TYPE_ALIGN (variant)
2492 : : >= GET_MODE_ALIGNMENT (mode))))
2493 : : {
2494 : : TYPE_NO_FORCE_BLK (variant) = 1;
2495 : : SET_TYPE_MODE (variant, BLKmode);
2496 : : }
2497 : : }
2498 : 34570 : return;
2499 : : }
2500 : : break;
2501 : : }
2502 : :
2503 : 3875498 : case REAL_TYPE:
2504 : 3875498 : {
2505 : : /* Allow the caller to choose the type mode, which is how decimal
2506 : : floats are distinguished from binary ones. */
2507 : 3875498 : if (TYPE_MODE (type) == VOIDmode)
2508 : 2929979 : SET_TYPE_MODE
2509 : : (type, float_mode_for_size (TYPE_PRECISION (type)).require ());
2510 : 3875498 : scalar_float_mode mode = as_a <scalar_float_mode> (TYPE_MODE (type));
2511 : 7750996 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2512 : 7750996 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2513 : 3875498 : break;
2514 : : }
2515 : :
2516 : 10138752 : case FIXED_POINT_TYPE:
2517 : 10138752 : {
2518 : : /* TYPE_MODE (type) has been set already. */
2519 : 10138752 : scalar_mode mode = SCALAR_TYPE_MODE (type);
2520 : 20277504 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2521 : 20277504 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2522 : 10138752 : break;
2523 : : }
2524 : :
2525 : 5116177 : case COMPLEX_TYPE:
2526 : 5116177 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2527 : 5116177 : if (TYPE_MODE (TREE_TYPE (type)) == BLKmode)
2528 : : {
2529 : 4552 : gcc_checking_assert (TREE_CODE (TREE_TYPE (type)) == BITINT_TYPE);
2530 : 4552 : SET_TYPE_MODE (type, BLKmode);
2531 : 4552 : TYPE_SIZE (type)
2532 : 4552 : = int_const_binop (MULT_EXPR, TYPE_SIZE (TREE_TYPE (type)),
2533 : 4552 : bitsize_int (2));
2534 : 4552 : TYPE_SIZE_UNIT (type)
2535 : 4552 : = int_const_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (type)),
2536 : 4552 : bitsize_int (2));
2537 : 4552 : break;
2538 : : }
2539 : 5111625 : SET_TYPE_MODE (type,
2540 : : GET_MODE_COMPLEX_MODE (TYPE_MODE (TREE_TYPE (type))));
2541 : :
2542 : 10223250 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
2543 : 10223250 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
2544 : 5111625 : break;
2545 : :
2546 : 91594291 : case VECTOR_TYPE:
2547 : 91594291 : {
2548 : 91594291 : poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (type);
2549 : 91594291 : tree innertype = TREE_TYPE (type);
2550 : :
2551 : : /* Find an appropriate mode for the vector type. */
2552 : 91594291 : if (TYPE_MODE (type) == VOIDmode)
2553 : 45764136 : SET_TYPE_MODE (type,
2554 : : mode_for_vector (SCALAR_TYPE_MODE (innertype),
2555 : : nunits).else_blk ());
2556 : :
2557 : 91594291 : TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type));
2558 : 91594291 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2559 : : /* Several boolean vector elements may fit in a single unit. */
2560 : 91594291 : if (VECTOR_BOOLEAN_TYPE_P (type)
2561 : 93304069 : && type->type_common.mode != BLKmode)
2562 : 1709775 : TYPE_SIZE_UNIT (type)
2563 : 5129325 : = size_int (GET_MODE_SIZE (type->type_common.mode));
2564 : : else
2565 : 89884516 : TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
2566 : 89884516 : TYPE_SIZE_UNIT (innertype),
2567 : 179769032 : size_int (nunits));
2568 : 91594291 : TYPE_SIZE (type) = int_const_binop
2569 : 91594291 : (MULT_EXPR,
2570 : 91594291 : bits_from_bytes (TYPE_SIZE_UNIT (type)),
2571 : 91594291 : bitsize_int (BITS_PER_UNIT));
2572 : :
2573 : : /* For vector types, we do not default to the mode's alignment.
2574 : : Instead, query a target hook, defaulting to natural alignment.
2575 : : This prevents ABI changes depending on whether or not native
2576 : : vector modes are supported. */
2577 : 91594291 : SET_TYPE_ALIGN (type, targetm.vector_alignment (type));
2578 : :
2579 : : /* However, if the underlying mode requires a bigger alignment than
2580 : : what the target hook provides, we cannot use the mode. For now,
2581 : : simply reject that case. */
2582 : 91594291 : gcc_assert (TYPE_ALIGN (type)
2583 : : >= GET_MODE_ALIGNMENT (TYPE_MODE (type)));
2584 : 91594291 : break;
2585 : : }
2586 : :
2587 : 281632 : case VOID_TYPE:
2588 : : /* This is an incomplete type and so doesn't have a size. */
2589 : 281632 : SET_TYPE_ALIGN (type, 1);
2590 : 281632 : TYPE_USER_ALIGN (type) = 0;
2591 : 281632 : SET_TYPE_MODE (type, VOIDmode);
2592 : 281632 : break;
2593 : :
2594 : 974256 : case OFFSET_TYPE:
2595 : 978317 : TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
2596 : 978317 : TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE_UNITS);
2597 : : /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be
2598 : : integral, which may be an __intN. */
2599 : 978317 : SET_TYPE_MODE (type, int_mode_for_size (POINTER_SIZE, 0).require ());
2600 : 978317 : TYPE_PRECISION (type) = POINTER_SIZE;
2601 : 974256 : break;
2602 : :
2603 : 937969768 : case FUNCTION_TYPE:
2604 : 937969768 : case METHOD_TYPE:
2605 : : /* It's hard to see what the mode and size of a function ought to
2606 : : be, but we do know the alignment is FUNCTION_BOUNDARY, so
2607 : : make it consistent with that. */
2608 : 937969768 : SET_TYPE_MODE (type,
2609 : : int_mode_for_size (FUNCTION_BOUNDARY, 0).else_blk ());
2610 : 937969768 : TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
2611 : 937969768 : TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
2612 : 937969768 : break;
2613 : :
2614 : 184776235 : case POINTER_TYPE:
2615 : 184776235 : case REFERENCE_TYPE:
2616 : 184776235 : {
2617 : 184776235 : scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
2618 : 369552470 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2619 : 369552470 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2620 : 184776235 : TYPE_UNSIGNED (type) = 1;
2621 : 184776235 : TYPE_PRECISION (type) = GET_MODE_PRECISION (mode);
2622 : : }
2623 : 184776235 : break;
2624 : :
2625 : 56666920 : case ARRAY_TYPE:
2626 : 56666920 : {
2627 : 56666920 : tree index = TYPE_DOMAIN (type);
2628 : 56666920 : tree element = TREE_TYPE (type);
2629 : :
2630 : : /* We need to know both bounds in order to compute the size. */
2631 : 52433275 : if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
2632 : 107987592 : && TYPE_SIZE (element))
2633 : : {
2634 : 51259862 : tree ub = TYPE_MAX_VALUE (index);
2635 : 51259862 : tree lb = TYPE_MIN_VALUE (index);
2636 : 51259862 : tree element_size = TYPE_SIZE (element);
2637 : 51259862 : tree length;
2638 : :
2639 : : /* Make sure that an array of zero-sized element is zero-sized
2640 : : regardless of its extent. */
2641 : 51259862 : if (integer_zerop (element_size))
2642 : 3560 : length = size_zero_node;
2643 : :
2644 : : /* The computation should happen in the original signedness so
2645 : : that (possible) negative values are handled appropriately
2646 : : when determining overflow. */
2647 : : else
2648 : : {
2649 : : /* ??? When it is obvious that the range is signed
2650 : : represent it using ssizetype. */
2651 : 51256302 : if (TREE_CODE (lb) == INTEGER_CST
2652 : 51255667 : && TREE_CODE (ub) == INTEGER_CST
2653 : 51005276 : && TYPE_UNSIGNED (TREE_TYPE (lb))
2654 : 101551719 : && tree_int_cst_lt (ub, lb))
2655 : : {
2656 : 588 : lb = wide_int_to_tree (ssizetype,
2657 : 588 : offset_int::from (wi::to_wide (lb),
2658 : : SIGNED));
2659 : 588 : ub = wide_int_to_tree (ssizetype,
2660 : 1176 : offset_int::from (wi::to_wide (ub),
2661 : : SIGNED));
2662 : : }
2663 : 51256302 : length
2664 : 51256302 : = fold_convert (sizetype,
2665 : : size_binop (PLUS_EXPR,
2666 : : build_int_cst (TREE_TYPE (lb), 1),
2667 : : size_binop (MINUS_EXPR, ub, lb)));
2668 : : }
2669 : :
2670 : : /* ??? We have no way to distinguish a null-sized array from an
2671 : : array spanning the whole sizetype range, so we arbitrarily
2672 : : decide that [0, -1] is the only valid representation. */
2673 : 51259862 : if (integer_zerop (length)
2674 : 41070 : && TREE_OVERFLOW (length)
2675 : 51287966 : && integer_zerop (lb))
2676 : 28104 : length = size_zero_node;
2677 : :
2678 : 51259862 : TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
2679 : : bits_from_bytes (length));
2680 : :
2681 : : /* If we know the size of the element, calculate the total size
2682 : : directly, rather than do some division thing below. This
2683 : : optimization helps Fortran assumed-size arrays (where the
2684 : : size of the array is determined at runtime) substantially. */
2685 : 51259862 : if (TYPE_SIZE_UNIT (element))
2686 : 51259862 : TYPE_SIZE_UNIT (type)
2687 : 102519724 : = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
2688 : : }
2689 : :
2690 : : /* Now round the alignment and size,
2691 : : using machine-dependent criteria if any. */
2692 : :
2693 : 56666920 : unsigned align = TYPE_ALIGN (element);
2694 : 56666920 : if (TYPE_USER_ALIGN (type))
2695 : 2264 : align = MAX (align, TYPE_ALIGN (type));
2696 : : else
2697 : 56664656 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
2698 : 56666920 : if (!TYPE_WARN_IF_NOT_ALIGN (type))
2699 : 56666920 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2700 : : TYPE_WARN_IF_NOT_ALIGN (element));
2701 : : #ifdef ROUND_TYPE_ALIGN
2702 : : align = ROUND_TYPE_ALIGN (type, align, BITS_PER_UNIT);
2703 : : #else
2704 : 56666920 : align = MAX (align, BITS_PER_UNIT);
2705 : : #endif
2706 : 56666920 : SET_TYPE_ALIGN (type, align);
2707 : 56666920 : SET_TYPE_MODE (type, BLKmode);
2708 : 56666920 : if (TYPE_SIZE (type) != 0
2709 : 51259862 : && ! targetm.member_type_forces_blk (type, VOIDmode)
2710 : : /* BLKmode elements force BLKmode aggregate;
2711 : : else extract/store fields may lose. */
2712 : 107926782 : && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
2713 : 568416 : || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
2714 : : {
2715 : 50691446 : SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type),
2716 : : TYPE_SIZE (type)));
2717 : 50691446 : if (TYPE_MODE (type) != BLKmode
2718 : : && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
2719 : : && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
2720 : : {
2721 : : TYPE_NO_FORCE_BLK (type) = 1;
2722 : : SET_TYPE_MODE (type, BLKmode);
2723 : : }
2724 : : }
2725 : 56666920 : if (AGGREGATE_TYPE_P (element))
2726 : 1180854 : TYPE_TYPELESS_STORAGE (type) = TYPE_TYPELESS_STORAGE (element);
2727 : : /* When the element size is constant, check that it is at least as
2728 : : large as the element alignment. */
2729 : 56666920 : if (TYPE_SIZE_UNIT (element)
2730 : 56596698 : && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
2731 : : /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
2732 : : TYPE_ALIGN_UNIT. */
2733 : 56566571 : && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element))
2734 : 113233471 : && !integer_zerop (TYPE_SIZE_UNIT (element)))
2735 : : {
2736 : 56560432 : if (compare_tree_int (TYPE_SIZE_UNIT (element),
2737 : 56560432 : TYPE_ALIGN_UNIT (element)) < 0)
2738 : 10 : error ("alignment of array elements is greater than "
2739 : : "element size");
2740 : 56560422 : else if (TYPE_ALIGN_UNIT (element) > 1
2741 : 76883598 : && (wi::zext (wi::to_wide (TYPE_SIZE_UNIT (element)),
2742 : 10161588 : ffs_hwi (TYPE_ALIGN_UNIT (element)) - 1)
2743 : 76883598 : != 0))
2744 : 7 : error ("size of array element is not a multiple of its "
2745 : : "alignment");
2746 : : }
2747 : : break;
2748 : : }
2749 : :
2750 : 5046948 : case RECORD_TYPE:
2751 : 5046948 : case UNION_TYPE:
2752 : 5046948 : case QUAL_UNION_TYPE:
2753 : 5046948 : {
2754 : 5046948 : tree field;
2755 : 5046948 : record_layout_info rli;
2756 : :
2757 : : /* Initialize the layout information. */
2758 : 5046948 : rli = start_record_layout (type);
2759 : :
2760 : : /* If this is a QUAL_UNION_TYPE, we want to process the fields
2761 : : in the reverse order in building the COND_EXPR that denotes
2762 : : its size. We reverse them again later. */
2763 : 5046948 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2764 : 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2765 : :
2766 : : /* Place all the fields. */
2767 : 25326053 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2768 : 20279105 : place_field (rli, field);
2769 : :
2770 : 5046948 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2771 : 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2772 : :
2773 : : /* Finish laying out the record. */
2774 : 5046948 : finish_record_layout (rli, /*free_p=*/true);
2775 : : }
2776 : 5046948 : break;
2777 : :
2778 : 0 : default:
2779 : 0 : gcc_unreachable ();
2780 : : }
2781 : :
2782 : : /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
2783 : : records and unions, finish_record_layout already called this
2784 : : function. */
2785 : 1305870153 : if (!RECORD_OR_UNION_TYPE_P (type))
2786 : 1300823205 : finalize_type_size (type);
2787 : :
2788 : : /* We should never see alias sets on incomplete aggregates. And we
2789 : : should not call layout_type on not incomplete aggregates. */
2790 : 1305870153 : if (AGGREGATE_TYPE_P (type))
2791 : 61713868 : gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type));
2792 : : }
2793 : :
2794 : : /* Return the least alignment required for type TYPE. */
2795 : :
2796 : : unsigned int
2797 : 33052342 : min_align_of_type (tree type)
2798 : : {
2799 : 33052342 : unsigned int align = TYPE_ALIGN (type);
2800 : 33052342 : if (!TYPE_USER_ALIGN (type))
2801 : : {
2802 : 60057721 : align = MIN (align, BIGGEST_ALIGNMENT);
2803 : : #ifdef BIGGEST_FIELD_ALIGNMENT
2804 : : align = MIN (align, BIGGEST_FIELD_ALIGNMENT);
2805 : : #endif
2806 : 30931964 : unsigned int field_align = align;
2807 : : #ifdef ADJUST_FIELD_ALIGN
2808 : 30931964 : field_align = ADJUST_FIELD_ALIGN (NULL_TREE, type, field_align);
2809 : : #endif
2810 : 30931964 : align = MIN (align, field_align);
2811 : : }
2812 : 33052342 : return align / BITS_PER_UNIT;
2813 : : }
2814 : :
2815 : : /* Create and return a type for signed integers of PRECISION bits. */
2816 : :
2817 : : tree
2818 : 2660417 : make_signed_type (int precision)
2819 : : {
2820 : 2660417 : tree type = make_node (INTEGER_TYPE);
2821 : :
2822 : 2660417 : TYPE_PRECISION (type) = precision;
2823 : :
2824 : 2660417 : fixup_signed_type (type);
2825 : 2660417 : return type;
2826 : : }
2827 : :
2828 : : /* Create and return a type for unsigned integers of PRECISION bits. */
2829 : :
2830 : : tree
2831 : 2519971 : make_unsigned_type (int precision)
2832 : : {
2833 : 2519971 : tree type = make_node (INTEGER_TYPE);
2834 : :
2835 : 2519971 : TYPE_PRECISION (type) = precision;
2836 : :
2837 : 2519971 : fixup_unsigned_type (type);
2838 : 2519971 : return type;
2839 : : }
2840 : :
2841 : : /* Create and return a type for fract of PRECISION bits, UNSIGNEDP,
2842 : : and SATP. */
2843 : :
2844 : : tree
2845 : 5632640 : make_fract_type (int precision, int unsignedp, int satp)
2846 : : {
2847 : 5632640 : tree type = make_node (FIXED_POINT_TYPE);
2848 : :
2849 : 5632640 : TYPE_PRECISION (type) = precision;
2850 : :
2851 : 5632640 : if (satp)
2852 : 2816320 : TYPE_SATURATING (type) = 1;
2853 : :
2854 : : /* Lay out the type: set its alignment, size, etc. */
2855 : 5632640 : TYPE_UNSIGNED (type) = unsignedp;
2856 : 5632640 : enum mode_class mclass = unsignedp ? MODE_UFRACT : MODE_FRACT;
2857 : 5632640 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2858 : 5632640 : layout_type (type);
2859 : :
2860 : 5632640 : return type;
2861 : : }
2862 : :
2863 : : /* Create and return a type for accum of PRECISION bits, UNSIGNEDP,
2864 : : and SATP. */
2865 : :
2866 : : tree
2867 : 4506112 : make_accum_type (int precision, int unsignedp, int satp)
2868 : : {
2869 : 4506112 : tree type = make_node (FIXED_POINT_TYPE);
2870 : :
2871 : 4506112 : TYPE_PRECISION (type) = precision;
2872 : :
2873 : 4506112 : if (satp)
2874 : 2253056 : TYPE_SATURATING (type) = 1;
2875 : :
2876 : : /* Lay out the type: set its alignment, size, etc. */
2877 : 4506112 : TYPE_UNSIGNED (type) = unsignedp;
2878 : 4506112 : enum mode_class mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM;
2879 : 4506112 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2880 : 4506112 : layout_type (type);
2881 : :
2882 : 4506112 : return type;
2883 : : }
2884 : :
2885 : : /* Initialize sizetypes so layout_type can use them. */
2886 : :
2887 : : void
2888 : 281632 : initialize_sizetypes (void)
2889 : : {
2890 : 281632 : int precision, bprecision;
2891 : :
2892 : : /* Get sizetypes precision from the SIZE_TYPE target macro. */
2893 : 288567 : if (strcmp (SIZETYPE, "unsigned int") == 0)
2894 : 6935 : precision = INT_TYPE_SIZE;
2895 : 274697 : else if (strcmp (SIZETYPE, "long unsigned int") == 0)
2896 : 274697 : precision = LONG_TYPE_SIZE;
2897 : 0 : else if (strcmp (SIZETYPE, "long long unsigned int") == 0)
2898 : 0 : precision = LONG_LONG_TYPE_SIZE;
2899 : 0 : else if (strcmp (SIZETYPE, "short unsigned int") == 0)
2900 : 0 : precision = SHORT_TYPE_SIZE;
2901 : : else
2902 : : {
2903 : 0 : int i;
2904 : :
2905 : 0 : precision = -1;
2906 : 0 : for (i = 0; i < NUM_INT_N_ENTS; i++)
2907 : 0 : if (int_n_enabled_p[i])
2908 : : {
2909 : 0 : char name[50], altname[50];
2910 : 0 : sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
2911 : 0 : sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
2912 : :
2913 : 0 : if (strcmp (name, SIZETYPE) == 0
2914 : 0 : || strcmp (altname, SIZETYPE) == 0)
2915 : : {
2916 : 0 : precision = int_n_data[i].bitsize;
2917 : : }
2918 : : }
2919 : 0 : if (precision == -1)
2920 : 0 : gcc_unreachable ();
2921 : : }
2922 : :
2923 : 281632 : bprecision
2924 : 563264 : = MIN (precision + LOG2_BITS_PER_UNIT + 1, MAX_FIXED_MODE_SIZE);
2925 : 281632 : bprecision = GET_MODE_PRECISION (smallest_int_mode_for_size (bprecision));
2926 : 281632 : if (bprecision > HOST_BITS_PER_DOUBLE_INT)
2927 : 0 : bprecision = HOST_BITS_PER_DOUBLE_INT;
2928 : :
2929 : : /* Create stubs for sizetype and bitsizetype so we can create constants. */
2930 : 281632 : sizetype = make_node (INTEGER_TYPE);
2931 : 281632 : TYPE_NAME (sizetype) = get_identifier ("sizetype");
2932 : 281632 : TYPE_PRECISION (sizetype) = precision;
2933 : 281632 : TYPE_UNSIGNED (sizetype) = 1;
2934 : 281632 : bitsizetype = make_node (INTEGER_TYPE);
2935 : 281632 : TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype");
2936 : 281632 : TYPE_PRECISION (bitsizetype) = bprecision;
2937 : 281632 : TYPE_UNSIGNED (bitsizetype) = 1;
2938 : :
2939 : : /* Now layout both types manually. */
2940 : 281632 : scalar_int_mode mode = smallest_int_mode_for_size (precision);
2941 : 281632 : SET_TYPE_MODE (sizetype, mode);
2942 : 281632 : SET_TYPE_ALIGN (sizetype, GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)));
2943 : 281632 : TYPE_SIZE (sizetype) = bitsize_int (precision);
2944 : 563264 : TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (mode));
2945 : 281632 : set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED);
2946 : :
2947 : 281632 : mode = smallest_int_mode_for_size (bprecision);
2948 : 281632 : SET_TYPE_MODE (bitsizetype, mode);
2949 : 281632 : SET_TYPE_ALIGN (bitsizetype, GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)));
2950 : 281632 : TYPE_SIZE (bitsizetype) = bitsize_int (bprecision);
2951 : 563264 : TYPE_SIZE_UNIT (bitsizetype) = size_int (GET_MODE_SIZE (mode));
2952 : 281632 : set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED);
2953 : :
2954 : : /* Create the signed variants of *sizetype. */
2955 : 281632 : ssizetype = make_signed_type (TYPE_PRECISION (sizetype));
2956 : 281632 : TYPE_NAME (ssizetype) = get_identifier ("ssizetype");
2957 : 281632 : sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype));
2958 : 281632 : TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype");
2959 : 281632 : }
2960 : :
2961 : : /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2962 : : or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2963 : : for TYPE, based on the PRECISION and whether or not the TYPE
2964 : : IS_UNSIGNED. PRECISION need not correspond to a width supported
2965 : : natively by the hardware; for example, on a machine with 8-bit,
2966 : : 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2967 : : 61. */
2968 : :
2969 : : void
2970 : 11171812 : set_min_and_max_values_for_integral_type (tree type,
2971 : : int precision,
2972 : : signop sgn)
2973 : : {
2974 : : /* For bitfields with zero width we end up creating integer types
2975 : : with zero precision. Don't assign any minimum/maximum values
2976 : : to those types, they don't have any valid value. */
2977 : 11171812 : if (precision < 1)
2978 : : return;
2979 : :
2980 : 11171554 : gcc_assert (precision <= WIDE_INT_MAX_PRECISION);
2981 : :
2982 : 11171554 : TYPE_MIN_VALUE (type)
2983 : 22343108 : = wide_int_to_tree (type, wi::min_value (precision, sgn));
2984 : 11171554 : TYPE_MAX_VALUE (type)
2985 : 22370771 : = wide_int_to_tree (type, wi::max_value (precision, sgn));
2986 : : }
2987 : :
2988 : : /* Set the extreme values of TYPE based on its precision in bits,
2989 : : then lay it out. Used when make_signed_type won't do
2990 : : because the tree code is not INTEGER_TYPE. */
2991 : :
2992 : : void
2993 : 5862124 : fixup_signed_type (tree type)
2994 : : {
2995 : 5862124 : int precision = TYPE_PRECISION (type);
2996 : :
2997 : 5862124 : set_min_and_max_values_for_integral_type (type, precision, SIGNED);
2998 : :
2999 : : /* Lay out the type: set its alignment, size, etc. */
3000 : 5862124 : layout_type (type);
3001 : 5862124 : }
3002 : :
3003 : : /* Set the extreme values of TYPE based on its precision in bits,
3004 : : then lay it out. This is used both in `make_unsigned_type'
3005 : : and for enumeral types. */
3006 : :
3007 : : void
3008 : 3419337 : fixup_unsigned_type (tree type)
3009 : : {
3010 : 3419337 : int precision = TYPE_PRECISION (type);
3011 : :
3012 : 3419337 : TYPE_UNSIGNED (type) = 1;
3013 : :
3014 : 3419337 : set_min_and_max_values_for_integral_type (type, precision, UNSIGNED);
3015 : :
3016 : : /* Lay out the type: set its alignment, size, etc. */
3017 : 3419337 : layout_type (type);
3018 : 3419337 : }
3019 : :
3020 : : /* Construct an iterator for a bitfield that spans BITSIZE bits,
3021 : : starting at BITPOS.
3022 : :
3023 : : BITREGION_START is the bit position of the first bit in this
3024 : : sequence of bit fields. BITREGION_END is the last bit in this
3025 : : sequence. If these two fields are non-zero, we should restrict the
3026 : : memory access to that range. Otherwise, we are allowed to touch
3027 : : any adjacent non bit-fields.
3028 : :
3029 : : ALIGN is the alignment of the underlying object in bits.
3030 : : VOLATILEP says whether the bitfield is volatile. */
3031 : :
3032 : 1087564 : bit_field_mode_iterator
3033 : : ::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3034 : : poly_int64 bitregion_start,
3035 : : poly_int64 bitregion_end,
3036 : 1087564 : unsigned int align, bool volatilep)
3037 : 1087564 : : m_mode (NARROWEST_INT_MODE), m_bitsize (bitsize),
3038 : 1087564 : m_bitpos (bitpos), m_bitregion_start (bitregion_start),
3039 : 1087564 : m_bitregion_end (bitregion_end), m_align (align),
3040 : 1087564 : m_volatilep (volatilep), m_count (0)
3041 : : {
3042 : 1087564 : if (known_eq (m_bitregion_end, 0))
3043 : : {
3044 : : /* We can assume that any aligned chunk of ALIGN bits that overlaps
3045 : : the bitfield is mapped and won't trap, provided that ALIGN isn't
3046 : : too large. The cap is the biggest required alignment for data,
3047 : : or at least the word size. And force one such chunk at least. */
3048 : 708540 : unsigned HOST_WIDE_INT units
3049 : 1256223 : = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD));
3050 : 354270 : if (bitsize <= 0)
3051 : : bitsize = 1;
3052 : 354270 : HOST_WIDE_INT end = bitpos + bitsize + units - 1;
3053 : 354270 : m_bitregion_end = end - end % units - 1;
3054 : : }
3055 : 1087564 : }
3056 : :
3057 : : /* Calls to this function return successively larger modes that can be used
3058 : : to represent the bitfield. Return true if another bitfield mode is
3059 : : available, storing it in *OUT_MODE if so. */
3060 : :
3061 : : bool
3062 : 1088510 : bit_field_mode_iterator::next_mode (scalar_int_mode *out_mode)
3063 : : {
3064 : 1088510 : scalar_int_mode mode;
3065 : 1836679 : for (; m_mode.exists (&mode); m_mode = GET_MODE_WIDER_MODE (mode))
3066 : : {
3067 : 1836679 : unsigned int unit = GET_MODE_BITSIZE (mode);
3068 : :
3069 : : /* Skip modes that don't have full precision. */
3070 : 1836679 : if (unit != GET_MODE_PRECISION (mode))
3071 : 748169 : continue;
3072 : :
3073 : : /* Stop if the mode is too wide to handle efficiently. */
3074 : 3673358 : if (unit > MAX_FIXED_MODE_SIZE)
3075 : : break;
3076 : :
3077 : : /* Don't deliver more than one multiword mode; the smallest one
3078 : : should be used. */
3079 : 1824626 : if (m_count > 0 && unit > BITS_PER_WORD)
3080 : : break;
3081 : :
3082 : : /* Skip modes that are too small. */
3083 : 1824526 : unsigned HOST_WIDE_INT substart = (unsigned HOST_WIDE_INT) m_bitpos % unit;
3084 : 1824526 : unsigned HOST_WIDE_INT subend = substart + m_bitsize;
3085 : 1824526 : if (subend > unit)
3086 : 748169 : continue;
3087 : :
3088 : : /* Stop if the mode goes outside the bitregion. */
3089 : 1076357 : HOST_WIDE_INT start = m_bitpos - substart;
3090 : 1076357 : if (maybe_ne (m_bitregion_start, 0)
3091 : 1076357 : && maybe_lt (start, m_bitregion_start))
3092 : : break;
3093 : 1076326 : HOST_WIDE_INT end = start + unit;
3094 : 1076326 : if (maybe_gt (end, m_bitregion_end + 1))
3095 : : break;
3096 : :
3097 : : /* Stop if the mode requires too much alignment. */
3098 : 1061031 : if (GET_MODE_ALIGNMENT (mode) > m_align
3099 : 1061031 : && targetm.slow_unaligned_access (mode, m_align))
3100 : : break;
3101 : :
3102 : 1061031 : *out_mode = mode;
3103 : 1061031 : m_mode = GET_MODE_WIDER_MODE (mode);
3104 : 1061031 : m_count++;
3105 : 1061031 : return true;
3106 : : }
3107 : : return false;
3108 : : }
3109 : :
3110 : : /* Return true if smaller modes are generally preferred for this kind
3111 : : of bitfield. */
3112 : :
3113 : : bool
3114 : 1046834 : bit_field_mode_iterator::prefer_smaller_modes ()
3115 : : {
3116 : 1046834 : return (m_volatilep
3117 : 1046834 : ? targetm.narrow_volatile_bitfield ()
3118 : 1046834 : : !SLOW_BYTE_ACCESS);
3119 : : }
3120 : :
3121 : : /* Find the best machine mode to use when referencing a bit field of length
3122 : : BITSIZE bits starting at BITPOS.
3123 : :
3124 : : BITREGION_START is the bit position of the first bit in this
3125 : : sequence of bit fields. BITREGION_END is the last bit in this
3126 : : sequence. If these two fields are non-zero, we should restrict the
3127 : : memory access to that range. Otherwise, we are allowed to touch
3128 : : any adjacent non bit-fields.
3129 : :
3130 : : The chosen mode must have no more than LARGEST_MODE_BITSIZE bits.
3131 : : INT_MAX is a suitable value for LARGEST_MODE_BITSIZE if the caller
3132 : : doesn't want to apply a specific limit.
3133 : :
3134 : : If no mode meets all these conditions, we return VOIDmode.
3135 : :
3136 : : The underlying object is known to be aligned to a boundary of ALIGN bits.
3137 : :
3138 : : If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
3139 : : smallest mode meeting these conditions.
3140 : :
3141 : : If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
3142 : : largest mode (but a mode no wider than UNITS_PER_WORD) that meets
3143 : : all the conditions.
3144 : :
3145 : : If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
3146 : : decide which of the above modes should be used. */
3147 : :
3148 : : bool
3149 : 886367 : get_best_mode (int bitsize, int bitpos,
3150 : : poly_uint64 bitregion_start, poly_uint64 bitregion_end,
3151 : : unsigned int align,
3152 : : unsigned HOST_WIDE_INT largest_mode_bitsize, bool volatilep,
3153 : : scalar_int_mode *best_mode)
3154 : : {
3155 : 886367 : bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start,
3156 : 886367 : bitregion_end, align, volatilep);
3157 : 886367 : scalar_int_mode mode;
3158 : 886367 : bool found = false;
3159 : 886367 : while (iter.next_mode (&mode)
3160 : : /* ??? For historical reasons, reject modes that would normally
3161 : : receive greater alignment, even if unaligned accesses are
3162 : : acceptable. This has both advantages and disadvantages.
3163 : : Removing this check means that something like:
3164 : :
3165 : : struct s { unsigned int x; unsigned int y; };
3166 : : int f (struct s *s) { return s->x == 0 && s->y == 0; }
3167 : :
3168 : : can be implemented using a single load and compare on
3169 : : 64-bit machines that have no alignment restrictions.
3170 : : For example, on powerpc64-linux-gnu, we would generate:
3171 : :
3172 : : ld 3,0(3)
3173 : : cntlzd 3,3
3174 : : srdi 3,3,6
3175 : : blr
3176 : :
3177 : : rather than:
3178 : :
3179 : : lwz 9,0(3)
3180 : : cmpwi 7,9,0
3181 : : bne 7,.L3
3182 : : lwz 3,4(3)
3183 : : cntlzw 3,3
3184 : : srwi 3,3,5
3185 : : extsw 3,3
3186 : : blr
3187 : : .p2align 4,,15
3188 : : .L3:
3189 : : li 3,0
3190 : : blr
3191 : :
3192 : : However, accessing more than one field can make life harder
3193 : : for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c
3194 : : has a series of unsigned short copies followed by a series of
3195 : : unsigned short comparisons. With this check, both the copies
3196 : : and comparisons remain 16-bit accesses and FRE is able
3197 : : to eliminate the latter. Without the check, the comparisons
3198 : : can be done using 2 64-bit operations, which FRE isn't able
3199 : : to handle in the same way.
3200 : :
3201 : : Either way, it would probably be worth disabling this check
3202 : : during expand. One particular example where removing the
3203 : : check would help is the get_best_mode call in store_bit_field.
3204 : : If we are given a memory bitregion of 128 bits that is aligned
3205 : : to a 64-bit boundary, and the bitfield we want to modify is
3206 : : in the second half of the bitregion, this check causes
3207 : : store_bitfield to turn the memory into a 64-bit reference
3208 : : to the _first_ half of the region. We later use
3209 : : adjust_bitfield_address to get a reference to the correct half,
3210 : : but doing so looks to adjust_bitfield_address as though we are
3211 : : moving past the end of the original object, so it drops the
3212 : : associated MEM_EXPR and MEM_OFFSET. Removing the check
3213 : : causes store_bit_field to keep a 128-bit memory reference,
3214 : : so that the final bitfield reference still has a MEM_EXPR
3215 : : and MEM_OFFSET. */
3216 : 866197 : && GET_MODE_ALIGNMENT (mode) <= align
3217 : 1742228 : && GET_MODE_BITSIZE (mode) <= largest_mode_bitsize)
3218 : : {
3219 : 852229 : *best_mode = mode;
3220 : 852229 : found = true;
3221 : 852229 : if (iter.prefer_smaller_modes ())
3222 : : break;
3223 : : }
3224 : :
3225 : 886367 : return found;
3226 : : }
3227 : :
3228 : : /* Gets minimal and maximal values for MODE (signed or unsigned depending on
3229 : : SIGN). The returned constants are made to be usable in TARGET_MODE. */
3230 : :
3231 : : void
3232 : 47500252 : get_mode_bounds (scalar_int_mode mode, int sign,
3233 : : scalar_int_mode target_mode,
3234 : : rtx *mmin, rtx *mmax)
3235 : : {
3236 : 47500252 : unsigned size = GET_MODE_PRECISION (mode);
3237 : 47500252 : unsigned HOST_WIDE_INT min_val, max_val;
3238 : :
3239 : 47500252 : gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
3240 : :
3241 : : /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */
3242 : 47500252 : if (mode == BImode)
3243 : : {
3244 : : if (STORE_FLAG_VALUE < 0)
3245 : : {
3246 : : min_val = STORE_FLAG_VALUE;
3247 : : max_val = 0;
3248 : : }
3249 : : else
3250 : : {
3251 : : min_val = 0;
3252 : : max_val = STORE_FLAG_VALUE;
3253 : : }
3254 : : }
3255 : 47500252 : else if (sign)
3256 : : {
3257 : 42750249 : min_val = -(HOST_WIDE_INT_1U << (size - 1));
3258 : 42750249 : max_val = (HOST_WIDE_INT_1U << (size - 1)) - 1;
3259 : : }
3260 : : else
3261 : : {
3262 : 4750003 : min_val = 0;
3263 : 4750003 : max_val = (HOST_WIDE_INT_1U << (size - 1) << 1) - 1;
3264 : : }
3265 : :
3266 : 47500252 : *mmin = gen_int_mode (min_val, target_mode);
3267 : 47500252 : *mmax = gen_int_mode (max_val, target_mode);
3268 : 47500252 : }
3269 : :
3270 : : #include "gt-stor-layout.h"
|