Branch data Line data Source code
1 : : /* C-compiler utilities for types and variables storage layout
2 : : Copyright (C) 1987-2024 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "target.h"
25 : : #include "function.h"
26 : : #include "rtl.h"
27 : : #include "tree.h"
28 : : #include "memmodel.h"
29 : : #include "tm_p.h"
30 : : #include "stringpool.h"
31 : : #include "regs.h"
32 : : #include "emit-rtl.h"
33 : : #include "cgraph.h"
34 : : #include "diagnostic-core.h"
35 : : #include "fold-const.h"
36 : : #include "stor-layout.h"
37 : : #include "varasm.h"
38 : : #include "print-tree.h"
39 : : #include "langhooks.h"
40 : : #include "tree-inline.h"
41 : : #include "dumpfile.h"
42 : : #include "gimplify.h"
43 : : #include "attribs.h"
44 : : #include "debug.h"
45 : : #include "calls.h"
46 : :
47 : : /* Data type for the expressions representing sizes of data types.
48 : : It is the first integer type laid out. */
49 : : tree sizetype_tab[(int) stk_type_kind_last];
50 : :
51 : : /* If nonzero, this is an upper limit on alignment of structure fields.
52 : : The value is measured in bits. */
53 : : unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
54 : :
55 : : static tree self_referential_size (tree);
56 : : static void finalize_record_size (record_layout_info);
57 : : static void finalize_type_size (tree);
58 : : static void place_union_field (record_layout_info, tree);
59 : : static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
60 : : HOST_WIDE_INT, tree);
61 : : extern void debug_rli (record_layout_info);
62 : :
63 : : /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
64 : : to serve as the actual size-expression for a type or decl. */
65 : :
66 : : tree
67 : 565910 : variable_size (tree size)
68 : : {
69 : : /* Obviously. */
70 : 565910 : if (TREE_CONSTANT (size))
71 : : return size;
72 : :
73 : : /* If the size is self-referential, we can't make a SAVE_EXPR (see
74 : : save_expr for the rationale). But we can do something else. */
75 : 565848 : if (CONTAINS_PLACEHOLDER_P (size))
76 : 0 : return self_referential_size (size);
77 : :
78 : : /* If we are in the global binding level, we can't make a SAVE_EXPR
79 : : since it may end up being shared across functions, so it is up
80 : : to the front-end to deal with this case. */
81 : 565848 : if (lang_hooks.decls.global_bindings_p ())
82 : : return size;
83 : :
84 : 301723 : return save_expr (size);
85 : : }
86 : :
87 : : /* An array of functions used for self-referential size computation. */
88 : : static GTY(()) vec<tree, va_gc> *size_functions;
89 : :
90 : : /* Return true if T is a self-referential component reference. */
91 : :
92 : : static bool
93 : 0 : self_referential_component_ref_p (tree t)
94 : : {
95 : 0 : if (TREE_CODE (t) != COMPONENT_REF)
96 : : return false;
97 : :
98 : 0 : while (REFERENCE_CLASS_P (t))
99 : 0 : t = TREE_OPERAND (t, 0);
100 : :
101 : 0 : return (TREE_CODE (t) == PLACEHOLDER_EXPR);
102 : : }
103 : :
104 : : /* Similar to copy_tree_r but do not copy component references involving
105 : : PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr
106 : : and substituted in substitute_in_expr. */
107 : :
108 : : static tree
109 : 0 : copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data)
110 : : {
111 : 0 : enum tree_code code = TREE_CODE (*tp);
112 : :
113 : : /* Stop at types, decls, constants like copy_tree_r. */
114 : 0 : if (TREE_CODE_CLASS (code) == tcc_type
115 : : || TREE_CODE_CLASS (code) == tcc_declaration
116 : 0 : || TREE_CODE_CLASS (code) == tcc_constant)
117 : : {
118 : 0 : *walk_subtrees = 0;
119 : 0 : return NULL_TREE;
120 : : }
121 : :
122 : : /* This is the pattern built in ada/make_aligning_type. */
123 : 0 : else if (code == ADDR_EXPR
124 : 0 : && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR)
125 : : {
126 : 0 : *walk_subtrees = 0;
127 : 0 : return NULL_TREE;
128 : : }
129 : :
130 : : /* Default case: the component reference. */
131 : 0 : else if (self_referential_component_ref_p (*tp))
132 : : {
133 : 0 : *walk_subtrees = 0;
134 : 0 : return NULL_TREE;
135 : : }
136 : :
137 : : /* We're not supposed to have them in self-referential size trees
138 : : because we wouldn't properly control when they are evaluated.
139 : : However, not creating superfluous SAVE_EXPRs requires accurate
140 : : tracking of readonly-ness all the way down to here, which we
141 : : cannot always guarantee in practice. So punt in this case. */
142 : 0 : else if (code == SAVE_EXPR)
143 : 0 : return error_mark_node;
144 : :
145 : 0 : else if (code == STATEMENT_LIST)
146 : 0 : gcc_unreachable ();
147 : :
148 : 0 : return copy_tree_r (tp, walk_subtrees, data);
149 : : }
150 : :
151 : : /* Given a SIZE expression that is self-referential, return an equivalent
152 : : expression to serve as the actual size expression for a type. */
153 : :
154 : : static tree
155 : 0 : self_referential_size (tree size)
156 : : {
157 : 0 : static unsigned HOST_WIDE_INT fnno = 0;
158 : 0 : vec<tree> self_refs = vNULL;
159 : 0 : tree param_type_list = NULL, param_decl_list = NULL;
160 : 0 : tree t, ref, return_type, fntype, fnname, fndecl;
161 : 0 : unsigned int i;
162 : 0 : char buf[128];
163 : 0 : vec<tree, va_gc> *args = NULL;
164 : :
165 : : /* Do not factor out simple operations. */
166 : 0 : t = skip_simple_constant_arithmetic (size);
167 : 0 : if (TREE_CODE (t) == CALL_EXPR || self_referential_component_ref_p (t))
168 : : return size;
169 : :
170 : : /* Collect the list of self-references in the expression. */
171 : 0 : find_placeholder_in_expr (size, &self_refs);
172 : 0 : gcc_assert (self_refs.length () > 0);
173 : :
174 : : /* Obtain a private copy of the expression. */
175 : 0 : t = size;
176 : 0 : if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE)
177 : : return size;
178 : 0 : size = t;
179 : :
180 : : /* Build the parameter and argument lists in parallel; also
181 : : substitute the former for the latter in the expression. */
182 : 0 : vec_alloc (args, self_refs.length ());
183 : 0 : FOR_EACH_VEC_ELT (self_refs, i, ref)
184 : : {
185 : 0 : tree subst, param_name, param_type, param_decl;
186 : :
187 : 0 : if (DECL_P (ref))
188 : : {
189 : : /* We shouldn't have true variables here. */
190 : 0 : gcc_assert (TREE_READONLY (ref));
191 : : subst = ref;
192 : : }
193 : : /* This is the pattern built in ada/make_aligning_type. */
194 : 0 : else if (TREE_CODE (ref) == ADDR_EXPR)
195 : : subst = ref;
196 : : /* Default case: the component reference. */
197 : : else
198 : 0 : subst = TREE_OPERAND (ref, 1);
199 : :
200 : 0 : sprintf (buf, "p%d", i);
201 : 0 : param_name = get_identifier (buf);
202 : 0 : param_type = TREE_TYPE (ref);
203 : 0 : param_decl
204 : 0 : = build_decl (input_location, PARM_DECL, param_name, param_type);
205 : 0 : DECL_ARG_TYPE (param_decl) = param_type;
206 : 0 : DECL_ARTIFICIAL (param_decl) = 1;
207 : 0 : TREE_READONLY (param_decl) = 1;
208 : :
209 : 0 : size = substitute_in_expr (size, subst, param_decl);
210 : :
211 : 0 : param_type_list = tree_cons (NULL_TREE, param_type, param_type_list);
212 : 0 : param_decl_list = chainon (param_decl, param_decl_list);
213 : 0 : args->quick_push (ref);
214 : : }
215 : :
216 : 0 : self_refs.release ();
217 : :
218 : : /* Append 'void' to indicate that the number of parameters is fixed. */
219 : 0 : param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
220 : :
221 : : /* The 3 lists have been created in reverse order. */
222 : 0 : param_type_list = nreverse (param_type_list);
223 : 0 : param_decl_list = nreverse (param_decl_list);
224 : :
225 : : /* Build the function type. */
226 : 0 : return_type = TREE_TYPE (size);
227 : 0 : fntype = build_function_type (return_type, param_type_list);
228 : :
229 : : /* Build the function declaration. */
230 : 0 : sprintf (buf, "SZ" HOST_WIDE_INT_PRINT_UNSIGNED, fnno++);
231 : 0 : fnname = get_file_function_name (buf);
232 : 0 : fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype);
233 : 0 : for (t = param_decl_list; t; t = DECL_CHAIN (t))
234 : 0 : DECL_CONTEXT (t) = fndecl;
235 : 0 : DECL_ARGUMENTS (fndecl) = param_decl_list;
236 : 0 : DECL_RESULT (fndecl)
237 : 0 : = build_decl (input_location, RESULT_DECL, 0, return_type);
238 : 0 : DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
239 : :
240 : : /* The function has been created by the compiler and we don't
241 : : want to emit debug info for it. */
242 : 0 : DECL_ARTIFICIAL (fndecl) = 1;
243 : 0 : DECL_IGNORED_P (fndecl) = 1;
244 : :
245 : : /* It is supposed to be "const" and never throw. */
246 : 0 : TREE_READONLY (fndecl) = 1;
247 : 0 : TREE_NOTHROW (fndecl) = 1;
248 : :
249 : : /* We want it to be inlined when this is deemed profitable, as
250 : : well as discarded if every call has been integrated. */
251 : 0 : DECL_DECLARED_INLINE_P (fndecl) = 1;
252 : :
253 : : /* It is made up of a unique return statement. */
254 : 0 : DECL_INITIAL (fndecl) = make_node (BLOCK);
255 : 0 : BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl;
256 : 0 : t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size);
257 : 0 : DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t);
258 : 0 : TREE_STATIC (fndecl) = 1;
259 : :
260 : : /* Put it onto the list of size functions. */
261 : 0 : vec_safe_push (size_functions, fndecl);
262 : :
263 : : /* Replace the original expression with a call to the size function. */
264 : 0 : return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args);
265 : : }
266 : :
267 : : /* Take, queue and compile all the size functions. It is essential that
268 : : the size functions be gimplified at the very end of the compilation
269 : : in order to guarantee transparent handling of self-referential sizes.
270 : : Otherwise the GENERIC inliner would not be able to inline them back
271 : : at each of their call sites, thus creating artificial non-constant
272 : : size expressions which would trigger nasty problems later on. */
273 : :
274 : : void
275 : 250384 : finalize_size_functions (void)
276 : : {
277 : 250384 : unsigned int i;
278 : 250384 : tree fndecl;
279 : :
280 : 250384 : for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++)
281 : : {
282 : 0 : allocate_struct_function (fndecl, false);
283 : 0 : set_cfun (NULL);
284 : 0 : dump_function (TDI_original, fndecl);
285 : :
286 : : /* As these functions are used to describe the layout of variable-length
287 : : structures, debug info generation needs their implementation. */
288 : 0 : debug_hooks->size_function (fndecl);
289 : 0 : gimplify_function_tree (fndecl);
290 : 0 : cgraph_node::finalize_function (fndecl, false);
291 : : }
292 : :
293 : 250384 : vec_free (size_functions);
294 : 250384 : }
295 : :
296 : : /* Return a machine mode of class MCLASS with SIZE bits of precision,
297 : : if one exists. The mode may have padding bits as well the SIZE
298 : : value bits. If LIMIT is nonzero, disregard modes wider than
299 : : MAX_FIXED_MODE_SIZE. */
300 : :
301 : : opt_machine_mode
302 : 1014444804 : mode_for_size (poly_uint64 size, enum mode_class mclass, int limit)
303 : : {
304 : 1014444804 : machine_mode mode;
305 : 1014444804 : int i;
306 : :
307 : 1136536850 : if (limit && maybe_gt (size, (unsigned int) MAX_FIXED_MODE_SIZE))
308 : 31391137 : return opt_machine_mode ();
309 : :
310 : : /* Get the first mode which has this size, in the specified class. */
311 : 1409996363 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
312 : 1382500423 : if (known_eq (GET_MODE_PRECISION (mode), size))
313 : 955557727 : return mode;
314 : :
315 : 27495940 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
316 : 54959740 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
317 : 27479870 : if (known_eq (int_n_data[i].bitsize, size)
318 : 27479870 : && int_n_enabled_p[i])
319 : 0 : return int_n_data[i].m;
320 : :
321 : 27495940 : return opt_machine_mode ();
322 : : }
323 : :
324 : : /* Similar, except passed a tree node. */
325 : :
326 : : opt_machine_mode
327 : 116226094 : mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
328 : : {
329 : 116226094 : unsigned HOST_WIDE_INT uhwi;
330 : 116226094 : unsigned int ui;
331 : :
332 : 116226094 : if (!tree_fits_uhwi_p (size))
333 : 239700 : return opt_machine_mode ();
334 : 115986394 : uhwi = tree_to_uhwi (size);
335 : 115986394 : ui = uhwi;
336 : 115986394 : if (uhwi != ui)
337 : 527 : return opt_machine_mode ();
338 : 115985867 : return mode_for_size (ui, mclass, limit);
339 : : }
340 : :
341 : : /* Return the narrowest mode of class MCLASS that contains at least
342 : : SIZE bits, if such a mode exists. */
343 : :
344 : : opt_machine_mode
345 : 20108245 : smallest_mode_for_size (poly_uint64 size, enum mode_class mclass)
346 : : {
347 : 20108245 : machine_mode mode = VOIDmode;
348 : 20108245 : int i;
349 : :
350 : : /* Get the first mode which has at least this size, in the
351 : : specified class. */
352 : 68529180 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
353 : 68529180 : if (known_ge (GET_MODE_PRECISION (mode), size))
354 : : break;
355 : :
356 : 20108245 : if (mode == VOIDmode)
357 : 0 : return opt_machine_mode ();
358 : :
359 : 20108245 : if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
360 : 40216490 : for (i = 0; i < NUM_INT_N_ENTS; i ++)
361 : 20108245 : if (known_ge (int_n_data[i].bitsize, size)
362 : 20108167 : && known_lt (int_n_data[i].bitsize, GET_MODE_PRECISION (mode))
363 : 20108245 : && int_n_enabled_p[i])
364 : 0 : mode = int_n_data[i].m;
365 : :
366 : 20108245 : return mode;
367 : : }
368 : :
369 : : /* Return an integer mode of exactly the same size as MODE, if one exists. */
370 : :
371 : : opt_scalar_int_mode
372 : 1711164 : int_mode_for_mode (machine_mode mode)
373 : : {
374 : 1711164 : switch (GET_MODE_CLASS (mode))
375 : : {
376 : 1280824 : case MODE_INT:
377 : 1280824 : case MODE_PARTIAL_INT:
378 : 1280824 : return as_a <scalar_int_mode> (mode);
379 : :
380 : 257478 : case MODE_COMPLEX_INT:
381 : 257478 : case MODE_COMPLEX_FLOAT:
382 : 257478 : case MODE_FLOAT:
383 : 257478 : case MODE_DECIMAL_FLOAT:
384 : 257478 : case MODE_FRACT:
385 : 257478 : case MODE_ACCUM:
386 : 257478 : case MODE_UFRACT:
387 : 257478 : case MODE_UACCUM:
388 : 257478 : case MODE_VECTOR_BOOL:
389 : 257478 : case MODE_VECTOR_INT:
390 : 257478 : case MODE_VECTOR_FLOAT:
391 : 257478 : case MODE_VECTOR_FRACT:
392 : 257478 : case MODE_VECTOR_ACCUM:
393 : 257478 : case MODE_VECTOR_UFRACT:
394 : 257478 : case MODE_VECTOR_UACCUM:
395 : 514956 : return int_mode_for_size (GET_MODE_BITSIZE (mode), 0);
396 : :
397 : 0 : case MODE_OPAQUE:
398 : 0 : return opt_scalar_int_mode ();
399 : :
400 : 172862 : case MODE_RANDOM:
401 : 172862 : if (mode == BLKmode)
402 : 172862 : return opt_scalar_int_mode ();
403 : :
404 : : /* fall through */
405 : :
406 : 0 : case MODE_CC:
407 : 0 : default:
408 : 0 : gcc_unreachable ();
409 : : }
410 : : }
411 : :
412 : : /* Find a mode that can be used for efficient bitwise operations on MODE,
413 : : if one exists. */
414 : :
415 : : opt_machine_mode
416 : 25135 : bitwise_mode_for_mode (machine_mode mode)
417 : : {
418 : : /* Quick exit if we already have a suitable mode. */
419 : 25135 : scalar_int_mode int_mode;
420 : 25135 : if (is_a <scalar_int_mode> (mode, &int_mode)
421 : 46850 : && GET_MODE_BITSIZE (int_mode) <= MAX_FIXED_MODE_SIZE)
422 : 23425 : return int_mode;
423 : :
424 : : /* Reuse the sanity checks from int_mode_for_mode. */
425 : 1710 : gcc_checking_assert ((int_mode_for_mode (mode), true));
426 : :
427 : 3420 : poly_int64 bitsize = GET_MODE_BITSIZE (mode);
428 : :
429 : : /* Try to replace complex modes with complex modes. In general we
430 : : expect both components to be processed independently, so we only
431 : : care whether there is a register for the inner mode. */
432 : 1710 : if (COMPLEX_MODE_P (mode))
433 : : {
434 : 400 : machine_mode trial = mode;
435 : 400 : if ((GET_MODE_CLASS (trial) == MODE_COMPLEX_INT
436 : 460 : || mode_for_size (bitsize, MODE_COMPLEX_INT, false).exists (&trial))
437 : 1072 : && have_regs_of_mode[GET_MODE_INNER (trial)])
438 : 276 : return trial;
439 : : }
440 : :
441 : : /* Try to replace vector modes with vector modes. Also try using vector
442 : : modes if an integer mode would be too big. */
443 : 1225 : if (VECTOR_MODE_P (mode)
444 : 3882 : || maybe_gt (bitsize, MAX_FIXED_MODE_SIZE))
445 : : {
446 : 347 : machine_mode trial = mode;
447 : 347 : if ((GET_MODE_CLASS (trial) == MODE_VECTOR_INT
448 : 211 : || mode_for_size (bitsize, MODE_VECTOR_INT, 0).exists (&trial))
449 : 283 : && have_regs_of_mode[trial]
450 : 557 : && targetm.vector_mode_supported_p (trial))
451 : 210 : return trial;
452 : : }
453 : :
454 : : /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */
455 : 1224 : return mode_for_size (bitsize, MODE_INT, true);
456 : : }
457 : :
458 : : /* Find a type that can be used for efficient bitwise operations on MODE.
459 : : Return null if no such mode exists. */
460 : :
461 : : tree
462 : 25135 : bitwise_type_for_mode (machine_mode mode)
463 : : {
464 : 25135 : if (!bitwise_mode_for_mode (mode).exists (&mode))
465 : 137 : return NULL_TREE;
466 : :
467 : 24998 : unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode);
468 : 24998 : tree inner_type = build_nonstandard_integer_type (inner_size, true);
469 : :
470 : 24998 : if (VECTOR_MODE_P (mode))
471 : 210 : return build_vector_type_for_mode (inner_type, mode);
472 : :
473 : 24788 : if (COMPLEX_MODE_P (mode))
474 : 276 : return build_complex_type (inner_type);
475 : :
476 : 49024 : gcc_checking_assert (GET_MODE_INNER (mode) == mode);
477 : : return inner_type;
478 : : }
479 : :
480 : : /* Find a mode that can be used for efficient bitwise operations on SIZE
481 : : bits, if one exists. */
482 : :
483 : : opt_machine_mode
484 : 23596 : bitwise_mode_for_size (poly_uint64 size)
485 : : {
486 : 47192 : if (known_le (size, (unsigned int) MAX_FIXED_MODE_SIZE))
487 : 23383 : return mode_for_size (size, MODE_INT, true);
488 : :
489 : : machine_mode mode, ret = VOIDmode;
490 : 3715 : FOR_EACH_MODE_FROM (mode, MIN_MODE_VECTOR_INT)
491 : 7422 : if (known_eq (GET_MODE_BITSIZE (mode), size)
492 : 229 : && (ret == VOIDmode || GET_MODE_INNER (mode) == QImode)
493 : 229 : && have_regs_of_mode[mode]
494 : 3920 : && targetm.vector_mode_supported_p (mode))
495 : : {
496 : 418 : if (GET_MODE_INNER (mode) == QImode)
497 : 209 : return mode;
498 : 0 : else if (ret == VOIDmode)
499 : 3502 : ret = mode;
500 : : }
501 : 4 : if (ret != VOIDmode)
502 : 0 : return ret;
503 : 4 : return opt_machine_mode ();
504 : : }
505 : :
506 : : /* Find a mode that is suitable for representing a vector with NUNITS
507 : : elements of mode INNERMODE, if one exists. The returned mode can be
508 : : either an integer mode or a vector mode. */
509 : :
510 : : opt_machine_mode
511 : 100767841 : mode_for_vector (scalar_mode innermode, poly_uint64 nunits)
512 : : {
513 : 100767841 : machine_mode mode;
514 : :
515 : : /* First, look for a supported vector type. */
516 : 100767841 : if (SCALAR_FLOAT_MODE_P (innermode))
517 : : mode = MIN_MODE_VECTOR_FLOAT;
518 : 96482417 : else if (SCALAR_FRACT_MODE_P (innermode))
519 : : mode = MIN_MODE_VECTOR_FRACT;
520 : 96482417 : else if (SCALAR_UFRACT_MODE_P (innermode))
521 : : mode = MIN_MODE_VECTOR_UFRACT;
522 : 96482417 : else if (SCALAR_ACCUM_MODE_P (innermode))
523 : : mode = MIN_MODE_VECTOR_ACCUM;
524 : 96482417 : else if (SCALAR_UACCUM_MODE_P (innermode))
525 : : mode = MIN_MODE_VECTOR_UACCUM;
526 : : else
527 : 100767841 : mode = MIN_MODE_VECTOR_INT;
528 : :
529 : : /* Only check the broader vector_mode_supported_any_target_p here.
530 : : We'll filter through target-specific availability and
531 : : vector_mode_supported_p later in vector_type_mode. */
532 : 1191173352 : FOR_EACH_MODE_FROM (mode, mode)
533 : 2381702700 : if (known_eq (GET_MODE_NUNITS (mode), nunits)
534 : 578187222 : && GET_MODE_INNER (mode) == innermode
535 : 1291297189 : && targetm.vector_mode_supported_any_target_p (mode))
536 : 100445839 : return mode;
537 : :
538 : : /* For integers, try mapping it to a same-sized scalar mode. */
539 : 322002 : if (GET_MODE_CLASS (innermode) == MODE_INT)
540 : : {
541 : 15986 : poly_uint64 nbits = nunits * GET_MODE_BITSIZE (innermode);
542 : 15986 : if (int_mode_for_size (nbits, 0).exists (&mode)
543 : 15526 : && have_regs_of_mode[mode])
544 : 15526 : return mode;
545 : : }
546 : :
547 : 306476 : return opt_machine_mode ();
548 : : }
549 : :
550 : : /* If a piece of code is using vector mode VECTOR_MODE and also wants
551 : : to operate on elements of mode ELEMENT_MODE, return the vector mode
552 : : it should use for those elements. If NUNITS is nonzero, ensure that
553 : : the mode has exactly NUNITS elements, otherwise pick whichever vector
554 : : size pairs the most naturally with VECTOR_MODE; this may mean choosing
555 : : a mode with a different size and/or number of elements, depending on
556 : : what the target prefers. Return an empty opt_machine_mode if there
557 : : is no supported vector mode with the required properties.
558 : :
559 : : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
560 : : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
561 : :
562 : : opt_machine_mode
563 : 50067068 : related_vector_mode (machine_mode vector_mode, scalar_mode element_mode,
564 : : poly_uint64 nunits)
565 : : {
566 : 50067068 : gcc_assert (VECTOR_MODE_P (vector_mode));
567 : 50067068 : return targetm.vectorize.related_mode (vector_mode, element_mode, nunits);
568 : : }
569 : :
570 : : /* If a piece of code is using vector mode VECTOR_MODE and also wants
571 : : to operate on integer vectors with the same element size and number
572 : : of elements, return the vector mode it should use. Return an empty
573 : : opt_machine_mode if there is no supported vector mode with the
574 : : required properties.
575 : :
576 : : Unlike mode_for_vector. any returned mode is guaranteed to satisfy
577 : : both VECTOR_MODE_P and targetm.vector_mode_supported_p. */
578 : :
579 : : opt_machine_mode
580 : 18258 : related_int_vector_mode (machine_mode vector_mode)
581 : : {
582 : 18258 : gcc_assert (VECTOR_MODE_P (vector_mode));
583 : 18258 : scalar_int_mode int_mode;
584 : 36516 : if (int_mode_for_mode (GET_MODE_INNER (vector_mode)).exists (&int_mode))
585 : 36516 : return related_vector_mode (vector_mode, int_mode,
586 : 18258 : GET_MODE_NUNITS (vector_mode));
587 : 0 : return opt_machine_mode ();
588 : : }
589 : :
590 : : /* Return the alignment of MODE. This will be bounded by 1 and
591 : : BIGGEST_ALIGNMENT. */
592 : :
593 : : unsigned int
594 : 1576599496 : get_mode_alignment (machine_mode mode)
595 : : {
596 : 3053434130 : return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
597 : : }
598 : :
599 : : /* Return the natural mode of an array, given that it is SIZE bytes in
600 : : total and has elements of type ELEM_TYPE. */
601 : :
602 : : static machine_mode
603 : 55792446 : mode_for_array (tree elem_type, tree size)
604 : : {
605 : 55792446 : tree elem_size;
606 : 55792446 : poly_uint64 int_size, int_elem_size;
607 : 55792446 : unsigned HOST_WIDE_INT num_elems;
608 : 55792446 : bool limit_p;
609 : :
610 : : /* One-element arrays get the component type's mode. */
611 : 55792446 : elem_size = TYPE_SIZE (elem_type);
612 : 55792446 : if (simple_cst_equal (size, elem_size))
613 : 2971526 : return TYPE_MODE (elem_type);
614 : :
615 : 52820920 : limit_p = true;
616 : 52820920 : if (poly_int_tree_p (size, &int_size)
617 : 52581220 : && poly_int_tree_p (elem_size, &int_elem_size)
618 : 52581220 : && maybe_ne (int_elem_size, 0U)
619 : 52820920 : && constant_multiple_p (int_size, int_elem_size, &num_elems))
620 : : {
621 : 52581220 : machine_mode elem_mode = TYPE_MODE (elem_type);
622 : 52581220 : machine_mode mode;
623 : 52581220 : if (targetm.array_mode (elem_mode, num_elems).exists (&mode))
624 : 0 : return mode;
625 : 52581220 : if (targetm.array_mode_supported_p (elem_mode, num_elems))
626 : 52820920 : limit_p = false;
627 : : }
628 : 52820920 : return mode_for_size_tree (size, MODE_INT, limit_p).else_blk ();
629 : : }
630 : :
631 : : /* Subroutine of layout_decl: Force alignment required for the data type.
632 : : But if the decl itself wants greater alignment, don't override that. */
633 : :
634 : : static inline void
635 : 1586637688 : do_type_align (tree type, tree decl)
636 : : {
637 : 1586637688 : if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
638 : : {
639 : 1534480117 : SET_DECL_ALIGN (decl, TYPE_ALIGN (type));
640 : 1534480117 : if (TREE_CODE (decl) == FIELD_DECL)
641 : 46739419 : DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
642 : : }
643 : 1586637688 : if (TYPE_WARN_IF_NOT_ALIGN (type) > DECL_WARN_IF_NOT_ALIGN (decl))
644 : 51 : SET_DECL_WARN_IF_NOT_ALIGN (decl, TYPE_WARN_IF_NOT_ALIGN (type));
645 : 1586637688 : }
646 : :
647 : : /* Set the size, mode and alignment of a ..._DECL node.
648 : : TYPE_DECL does need this for C++.
649 : : Note that LABEL_DECL and CONST_DECL nodes do not need this,
650 : : and FUNCTION_DECL nodes have them set up in a special (and simple) way.
651 : : Don't call layout_decl for them.
652 : :
653 : : KNOWN_ALIGN is the amount of alignment we can assume this
654 : : decl has with no special effort. It is relevant only for FIELD_DECLs
655 : : and depends on the previous fields.
656 : : All that matters about KNOWN_ALIGN is which powers of 2 divide it.
657 : : If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
658 : : the record will be aligned to suit. */
659 : :
660 : : void
661 : 1587062467 : layout_decl (tree decl, unsigned int known_align)
662 : : {
663 : 1587062467 : tree type = TREE_TYPE (decl);
664 : 1587062467 : enum tree_code code = TREE_CODE (decl);
665 : 1587062467 : rtx rtl = NULL_RTX;
666 : 1587062467 : location_t loc = DECL_SOURCE_LOCATION (decl);
667 : :
668 : 1587062467 : if (code == CONST_DECL)
669 : : return;
670 : :
671 : 1587062467 : gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
672 : : || code == TYPE_DECL || code == FIELD_DECL);
673 : :
674 : 1587062467 : rtl = DECL_RTL_IF_SET (decl);
675 : :
676 : 1587062467 : if (type == error_mark_node)
677 : 2807 : type = void_type_node;
678 : :
679 : : /* Usually the size and mode come from the data type without change,
680 : : however, the front-end may set the explicit width of the field, so its
681 : : size may not be the same as the size of its type. This happens with
682 : : bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
683 : : also happens with other fields. For example, the C++ front-end creates
684 : : zero-sized fields corresponding to empty base classes, and depends on
685 : : layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
686 : : size in bytes from the size in bits. If we have already set the mode,
687 : : don't set it again since we can be called twice for FIELD_DECLs. */
688 : :
689 : 1587062467 : DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
690 : 1587062467 : if (DECL_MODE (decl) == VOIDmode)
691 : 1547238189 : SET_DECL_MODE (decl, TYPE_MODE (type));
692 : :
693 : 1587062467 : if (DECL_SIZE (decl) == 0)
694 : : {
695 : 1547860952 : DECL_SIZE (decl) = TYPE_SIZE (type);
696 : 1547860952 : DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
697 : : }
698 : 39201515 : else if (DECL_SIZE_UNIT (decl) == 0)
699 : 416263 : DECL_SIZE_UNIT (decl)
700 : 832526 : = fold_convert_loc (loc, sizetype,
701 : 416263 : size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl),
702 : : bitsize_unit_node));
703 : :
704 : 1587062467 : if (code != FIELD_DECL)
705 : : /* For non-fields, update the alignment from the type. */
706 : 1535869255 : do_type_align (type, decl);
707 : : else
708 : : /* For fields, it's a bit more complicated... */
709 : : {
710 : 51193212 : bool old_user_align = DECL_USER_ALIGN (decl);
711 : 51193212 : bool zero_bitfield = false;
712 : 51193212 : bool packed_p = DECL_PACKED (decl);
713 : 51193212 : unsigned int mfa;
714 : :
715 : 51193212 : if (DECL_BIT_FIELD (decl))
716 : : {
717 : 425797 : DECL_BIT_FIELD_TYPE (decl) = type;
718 : :
719 : : /* A zero-length bit-field affects the alignment of the next
720 : : field. In essence such bit-fields are not influenced by
721 : : any packing due to #pragma pack or attribute packed. */
722 : 425797 : if (integer_zerop (DECL_SIZE (decl))
723 : 425797 : && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
724 : : {
725 : 2096 : zero_bitfield = true;
726 : 2096 : packed_p = false;
727 : 2096 : if (PCC_BITFIELD_TYPE_MATTERS)
728 : 2096 : do_type_align (type, decl);
729 : : else
730 : : {
731 : : #ifdef EMPTY_FIELD_BOUNDARY
732 : : if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
733 : : {
734 : : SET_DECL_ALIGN (decl, EMPTY_FIELD_BOUNDARY);
735 : : DECL_USER_ALIGN (decl) = 0;
736 : : }
737 : : #endif
738 : : }
739 : : }
740 : :
741 : : /* See if we can use an ordinary integer mode for a bit-field.
742 : : Conditions are: a fixed size that is correct for another mode,
743 : : occupying a complete byte or bytes on proper boundary. */
744 : 425797 : if (TYPE_SIZE (type) != 0
745 : 425797 : && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
746 : 851594 : && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
747 : : {
748 : 425706 : machine_mode xmode;
749 : 425706 : if (mode_for_size_tree (DECL_SIZE (decl),
750 : 269210 : MODE_INT, 1).exists (&xmode))
751 : : {
752 : 156496 : unsigned int xalign = GET_MODE_ALIGNMENT (xmode);
753 : 153321 : if (!(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
754 : 309547 : && (known_align == 0 || known_align >= xalign))
755 : : {
756 : 153393 : SET_DECL_ALIGN (decl, MAX (xalign, DECL_ALIGN (decl)));
757 : 153393 : SET_DECL_MODE (decl, xmode);
758 : 153393 : DECL_BIT_FIELD (decl) = 0;
759 : : }
760 : : }
761 : : }
762 : :
763 : : /* Turn off DECL_BIT_FIELD if we won't need it set. */
764 : 425888 : if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
765 : 91 : && known_align >= TYPE_ALIGN (type)
766 : 425803 : && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
767 : 1 : DECL_BIT_FIELD (decl) = 0;
768 : : }
769 : 50767415 : else if (packed_p && DECL_USER_ALIGN (decl))
770 : : /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
771 : : round up; we'll reduce it again below. We want packing to
772 : : supersede USER_ALIGN inherited from the type, but defer to
773 : : alignment explicitly specified on the field decl. */;
774 : : else
775 : 50766337 : do_type_align (type, decl);
776 : :
777 : : /* If the field is packed and not explicitly aligned, give it the
778 : : minimum alignment. Note that do_type_align may set
779 : : DECL_USER_ALIGN, so we need to check old_user_align instead. */
780 : 51193212 : if (packed_p
781 : 51193212 : && !old_user_align)
782 : 6410 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), BITS_PER_UNIT));
783 : :
784 : 51193212 : if (! packed_p && ! DECL_USER_ALIGN (decl))
785 : : {
786 : : /* Some targets (i.e. i386, VMS) limit struct field alignment
787 : : to a lower boundary than alignment of variables unless
788 : : it was overridden by attribute aligned. */
789 : : #ifdef BIGGEST_FIELD_ALIGNMENT
790 : : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl),
791 : : (unsigned) BIGGEST_FIELD_ALIGNMENT));
792 : : #endif
793 : : #ifdef ADJUST_FIELD_ALIGN
794 : 50618154 : SET_DECL_ALIGN (decl, ADJUST_FIELD_ALIGN (decl, TREE_TYPE (decl),
795 : : DECL_ALIGN (decl)));
796 : : #endif
797 : : }
798 : :
799 : 51193212 : if (zero_bitfield)
800 : 2096 : mfa = initial_max_fld_align * BITS_PER_UNIT;
801 : : else
802 : 51191116 : mfa = maximum_field_alignment;
803 : : /* Should this be controlled by DECL_USER_ALIGN, too? */
804 : 51193212 : if (mfa != 0)
805 : 550 : SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), mfa));
806 : : }
807 : :
808 : : /* Evaluate nonconstant size only once, either now or as soon as safe. */
809 : 1587062467 : if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
810 : 25443 : DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
811 : 1587062467 : if (DECL_SIZE_UNIT (decl) != 0
812 : 1587062467 : && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
813 : 25443 : DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
814 : :
815 : : /* If requested, warn about definitions of large data objects. */
816 : 903693766 : if ((code == PARM_DECL || (code == VAR_DECL && !DECL_NONLOCAL_FRAME (decl)))
817 : 1798195514 : && !DECL_EXTERNAL (decl))
818 : : {
819 : 1095666005 : tree size = DECL_SIZE_UNIT (decl);
820 : :
821 : 1095666005 : if (size != 0 && TREE_CODE (size) == INTEGER_CST)
822 : : {
823 : : /* -Wlarger-than= argument of HOST_WIDE_INT_MAX is treated
824 : : as if PTRDIFF_MAX had been specified, with the value
825 : : being that on the target rather than the host. */
826 : 994228581 : unsigned HOST_WIDE_INT max_size = warn_larger_than_size;
827 : 994228581 : if (max_size == HOST_WIDE_INT_MAX)
828 : 994228520 : max_size = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
829 : :
830 : 994228581 : if (compare_tree_int (size, max_size) > 0)
831 : 23 : warning (OPT_Wlarger_than_, "size of %q+D %E bytes exceeds "
832 : : "maximum object size %wu",
833 : : decl, size, max_size);
834 : : }
835 : : }
836 : :
837 : : /* If the RTL was already set, update its mode and mem attributes. */
838 : 1587062467 : if (rtl)
839 : : {
840 : 36260 : PUT_MODE (rtl, DECL_MODE (decl));
841 : 36260 : SET_DECL_RTL (decl, 0);
842 : 36260 : if (MEM_P (rtl))
843 : 36260 : set_mem_attributes (rtl, decl, 1);
844 : 36260 : SET_DECL_RTL (decl, rtl);
845 : : }
846 : : }
847 : :
848 : : /* Given a VAR_DECL, PARM_DECL, RESULT_DECL, or FIELD_DECL, clears the
849 : : results of a previous call to layout_decl and calls it again. */
850 : :
851 : : void
852 : 390041466 : relayout_decl (tree decl)
853 : : {
854 : 390041466 : DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
855 : 390041466 : SET_DECL_MODE (decl, VOIDmode);
856 : 390041466 : if (!DECL_USER_ALIGN (decl))
857 : 390040288 : SET_DECL_ALIGN (decl, 0);
858 : 390041466 : if (DECL_RTL_SET_P (decl))
859 : 0 : SET_DECL_RTL (decl, 0);
860 : :
861 : 390041466 : layout_decl (decl, 0);
862 : 390041466 : }
863 : :
864 : : /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
865 : : QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
866 : : is to be passed to all other layout functions for this record. It is the
867 : : responsibility of the caller to call `free' for the storage returned.
868 : : Note that garbage collection is not permitted until we finish laying
869 : : out the record. */
870 : :
871 : : record_layout_info
872 : 39927073 : start_record_layout (tree t)
873 : : {
874 : 39927073 : record_layout_info rli = XNEW (struct record_layout_info_s);
875 : :
876 : 39927073 : rli->t = t;
877 : :
878 : : /* If the type has a minimum specified alignment (via an attribute
879 : : declaration, for example) use it -- otherwise, start with a
880 : : one-byte alignment. */
881 : 39927073 : rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
882 : 39927073 : rli->unpacked_align = rli->record_align;
883 : 79560962 : rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
884 : :
885 : : #ifdef STRUCTURE_SIZE_BOUNDARY
886 : : /* Packed structures don't need to have minimum size. */
887 : : if (! TYPE_PACKED (t))
888 : : {
889 : : unsigned tmp;
890 : :
891 : : /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */
892 : : tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY;
893 : : if (maximum_field_alignment != 0)
894 : : tmp = MIN (tmp, maximum_field_alignment);
895 : : rli->record_align = MAX (rli->record_align, tmp);
896 : : }
897 : : #endif
898 : :
899 : 39927073 : rli->offset = size_zero_node;
900 : 39927073 : rli->bitpos = bitsize_zero_node;
901 : 39927073 : rli->prev_field = 0;
902 : 39927073 : rli->pending_statics = 0;
903 : 39927073 : rli->packed_maybe_necessary = 0;
904 : 39927073 : rli->remaining_in_alignment = 0;
905 : :
906 : 39927073 : return rli;
907 : : }
908 : :
909 : : /* Fold sizetype value X to bitsizetype, given that X represents a type
910 : : size or offset. */
911 : :
912 : : static tree
913 : 352797897 : bits_from_bytes (tree x)
914 : : {
915 : 352797897 : if (POLY_INT_CST_P (x))
916 : : /* The runtime calculation isn't allowed to overflow sizetype;
917 : : increasing the runtime values must always increase the size
918 : : or offset of the object. This means that the object imposes
919 : : a maximum value on the runtime parameters, but we don't record
920 : : what that is. */
921 : : return build_poly_int_cst
922 : : (bitsizetype,
923 : : poly_wide_int::from (poly_int_cst_value (x),
924 : : TYPE_PRECISION (bitsizetype),
925 : : TYPE_SIGN (TREE_TYPE (x))));
926 : 352797897 : x = fold_convert (bitsizetype, x);
927 : 352797897 : gcc_checking_assert (x);
928 : 352797897 : return x;
929 : : }
930 : :
931 : : /* Return the combined bit position for the byte offset OFFSET and the
932 : : bit position BITPOS.
933 : :
934 : : These functions operate on byte and bit positions present in FIELD_DECLs
935 : : and assume that these expressions result in no (intermediate) overflow.
936 : : This assumption is necessary to fold the expressions as much as possible,
937 : : so as to avoid creating artificially variable-sized types in languages
938 : : supporting variable-sized types like Ada. */
939 : :
940 : : tree
941 : 194347999 : bit_from_pos (tree offset, tree bitpos)
942 : : {
943 : 194347999 : return size_binop (PLUS_EXPR, bitpos,
944 : : size_binop (MULT_EXPR, bits_from_bytes (offset),
945 : : bitsize_unit_node));
946 : : }
947 : :
948 : : /* Return the combined truncated byte position for the byte offset OFFSET and
949 : : the bit position BITPOS. */
950 : :
951 : : tree
952 : 219191872 : byte_from_pos (tree offset, tree bitpos)
953 : : {
954 : 219191872 : tree bytepos;
955 : 219191872 : if (TREE_CODE (bitpos) == MULT_EXPR
956 : 219191872 : && tree_int_cst_equal (TREE_OPERAND (bitpos, 1), bitsize_unit_node))
957 : 0 : bytepos = TREE_OPERAND (bitpos, 0);
958 : : else
959 : 219191872 : bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node);
960 : 219191872 : return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos));
961 : : }
962 : :
963 : : /* Split the bit position POS into a byte offset *POFFSET and a bit
964 : : position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */
965 : :
966 : : void
967 : 49509015 : pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
968 : : tree pos)
969 : : {
970 : 49509015 : tree toff_align = bitsize_int (off_align);
971 : 49509015 : if (TREE_CODE (pos) == MULT_EXPR
972 : 49509015 : && tree_int_cst_equal (TREE_OPERAND (pos, 1), toff_align))
973 : : {
974 : 0 : *poffset = size_binop (MULT_EXPR,
975 : : fold_convert (sizetype, TREE_OPERAND (pos, 0)),
976 : : size_int (off_align / BITS_PER_UNIT));
977 : 0 : *pbitpos = bitsize_zero_node;
978 : : }
979 : : else
980 : : {
981 : 49509015 : *poffset = size_binop (MULT_EXPR,
982 : : fold_convert (sizetype,
983 : : size_binop (FLOOR_DIV_EXPR, pos,
984 : : toff_align)),
985 : : size_int (off_align / BITS_PER_UNIT));
986 : 49509015 : *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align);
987 : : }
988 : 49509015 : }
989 : :
990 : : /* Given a pointer to bit and byte offsets and an offset alignment,
991 : : normalize the offsets so they are within the alignment. */
992 : :
993 : : void
994 : 155830070 : normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
995 : : {
996 : : /* If the bit position is now larger than it should be, adjust it
997 : : downwards. */
998 : 155830070 : if (compare_tree_int (*pbitpos, off_align) >= 0)
999 : : {
1000 : 49509015 : tree offset, bitpos;
1001 : 49509015 : pos_from_bit (&offset, &bitpos, off_align, *pbitpos);
1002 : 49509015 : *poffset = size_binop (PLUS_EXPR, *poffset, offset);
1003 : 49509015 : *pbitpos = bitpos;
1004 : : }
1005 : 155830070 : }
1006 : :
1007 : : /* Print debugging information about the information in RLI. */
1008 : :
1009 : : DEBUG_FUNCTION void
1010 : 0 : debug_rli (record_layout_info rli)
1011 : : {
1012 : 0 : print_node_brief (stderr, "type", rli->t, 0);
1013 : 0 : print_node_brief (stderr, "\noffset", rli->offset, 0);
1014 : 0 : print_node_brief (stderr, " bitpos", rli->bitpos, 0);
1015 : :
1016 : 0 : fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
1017 : : rli->record_align, rli->unpacked_align,
1018 : : rli->offset_align);
1019 : :
1020 : : /* The ms_struct code is the only that uses this. */
1021 : 0 : if (targetm.ms_bitfield_layout_p (rli->t))
1022 : 0 : fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
1023 : :
1024 : 0 : if (rli->packed_maybe_necessary)
1025 : 0 : fprintf (stderr, "packed may be necessary\n");
1026 : :
1027 : 0 : if (!vec_safe_is_empty (rli->pending_statics))
1028 : : {
1029 : 0 : fprintf (stderr, "pending statics:\n");
1030 : 0 : debug (rli->pending_statics);
1031 : : }
1032 : 0 : }
1033 : :
1034 : : /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
1035 : : BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
1036 : :
1037 : : void
1038 : 155830070 : normalize_rli (record_layout_info rli)
1039 : : {
1040 : 155830070 : normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
1041 : 155830070 : }
1042 : :
1043 : : /* Returns the size in bytes allocated so far. */
1044 : :
1045 : : tree
1046 : 126089530 : rli_size_unit_so_far (record_layout_info rli)
1047 : : {
1048 : 126089530 : return byte_from_pos (rli->offset, rli->bitpos);
1049 : : }
1050 : :
1051 : : /* Returns the size in bits allocated so far. */
1052 : :
1053 : : tree
1054 : 106604745 : rli_size_so_far (record_layout_info rli)
1055 : : {
1056 : 106604745 : return bit_from_pos (rli->offset, rli->bitpos);
1057 : : }
1058 : :
1059 : : /* FIELD is about to be added to RLI->T. The alignment (in bits) of
1060 : : the next available location within the record is given by KNOWN_ALIGN.
1061 : : Update the variable alignment fields in RLI, and return the alignment
1062 : : to give the FIELD. */
1063 : :
1064 : : unsigned int
1065 : 49855677 : update_alignment_for_field (record_layout_info rli, tree field,
1066 : : unsigned int known_align)
1067 : : {
1068 : : /* The alignment required for FIELD. */
1069 : 49855677 : unsigned int desired_align;
1070 : : /* The type of this field. */
1071 : 49855677 : tree type = TREE_TYPE (field);
1072 : : /* True if the field was explicitly aligned by the user. */
1073 : 49855677 : bool user_align;
1074 : 49855677 : bool is_bitfield;
1075 : :
1076 : : /* Do not attempt to align an ERROR_MARK node */
1077 : 49855677 : if (TREE_CODE (type) == ERROR_MARK)
1078 : : return 0;
1079 : :
1080 : : /* Lay out the field so we know what alignment it needs. */
1081 : 49855674 : layout_decl (field, known_align);
1082 : 49855674 : desired_align = DECL_ALIGN (field);
1083 : 49855674 : user_align = DECL_USER_ALIGN (field);
1084 : :
1085 : 99711348 : is_bitfield = (type != error_mark_node
1086 : 49855674 : && DECL_BIT_FIELD_TYPE (field)
1087 : 50271937 : && ! integer_zerop (TYPE_SIZE (type)));
1088 : :
1089 : : /* Record must have at least as much alignment as any field.
1090 : : Otherwise, the alignment of the field within the record is
1091 : : meaningless. */
1092 : 49855674 : if (targetm.ms_bitfield_layout_p (rli->t))
1093 : : {
1094 : : /* Here, the alignment of the underlying type of a bitfield can
1095 : : affect the alignment of a record; even a zero-sized field
1096 : : can do this. The alignment should be to the alignment of
1097 : : the type, except that for zero-size bitfields this only
1098 : : applies if there was an immediately prior, nonzero-size
1099 : : bitfield. (That's the way it is, experimentally.) */
1100 : 206 : if (!is_bitfield
1101 : 206 : || ((DECL_SIZE (field) == NULL_TREE
1102 : 131 : || !integer_zerop (DECL_SIZE (field)))
1103 : 121 : ? !DECL_PACKED (field)
1104 : 10 : : (rli->prev_field
1105 : 8 : && DECL_BIT_FIELD_TYPE (rli->prev_field)
1106 : 8 : && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
1107 : : {
1108 : 202 : unsigned int type_align = TYPE_ALIGN (type);
1109 : 277 : if (!is_bitfield && DECL_PACKED (field))
1110 : : type_align = desired_align;
1111 : : else
1112 : 195 : type_align = MAX (type_align, desired_align);
1113 : 202 : if (maximum_field_alignment != 0)
1114 : 68 : type_align = MIN (type_align, maximum_field_alignment);
1115 : 202 : rli->record_align = MAX (rli->record_align, type_align);
1116 : 202 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1117 : : }
1118 : : }
1119 : 49855468 : else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
1120 : : {
1121 : : /* Named bit-fields cause the entire structure to have the
1122 : : alignment implied by their type. Some targets also apply the same
1123 : : rules to unnamed bitfields. */
1124 : 416132 : if (DECL_NAME (field) != 0
1125 : 416132 : || targetm.align_anon_bitfield ())
1126 : : {
1127 : 276963 : unsigned int type_align = TYPE_ALIGN (type);
1128 : :
1129 : : #ifdef ADJUST_FIELD_ALIGN
1130 : 276963 : if (! TYPE_USER_ALIGN (type))
1131 : 270109 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1132 : : #endif
1133 : :
1134 : : /* Targets might chose to handle unnamed and hence possibly
1135 : : zero-width bitfield. Those are not influenced by #pragmas
1136 : : or packed attributes. */
1137 : 276963 : if (integer_zerop (DECL_SIZE (field)))
1138 : : {
1139 : 0 : if (initial_max_fld_align)
1140 : 0 : type_align = MIN (type_align,
1141 : : initial_max_fld_align * BITS_PER_UNIT);
1142 : : }
1143 : 276963 : else if (maximum_field_alignment != 0)
1144 : 177 : type_align = MIN (type_align, maximum_field_alignment);
1145 : 276786 : else if (DECL_PACKED (field))
1146 : 2846 : type_align = MIN (type_align, BITS_PER_UNIT);
1147 : :
1148 : : /* The alignment of the record is increased to the maximum
1149 : : of the current alignment, the alignment indicated on the
1150 : : field (i.e., the alignment specified by an __aligned__
1151 : : attribute), and the alignment indicated by the type of
1152 : : the field. */
1153 : 276963 : rli->record_align = MAX (rli->record_align, desired_align);
1154 : 276963 : rli->record_align = MAX (rli->record_align, type_align);
1155 : :
1156 : 276963 : if (warn_packed)
1157 : 0 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1158 : 276963 : user_align |= TYPE_USER_ALIGN (type);
1159 : : }
1160 : : }
1161 : : else
1162 : : {
1163 : 49439336 : rli->record_align = MAX (rli->record_align, desired_align);
1164 : 49439336 : rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1165 : : }
1166 : :
1167 : 49855674 : TYPE_USER_ALIGN (rli->t) |= user_align;
1168 : :
1169 : 49855674 : return desired_align;
1170 : : }
1171 : :
1172 : : /* Issue a warning if the record alignment, RECORD_ALIGN, is less than
1173 : : the field alignment of FIELD or FIELD isn't aligned. */
1174 : :
1175 : : static void
1176 : 49847437 : handle_warn_if_not_align (tree field, unsigned int record_align)
1177 : : {
1178 : 49847437 : tree type = TREE_TYPE (field);
1179 : :
1180 : 49847437 : if (type == error_mark_node)
1181 : 49847437 : return;
1182 : :
1183 : 49847398 : unsigned int warn_if_not_align = 0;
1184 : :
1185 : 49847398 : int opt_w = 0;
1186 : :
1187 : 49847398 : if (warn_if_not_aligned)
1188 : : {
1189 : 49847258 : warn_if_not_align = DECL_WARN_IF_NOT_ALIGN (field);
1190 : 49847258 : if (!warn_if_not_align)
1191 : 49847211 : warn_if_not_align = TYPE_WARN_IF_NOT_ALIGN (type);
1192 : 47 : if (warn_if_not_align)
1193 : : opt_w = OPT_Wif_not_aligned;
1194 : : }
1195 : :
1196 : : if (!warn_if_not_align
1197 : 49847351 : && warn_packed_not_aligned
1198 : 2519682 : && lookup_attribute ("aligned", TYPE_ATTRIBUTES (type)))
1199 : : {
1200 : 115 : warn_if_not_align = TYPE_ALIGN (type);
1201 : 115 : opt_w = OPT_Wpacked_not_aligned;
1202 : : }
1203 : :
1204 : 49847398 : if (!warn_if_not_align)
1205 : 49847236 : return;
1206 : :
1207 : 162 : tree context = DECL_CONTEXT (field);
1208 : :
1209 : 162 : warn_if_not_align /= BITS_PER_UNIT;
1210 : 162 : record_align /= BITS_PER_UNIT;
1211 : 162 : if ((record_align % warn_if_not_align) != 0)
1212 : 36 : warning (opt_w, "alignment %u of %qT is less than %u",
1213 : : record_align, context, warn_if_not_align);
1214 : :
1215 : 162 : tree off = byte_position (field);
1216 : 162 : if (!multiple_of_p (TREE_TYPE (off), off, size_int (warn_if_not_align)))
1217 : : {
1218 : 25 : if (TREE_CODE (off) == INTEGER_CST)
1219 : 24 : warning (opt_w, "%q+D offset %E in %qT isn%'t aligned to %u",
1220 : : field, off, context, warn_if_not_align);
1221 : : else
1222 : 1 : warning (opt_w, "%q+D offset %E in %qT may not be aligned to %u",
1223 : : field, off, context, warn_if_not_align);
1224 : : }
1225 : : }
1226 : :
1227 : : /* Called from place_field to handle unions. */
1228 : :
1229 : : static void
1230 : 1795251 : place_union_field (record_layout_info rli, tree field)
1231 : : {
1232 : 1795251 : update_alignment_for_field (rli, field, /*known_align=*/0);
1233 : :
1234 : 1795251 : DECL_FIELD_OFFSET (field) = size_zero_node;
1235 : 1795251 : DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
1236 : 1795251 : SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
1237 : 1795251 : handle_warn_if_not_align (field, rli->record_align);
1238 : :
1239 : : /* If this is an ERROR_MARK return *after* having set the
1240 : : field at the start of the union. This helps when parsing
1241 : : invalid fields. */
1242 : 1795251 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
1243 : : return;
1244 : :
1245 : 3034529 : if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1246 : 2132579 : && TYPE_TYPELESS_STORAGE (TREE_TYPE (field)))
1247 : 460813 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1248 : :
1249 : : /* We might see a flexible array member field (with no DECL_SIZE_UNIT), use
1250 : : zero size for such field. */
1251 : 1795248 : tree field_size_unit = DECL_SIZE_UNIT (field)
1252 : 1795248 : ? DECL_SIZE_UNIT (field)
1253 : 107 : : build_int_cst (sizetype, 0);
1254 : : /* We assume the union's size will be a multiple of a byte so we don't
1255 : : bother with BITPOS. */
1256 : 1795248 : if (TREE_CODE (rli->t) == UNION_TYPE)
1257 : 1795248 : rli->offset = size_binop (MAX_EXPR, rli->offset, field_size_unit);
1258 : 0 : else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
1259 : 0 : rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field),
1260 : : field_size_unit, rli->offset);
1261 : : }
1262 : :
1263 : : /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
1264 : : at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
1265 : : units of alignment than the underlying TYPE. */
1266 : : static int
1267 : 258168 : excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
1268 : : HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
1269 : : {
1270 : : /* Note that the calculation of OFFSET might overflow; we calculate it so
1271 : : that we still get the right result as long as ALIGN is a power of two. */
1272 : 258168 : unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
1273 : :
1274 : 258168 : offset = offset % align;
1275 : 258168 : return ((offset + size + align - 1) / align
1276 : 258168 : > tree_to_uhwi (TYPE_SIZE (type)) / align);
1277 : : }
1278 : :
1279 : : /* RLI contains information about the layout of a RECORD_TYPE. FIELD
1280 : : is a FIELD_DECL to be added after those fields already present in
1281 : : T. (FIELD is not actually added to the TYPE_FIELDS list here;
1282 : : callers that desire that behavior must manually perform that step.) */
1283 : :
1284 : : void
1285 : 303289641 : place_field (record_layout_info rli, tree field)
1286 : : {
1287 : : /* The alignment required for FIELD. */
1288 : 303289641 : unsigned int desired_align;
1289 : : /* The alignment FIELD would have if we just dropped it into the
1290 : : record as it presently stands. */
1291 : 303289641 : unsigned int known_align;
1292 : 303289641 : unsigned int actual_align;
1293 : : /* The type of this field. */
1294 : 303289641 : tree type = TREE_TYPE (field);
1295 : :
1296 : 303289641 : gcc_assert (TREE_CODE (field) != ERROR_MARK);
1297 : :
1298 : : /* If FIELD is static, then treat it like a separate variable, not
1299 : : really like a structure field. If it is a FUNCTION_DECL, it's a
1300 : : method. In both cases, all we do is lay out the decl, and we do
1301 : : it *after* the record is laid out. */
1302 : 303289641 : if (VAR_P (field))
1303 : : {
1304 : 9686529 : vec_safe_push (rli->pending_statics, field);
1305 : 9686529 : return;
1306 : : }
1307 : :
1308 : : /* Enumerators and enum types which are local to this class need not
1309 : : be laid out. Likewise for initialized constant fields. */
1310 : 293603112 : else if (TREE_CODE (field) != FIELD_DECL)
1311 : : return;
1312 : :
1313 : : /* Unions are laid out very differently than records, so split
1314 : : that code off to another function. */
1315 : 49847437 : else if (TREE_CODE (rli->t) != RECORD_TYPE)
1316 : : {
1317 : 1795251 : place_union_field (rli, field);
1318 : 1795251 : return;
1319 : : }
1320 : :
1321 : 48052186 : else if (TREE_CODE (type) == ERROR_MARK)
1322 : : {
1323 : : /* Place this field at the current allocation position, so we
1324 : : maintain monotonicity. */
1325 : 36 : DECL_FIELD_OFFSET (field) = rli->offset;
1326 : 36 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1327 : 36 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1328 : 36 : handle_warn_if_not_align (field, rli->record_align);
1329 : 36 : return;
1330 : : }
1331 : :
1332 : 48052150 : if (AGGREGATE_TYPE_P (type)
1333 : 48052150 : && TYPE_TYPELESS_STORAGE (type))
1334 : 1087206 : TYPE_TYPELESS_STORAGE (rli->t) = 1;
1335 : :
1336 : : /* Work out the known alignment so far. Note that A & (-A) is the
1337 : : value of the least-significant bit in A that is one. */
1338 : 48052150 : if (! integer_zerop (rli->bitpos))
1339 : 16708931 : known_align = least_bit_hwi (tree_to_uhwi (rli->bitpos));
1340 : 31343219 : else if (integer_zerop (rli->offset))
1341 : : known_align = 0;
1342 : 7816286 : else if (tree_fits_uhwi_p (rli->offset))
1343 : 7815521 : known_align = (BITS_PER_UNIT
1344 : 7815521 : * least_bit_hwi (tree_to_uhwi (rli->offset)));
1345 : : else
1346 : 765 : known_align = rli->offset_align;
1347 : :
1348 : 48052150 : desired_align = update_alignment_for_field (rli, field, known_align);
1349 : 48052150 : if (known_align == 0)
1350 : 46938780 : known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1351 : :
1352 : 48052150 : if (warn_packed && DECL_PACKED (field))
1353 : : {
1354 : 3 : if (known_align >= TYPE_ALIGN (type))
1355 : : {
1356 : 3 : if (TYPE_ALIGN (type) > desired_align)
1357 : : {
1358 : 3 : if (STRICT_ALIGNMENT)
1359 : : warning (OPT_Wattributes, "packed attribute causes "
1360 : : "inefficient alignment for %q+D", field);
1361 : : /* Don't warn if DECL_PACKED was set by the type. */
1362 : 3 : else if (!TYPE_PACKED (rli->t))
1363 : 0 : warning (OPT_Wattributes, "packed attribute is "
1364 : : "unnecessary for %q+D", field);
1365 : : }
1366 : : }
1367 : : else
1368 : 0 : rli->packed_maybe_necessary = 1;
1369 : : }
1370 : :
1371 : : /* Does this field automatically have alignment it needs by virtue
1372 : : of the fields that precede it and the record's own alignment? */
1373 : 48052150 : if (known_align < desired_align
1374 : 48052150 : && (! targetm.ms_bitfield_layout_p (rli->t)
1375 : 17 : || rli->prev_field == NULL))
1376 : : {
1377 : : /* No, we need to skip space before this field.
1378 : : Bump the cumulative size to multiple of field alignment. */
1379 : :
1380 : 1329334 : if (!targetm.ms_bitfield_layout_p (rli->t)
1381 : 1329331 : && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION
1382 : 2643961 : && !TYPE_ARTIFICIAL (rli->t))
1383 : 1314596 : warning (OPT_Wpadded, "padding struct to align %q+D", field);
1384 : :
1385 : : /* If the alignment is still within offset_align, just align
1386 : : the bit position. */
1387 : 1329334 : if (desired_align < rli->offset_align)
1388 : 1293721 : rli->bitpos = round_up (rli->bitpos, desired_align);
1389 : : else
1390 : : {
1391 : : /* First adjust OFFSET by the partial bits, then align. */
1392 : 35613 : rli->offset
1393 : 35613 : = size_binop (PLUS_EXPR, rli->offset,
1394 : : fold_convert (sizetype,
1395 : : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1396 : : bitsize_unit_node)));
1397 : 35613 : rli->bitpos = bitsize_zero_node;
1398 : :
1399 : 35613 : rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
1400 : : }
1401 : :
1402 : 1329334 : if (! TREE_CONSTANT (rli->offset))
1403 : 416 : rli->offset_align = desired_align;
1404 : : }
1405 : :
1406 : : /* Handle compatibility with PCC. Note that if the record has any
1407 : : variable-sized fields, we need not worry about compatibility. */
1408 : 48052150 : if (PCC_BITFIELD_TYPE_MATTERS
1409 : 48052150 : && ! targetm.ms_bitfield_layout_p (rli->t)
1410 : 48051949 : && TREE_CODE (field) == FIELD_DECL
1411 : 48051949 : && type != error_mark_node
1412 : 48051949 : && DECL_BIT_FIELD (field)
1413 : 262217 : && (! DECL_PACKED (field)
1414 : : /* Enter for these packed fields only to issue a warning. */
1415 : 2718 : || TYPE_ALIGN (type) <= BITS_PER_UNIT)
1416 : 259691 : && maximum_field_alignment == 0
1417 : 259457 : && ! integer_zerop (DECL_SIZE (field))
1418 : 258182 : && tree_fits_uhwi_p (DECL_SIZE (field))
1419 : 258182 : && tree_fits_uhwi_p (rli->offset)
1420 : 48310318 : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1421 : : {
1422 : 258168 : unsigned int type_align = TYPE_ALIGN (type);
1423 : 258168 : tree dsize = DECL_SIZE (field);
1424 : 258168 : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1425 : 258168 : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1426 : 258168 : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1427 : :
1428 : : #ifdef ADJUST_FIELD_ALIGN
1429 : 258168 : if (! TYPE_USER_ALIGN (type))
1430 : 253179 : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1431 : : #endif
1432 : :
1433 : : /* A bit field may not span more units of alignment of its type
1434 : : than its type itself. Advance to next boundary if necessary. */
1435 : 258168 : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1436 : : {
1437 : 8921 : if (DECL_PACKED (field))
1438 : : {
1439 : 22 : if (warn_packed_bitfield_compat == 1)
1440 : 14 : inform
1441 : 14 : (input_location,
1442 : : "offset of packed bit-field %qD has changed in GCC 4.4",
1443 : : field);
1444 : : }
1445 : : else
1446 : 8899 : rli->bitpos = round_up (rli->bitpos, type_align);
1447 : : }
1448 : :
1449 : 258168 : if (! DECL_PACKED (field))
1450 : 257982 : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1451 : :
1452 : 258168 : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1453 : : TYPE_WARN_IF_NOT_ALIGN (type));
1454 : : }
1455 : :
1456 : : #ifdef BITFIELD_NBYTES_LIMITED
1457 : : if (BITFIELD_NBYTES_LIMITED
1458 : : && ! targetm.ms_bitfield_layout_p (rli->t)
1459 : : && TREE_CODE (field) == FIELD_DECL
1460 : : && type != error_mark_node
1461 : : && DECL_BIT_FIELD_TYPE (field)
1462 : : && ! DECL_PACKED (field)
1463 : : && ! integer_zerop (DECL_SIZE (field))
1464 : : && tree_fits_uhwi_p (DECL_SIZE (field))
1465 : : && tree_fits_uhwi_p (rli->offset)
1466 : : && tree_fits_uhwi_p (TYPE_SIZE (type)))
1467 : : {
1468 : : unsigned int type_align = TYPE_ALIGN (type);
1469 : : tree dsize = DECL_SIZE (field);
1470 : : HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1471 : : HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1472 : : HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1473 : :
1474 : : #ifdef ADJUST_FIELD_ALIGN
1475 : : if (! TYPE_USER_ALIGN (type))
1476 : : type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1477 : : #endif
1478 : :
1479 : : if (maximum_field_alignment != 0)
1480 : : type_align = MIN (type_align, maximum_field_alignment);
1481 : : /* ??? This test is opposite the test in the containing if
1482 : : statement, so this code is unreachable currently. */
1483 : : else if (DECL_PACKED (field))
1484 : : type_align = MIN (type_align, BITS_PER_UNIT);
1485 : :
1486 : : /* A bit field may not span the unit of alignment of its type.
1487 : : Advance to next boundary if necessary. */
1488 : : if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1489 : : rli->bitpos = round_up (rli->bitpos, type_align);
1490 : :
1491 : : TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1492 : : SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1493 : : TYPE_WARN_IF_NOT_ALIGN (type));
1494 : : }
1495 : : #endif
1496 : :
1497 : : /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1498 : : A subtlety:
1499 : : When a bit field is inserted into a packed record, the whole
1500 : : size of the underlying type is used by one or more same-size
1501 : : adjacent bitfields. (That is, if its long:3, 32 bits is
1502 : : used in the record, and any additional adjacent long bitfields are
1503 : : packed into the same chunk of 32 bits. However, if the size
1504 : : changes, a new field of that size is allocated.) In an unpacked
1505 : : record, this is the same as using alignment, but not equivalent
1506 : : when packing.
1507 : :
1508 : : Note: for compatibility, we use the type size, not the type alignment
1509 : : to determine alignment, since that matches the documentation */
1510 : :
1511 : 48052150 : if (targetm.ms_bitfield_layout_p (rli->t))
1512 : : {
1513 : 201 : tree prev_saved = rli->prev_field;
1514 : 283 : tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL;
1515 : :
1516 : : /* This is a bitfield if it exists. */
1517 : 201 : if (rli->prev_field)
1518 : : {
1519 : 82 : bool realign_p = known_align < desired_align;
1520 : :
1521 : : /* If both are bitfields, nonzero, and the same size, this is
1522 : : the middle of a run. Zero declared size fields are special
1523 : : and handled as "end of run". (Note: it's nonzero declared
1524 : : size, but equal type sizes!) (Since we know that both
1525 : : the current and previous fields are bitfields by the
1526 : : time we check it, DECL_SIZE must be present for both.) */
1527 : 82 : if (DECL_BIT_FIELD_TYPE (field)
1528 : 64 : && !integer_zerop (DECL_SIZE (field))
1529 : 56 : && !integer_zerop (DECL_SIZE (rli->prev_field))
1530 : 54 : && tree_fits_shwi_p (DECL_SIZE (rli->prev_field))
1531 : 54 : && tree_fits_uhwi_p (TYPE_SIZE (type))
1532 : 136 : && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
1533 : : {
1534 : : /* We're in the middle of a run of equal type size fields; make
1535 : : sure we realign if we run out of bits. (Not decl size,
1536 : : type size!) */
1537 : 52 : HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field));
1538 : :
1539 : 52 : if (rli->remaining_in_alignment < bitsize)
1540 : : {
1541 : 1 : HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type));
1542 : :
1543 : : /* out of bits; bump up to next 'word'. */
1544 : 1 : rli->bitpos
1545 : 1 : = size_binop (PLUS_EXPR, rli->bitpos,
1546 : : bitsize_int (rli->remaining_in_alignment));
1547 : 1 : rli->prev_field = field;
1548 : 1 : if (typesize < bitsize)
1549 : 0 : rli->remaining_in_alignment = 0;
1550 : : else
1551 : 1 : rli->remaining_in_alignment = typesize - bitsize;
1552 : : }
1553 : : else
1554 : : {
1555 : 51 : rli->remaining_in_alignment -= bitsize;
1556 : 51 : realign_p = false;
1557 : : }
1558 : : }
1559 : : else
1560 : : {
1561 : : /* End of a run: if leaving a run of bitfields of the same type
1562 : : size, we have to "use up" the rest of the bits of the type
1563 : : size.
1564 : :
1565 : : Compute the new position as the sum of the size for the prior
1566 : : type and where we first started working on that type.
1567 : : Note: since the beginning of the field was aligned then
1568 : : of course the end will be too. No round needed. */
1569 : :
1570 : 30 : if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1571 : : {
1572 : 20 : rli->bitpos
1573 : 20 : = size_binop (PLUS_EXPR, rli->bitpos,
1574 : : bitsize_int (rli->remaining_in_alignment));
1575 : : }
1576 : : else
1577 : : /* We "use up" size zero fields; the code below should behave
1578 : : as if the prior field was not a bitfield. */
1579 : : prev_saved = NULL;
1580 : :
1581 : : /* Cause a new bitfield to be captured, either this time (if
1582 : : currently a bitfield) or next time we see one. */
1583 : 30 : if (!DECL_BIT_FIELD_TYPE (field)
1584 : 30 : || integer_zerop (DECL_SIZE (field)))
1585 : 26 : rli->prev_field = NULL;
1586 : : }
1587 : :
1588 : : /* Does this field automatically have alignment it needs by virtue
1589 : : of the fields that precede it and the record's own alignment? */
1590 : 82 : if (realign_p)
1591 : : {
1592 : : /* If the alignment is still within offset_align, just align
1593 : : the bit position. */
1594 : 13 : if (desired_align < rli->offset_align)
1595 : 11 : rli->bitpos = round_up (rli->bitpos, desired_align);
1596 : : else
1597 : : {
1598 : : /* First adjust OFFSET by the partial bits, then align. */
1599 : 2 : tree d = size_binop (CEIL_DIV_EXPR, rli->bitpos,
1600 : : bitsize_unit_node);
1601 : 2 : rli->offset = size_binop (PLUS_EXPR, rli->offset,
1602 : : fold_convert (sizetype, d));
1603 : 2 : rli->bitpos = bitsize_zero_node;
1604 : :
1605 : 2 : rli->offset = round_up (rli->offset,
1606 : : desired_align / BITS_PER_UNIT);
1607 : : }
1608 : :
1609 : 13 : if (! TREE_CONSTANT (rli->offset))
1610 : 0 : rli->offset_align = desired_align;
1611 : : }
1612 : :
1613 : 82 : normalize_rli (rli);
1614 : : }
1615 : :
1616 : : /* If we're starting a new run of same type size bitfields
1617 : : (or a run of non-bitfields), set up the "first of the run"
1618 : : fields.
1619 : :
1620 : : That is, if the current field is not a bitfield, or if there
1621 : : was a prior bitfield the type sizes differ, or if there wasn't
1622 : : a prior bitfield the size of the current field is nonzero.
1623 : :
1624 : : Note: we must be sure to test ONLY the type size if there was
1625 : : a prior bitfield and ONLY for the current field being zero if
1626 : : there wasn't. */
1627 : :
1628 : 201 : if (!DECL_BIT_FIELD_TYPE (field)
1629 : 261 : || (prev_saved != NULL
1630 : 129 : ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))
1631 : 69 : : !integer_zerop (DECL_SIZE (field))))
1632 : : {
1633 : : /* Never smaller than a byte for compatibility. */
1634 : 143 : unsigned int type_align = BITS_PER_UNIT;
1635 : :
1636 : : /* (When not a bitfield), we could be seeing a flex array (with
1637 : : no DECL_SIZE). Since we won't be using remaining_in_alignment
1638 : : until we see a bitfield (and come by here again) we just skip
1639 : : calculating it. */
1640 : 143 : if (DECL_SIZE (field) != NULL
1641 : 143 : && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field)))
1642 : 285 : && tree_fits_uhwi_p (DECL_SIZE (field)))
1643 : : {
1644 : 142 : unsigned HOST_WIDE_INT bitsize
1645 : 142 : = tree_to_uhwi (DECL_SIZE (field));
1646 : 142 : unsigned HOST_WIDE_INT typesize
1647 : 142 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field)));
1648 : :
1649 : 142 : if (typesize < bitsize)
1650 : 0 : rli->remaining_in_alignment = 0;
1651 : : else
1652 : 142 : rli->remaining_in_alignment = typesize - bitsize;
1653 : : }
1654 : :
1655 : : /* Now align (conventionally) for the new type. */
1656 : 143 : if (! DECL_PACKED (field))
1657 : 137 : type_align = TYPE_ALIGN (TREE_TYPE (field));
1658 : :
1659 : 143 : if (maximum_field_alignment != 0)
1660 : 56 : type_align = MIN (type_align, maximum_field_alignment);
1661 : :
1662 : 143 : rli->bitpos = round_up (rli->bitpos, type_align);
1663 : :
1664 : : /* If we really aligned, don't allow subsequent bitfields
1665 : : to undo that. */
1666 : 143 : rli->prev_field = NULL;
1667 : : }
1668 : : }
1669 : :
1670 : : /* Offset so far becomes the position of this field after normalizing. */
1671 : 48052150 : normalize_rli (rli);
1672 : 48052150 : DECL_FIELD_OFFSET (field) = rli->offset;
1673 : 48052150 : DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1674 : 48052150 : SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1675 : 48052150 : handle_warn_if_not_align (field, rli->record_align);
1676 : :
1677 : : /* Evaluate nonconstant offsets only once, either now or as soon as safe. */
1678 : 48052150 : if (TREE_CODE (DECL_FIELD_OFFSET (field)) != INTEGER_CST)
1679 : 1169 : DECL_FIELD_OFFSET (field) = variable_size (DECL_FIELD_OFFSET (field));
1680 : :
1681 : : /* If this field ended up more aligned than we thought it would be (we
1682 : : approximate this by seeing if its position changed), lay out the field
1683 : : again; perhaps we can use an integral mode for it now. */
1684 : 48052150 : if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1685 : 16166144 : actual_align = least_bit_hwi (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)));
1686 : 31886006 : else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1687 : 46938758 : actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1688 : 8359073 : else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1689 : 8357921 : actual_align = (BITS_PER_UNIT
1690 : 8357921 : * least_bit_hwi (tree_to_uhwi (DECL_FIELD_OFFSET (field))));
1691 : : else
1692 : 1152 : actual_align = DECL_OFFSET_ALIGN (field);
1693 : : /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1694 : : store / extract bit field operations will check the alignment of the
1695 : : record against the mode of bit fields. */
1696 : :
1697 : 48052150 : if (known_align != actual_align)
1698 : 1337538 : layout_decl (field, actual_align);
1699 : :
1700 : 48052150 : if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field))
1701 : 62964 : rli->prev_field = field;
1702 : :
1703 : : /* Now add size of this field to the size of the record. If the size is
1704 : : not constant, treat the field as being a multiple of bytes and just
1705 : : adjust the offset, resetting the bit position. Otherwise, apportion the
1706 : : size amongst the bit position and offset. First handle the case of an
1707 : : unspecified size, which can happen when we have an invalid nested struct
1708 : : definition, such as struct j { struct j { int i; } }. The error message
1709 : : is printed in finish_struct. */
1710 : 48052150 : if (DECL_SIZE (field) == 0)
1711 : : /* Do nothing. */;
1712 : 47966959 : else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1713 : 47966959 : || TREE_OVERFLOW (DECL_SIZE (field)))
1714 : : {
1715 : 1052 : rli->offset
1716 : 1052 : = size_binop (PLUS_EXPR, rli->offset,
1717 : : fold_convert (sizetype,
1718 : : size_binop (CEIL_DIV_EXPR, rli->bitpos,
1719 : : bitsize_unit_node)));
1720 : 1052 : rli->offset
1721 : 1052 : = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1722 : 1052 : rli->bitpos = bitsize_zero_node;
1723 : 1052 : rli->offset_align = MIN (rli->offset_align, desired_align);
1724 : :
1725 : 1052 : if (!multiple_of_p (bitsizetype, DECL_SIZE (field),
1726 : 2104 : bitsize_int (rli->offset_align)))
1727 : : {
1728 : 303 : tree type = strip_array_types (TREE_TYPE (field));
1729 : : /* The above adjusts offset_align just based on the start of the
1730 : : field. The field might not have a size that is a multiple of
1731 : : that offset_align though. If the field is an array of fixed
1732 : : sized elements, assume there can be any multiple of those
1733 : : sizes. If it is a variable length aggregate or array of
1734 : : variable length aggregates, assume worst that the end is
1735 : : just BITS_PER_UNIT aligned. */
1736 : 303 : if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
1737 : : {
1738 : 303 : if (TREE_INT_CST_LOW (TYPE_SIZE (type)))
1739 : : {
1740 : 303 : unsigned HOST_WIDE_INT sz
1741 : 303 : = least_bit_hwi (TREE_INT_CST_LOW (TYPE_SIZE (type)));
1742 : 303 : rli->offset_align = MIN (rli->offset_align, sz);
1743 : : }
1744 : : }
1745 : : else
1746 : 0 : rli->offset_align = MIN (rli->offset_align, BITS_PER_UNIT);
1747 : : }
1748 : : }
1749 : 47965907 : else if (targetm.ms_bitfield_layout_p (rli->t))
1750 : : {
1751 : 201 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1752 : :
1753 : : /* If FIELD is the last field and doesn't end at the full length
1754 : : of the type then pad the struct out to the full length of the
1755 : : last type. */
1756 : 201 : if (DECL_BIT_FIELD_TYPE (field)
1757 : 201 : && !integer_zerop (DECL_SIZE (field)))
1758 : : {
1759 : : /* We have to scan, because non-field DECLS are also here. */
1760 : : tree probe = field;
1761 : 182 : while ((probe = DECL_CHAIN (probe)))
1762 : 135 : if (TREE_CODE (probe) == FIELD_DECL)
1763 : : break;
1764 : 119 : if (!probe)
1765 : 47 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,
1766 : : bitsize_int (rli->remaining_in_alignment));
1767 : : }
1768 : :
1769 : 201 : normalize_rli (rli);
1770 : : }
1771 : : else
1772 : : {
1773 : 47965706 : rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1774 : 47965706 : normalize_rli (rli);
1775 : : }
1776 : : }
1777 : :
1778 : : /* Assuming that all the fields have been laid out, this function uses
1779 : : RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1780 : : indicated by RLI. */
1781 : :
1782 : : static void
1783 : 39927073 : finalize_record_size (record_layout_info rli)
1784 : : {
1785 : 39927073 : tree unpadded_size, unpadded_size_unit;
1786 : :
1787 : : /* Now we want just byte and bit offsets, so set the offset alignment
1788 : : to be a byte and then normalize. */
1789 : 39927073 : rli->offset_align = BITS_PER_UNIT;
1790 : 39927073 : normalize_rli (rli);
1791 : :
1792 : : /* Determine the desired alignment. */
1793 : : #ifdef ROUND_TYPE_ALIGN
1794 : : SET_TYPE_ALIGN (rli->t, ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1795 : : rli->record_align));
1796 : : #else
1797 : 39927073 : SET_TYPE_ALIGN (rli->t, MAX (TYPE_ALIGN (rli->t), rli->record_align));
1798 : : #endif
1799 : :
1800 : : /* Compute the size so far. Be sure to allow for extra bits in the
1801 : : size in bytes. We have guaranteed above that it will be no more
1802 : : than a single byte. */
1803 : 39927073 : unpadded_size = rli_size_so_far (rli);
1804 : 39927073 : unpadded_size_unit = rli_size_unit_so_far (rli);
1805 : 39927073 : if (! integer_zerop (rli->bitpos))
1806 : 2147 : unpadded_size_unit
1807 : 2147 : = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1808 : :
1809 : : /* Round the size up to be a multiple of the required alignment. */
1810 : 39927073 : TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1811 : 39927073 : TYPE_SIZE_UNIT (rli->t)
1812 : 39927073 : = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1813 : :
1814 : 39927073 : if (TREE_CONSTANT (unpadded_size)
1815 : 39926380 : && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0
1816 : 1000931 : && input_location != BUILTINS_LOCATION
1817 : 40927778 : && !TYPE_ARTIFICIAL (rli->t))
1818 : : {
1819 : 993388 : tree pad_size
1820 : 993388 : = size_binop (MINUS_EXPR, TYPE_SIZE_UNIT (rli->t), unpadded_size_unit);
1821 : 993388 : warning (OPT_Wpadded,
1822 : : "padding struct size to alignment boundary with %E bytes", pad_size);
1823 : : }
1824 : :
1825 : 19 : if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1826 : 19 : && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1827 : 39927075 : && TREE_CONSTANT (unpadded_size))
1828 : : {
1829 : 2 : tree unpacked_size;
1830 : :
1831 : : #ifdef ROUND_TYPE_ALIGN
1832 : : rli->unpacked_align
1833 : : = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1834 : : #else
1835 : 2 : rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1836 : : #endif
1837 : :
1838 : 2 : unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1839 : 2 : if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1840 : : {
1841 : 2 : if (TYPE_NAME (rli->t))
1842 : : {
1843 : 2 : tree name;
1844 : :
1845 : 2 : if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1846 : 2 : name = TYPE_NAME (rli->t);
1847 : : else
1848 : 0 : name = DECL_NAME (TYPE_NAME (rli->t));
1849 : :
1850 : 2 : if (STRICT_ALIGNMENT)
1851 : : warning (OPT_Wpacked, "packed attribute causes inefficient "
1852 : : "alignment for %qE", name);
1853 : : else
1854 : 2 : warning (OPT_Wpacked,
1855 : : "packed attribute is unnecessary for %qE", name);
1856 : : }
1857 : : else
1858 : : {
1859 : 0 : if (STRICT_ALIGNMENT)
1860 : : warning (OPT_Wpacked,
1861 : : "packed attribute causes inefficient alignment");
1862 : : else
1863 : 0 : warning (OPT_Wpacked, "packed attribute is unnecessary");
1864 : : }
1865 : : }
1866 : : }
1867 : 39927073 : }
1868 : :
1869 : : /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1870 : :
1871 : : void
1872 : 73265909 : compute_record_mode (tree type)
1873 : : {
1874 : 73265909 : tree field;
1875 : 73265909 : machine_mode mode = VOIDmode;
1876 : :
1877 : : /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1878 : : However, if possible, we use a mode that fits in a register
1879 : : instead, in order to allow for better optimization down the
1880 : : line. */
1881 : 73265909 : SET_TYPE_MODE (type, BLKmode);
1882 : :
1883 : 73265909 : poly_uint64 type_size;
1884 : 73265909 : if (!poly_int_tree_p (TYPE_SIZE (type), &type_size))
1885 : 7724991 : return;
1886 : :
1887 : : /* A record which has any BLKmode members must itself be
1888 : : BLKmode; it can't go in a register. Unless the member is
1889 : : BLKmode only because it isn't aligned. */
1890 : 290420056 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1891 : : {
1892 : 224879138 : if (TREE_CODE (field) != FIELD_DECL)
1893 : 158137037 : continue;
1894 : :
1895 : 66742101 : poly_uint64 field_size;
1896 : 66742101 : if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1897 : 66741792 : || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1898 : 38038961 : && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1899 : 76026101 : && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1900 : 37987140 : && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1901 : 59029959 : || !tree_fits_poly_uint64_p (bit_position (field))
1902 : 59029959 : || DECL_SIZE (field) == 0
1903 : 125772060 : || !poly_int_tree_p (DECL_SIZE (field), &field_size))
1904 : 7724260 : return;
1905 : :
1906 : : /* If this field is the whole struct, remember its mode so
1907 : : that, say, we can put a double in a class into a DF
1908 : : register instead of forcing it to live in the stack. */
1909 : 59029959 : if (known_eq (field_size, type_size)
1910 : : /* Partial int types (e.g. __int20) may have TYPE_SIZE equal to
1911 : : wider types (e.g. int32), despite precision being less. Ensure
1912 : : that the TYPE_MODE of the struct does not get set to the partial
1913 : : int mode if there is a wider type also in the struct. */
1914 : 59029959 : && known_gt (GET_MODE_PRECISION (DECL_MODE (field)),
1915 : : GET_MODE_PRECISION (mode)))
1916 : 4160677 : mode = DECL_MODE (field);
1917 : :
1918 : : /* With some targets, it is sub-optimal to access an aligned
1919 : : BLKmode structure as a scalar. */
1920 : 59029959 : if (targetm.member_type_forces_blk (field, mode))
1921 : : return;
1922 : : }
1923 : :
1924 : : /* If we only have one real field; use its mode if that mode's size
1925 : : matches the type's size. This generally only applies to RECORD_TYPE.
1926 : : For UNION_TYPE, if the widest field is MODE_INT then use that mode.
1927 : : If the widest field is MODE_PARTIAL_INT, and the union will be passed
1928 : : by reference, then use that mode. */
1929 : 65540918 : if ((TREE_CODE (type) == RECORD_TYPE
1930 : 441574 : || (TREE_CODE (type) == UNION_TYPE
1931 : 441574 : && (GET_MODE_CLASS (mode) == MODE_INT
1932 : 62550 : || (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
1933 : 0 : && (targetm.calls.pass_by_reference
1934 : 0 : (pack_cumulative_args (0),
1935 : 4039900 : function_arg_info (type, mode, /*named=*/false)))))))
1936 : 65478368 : && mode != VOIDmode
1937 : 69580952 : && known_eq (GET_MODE_BITSIZE (mode), type_size))
1938 : : ;
1939 : : else
1940 : 61501018 : mode = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1).else_blk ();
1941 : :
1942 : : /* If structure's known alignment is less than what the scalar
1943 : : mode would need, and it matters, then stick with BLKmode. */
1944 : 65540918 : if (mode != BLKmode
1945 : : && STRICT_ALIGNMENT
1946 : : && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1947 : : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
1948 : : {
1949 : : /* If this is the only reason this type is BLKmode, then
1950 : : don't force containing types to be BLKmode. */
1951 : : TYPE_NO_FORCE_BLK (type) = 1;
1952 : : mode = BLKmode;
1953 : : }
1954 : :
1955 : 65540918 : SET_TYPE_MODE (type, mode);
1956 : : }
1957 : :
1958 : : /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1959 : : out. */
1960 : :
1961 : : static void
1962 : 1261825347 : finalize_type_size (tree type)
1963 : : {
1964 : : /* Normally, use the alignment corresponding to the mode chosen.
1965 : : However, where strict alignment is not required, avoid
1966 : : over-aligning structures, since most compilers do not do this
1967 : : alignment. */
1968 : 1261825347 : bool tua_cleared_p = false;
1969 : 1261825347 : if (TYPE_MODE (type) != BLKmode
1970 : 1204835155 : && TYPE_MODE (type) != VOIDmode
1971 : 2466379185 : && (STRICT_ALIGNMENT || !AGGREGATE_TYPE_P (type)))
1972 : : {
1973 : 1154599686 : unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1974 : :
1975 : : /* Don't override a larger alignment requirement coming from a user
1976 : : alignment of one of the fields. */
1977 : 1154599686 : if (mode_align >= TYPE_ALIGN (type))
1978 : : {
1979 : 1154599686 : SET_TYPE_ALIGN (type, mode_align);
1980 : : /* Remember that we're about to reset this flag. */
1981 : 1154599686 : tua_cleared_p = TYPE_USER_ALIGN (type);
1982 : 1154599686 : TYPE_USER_ALIGN (type) = false;
1983 : : }
1984 : : }
1985 : :
1986 : : /* Do machine-dependent extra alignment. */
1987 : : #ifdef ROUND_TYPE_ALIGN
1988 : : SET_TYPE_ALIGN (type,
1989 : : ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT));
1990 : : #endif
1991 : :
1992 : : /* If we failed to find a simple way to calculate the unit size
1993 : : of the type, find it by division. */
1994 : 1261825347 : if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1995 : : /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1996 : : result will fit in sizetype. We will get more efficient code using
1997 : : sizetype, so we force a conversion. */
1998 : 0 : TYPE_SIZE_UNIT (type)
1999 : 0 : = fold_convert (sizetype,
2000 : : size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
2001 : : bitsize_unit_node));
2002 : :
2003 : 1261825347 : if (TYPE_SIZE (type) != 0)
2004 : : {
2005 : 1255955592 : TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
2006 : 1255955592 : TYPE_SIZE_UNIT (type)
2007 : 2511911184 : = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type));
2008 : : }
2009 : :
2010 : : /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
2011 : 1261825347 : if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2012 : 256232 : TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
2013 : 1261825347 : if (TYPE_SIZE_UNIT (type) != 0
2014 : 1261825347 : && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
2015 : 256232 : TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
2016 : :
2017 : : /* Handle empty records as per the x86-64 psABI. */
2018 : 1261825347 : TYPE_EMPTY_P (type) = targetm.calls.empty_record_p (type);
2019 : :
2020 : : /* Also layout any other variants of the type. */
2021 : 1261825347 : if (TYPE_NEXT_VARIANT (type)
2022 : 1261825347 : || type != TYPE_MAIN_VARIANT (type))
2023 : : {
2024 : 36204633 : tree variant;
2025 : : /* Record layout info of this variant. */
2026 : 36204633 : tree size = TYPE_SIZE (type);
2027 : 36204633 : tree size_unit = TYPE_SIZE_UNIT (type);
2028 : 36204633 : unsigned int align = TYPE_ALIGN (type);
2029 : 36204633 : unsigned int precision = TYPE_PRECISION (type);
2030 : 36204633 : unsigned int user_align = TYPE_USER_ALIGN (type);
2031 : 36204633 : machine_mode mode = TYPE_MODE (type);
2032 : 36204633 : bool empty_p = TYPE_EMPTY_P (type);
2033 : 36204633 : bool typeless = AGGREGATE_TYPE_P (type) && TYPE_TYPELESS_STORAGE (type);
2034 : :
2035 : : /* Copy it into all variants. */
2036 : 36204633 : for (variant = TYPE_MAIN_VARIANT (type);
2037 : 120736195 : variant != NULL_TREE;
2038 : 84531562 : variant = TYPE_NEXT_VARIANT (variant))
2039 : : {
2040 : 84531562 : TYPE_SIZE (variant) = size;
2041 : 84531562 : TYPE_SIZE_UNIT (variant) = size_unit;
2042 : 84531562 : unsigned valign = align;
2043 : 84531562 : if (TYPE_USER_ALIGN (variant))
2044 : : {
2045 : 507971 : valign = MAX (valign, TYPE_ALIGN (variant));
2046 : : /* If we reset TYPE_USER_ALIGN on the main variant, we might
2047 : : need to reset it on the variants too. TYPE_MODE will be set
2048 : : to MODE in this variant, so we can use that. */
2049 : 507971 : if (tua_cleared_p && GET_MODE_ALIGNMENT (mode) >= valign)
2050 : 0 : TYPE_USER_ALIGN (variant) = false;
2051 : : }
2052 : : else
2053 : 84023591 : TYPE_USER_ALIGN (variant) = user_align;
2054 : 84531562 : SET_TYPE_ALIGN (variant, valign);
2055 : 84531562 : TYPE_PRECISION (variant) = precision;
2056 : 84531562 : SET_TYPE_MODE (variant, mode);
2057 : 84531562 : TYPE_EMPTY_P (variant) = empty_p;
2058 : 84531562 : if (AGGREGATE_TYPE_P (variant))
2059 : 84531544 : TYPE_TYPELESS_STORAGE (variant) = typeless;
2060 : : }
2061 : : }
2062 : 1261825347 : }
2063 : :
2064 : : /* Return a new underlying object for a bitfield started with FIELD. */
2065 : :
2066 : : static tree
2067 : 68477 : start_bitfield_representative (tree field)
2068 : : {
2069 : 68477 : tree repr = make_node (FIELD_DECL);
2070 : 68477 : DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field);
2071 : : /* Force the representative to begin at a BITS_PER_UNIT aligned
2072 : : boundary - C++ may use tail-padding of a base object to
2073 : : continue packing bits so the bitfield region does not start
2074 : : at bit zero (see g++.dg/abi/bitfield5.C for example).
2075 : : Unallocated bits may happen for other reasons as well,
2076 : : for example Ada which allows explicit bit-granular structure layout. */
2077 : 136954 : DECL_FIELD_BIT_OFFSET (repr)
2078 : 68477 : = size_binop (BIT_AND_EXPR,
2079 : : DECL_FIELD_BIT_OFFSET (field),
2080 : : bitsize_int (~(BITS_PER_UNIT - 1)));
2081 : 68477 : SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field));
2082 : 68477 : DECL_SIZE (repr) = DECL_SIZE (field);
2083 : 68477 : DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field);
2084 : 68477 : DECL_PACKED (repr) = DECL_PACKED (field);
2085 : 68477 : DECL_CONTEXT (repr) = DECL_CONTEXT (field);
2086 : : /* There are no indirect accesses to this field. If we introduce
2087 : : some then they have to use the record alias set. This makes
2088 : : sure to properly conflict with [indirect] accesses to addressable
2089 : : fields of the bitfield group. */
2090 : 68477 : DECL_NONADDRESSABLE_P (repr) = 1;
2091 : 68477 : return repr;
2092 : : }
2093 : :
2094 : : /* Finish up a bitfield group that was started by creating the underlying
2095 : : object REPR with the last field in the bitfield group FIELD. */
2096 : :
2097 : : static void
2098 : 68477 : finish_bitfield_representative (tree repr, tree field)
2099 : : {
2100 : 68477 : unsigned HOST_WIDE_INT bitsize, maxbitsize;
2101 : 68477 : tree nextf, size;
2102 : :
2103 : 68477 : size = size_diffop (DECL_FIELD_OFFSET (field),
2104 : : DECL_FIELD_OFFSET (repr));
2105 : 136980 : while (TREE_CODE (size) == COMPOUND_EXPR)
2106 : 26 : size = TREE_OPERAND (size, 1);
2107 : 68477 : gcc_assert (tree_fits_uhwi_p (size));
2108 : 68477 : bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT
2109 : 68477 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2110 : 68477 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))
2111 : 68477 : + tree_to_uhwi (DECL_SIZE (field)));
2112 : :
2113 : : /* Round up bitsize to multiples of BITS_PER_UNIT. */
2114 : 68477 : bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2115 : :
2116 : : /* Now nothing tells us how to pad out bitsize ... */
2117 : 68477 : if (TREE_CODE (DECL_CONTEXT (field)) == RECORD_TYPE)
2118 : : {
2119 : 65041 : nextf = DECL_CHAIN (field);
2120 : 107610 : while (nextf && TREE_CODE (nextf) != FIELD_DECL)
2121 : 42569 : nextf = DECL_CHAIN (nextf);
2122 : : }
2123 : : else
2124 : : nextf = NULL_TREE;
2125 : 65041 : if (nextf)
2126 : : {
2127 : 38241 : tree maxsize;
2128 : : /* If there was an error, the field may be not laid out
2129 : : correctly. Don't bother to do anything. */
2130 : 38241 : if (TREE_TYPE (nextf) == error_mark_node)
2131 : : {
2132 : 1 : TREE_TYPE (repr) = error_mark_node;
2133 : 1 : return;
2134 : : }
2135 : 38240 : maxsize = size_diffop (DECL_FIELD_OFFSET (nextf),
2136 : : DECL_FIELD_OFFSET (repr));
2137 : 38240 : if (tree_fits_uhwi_p (maxsize))
2138 : : {
2139 : 38224 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2140 : 38224 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf))
2141 : 38224 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2142 : : /* If the group ends within a bitfield nextf does not need to be
2143 : : aligned to BITS_PER_UNIT. Thus round up. */
2144 : 38224 : maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
2145 : : }
2146 : : else
2147 : : maxbitsize = bitsize;
2148 : : }
2149 : : else
2150 : : {
2151 : : /* Note that if the C++ FE sets up tail-padding to be re-used it
2152 : : creates a as-base variant of the type with TYPE_SIZE adjusted
2153 : : accordingly. So it is safe to include tail-padding here. */
2154 : 30236 : tree aggsize = lang_hooks.types.unit_size_without_reusable_padding
2155 : 30236 : (DECL_CONTEXT (field));
2156 : 30236 : tree maxsize = size_diffop (aggsize, DECL_FIELD_OFFSET (repr));
2157 : : /* We cannot generally rely on maxsize to fold to an integer constant,
2158 : : so use bitsize as fallback for this case. */
2159 : 30236 : if (tree_fits_uhwi_p (maxsize))
2160 : 30226 : maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
2161 : 30226 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2162 : : else
2163 : : maxbitsize = bitsize;
2164 : : }
2165 : :
2166 : : /* Only if we don't artificially break up the representative in
2167 : : the middle of a large bitfield with different possibly
2168 : : overlapping representatives. And all representatives start
2169 : : at byte offset. */
2170 : 68476 : gcc_assert (maxbitsize % BITS_PER_UNIT == 0);
2171 : :
2172 : : /* Find the smallest nice mode to use. */
2173 : 68476 : opt_scalar_int_mode mode_iter;
2174 : 241475 : FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2175 : 482678 : if (GET_MODE_BITSIZE (mode_iter.require ()) >= bitsize)
2176 : : break;
2177 : :
2178 : 68476 : scalar_int_mode mode;
2179 : 68476 : if (!mode_iter.exists (&mode)
2180 : 68340 : || GET_MODE_BITSIZE (mode) > maxbitsize
2181 : 105766 : || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)
2182 : : {
2183 : 16414 : if (TREE_CODE (TREE_TYPE (field)) == BITINT_TYPE)
2184 : : {
2185 : 45 : struct bitint_info info;
2186 : 45 : unsigned prec = TYPE_PRECISION (TREE_TYPE (field));
2187 : 45 : bool ok = targetm.c.bitint_type_info (prec, &info);
2188 : 45 : gcc_assert (ok);
2189 : 45 : scalar_int_mode limb_mode
2190 : 45 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2191 : 45 : unsigned lprec = GET_MODE_PRECISION (limb_mode);
2192 : 45 : if (prec > lprec)
2193 : : {
2194 : : /* For middle/large/huge _BitInt prefer bitsize being a multiple
2195 : : of limb precision. */
2196 : 43 : unsigned HOST_WIDE_INT bsz = CEIL (bitsize, lprec) * lprec;
2197 : 43 : if (bsz <= maxbitsize)
2198 : 45 : bitsize = bsz;
2199 : : }
2200 : : }
2201 : : /* We really want a BLKmode representative only as a last resort,
2202 : : considering the member b in
2203 : : struct { int a : 7; int b : 17; int c; } __attribute__((packed));
2204 : : Otherwise we simply want to split the representative up
2205 : : allowing for overlaps within the bitfield region as required for
2206 : : struct { int a : 7; int b : 7;
2207 : : int c : 10; int d; } __attribute__((packed));
2208 : : [0, 15] HImode for a and b, [8, 23] HImode for c. */
2209 : 16414 : DECL_SIZE (repr) = bitsize_int (bitsize);
2210 : 16414 : DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT);
2211 : 16414 : SET_DECL_MODE (repr, BLKmode);
2212 : 16414 : TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node,
2213 : 16414 : bitsize / BITS_PER_UNIT);
2214 : : }
2215 : : else
2216 : : {
2217 : 52062 : unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode);
2218 : 52062 : DECL_SIZE (repr) = bitsize_int (modesize);
2219 : 52062 : DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT);
2220 : 52062 : SET_DECL_MODE (repr, mode);
2221 : 52062 : TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1);
2222 : : }
2223 : :
2224 : : /* Remember whether the bitfield group is at the end of the
2225 : : structure or not. */
2226 : 68476 : DECL_CHAIN (repr) = nextf;
2227 : : }
2228 : :
2229 : : /* Compute and set FIELD_DECLs for the underlying objects we should
2230 : : use for bitfield access for the structure T. */
2231 : :
2232 : : void
2233 : 39927073 : finish_bitfield_layout (tree t)
2234 : : {
2235 : 39927073 : tree field, prev;
2236 : 39927073 : tree repr = NULL_TREE;
2237 : :
2238 : 39927073 : if (TREE_CODE (t) == QUAL_UNION_TYPE)
2239 : : return;
2240 : :
2241 : 39927073 : for (prev = NULL_TREE, field = TYPE_FIELDS (t);
2242 : 347301276 : field; field = DECL_CHAIN (field))
2243 : : {
2244 : 307374203 : if (TREE_CODE (field) != FIELD_DECL)
2245 : 253442204 : continue;
2246 : :
2247 : : /* In the C++ memory model, consecutive bit fields in a structure are
2248 : : considered one memory location and updating a memory location
2249 : : may not store into adjacent memory locations. */
2250 : 53931999 : if (!repr
2251 : 53931999 : && DECL_BIT_FIELD_TYPE (field))
2252 : : {
2253 : : /* Start new representative. */
2254 : 68476 : repr = start_bitfield_representative (field);
2255 : : }
2256 : 53863523 : else if (repr
2257 : 53863523 : && ! DECL_BIT_FIELD_TYPE (field))
2258 : : {
2259 : : /* Finish off new representative. */
2260 : 37616 : finish_bitfield_representative (repr, prev);
2261 : 37616 : repr = NULL_TREE;
2262 : : }
2263 : 53825907 : else if (DECL_BIT_FIELD_TYPE (field))
2264 : : {
2265 : 347019 : gcc_assert (repr != NULL_TREE);
2266 : :
2267 : : /* Zero-size bitfields finish off a representative and
2268 : : do not have a representative themselves. This is
2269 : : required by the C++ memory model. */
2270 : 347019 : if (integer_zerop (DECL_SIZE (field)))
2271 : : {
2272 : 624 : finish_bitfield_representative (repr, prev);
2273 : 624 : repr = NULL_TREE;
2274 : : }
2275 : :
2276 : : /* We assume that either DECL_FIELD_OFFSET of the representative
2277 : : and each bitfield member is a constant or they are equal.
2278 : : This is because we need to be able to compute the bit-offset
2279 : : of each field relative to the representative in get_bit_range
2280 : : during RTL expansion.
2281 : : If these constraints are not met, simply force a new
2282 : : representative to be generated. That will at most
2283 : : generate worse code but still maintain correctness with
2284 : : respect to the C++ memory model. */
2285 : 346406 : else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))
2286 : 346384 : && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
2287 : 11 : || operand_equal_p (DECL_FIELD_OFFSET (repr),
2288 : 11 : DECL_FIELD_OFFSET (field), 0)))
2289 : : {
2290 : 1 : finish_bitfield_representative (repr, prev);
2291 : 1 : repr = start_bitfield_representative (field);
2292 : : }
2293 : : }
2294 : : else
2295 : 53478888 : continue;
2296 : :
2297 : 106717 : if (repr)
2298 : 414871 : DECL_BIT_FIELD_REPRESENTATIVE (field) = repr;
2299 : :
2300 : 453111 : if (TREE_CODE (t) == RECORD_TYPE)
2301 : : prev = field;
2302 : 3436 : else if (repr)
2303 : : {
2304 : 3436 : finish_bitfield_representative (repr, field);
2305 : 3436 : repr = NULL_TREE;
2306 : : }
2307 : : }
2308 : :
2309 : 39927073 : if (repr)
2310 : 26800 : finish_bitfield_representative (repr, prev);
2311 : : }
2312 : :
2313 : : /* Do all of the work required to layout the type indicated by RLI,
2314 : : once the fields have been laid out. This function will call `free'
2315 : : for RLI, unless FREE_P is false. Passing a value other than false
2316 : : for FREE_P is bad practice; this option only exists to support the
2317 : : G++ 3.2 ABI. */
2318 : :
2319 : : void
2320 : 39927073 : finish_record_layout (record_layout_info rli, int free_p)
2321 : : {
2322 : 39927073 : tree variant;
2323 : :
2324 : : /* Compute the final size. */
2325 : 39927073 : finalize_record_size (rli);
2326 : :
2327 : : /* Compute the TYPE_MODE for the record. */
2328 : 39927073 : compute_record_mode (rli->t);
2329 : :
2330 : : /* Perform any last tweaks to the TYPE_SIZE, etc. */
2331 : 39927073 : finalize_type_size (rli->t);
2332 : :
2333 : : /* Compute bitfield representatives. */
2334 : 39927073 : finish_bitfield_layout (rli->t);
2335 : :
2336 : : /* Propagate TYPE_PACKED and TYPE_REVERSE_STORAGE_ORDER to variants.
2337 : : With C++ templates, it is too early to do this when the attribute
2338 : : is being parsed. */
2339 : 86705648 : for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
2340 : 46778575 : variant = TYPE_NEXT_VARIANT (variant))
2341 : : {
2342 : 46778575 : TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
2343 : 93557150 : TYPE_REVERSE_STORAGE_ORDER (variant)
2344 : 46778575 : = TYPE_REVERSE_STORAGE_ORDER (rli->t);
2345 : : }
2346 : :
2347 : : /* Lay out any static members. This is done now because their type
2348 : : may use the record's type. */
2349 : 49613602 : while (!vec_safe_is_empty (rli->pending_statics))
2350 : 9686529 : layout_decl (rli->pending_statics->pop (), 0);
2351 : :
2352 : : /* Clean up. */
2353 : 39927073 : if (free_p)
2354 : : {
2355 : 39927073 : vec_free (rli->pending_statics);
2356 : 39927073 : free (rli);
2357 : : }
2358 : 39927073 : }
2359 : :
2360 : :
2361 : : /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
2362 : : NAME, its fields are chained in reverse on FIELDS.
2363 : :
2364 : : If ALIGN_TYPE is non-null, it is given the same alignment as
2365 : : ALIGN_TYPE. */
2366 : :
2367 : : void
2368 : 1192387 : finish_builtin_struct (tree type, const char *name, tree fields,
2369 : : tree align_type)
2370 : : {
2371 : 1192387 : tree tail, next;
2372 : :
2373 : 3605199 : for (tail = NULL_TREE; fields; tail = fields, fields = next)
2374 : : {
2375 : 2412812 : DECL_FIELD_CONTEXT (fields) = type;
2376 : 2412812 : next = DECL_CHAIN (fields);
2377 : 2412812 : DECL_CHAIN (fields) = tail;
2378 : : }
2379 : 1192387 : TYPE_FIELDS (type) = tail;
2380 : :
2381 : 1192387 : if (align_type)
2382 : : {
2383 : 1084199 : SET_TYPE_ALIGN (type, TYPE_ALIGN (align_type));
2384 : 1084199 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
2385 : 1084199 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2386 : : TYPE_WARN_IF_NOT_ALIGN (align_type));
2387 : : }
2388 : :
2389 : 1192387 : layout_type (type);
2390 : : #if 0 /* not yet, should get fixed properly later */
2391 : : TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
2392 : : #else
2393 : 1192387 : TYPE_NAME (type) = build_decl (BUILTINS_LOCATION,
2394 : : TYPE_DECL, get_identifier (name), type);
2395 : : #endif
2396 : 1192387 : TYPE_STUB_DECL (type) = TYPE_NAME (type);
2397 : 1192387 : layout_decl (TYPE_NAME (type), 0);
2398 : 1192387 : }
2399 : :
2400 : : /* Compute TYPE_MODE for TYPE (which is ARRAY_TYPE). */
2401 : :
2402 : 61951894 : void compute_array_mode (tree type)
2403 : : {
2404 : 61951894 : gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
2405 : :
2406 : 61951894 : SET_TYPE_MODE (type, BLKmode);
2407 : 61951894 : if (TYPE_SIZE (type) != 0
2408 : 56363444 : && ! targetm.member_type_forces_blk (type, VOIDmode)
2409 : : /* BLKmode elements force BLKmode aggregate;
2410 : : else extract/store fields may lose. */
2411 : 118315338 : && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
2412 : 570998 : || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
2413 : : {
2414 : 55792446 : SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type),
2415 : : TYPE_SIZE (type)));
2416 : 55792446 : if (TYPE_MODE (type) != BLKmode
2417 : : && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
2418 : : && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
2419 : : {
2420 : : TYPE_NO_FORCE_BLK (type) = 1;
2421 : : SET_TYPE_MODE (type, BLKmode);
2422 : : }
2423 : : }
2424 : 61951894 : }
2425 : :
2426 : : /* Calculate the mode, size, and alignment for TYPE.
2427 : : For an array type, calculate the element separation as well.
2428 : : Record TYPE on the chain of permanent or temporary types
2429 : : so that dbxout will find out about it.
2430 : :
2431 : : TYPE_SIZE of a type is nonzero if the type has been laid out already.
2432 : : layout_type does nothing on such a type.
2433 : :
2434 : : If the type is incomplete, its TYPE_SIZE remains zero. */
2435 : :
2436 : : void
2437 : 2075749719 : layout_type (tree type)
2438 : : {
2439 : 2075749719 : gcc_assert (type);
2440 : :
2441 : 2075749719 : if (type == error_mark_node)
2442 : : return;
2443 : :
2444 : : /* We don't want finalize_type_size to copy an alignment attribute to
2445 : : variants that don't have it. */
2446 : 2075749719 : type = TYPE_MAIN_VARIANT (type);
2447 : :
2448 : : /* Do nothing if type has been laid out before. */
2449 : 2075749719 : if (TYPE_SIZE (type))
2450 : : return;
2451 : :
2452 : 1226979648 : switch (TREE_CODE (type))
2453 : : {
2454 : 0 : case LANG_TYPE:
2455 : : /* This kind of type is the responsibility
2456 : : of the language-specific code. */
2457 : 0 : gcc_unreachable ();
2458 : :
2459 : 14076021 : case BOOLEAN_TYPE:
2460 : 14076021 : case INTEGER_TYPE:
2461 : 14076021 : case ENUMERAL_TYPE:
2462 : 14076021 : {
2463 : 14076021 : scalar_int_mode mode
2464 : 14076021 : = smallest_int_mode_for_size (TYPE_PRECISION (type)).require ();
2465 : 14076021 : SET_TYPE_MODE (type, mode);
2466 : 28152042 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2467 : : /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */
2468 : 28152042 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2469 : 14076021 : break;
2470 : : }
2471 : :
2472 : 1479745 : case BITINT_TYPE:
2473 : 1479745 : {
2474 : 1479745 : struct bitint_info info;
2475 : 1479745 : int cnt;
2476 : 1479745 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
2477 : 1479745 : gcc_assert (ok);
2478 : 1479745 : scalar_int_mode limb_mode
2479 : 1479745 : = as_a <scalar_int_mode> (info.abi_limb_mode);
2480 : 1479745 : if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
2481 : : {
2482 : 1295 : SET_TYPE_MODE (type, limb_mode);
2483 : 1295 : gcc_assert (info.abi_limb_mode == info.limb_mode);
2484 : : cnt = 1;
2485 : : }
2486 : : else
2487 : : {
2488 : 1478450 : SET_TYPE_MODE (type, BLKmode);
2489 : 1478450 : cnt = CEIL (TYPE_PRECISION (type), GET_MODE_PRECISION (limb_mode));
2490 : 1478450 : gcc_assert (info.abi_limb_mode == info.limb_mode
2491 : : || !info.big_endian == !WORDS_BIG_ENDIAN);
2492 : : }
2493 : 2959490 : TYPE_SIZE (type) = bitsize_int (cnt * GET_MODE_BITSIZE (limb_mode));
2494 : 2959490 : TYPE_SIZE_UNIT (type) = size_int (cnt * GET_MODE_SIZE (limb_mode));
2495 : 1479745 : SET_TYPE_ALIGN (type, GET_MODE_ALIGNMENT (limb_mode));
2496 : 1479745 : if (cnt > 1)
2497 : : {
2498 : : /* Use same mode as compute_record_mode would use for a structure
2499 : : containing cnt limb_mode elements. */
2500 : 1478450 : machine_mode mode = mode_for_size_tree (TYPE_SIZE (type),
2501 : 1478450 : MODE_INT, 1).else_blk ();
2502 : 1478450 : if (mode == BLKmode)
2503 : : break;
2504 : 37277 : finalize_type_size (type);
2505 : 37277 : SET_TYPE_MODE (type, mode);
2506 : 37277 : if (STRICT_ALIGNMENT
2507 : : && !(TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
2508 : : || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
2509 : : {
2510 : : /* If this is the only reason this type is BLKmode, then
2511 : : don't force containing types to be BLKmode. */
2512 : : TYPE_NO_FORCE_BLK (type) = 1;
2513 : : SET_TYPE_MODE (type, BLKmode);
2514 : : }
2515 : 37277 : if (TYPE_NEXT_VARIANT (type) || type != TYPE_MAIN_VARIANT (type))
2516 : 0 : for (tree variant = TYPE_MAIN_VARIANT (type);
2517 : 0 : variant != NULL_TREE;
2518 : 0 : variant = TYPE_NEXT_VARIANT (variant))
2519 : : {
2520 : 0 : SET_TYPE_MODE (variant, mode);
2521 : 0 : if (STRICT_ALIGNMENT
2522 : : && !(TYPE_ALIGN (variant) >= BIGGEST_ALIGNMENT
2523 : : || (TYPE_ALIGN (variant)
2524 : : >= GET_MODE_ALIGNMENT (mode))))
2525 : : {
2526 : : TYPE_NO_FORCE_BLK (variant) = 1;
2527 : : SET_TYPE_MODE (variant, BLKmode);
2528 : : }
2529 : : }
2530 : 37277 : return;
2531 : : }
2532 : : break;
2533 : : }
2534 : :
2535 : 4108151 : case REAL_TYPE:
2536 : 4108151 : {
2537 : : /* Allow the caller to choose the type mode, which is how decimal
2538 : : floats are distinguished from binary ones. */
2539 : 4108151 : if (TYPE_MODE (type) == VOIDmode)
2540 : 2047391 : SET_TYPE_MODE
2541 : : (type, float_mode_for_size (TYPE_PRECISION (type)).require ());
2542 : 4108151 : scalar_float_mode mode = as_a <scalar_float_mode> (TYPE_MODE (type));
2543 : 8216302 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2544 : 8216302 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2545 : 4108151 : break;
2546 : : }
2547 : :
2548 : 10126980 : case FIXED_POINT_TYPE:
2549 : 10126980 : {
2550 : : /* TYPE_MODE (type) has been set already. */
2551 : 10126980 : scalar_mode mode = SCALAR_TYPE_MODE (type);
2552 : 20253960 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2553 : 20253960 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2554 : 10126980 : break;
2555 : : }
2556 : :
2557 : 5094784 : case COMPLEX_TYPE:
2558 : 5094784 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2559 : 5094784 : if (TYPE_MODE (TREE_TYPE (type)) == BLKmode)
2560 : : {
2561 : 4556 : gcc_checking_assert (TREE_CODE (TREE_TYPE (type)) == BITINT_TYPE);
2562 : 4556 : SET_TYPE_MODE (type, BLKmode);
2563 : 4556 : TYPE_SIZE (type)
2564 : 4556 : = int_const_binop (MULT_EXPR, TYPE_SIZE (TREE_TYPE (type)),
2565 : 4556 : bitsize_int (2));
2566 : 4556 : TYPE_SIZE_UNIT (type)
2567 : 4556 : = int_const_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (type)),
2568 : 4556 : bitsize_int (2));
2569 : 4556 : break;
2570 : : }
2571 : 5090228 : SET_TYPE_MODE (type,
2572 : : GET_MODE_COMPLEX_MODE (TYPE_MODE (TREE_TYPE (type))));
2573 : :
2574 : 10180456 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
2575 : 10180456 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
2576 : 5090228 : break;
2577 : :
2578 : 102086454 : case VECTOR_TYPE:
2579 : 102086454 : {
2580 : 102086454 : poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (type);
2581 : 102086454 : tree innertype = TREE_TYPE (type);
2582 : :
2583 : : /* Find an appropriate mode for the vector type. */
2584 : 102086454 : if (TYPE_MODE (type) == VOIDmode)
2585 : 49663890 : SET_TYPE_MODE (type,
2586 : : mode_for_vector (SCALAR_TYPE_MODE (innertype),
2587 : : nunits).else_blk ());
2588 : :
2589 : 102086454 : TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type));
2590 : 102086454 : TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2591 : : /* Several boolean vector elements may fit in a single unit. */
2592 : 102086454 : if (VECTOR_BOOLEAN_TYPE_P (type)
2593 : 104687485 : && type->type_common.mode != BLKmode)
2594 : 2601028 : TYPE_SIZE_UNIT (type)
2595 : 7803084 : = size_int (GET_MODE_SIZE (type->type_common.mode));
2596 : : else
2597 : 198970852 : TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
2598 : 99485426 : TYPE_SIZE_UNIT (innertype),
2599 : 198970852 : size_int (nunits));
2600 : 102086454 : TYPE_SIZE (type) = int_const_binop
2601 : 102086454 : (MULT_EXPR,
2602 : 102086454 : bits_from_bytes (TYPE_SIZE_UNIT (type)),
2603 : 102086454 : bitsize_int (BITS_PER_UNIT));
2604 : :
2605 : : /* For vector types, we do not default to the mode's alignment.
2606 : : Instead, query a target hook, defaulting to natural alignment.
2607 : : This prevents ABI changes depending on whether or not native
2608 : : vector modes are supported. */
2609 : 102086454 : SET_TYPE_ALIGN (type, targetm.vector_alignment (type));
2610 : :
2611 : : /* However, if the underlying mode requires a bigger alignment than
2612 : : what the target hook provides, we cannot use the mode. For now,
2613 : : simply reject that case. */
2614 : 102086454 : gcc_assert (TYPE_ALIGN (type)
2615 : : >= GET_MODE_ALIGNMENT (TYPE_MODE (type)));
2616 : 102086454 : break;
2617 : : }
2618 : :
2619 : 281305 : case VOID_TYPE:
2620 : : /* This is an incomplete type and so doesn't have a size. */
2621 : 281305 : SET_TYPE_ALIGN (type, 1);
2622 : 281305 : TYPE_USER_ALIGN (type) = 0;
2623 : 281305 : SET_TYPE_MODE (type, VOIDmode);
2624 : 281305 : break;
2625 : :
2626 : 948413 : case OFFSET_TYPE:
2627 : 952507 : TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
2628 : 952507 : TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE_UNITS);
2629 : : /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be
2630 : : integral, which may be an __intN. */
2631 : 952507 : SET_TYPE_MODE (type, int_mode_for_size (POINTER_SIZE, 0).require ());
2632 : 952507 : TYPE_PRECISION (type) = POINTER_SIZE;
2633 : 948413 : break;
2634 : :
2635 : 838006741 : case FUNCTION_TYPE:
2636 : 838006741 : case METHOD_TYPE:
2637 : : /* It's hard to see what the mode and size of a function ought to
2638 : : be, but we do know the alignment is FUNCTION_BOUNDARY, so
2639 : : make it consistent with that. */
2640 : 838006741 : SET_TYPE_MODE (type,
2641 : : int_mode_for_size (FUNCTION_BOUNDARY, 0).else_blk ());
2642 : 838006741 : TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
2643 : 838006741 : TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
2644 : 838006741 : break;
2645 : :
2646 : 183737786 : case POINTER_TYPE:
2647 : 183737786 : case REFERENCE_TYPE:
2648 : 183737786 : {
2649 : 183737786 : scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
2650 : 367475572 : TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2651 : 367475572 : TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2652 : 183737786 : TYPE_UNSIGNED (type) = 1;
2653 : 183737786 : TYPE_PRECISION (type) = GET_MODE_PRECISION (mode);
2654 : : }
2655 : 183737786 : break;
2656 : :
2657 : 61951894 : case ARRAY_TYPE:
2658 : 61951894 : {
2659 : 61951894 : tree index = TYPE_DOMAIN (type);
2660 : 61951894 : tree element = TREE_TYPE (type);
2661 : :
2662 : : /* We need to know both bounds in order to compute the size. */
2663 : 57622707 : if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
2664 : 118375981 : && TYPE_SIZE (element))
2665 : : {
2666 : 56363444 : tree ub = TYPE_MAX_VALUE (index);
2667 : 56363444 : tree lb = TYPE_MIN_VALUE (index);
2668 : 56363444 : tree element_size = TYPE_SIZE (element);
2669 : 56363444 : tree length;
2670 : :
2671 : : /* Make sure that an array of zero-sized element is zero-sized
2672 : : regardless of its extent. */
2673 : 56363444 : if (integer_zerop (element_size))
2674 : 4640 : length = size_zero_node;
2675 : :
2676 : : /* The computation should happen in the original signedness so
2677 : : that (possible) negative values are handled appropriately
2678 : : when determining overflow. */
2679 : : else
2680 : : {
2681 : : /* ??? When it is obvious that the range is signed
2682 : : represent it using ssizetype. */
2683 : 56358804 : if (TREE_CODE (lb) == INTEGER_CST
2684 : 56358169 : && TREE_CODE (ub) == INTEGER_CST
2685 : 56105367 : && TYPE_UNSIGNED (TREE_TYPE (lb))
2686 : 111708804 : && tree_int_cst_lt (ub, lb))
2687 : : {
2688 : 474 : lb = wide_int_to_tree (ssizetype,
2689 : 474 : offset_int::from (wi::to_wide (lb),
2690 : : SIGNED));
2691 : 474 : ub = wide_int_to_tree (ssizetype,
2692 : 948 : offset_int::from (wi::to_wide (ub),
2693 : : SIGNED));
2694 : : }
2695 : 56358804 : length
2696 : 56358804 : = fold_convert (sizetype,
2697 : : size_binop (PLUS_EXPR,
2698 : : build_int_cst (TREE_TYPE (lb), 1),
2699 : : size_binop (MINUS_EXPR, ub, lb)));
2700 : : }
2701 : :
2702 : : /* ??? We have no way to distinguish a null-sized array from an
2703 : : array spanning the whole sizetype range, so we arbitrarily
2704 : : decide that [0, -1] is the only valid representation. */
2705 : 56363444 : if (integer_zerop (length)
2706 : 41920 : && TREE_OVERFLOW (length)
2707 : 56388813 : && integer_zerop (lb))
2708 : 25369 : length = size_zero_node;
2709 : :
2710 : 56363444 : TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
2711 : : bits_from_bytes (length));
2712 : :
2713 : : /* If we know the size of the element, calculate the total size
2714 : : directly, rather than do some division thing below. This
2715 : : optimization helps Fortran assumed-size arrays (where the
2716 : : size of the array is determined at runtime) substantially. */
2717 : 56363444 : if (TYPE_SIZE_UNIT (element))
2718 : 56363444 : TYPE_SIZE_UNIT (type)
2719 : 112726888 : = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
2720 : : }
2721 : :
2722 : : /* Now round the alignment and size,
2723 : : using machine-dependent criteria if any. */
2724 : :
2725 : 61951894 : unsigned align = TYPE_ALIGN (element);
2726 : 61951894 : if (TYPE_USER_ALIGN (type))
2727 : 2124 : align = MAX (align, TYPE_ALIGN (type));
2728 : : else
2729 : 61949770 : TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
2730 : 61951894 : if (!TYPE_WARN_IF_NOT_ALIGN (type))
2731 : 61951894 : SET_TYPE_WARN_IF_NOT_ALIGN (type,
2732 : : TYPE_WARN_IF_NOT_ALIGN (element));
2733 : : #ifdef ROUND_TYPE_ALIGN
2734 : : align = ROUND_TYPE_ALIGN (type, align, BITS_PER_UNIT);
2735 : : #else
2736 : 61951894 : align = MAX (align, BITS_PER_UNIT);
2737 : : #endif
2738 : 61951894 : SET_TYPE_ALIGN (type, align);
2739 : 61951894 : compute_array_mode (type);
2740 : 61951894 : if (AGGREGATE_TYPE_P (element))
2741 : 1224139 : TYPE_TYPELESS_STORAGE (type) = TYPE_TYPELESS_STORAGE (element);
2742 : : /* When the element size is constant, check that it is at least as
2743 : : large as the element alignment. */
2744 : 61951894 : if (TYPE_SIZE_UNIT (element)
2745 : 61881972 : && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
2746 : : /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
2747 : : TYPE_ALIGN_UNIT. */
2748 : 61850587 : && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element))
2749 : 123802466 : && !integer_zerop (TYPE_SIZE_UNIT (element)))
2750 : : {
2751 : 61843154 : if (compare_tree_int (TYPE_SIZE_UNIT (element),
2752 : 61843154 : TYPE_ALIGN_UNIT (element)) < 0)
2753 : 8 : error ("alignment of array elements is greater than "
2754 : : "element size");
2755 : 61843146 : else if (TYPE_ALIGN_UNIT (element) > 1
2756 : 82519928 : && (wi::zext (wi::to_wide (TYPE_SIZE_UNIT (element)),
2757 : 10338391 : ffs_hwi (TYPE_ALIGN_UNIT (element)) - 1)
2758 : 82519928 : != 0))
2759 : 6 : error ("size of array element is not a multiple of its "
2760 : : "alignment");
2761 : : }
2762 : : break;
2763 : : }
2764 : :
2765 : 5081374 : case RECORD_TYPE:
2766 : 5081374 : case UNION_TYPE:
2767 : 5081374 : case QUAL_UNION_TYPE:
2768 : 5081374 : {
2769 : 5081374 : tree field;
2770 : 5081374 : record_layout_info rli;
2771 : :
2772 : : /* Initialize the layout information. */
2773 : 5081374 : rli = start_record_layout (type);
2774 : :
2775 : : /* If this is a QUAL_UNION_TYPE, we want to process the fields
2776 : : in the reverse order in building the COND_EXPR that denotes
2777 : : its size. We reverse them again later. */
2778 : 5081374 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2779 : 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2780 : :
2781 : : /* Place all the fields. */
2782 : 25678135 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2783 : 20596761 : place_field (rli, field);
2784 : :
2785 : 5081374 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
2786 : 0 : TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
2787 : :
2788 : : /* Finish laying out the record. */
2789 : 5081374 : finish_record_layout (rli, /*free_p=*/true);
2790 : : }
2791 : 5081374 : break;
2792 : :
2793 : 0 : default:
2794 : 0 : gcc_unreachable ();
2795 : : }
2796 : :
2797 : : /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
2798 : : records and unions, finish_record_layout already called this
2799 : : function. */
2800 : 1226942371 : if (!RECORD_OR_UNION_TYPE_P (type))
2801 : 1221860997 : finalize_type_size (type);
2802 : :
2803 : : /* We should never see alias sets on incomplete aggregates. And we
2804 : : should not call layout_type on not incomplete aggregates. */
2805 : 1226942371 : if (AGGREGATE_TYPE_P (type))
2806 : 67033268 : gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type));
2807 : : }
2808 : :
2809 : : /* Return the least alignment required for type TYPE. */
2810 : :
2811 : : unsigned int
2812 : 34781197 : min_align_of_type (tree type)
2813 : : {
2814 : 34781197 : unsigned int align = TYPE_ALIGN (type);
2815 : 34781197 : if (!TYPE_USER_ALIGN (type))
2816 : : {
2817 : 63159155 : align = MIN (align, BIGGEST_ALIGNMENT);
2818 : : #ifdef BIGGEST_FIELD_ALIGNMENT
2819 : : align = MIN (align, BIGGEST_FIELD_ALIGNMENT);
2820 : : #endif
2821 : 32520548 : unsigned int field_align = align;
2822 : : #ifdef ADJUST_FIELD_ALIGN
2823 : 32520548 : field_align = ADJUST_FIELD_ALIGN (NULL_TREE, type, field_align);
2824 : : #endif
2825 : 32520548 : align = MIN (align, field_align);
2826 : : }
2827 : 34781197 : return align / BITS_PER_UNIT;
2828 : : }
2829 : :
2830 : : /* Create and return a type for signed integers of PRECISION bits. */
2831 : :
2832 : : tree
2833 : 2648921 : make_signed_type (int precision)
2834 : : {
2835 : 2648921 : tree type = make_node (INTEGER_TYPE);
2836 : :
2837 : 2648921 : TYPE_PRECISION (type) = precision;
2838 : :
2839 : 2648921 : fixup_signed_type (type);
2840 : 2648921 : return type;
2841 : : }
2842 : :
2843 : : /* Create and return a type for unsigned integers of PRECISION bits. */
2844 : :
2845 : : tree
2846 : 8654314 : make_unsigned_type (int precision)
2847 : : {
2848 : 8654314 : tree type = make_node (INTEGER_TYPE);
2849 : :
2850 : 8654314 : TYPE_PRECISION (type) = precision;
2851 : :
2852 : 8654314 : fixup_unsigned_type (type);
2853 : 8654314 : return type;
2854 : : }
2855 : :
2856 : : /* Create and return a type for fract of PRECISION bits, UNSIGNEDP,
2857 : : and SATP. */
2858 : :
2859 : : tree
2860 : 5626100 : make_fract_type (int precision, int unsignedp, int satp)
2861 : : {
2862 : 5626100 : tree type = make_node (FIXED_POINT_TYPE);
2863 : :
2864 : 5626100 : TYPE_PRECISION (type) = precision;
2865 : :
2866 : 5626100 : if (satp)
2867 : 2813050 : TYPE_SATURATING (type) = 1;
2868 : :
2869 : : /* Lay out the type: set its alignment, size, etc. */
2870 : 5626100 : TYPE_UNSIGNED (type) = unsignedp;
2871 : 5626100 : enum mode_class mclass = unsignedp ? MODE_UFRACT : MODE_FRACT;
2872 : 5626100 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2873 : 5626100 : layout_type (type);
2874 : :
2875 : 5626100 : return type;
2876 : : }
2877 : :
2878 : : /* Create and return a type for accum of PRECISION bits, UNSIGNEDP,
2879 : : and SATP. */
2880 : :
2881 : : tree
2882 : 4500880 : make_accum_type (int precision, int unsignedp, int satp)
2883 : : {
2884 : 4500880 : tree type = make_node (FIXED_POINT_TYPE);
2885 : :
2886 : 4500880 : TYPE_PRECISION (type) = precision;
2887 : :
2888 : 4500880 : if (satp)
2889 : 2250440 : TYPE_SATURATING (type) = 1;
2890 : :
2891 : : /* Lay out the type: set its alignment, size, etc. */
2892 : 4500880 : TYPE_UNSIGNED (type) = unsignedp;
2893 : 4500880 : enum mode_class mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM;
2894 : 4500880 : SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2895 : 4500880 : layout_type (type);
2896 : :
2897 : 4500880 : return type;
2898 : : }
2899 : :
2900 : : /* Initialize sizetypes so layout_type can use them. */
2901 : :
2902 : : void
2903 : 281305 : initialize_sizetypes (void)
2904 : : {
2905 : 281305 : int precision, bprecision;
2906 : :
2907 : : /* Get sizetypes precision from the SIZE_TYPE target macro. */
2908 : 288383 : if (strcmp (SIZETYPE, "unsigned int") == 0)
2909 : : precision = INT_TYPE_SIZE;
2910 : 274227 : else if (strcmp (SIZETYPE, "long unsigned int") == 0)
2911 : 274227 : precision = LONG_TYPE_SIZE;
2912 : 0 : else if (strcmp (SIZETYPE, "long long unsigned int") == 0)
2913 : : precision = LONG_LONG_TYPE_SIZE;
2914 : 0 : else if (strcmp (SIZETYPE, "short unsigned int") == 0)
2915 : : precision = SHORT_TYPE_SIZE;
2916 : : else
2917 : : {
2918 : : int i;
2919 : :
2920 : : precision = -1;
2921 : 0 : for (i = 0; i < NUM_INT_N_ENTS; i++)
2922 : 0 : if (int_n_enabled_p[i])
2923 : : {
2924 : 0 : char name[50], altname[50];
2925 : 0 : sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
2926 : 0 : sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
2927 : :
2928 : 0 : if (strcmp (name, SIZETYPE) == 0
2929 : 0 : || strcmp (altname, SIZETYPE) == 0)
2930 : : {
2931 : 0 : precision = int_n_data[i].bitsize;
2932 : : }
2933 : : }
2934 : 0 : if (precision == -1)
2935 : 0 : gcc_unreachable ();
2936 : : }
2937 : :
2938 : 281305 : bprecision
2939 : 562610 : = MIN (precision + LOG2_BITS_PER_UNIT + 1, MAX_FIXED_MODE_SIZE);
2940 : 281305 : bprecision
2941 : 281305 : = GET_MODE_PRECISION (smallest_int_mode_for_size (bprecision).require ());
2942 : 281305 : if (bprecision > HOST_BITS_PER_DOUBLE_INT)
2943 : : bprecision = HOST_BITS_PER_DOUBLE_INT;
2944 : :
2945 : : /* Create stubs for sizetype and bitsizetype so we can create constants. */
2946 : 281305 : sizetype = make_node (INTEGER_TYPE);
2947 : 281305 : TYPE_NAME (sizetype) = get_identifier ("sizetype");
2948 : 281305 : TYPE_PRECISION (sizetype) = precision;
2949 : 281305 : TYPE_UNSIGNED (sizetype) = 1;
2950 : 281305 : bitsizetype = make_node (INTEGER_TYPE);
2951 : 281305 : TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype");
2952 : 281305 : TYPE_PRECISION (bitsizetype) = bprecision;
2953 : 281305 : TYPE_UNSIGNED (bitsizetype) = 1;
2954 : :
2955 : : /* Now layout both types manually. */
2956 : 281305 : scalar_int_mode mode = smallest_int_mode_for_size (precision).require ();
2957 : 281305 : SET_TYPE_MODE (sizetype, mode);
2958 : 281305 : SET_TYPE_ALIGN (sizetype, GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)));
2959 : 281305 : TYPE_SIZE (sizetype) = bitsize_int (precision);
2960 : 562610 : TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (mode));
2961 : 281305 : set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED);
2962 : :
2963 : 281305 : mode = smallest_int_mode_for_size (bprecision).require ();
2964 : 281305 : SET_TYPE_MODE (bitsizetype, mode);
2965 : 281305 : SET_TYPE_ALIGN (bitsizetype, GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)));
2966 : 281305 : TYPE_SIZE (bitsizetype) = bitsize_int (bprecision);
2967 : 562610 : TYPE_SIZE_UNIT (bitsizetype) = size_int (GET_MODE_SIZE (mode));
2968 : 281305 : set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED);
2969 : :
2970 : : /* Create the signed variants of *sizetype. */
2971 : 281305 : ssizetype = make_signed_type (TYPE_PRECISION (sizetype));
2972 : 281305 : TYPE_NAME (ssizetype) = get_identifier ("ssizetype");
2973 : 281305 : sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype));
2974 : 281305 : TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype");
2975 : 281305 : }
2976 : :
2977 : : /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2978 : : or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2979 : : for TYPE, based on the PRECISION and whether or not the TYPE
2980 : : IS_UNSIGNED. PRECISION need not correspond to a width supported
2981 : : natively by the hardware; for example, on a machine with 8-bit,
2982 : : 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2983 : : 61. */
2984 : :
2985 : : void
2986 : 17222684 : set_min_and_max_values_for_integral_type (tree type,
2987 : : int precision,
2988 : : signop sgn)
2989 : : {
2990 : : /* For bitfields with zero width we end up creating integer types
2991 : : with zero precision. Don't assign any minimum/maximum values
2992 : : to those types, they don't have any valid value. */
2993 : 17222684 : if (precision < 1)
2994 : : return;
2995 : :
2996 : 17222422 : gcc_assert (precision <= WIDE_INT_MAX_PRECISION);
2997 : :
2998 : 17222422 : TYPE_MIN_VALUE (type)
2999 : 34444844 : = wide_int_to_tree (type, wi::min_value (precision, sgn));
3000 : 17222422 : TYPE_MAX_VALUE (type)
3001 : 34473323 : = wide_int_to_tree (type, wi::max_value (precision, sgn));
3002 : : }
3003 : :
3004 : : /* Set the extreme values of TYPE based on its precision in bits,
3005 : : then lay it out. Used when make_signed_type won't do
3006 : : because the tree code is not INTEGER_TYPE. */
3007 : :
3008 : : void
3009 : 5821336 : fixup_signed_type (tree type)
3010 : : {
3011 : 5821336 : int precision = TYPE_PRECISION (type);
3012 : :
3013 : 5821336 : set_min_and_max_values_for_integral_type (type, precision, SIGNED);
3014 : :
3015 : : /* Lay out the type: set its alignment, size, etc. */
3016 : 5821336 : layout_type (type);
3017 : 5821336 : }
3018 : :
3019 : : /* Set the extreme values of TYPE based on its precision in bits,
3020 : : then lay it out. This is used both in `make_unsigned_type'
3021 : : and for enumeral types. */
3022 : :
3023 : : void
3024 : 9542950 : fixup_unsigned_type (tree type)
3025 : : {
3026 : 9542950 : int precision = TYPE_PRECISION (type);
3027 : :
3028 : 9542950 : TYPE_UNSIGNED (type) = 1;
3029 : :
3030 : 9542950 : set_min_and_max_values_for_integral_type (type, precision, UNSIGNED);
3031 : :
3032 : : /* Lay out the type: set its alignment, size, etc. */
3033 : 9542950 : layout_type (type);
3034 : 9542950 : }
3035 : :
3036 : : /* Construct an iterator for a bitfield that spans BITSIZE bits,
3037 : : starting at BITPOS.
3038 : :
3039 : : BITREGION_START is the bit position of the first bit in this
3040 : : sequence of bit fields. BITREGION_END is the last bit in this
3041 : : sequence. If these two fields are non-zero, we should restrict the
3042 : : memory access to that range. Otherwise, we are allowed to touch
3043 : : any adjacent non bit-fields.
3044 : :
3045 : : ALIGN is the alignment of the underlying object in bits.
3046 : : VOLATILEP says whether the bitfield is volatile. */
3047 : :
3048 : 1181058 : bit_field_mode_iterator
3049 : : ::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3050 : : poly_int64 bitregion_start,
3051 : : poly_int64 bitregion_end,
3052 : 1181058 : unsigned int align, bool volatilep)
3053 : 1181058 : : m_mode (NARROWEST_INT_MODE), m_bitsize (bitsize),
3054 : 1181058 : m_bitpos (bitpos), m_bitregion_start (bitregion_start),
3055 : 1181058 : m_bitregion_end (bitregion_end), m_align (align),
3056 : 1181058 : m_volatilep (volatilep), m_count (0)
3057 : : {
3058 : 1181058 : if (known_eq (m_bitregion_end, 0))
3059 : : {
3060 : : /* We can assume that any aligned chunk of ALIGN bits that overlaps
3061 : : the bitfield is mapped and won't trap, provided that ALIGN isn't
3062 : : too large. The cap is the biggest required alignment for data,
3063 : : or at least the word size. And force one such chunk at least. */
3064 : 719434 : unsigned HOST_WIDE_INT units
3065 : 1273488 : = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD));
3066 : 359717 : if (bitsize <= 0)
3067 : : bitsize = 1;
3068 : 359717 : HOST_WIDE_INT end = bitpos + bitsize + units - 1;
3069 : 359717 : m_bitregion_end = end - end % units - 1;
3070 : : }
3071 : 1181058 : }
3072 : :
3073 : : /* Calls to this function return successively larger modes that can be used
3074 : : to represent the bitfield. Return true if another bitfield mode is
3075 : : available, storing it in *OUT_MODE if so. */
3076 : :
3077 : : bool
3078 : 1182071 : bit_field_mode_iterator::next_mode (scalar_int_mode *out_mode)
3079 : : {
3080 : 1182071 : scalar_int_mode mode;
3081 : 1947991 : for (; m_mode.exists (&mode); m_mode = GET_MODE_WIDER_MODE (mode))
3082 : : {
3083 : 1947991 : unsigned int unit = GET_MODE_BITSIZE (mode);
3084 : :
3085 : : /* Skip modes that don't have full precision. */
3086 : 1947991 : if (unit != GET_MODE_PRECISION (mode))
3087 : 765920 : continue;
3088 : :
3089 : : /* Stop if the mode is too wide to handle efficiently. */
3090 : 3895982 : if (unit > MAX_FIXED_MODE_SIZE)
3091 : : break;
3092 : :
3093 : : /* Don't deliver more than one multiword mode; the smallest one
3094 : : should be used. */
3095 : 1935680 : if (m_count > 0 && unit > BITS_PER_WORD)
3096 : : break;
3097 : :
3098 : : /* Skip modes that are too small. */
3099 : 1935574 : unsigned HOST_WIDE_INT substart = (unsigned HOST_WIDE_INT) m_bitpos % unit;
3100 : 1935574 : unsigned HOST_WIDE_INT subend = substart + m_bitsize;
3101 : 1935574 : if (subend > unit)
3102 : 765920 : continue;
3103 : :
3104 : : /* Stop if the mode goes outside the bitregion. */
3105 : 1169654 : HOST_WIDE_INT start = m_bitpos - substart;
3106 : 1169654 : if (maybe_ne (m_bitregion_start, 0)
3107 : 1169654 : && maybe_lt (start, m_bitregion_start))
3108 : : break;
3109 : 1169624 : HOST_WIDE_INT end = start + unit;
3110 : 1169624 : if (maybe_gt (end, m_bitregion_end + 1))
3111 : : break;
3112 : :
3113 : : /* Stop if the mode requires too much alignment. */
3114 : 1154233 : if (GET_MODE_ALIGNMENT (mode) > m_align
3115 : 1154233 : && targetm.slow_unaligned_access (mode, m_align))
3116 : : break;
3117 : :
3118 : 1154233 : *out_mode = mode;
3119 : 1154233 : m_mode = GET_MODE_WIDER_MODE (mode);
3120 : 1154233 : m_count++;
3121 : 1154233 : return true;
3122 : : }
3123 : : return false;
3124 : : }
3125 : :
3126 : : /* Return true if smaller modes are generally preferred for this kind
3127 : : of bitfield. */
3128 : :
3129 : : bool
3130 : 1139835 : bit_field_mode_iterator::prefer_smaller_modes ()
3131 : : {
3132 : 1139835 : return (m_volatilep
3133 : 1139835 : ? targetm.narrow_volatile_bitfield ()
3134 : 1139835 : : !SLOW_BYTE_ACCESS);
3135 : : }
3136 : :
3137 : : /* Find the best machine mode to use when referencing a bit field of length
3138 : : BITSIZE bits starting at BITPOS.
3139 : :
3140 : : BITREGION_START is the bit position of the first bit in this
3141 : : sequence of bit fields. BITREGION_END is the last bit in this
3142 : : sequence. If these two fields are non-zero, we should restrict the
3143 : : memory access to that range. Otherwise, we are allowed to touch
3144 : : any adjacent non bit-fields.
3145 : :
3146 : : The chosen mode must have no more than LARGEST_MODE_BITSIZE bits.
3147 : : INT_MAX is a suitable value for LARGEST_MODE_BITSIZE if the caller
3148 : : doesn't want to apply a specific limit.
3149 : :
3150 : : If no mode meets all these conditions, we return VOIDmode.
3151 : :
3152 : : The underlying object is known to be aligned to a boundary of ALIGN bits.
3153 : :
3154 : : If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
3155 : : smallest mode meeting these conditions.
3156 : :
3157 : : If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
3158 : : largest mode (but a mode no wider than UNITS_PER_WORD) that meets
3159 : : all the conditions.
3160 : :
3161 : : If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
3162 : : decide which of the above modes should be used. */
3163 : :
3164 : : bool
3165 : 974931 : get_best_mode (int bitsize, int bitpos,
3166 : : poly_uint64 bitregion_start, poly_uint64 bitregion_end,
3167 : : unsigned int align,
3168 : : unsigned HOST_WIDE_INT largest_mode_bitsize, bool volatilep,
3169 : : scalar_int_mode *best_mode)
3170 : : {
3171 : 974931 : bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start,
3172 : 974931 : bitregion_end, align, volatilep);
3173 : 974931 : scalar_int_mode mode;
3174 : 974931 : bool found = false;
3175 : 974931 : while (iter.next_mode (&mode)
3176 : : /* ??? For historical reasons, reject modes that would normally
3177 : : receive greater alignment, even if unaligned accesses are
3178 : : acceptable. This has both advantages and disadvantages.
3179 : : Removing this check means that something like:
3180 : :
3181 : : struct s { unsigned int x; unsigned int y; };
3182 : : int f (struct s *s) { return s->x == 0 && s->y == 0; }
3183 : :
3184 : : can be implemented using a single load and compare on
3185 : : 64-bit machines that have no alignment restrictions.
3186 : : For example, on powerpc64-linux-gnu, we would generate:
3187 : :
3188 : : ld 3,0(3)
3189 : : cntlzd 3,3
3190 : : srdi 3,3,6
3191 : : blr
3192 : :
3193 : : rather than:
3194 : :
3195 : : lwz 9,0(3)
3196 : : cmpwi 7,9,0
3197 : : bne 7,.L3
3198 : : lwz 3,4(3)
3199 : : cntlzw 3,3
3200 : : srwi 3,3,5
3201 : : extsw 3,3
3202 : : blr
3203 : : .p2align 4,,15
3204 : : .L3:
3205 : : li 3,0
3206 : : blr
3207 : :
3208 : : However, accessing more than one field can make life harder
3209 : : for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c
3210 : : has a series of unsigned short copies followed by a series of
3211 : : unsigned short comparisons. With this check, both the copies
3212 : : and comparisons remain 16-bit accesses and FRE is able
3213 : : to eliminate the latter. Without the check, the comparisons
3214 : : can be done using 2 64-bit operations, which FRE isn't able
3215 : : to handle in the same way.
3216 : :
3217 : : Either way, it would probably be worth disabling this check
3218 : : during expand. One particular example where removing the
3219 : : check would help is the get_best_mode call in store_bit_field.
3220 : : If we are given a memory bitregion of 128 bits that is aligned
3221 : : to a 64-bit boundary, and the bitfield we want to modify is
3222 : : in the second half of the bitregion, this check causes
3223 : : store_bitfield to turn the memory into a 64-bit reference
3224 : : to the _first_ half of the region. We later use
3225 : : adjust_bitfield_address to get a reference to the correct half,
3226 : : but doing so looks to adjust_bitfield_address as though we are
3227 : : moving past the end of the original object, so it drops the
3228 : : associated MEM_EXPR and MEM_OFFSET. Removing the check
3229 : : causes store_bit_field to keep a 128-bit memory reference,
3230 : : so that the final bitfield reference still has a MEM_EXPR
3231 : : and MEM_OFFSET. */
3232 : 954456 : && GET_MODE_ALIGNMENT (mode) <= align
3233 : 1918894 : && GET_MODE_BITSIZE (mode) <= largest_mode_bitsize)
3234 : : {
3235 : 940297 : *best_mode = mode;
3236 : 940297 : found = true;
3237 : 940297 : if (iter.prefer_smaller_modes ())
3238 : : break;
3239 : : }
3240 : :
3241 : 974931 : return found;
3242 : : }
3243 : :
3244 : : /* Gets minimal and maximal values for MODE (signed or unsigned depending on
3245 : : SIGN). The returned constants are made to be usable in TARGET_MODE. */
3246 : :
3247 : : void
3248 : 62129171 : get_mode_bounds (scalar_int_mode mode, int sign,
3249 : : scalar_int_mode target_mode,
3250 : : rtx *mmin, rtx *mmax)
3251 : : {
3252 : 62129171 : unsigned size = GET_MODE_PRECISION (mode);
3253 : 62129171 : unsigned HOST_WIDE_INT min_val, max_val;
3254 : :
3255 : 62129171 : gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
3256 : :
3257 : : /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */
3258 : 62129171 : if (mode == BImode)
3259 : : {
3260 : : if (STORE_FLAG_VALUE < 0)
3261 : : {
3262 : : min_val = STORE_FLAG_VALUE;
3263 : : max_val = 0;
3264 : : }
3265 : : else
3266 : : {
3267 : : min_val = 0;
3268 : : max_val = STORE_FLAG_VALUE;
3269 : : }
3270 : : }
3271 : 62129171 : else if (sign)
3272 : : {
3273 : 56538034 : min_val = -(HOST_WIDE_INT_1U << (size - 1));
3274 : 56538034 : max_val = (HOST_WIDE_INT_1U << (size - 1)) - 1;
3275 : : }
3276 : : else
3277 : : {
3278 : 5591137 : min_val = 0;
3279 : 5591137 : max_val = (HOST_WIDE_INT_1U << (size - 1) << 1) - 1;
3280 : : }
3281 : :
3282 : 62129171 : *mmin = gen_int_mode (min_val, target_mode);
3283 : 62129171 : *mmax = gen_int_mode (max_val, target_mode);
3284 : 62129171 : }
3285 : :
3286 : : #include "gt-stor-layout.h"
|