Line data Source code
1 : /* Basic IPA utilities for type inheritance graph construction and
2 : devirtualization.
3 : Copyright (C) 2013-2026 Free Software Foundation, Inc.
4 : Contributed by Jan Hubicka
5 :
6 : This file is part of GCC.
7 :
8 : GCC is free software; you can redistribute it and/or modify it under
9 : the terms of the GNU General Public License as published by the Free
10 : Software Foundation; either version 3, or (at your option) any later
11 : version.
12 :
13 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 : for more details.
17 :
18 : You should have received a copy of the GNU General Public License
19 : along with GCC; see the file COPYING3. If not see
20 : <http://www.gnu.org/licenses/>. */
21 :
22 : /* Brief vocabulary:
23 : ODR = One Definition Rule
24 : In short, the ODR states that:
25 : 1 In any translation unit, a template, type, function, or object can
26 : have no more than one definition. Some of these can have any number
27 : of declarations. A definition provides an instance.
28 : 2 In the entire program, an object or non-inline function cannot have
29 : more than one definition; if an object or function is used, it must
30 : have exactly one definition. You can declare an object or function
31 : that is never used, in which case you don't have to provide
32 : a definition. In no event can there be more than one definition.
33 : 3 Some things, like types, templates, and extern inline functions, can
34 : be defined in more than one translation unit. For a given entity,
35 : each definition must be the same. Non-extern objects and functions
36 : in different translation units are different entities, even if their
37 : names and types are the same.
38 :
39 : OTR = OBJ_TYPE_REF
40 : This is the Gimple representation of type information of a polymorphic call.
41 : It contains two parameters:
42 : otr_type is a type of class whose method is called.
43 : otr_token is the index into virtual table where address is taken.
44 :
45 : BINFO
46 : This is the type inheritance information attached to each tree
47 : RECORD_TYPE by the C++ frontend. It provides information about base
48 : types and virtual tables.
49 :
50 : BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 : BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 : BINFO_VTABLE.
53 :
54 : Base types of a given type are enumerated by BINFO_BASE_BINFO
55 : vector. Members of this vectors are not BINFOs associated
56 : with a base type. Rather they are new copies of BINFOs
57 : (base BINFOs). Their virtual tables may differ from
58 : virtual table of the base type. Also BINFO_OFFSET specifies
59 : offset of the base within the type.
60 :
61 : In the case of single inheritance, the virtual table is shared
62 : and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 : inheritance the individual virtual tables are pointer to by
64 : BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 : binfo associated to the base type).
66 :
67 : BINFO lookup for a given base type and offset can be done by
68 : get_binfo_at_offset. It returns proper BINFO whose virtual table
69 : can be used for lookup of virtual methods associated with the
70 : base type.
71 :
72 : token
73 : This is an index of virtual method in virtual table associated
74 : to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 : or from DECL_VINDEX of a given virtual table.
76 :
77 : polymorphic (indirect) call
78 : This is callgraph representation of virtual method call. Every
79 : polymorphic call contains otr_type and otr_token taken from
80 : original OBJ_TYPE_REF at callgraph construction time.
81 :
82 : What we do here:
83 :
84 : build_type_inheritance_graph triggers a construction of the type inheritance
85 : graph.
86 :
87 : We reconstruct it based on types of methods we see in the unit.
88 : This means that the graph is not complete. Types with no methods are not
89 : inserted into the graph. Also types without virtual methods are not
90 : represented at all, though it may be easy to add this.
91 :
92 : The inheritance graph is represented as follows:
93 :
94 : Vertices are structures odr_type. Every odr_type may correspond
95 : to one or more tree type nodes that are equivalent by ODR rule.
96 : (the multiple type nodes appear only with linktime optimization)
97 :
98 : Edges are represented by odr_type->base and odr_type->derived_types.
99 : At the moment we do not track offsets of types for multiple inheritance.
100 : Adding this is easy.
101 :
102 : possible_polymorphic_call_targets returns, given an parameters found in
103 : indirect polymorphic edge all possible polymorphic call targets of the call.
104 :
105 : pass_ipa_devirt performs simple speculative devirtualization.
106 : */
107 :
108 : #include "config.h"
109 : #include "system.h"
110 : #include "coretypes.h"
111 : #include "backend.h"
112 : #include "rtl.h"
113 : #include "tree.h"
114 : #include "gimple.h"
115 : #include "alloc-pool.h"
116 : #include "tree-pass.h"
117 : #include "cgraph.h"
118 : #include "lto-streamer.h"
119 : #include "fold-const.h"
120 : #include "print-tree.h"
121 : #include "calls.h"
122 : #include "ipa-utils.h"
123 : #include "gimple-iterator.h"
124 : #include "gimple-fold.h"
125 : #include "symbol-summary.h"
126 : #include "tree-vrp.h"
127 : #include "sreal.h"
128 : #include "ipa-cp.h"
129 : #include "ipa-prop.h"
130 : #include "ipa-fnsummary.h"
131 : #include "demangle.h"
132 : #include "dbgcnt.h"
133 : #include "gimple-pretty-print.h"
134 : #include "intl.h"
135 : #include "stringpool.h"
136 : #include "attribs.h"
137 : #include "data-streamer.h"
138 : #include "lto-streamer.h"
139 : #include "streamer-hooks.h"
140 :
141 : /* Hash based set of pairs of types. */
142 : struct type_pair
143 : {
144 : tree first;
145 : tree second;
146 : };
147 :
148 : template <>
149 : struct default_hash_traits <type_pair>
150 : : typed_noop_remove <type_pair>
151 : {
152 : GTY((skip)) typedef type_pair value_type;
153 : GTY((skip)) typedef type_pair compare_type;
154 : static hashval_t
155 16 : hash (type_pair p)
156 : {
157 16 : return TYPE_UID (p.first) ^ TYPE_UID (p.second);
158 : }
159 : static const bool empty_zero_p = true;
160 : static bool
161 104425 : is_empty (type_pair p)
162 : {
163 104409 : return p.first == NULL;
164 : }
165 : static bool
166 : is_deleted (type_pair p ATTRIBUTE_UNUSED)
167 : {
168 : return false;
169 : }
170 : static bool
171 0 : equal (const type_pair &a, const type_pair &b)
172 : {
173 0 : return a.first==b.first && a.second == b.second;
174 : }
175 : static void
176 0 : mark_empty (type_pair &e)
177 : {
178 0 : e.first = NULL;
179 : }
180 : };
181 :
182 : /* HACK alert: this is used to communicate with ipa-inline-transform that
183 : thunk is being expanded and there is no need to clear the polymorphic
184 : call target cache. */
185 : bool thunk_expansion;
186 :
187 : static bool odr_types_equivalent_p (tree, tree, bool, bool *,
188 : hash_set<type_pair> *,
189 : location_t, location_t);
190 : static void warn_odr (tree t1, tree t2, tree st1, tree st2,
191 : bool warn, bool *warned, const char *reason);
192 :
193 : static bool odr_violation_reported = false;
194 :
195 :
196 : /* Pointer set of all call targets appearing in the cache. */
197 : static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
198 :
199 : /* The node of type inheritance graph. For each type unique in
200 : One Definition Rule (ODR) sense, we produce one node linking all
201 : main variants of types equivalent to it, bases and derived types. */
202 :
203 : struct GTY(()) odr_type_d
204 : {
205 : /* leader type. */
206 : tree type;
207 : /* All bases; built only for main variants of types. */
208 : vec<odr_type> GTY((skip)) bases;
209 : /* All derived types with virtual methods seen in unit;
210 : built only for main variants of types. */
211 : vec<odr_type> GTY((skip)) derived_types;
212 :
213 : /* All equivalent types, if more than one. */
214 : vec<tree, va_gc> *types;
215 : /* Set of all equivalent types, if NON-NULL. */
216 : hash_set<tree> * GTY((skip)) types_set;
217 :
218 : /* Unique ID indexing the type in odr_types array. */
219 : int id;
220 : /* Is it in anonymous namespace? */
221 : bool anonymous_namespace;
222 : /* Do we know about all derivations of given type? */
223 : bool all_derivations_known;
224 : /* Did we report ODR violation here? */
225 : bool odr_violated;
226 : /* Set when virtual table without RTTI prevailed table with. */
227 : bool rtti_broken;
228 : /* Set when the canonical type is determined using the type name. */
229 : bool tbaa_enabled;
230 : /* Set when we determined there are no derived construction vtables. */
231 : bool no_derived_construction_vtables;
232 : };
233 :
234 : /* ODR types also stored into ODR_TYPE vector to allow consistent
235 : walking. Bases appear before derived types. Vector is garbage collected
236 : so we won't end up visiting empty types. */
237 :
238 : static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
239 : #define odr_types (*odr_types_ptr)
240 :
241 : /* Return TRUE if all derived types of T are known and thus
242 : we may consider the walk of derived type complete.
243 :
244 : This is typically true only for final anonymous namespace types and types
245 : defined within functions (that may be COMDAT and thus shared across units,
246 : but with the same set of derived types). */
247 :
248 : bool
249 3068805 : type_all_derivations_known_p (const_tree t)
250 : {
251 3068805 : if (TYPE_FINAL_P (t))
252 : return true;
253 2989973 : if (flag_ltrans)
254 : return false;
255 : /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
256 2984466 : if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
257 : return true;
258 2984466 : if (type_in_anonymous_namespace_p (t))
259 : return true;
260 2980048 : return (decl_function_context (TYPE_NAME (t)) != NULL);
261 : }
262 :
263 : /* Return TRUE if type's constructors are all visible. */
264 :
265 : static bool
266 275162 : type_all_ctors_visible_p (tree t)
267 : {
268 275162 : return !flag_ltrans
269 275046 : && symtab->state >= CONSTRUCTION
270 : /* We cannot always use type_all_derivations_known_p.
271 : For function local types we must assume case where
272 : the function is COMDAT and shared in between units.
273 :
274 : TODO: These cases are quite easy to get, but we need
275 : to keep track of C++ privatizing via -Wno-weak
276 : as well as the IPA privatizing. */
277 550208 : && type_in_anonymous_namespace_p (t);
278 : }
279 :
280 : /* Return true if VTABLE is is a virtual table of an anonymous namespace
281 : type and it is not the main virtual table for its type. */
282 :
283 : static bool
284 216 : anonymous_construction_vtable_p (tree vtable)
285 : {
286 216 : if (!DECL_VIRTUAL_P (vtable)
287 216 : || !type_in_anonymous_namespace_p (DECL_CONTEXT (vtable)))
288 164 : return false;
289 52 : tree vtable2 = BINFO_VTABLE (TYPE_BINFO (DECL_CONTEXT (vtable)));
290 52 : if (TREE_CODE (vtable2) == POINTER_PLUS_EXPR)
291 52 : vtable2 = TREE_OPERAND (TREE_OPERAND (vtable2, 0), 0);
292 52 : return vtable2 != vtable;
293 : }
294 :
295 : /* Set if construction vtables are computed. */
296 : static bool construction_vtables_detected = false;
297 :
298 : /* Mark all bases of T as having derived construction vtables. */
299 :
300 : static void
301 32 : mark_derived_construction_vtables (odr_type t)
302 : {
303 80 : for (odr_type b: t->bases)
304 : {
305 16 : b->no_derived_construction_vtables = false;
306 16 : mark_derived_construction_vtables (b);
307 : }
308 32 : }
309 :
310 : /* Watch removal of construction vtables so we recompute their
311 : existence. */
312 :
313 : void
314 94 : construction_vtable_hook (varpool_node *v, void *)
315 : {
316 94 : if (anonymous_construction_vtable_p (v->decl))
317 9 : construction_vtables_detected = false;
318 94 : }
319 :
320 : /* Return TRUE if type may have instance. */
321 :
322 : static bool
323 275162 : type_possibly_instantiated_p (tree t)
324 : {
325 275162 : tree vtable;
326 275162 : varpool_node *vnode;
327 :
328 : /* TODO: Add abstract types here. */
329 275162 : if (!type_all_ctors_visible_p (t))
330 : return true;
331 :
332 7574 : vtable = BINFO_VTABLE (TYPE_BINFO (t));
333 7574 : if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
334 7574 : vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
335 7574 : vnode = varpool_node::get (vtable);
336 7574 : if (vnode && vnode->definition)
337 : return true;
338 :
339 : /* If T is derived, we may see only the construction vtable.
340 : To find them, we need to walk symbol table. Cache the result
341 : and only recompute when some vtables are removed. This only
342 : happens in unreachable node removal, which is only called
343 : constant number of times during computation. */
344 64 : odr_type odr_t = get_odr_type (t);
345 64 : if (odr_t->derived_types.length () && !construction_vtables_detected)
346 : {
347 18 : static bool hook_registered = false;
348 18 : if (!hook_registered)
349 : {
350 13 : symtab->add_varpool_removal_hook (construction_vtable_hook, NULL);
351 13 : hook_registered = true;
352 : }
353 61 : for (odr_type t: odr_types)
354 43 : if (t)
355 43 : t->no_derived_construction_vtables = true;
356 168 : FOR_EACH_VARIABLE (vnode)
357 150 : if (vnode->definition
358 150 : && anonymous_construction_vtable_p (vnode->decl))
359 16 : mark_derived_construction_vtables
360 16 : (get_odr_type (DECL_CONTEXT (vnode->decl)));
361 18 : construction_vtables_detected = true;
362 : }
363 64 : return !odr_t->no_derived_construction_vtables;
364 : }
365 :
366 : /* Return true if T or type derived from T may have instance. */
367 :
368 : static bool
369 9 : type_or_derived_type_possibly_instantiated_p (odr_type t)
370 : {
371 9 : if (type_possibly_instantiated_p (t->type))
372 : return true;
373 9 : for (auto derived : t->derived_types)
374 3 : if (type_or_derived_type_possibly_instantiated_p (derived))
375 : return true;
376 : return false;
377 : }
378 :
379 : /* Hash used to unify ODR types based on their mangled name and for anonymous
380 : namespace types. */
381 :
382 : struct odr_name_hasher : pointer_hash <odr_type_d>
383 : {
384 : typedef union tree_node *compare_type;
385 : static inline hashval_t hash (const odr_type_d *);
386 : static inline bool equal (const odr_type_d *, const tree_node *);
387 : static inline void remove (odr_type_d *);
388 : };
389 :
390 : static bool
391 6819712 : can_be_name_hashed_p (tree t)
392 : {
393 163569 : return (!in_lto_p || odr_type_p (t));
394 : }
395 :
396 : /* Hash type by its ODR name. */
397 :
398 : static hashval_t
399 38219931 : hash_odr_name (const_tree t)
400 : {
401 38219931 : gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
402 :
403 : /* If not in LTO, all main variants are unique, so we can do
404 : pointer hash. */
405 38219931 : if (!in_lto_p)
406 37630062 : return htab_hash_pointer (t);
407 :
408 : /* Anonymous types are unique. */
409 589869 : if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
410 20223 : return htab_hash_pointer (t);
411 :
412 569646 : gcc_checking_assert (TYPE_NAME (t)
413 : && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
414 569646 : return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
415 : }
416 :
417 : /* Return the computed hashcode for ODR_TYPE. */
418 :
419 : inline hashval_t
420 31415861 : odr_name_hasher::hash (const odr_type_d *odr_type)
421 : {
422 31415861 : return hash_odr_name (odr_type->type);
423 : }
424 :
425 : /* For languages with One Definition Rule, work out if
426 : types are the same based on their name.
427 :
428 : This is non-trivial for LTO where minor differences in
429 : the type representation may have prevented type merging
430 : to merge two copies of otherwise equivalent type.
431 :
432 : Until we start streaming mangled type names, this function works
433 : only for polymorphic types.
434 : */
435 :
436 : bool
437 4432661 : types_same_for_odr (const_tree type1, const_tree type2)
438 : {
439 4432661 : gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
440 :
441 4432661 : type1 = TYPE_MAIN_VARIANT (type1);
442 4432661 : type2 = TYPE_MAIN_VARIANT (type2);
443 :
444 4432661 : if (type1 == type2)
445 : return true;
446 :
447 1433964 : if (!in_lto_p)
448 : return false;
449 :
450 : /* Anonymous namespace types are never duplicated. */
451 3362 : if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
452 3362 : || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
453 0 : return false;
454 :
455 : /* If both type has mangled defined check if they are same.
456 : Watch for anonymous types which are all mangled as "<anon">. */
457 1681 : if (!type_with_linkage_p (type1) || !type_with_linkage_p (type2))
458 : return false;
459 1681 : if (type_in_anonymous_namespace_p (type1)
460 1681 : || type_in_anonymous_namespace_p (type2))
461 0 : return false;
462 1681 : return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
463 1681 : == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
464 : }
465 :
466 : /* Return true if we can decide on ODR equivalency.
467 :
468 : In non-LTO it is always decide, in LTO however it depends in the type has
469 : ODR info attached. */
470 :
471 : bool
472 3506521 : types_odr_comparable (tree t1, tree t2)
473 : {
474 3506521 : return (!in_lto_p
475 20393 : || TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)
476 3507891 : || (odr_type_p (TYPE_MAIN_VARIANT (t1))
477 1356 : && odr_type_p (TYPE_MAIN_VARIANT (t2))));
478 : }
479 :
480 : /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
481 : known, be conservative and return false. */
482 :
483 : bool
484 996815 : types_must_be_same_for_odr (tree t1, tree t2)
485 : {
486 996815 : if (types_odr_comparable (t1, t2))
487 996815 : return types_same_for_odr (t1, t2);
488 : else
489 0 : return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
490 : }
491 :
492 : /* If T is compound type, return type it is based on. */
493 :
494 : static tree
495 6767 : compound_type_base (const_tree t)
496 : {
497 6767 : if (TREE_CODE (t) == ARRAY_TYPE
498 6629 : || POINTER_TYPE_P (t)
499 5072 : || TREE_CODE (t) == COMPLEX_TYPE
500 5063 : || VECTOR_TYPE_P (t))
501 1708 : return TREE_TYPE (t);
502 5059 : if (TREE_CODE (t) == METHOD_TYPE)
503 422 : return TYPE_METHOD_BASETYPE (t);
504 4637 : if (TREE_CODE (t) == OFFSET_TYPE)
505 0 : return TYPE_OFFSET_BASETYPE (t);
506 : return NULL_TREE;
507 : }
508 :
509 : /* Return true if T is either ODR type or compound type based from it.
510 : If the function return true, we know that T is a type originating from C++
511 : source even at link-time. */
512 :
513 : bool
514 6756 : odr_or_derived_type_p (const_tree t)
515 : {
516 8886 : do
517 : {
518 8886 : if (odr_type_p (TYPE_MAIN_VARIANT (t)))
519 : return true;
520 : /* Function type is a tricky one. Basically we can consider it
521 : ODR derived if return type or any of the parameters is.
522 : We need to check all parameters because LTO streaming merges
523 : common types (such as void) and they are not considered ODR then. */
524 7904 : if (TREE_CODE (t) == FUNCTION_TYPE)
525 : {
526 1137 : if (TYPE_METHOD_BASETYPE (t))
527 0 : t = TYPE_METHOD_BASETYPE (t);
528 : else
529 : {
530 1137 : if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
531 : return true;
532 3461 : for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
533 2388 : if (odr_or_derived_type_p (TYPE_MAIN_VARIANT (TREE_VALUE (t))))
534 : return true;
535 : return false;
536 : }
537 : }
538 : else
539 6767 : t = compound_type_base (t);
540 : }
541 6767 : while (t);
542 : return t;
543 : }
544 :
545 : /* Compare types T1 and T2 and return true if they are
546 : equivalent. */
547 :
548 : inline bool
549 37492139 : odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
550 : {
551 37492139 : tree t1 = o1->type;
552 :
553 37492139 : gcc_checking_assert (TYPE_MAIN_VARIANT (t2) == t2);
554 37492139 : gcc_checking_assert (TYPE_MAIN_VARIANT (t1) == t1);
555 37492139 : if (t1 == t2)
556 : return true;
557 32162075 : if (!in_lto_p)
558 : return false;
559 : /* Check for anonymous namespaces. */
560 991404 : if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
561 974360 : || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
562 25202 : return false;
563 470500 : gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
564 470500 : gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
565 470500 : return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
566 470500 : == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
567 : }
568 :
569 : /* Free ODR type V. */
570 :
571 : inline void
572 1300043 : odr_name_hasher::remove (odr_type_d *v)
573 : {
574 1300043 : v->bases.release ();
575 1300043 : v->derived_types.release ();
576 1300043 : if (v->types_set)
577 0 : delete v->types_set;
578 1300043 : ggc_free (v);
579 1300043 : }
580 :
581 : /* ODR type hash used to look up ODR type based on tree type node. */
582 :
583 : typedef hash_table<odr_name_hasher> odr_hash_type;
584 : static odr_hash_type *odr_hash;
585 :
586 : /* All enums defined and accessible for the unit. */
587 : static GTY(()) vec <tree, va_gc> *odr_enums;
588 :
589 : /* Information we hold about value defined by an enum type. */
590 3066 : struct odr_enum_val
591 : {
592 : const char *name;
593 : wide_int val;
594 : location_t locus;
595 : };
596 :
597 : /* Information about enum values. */
598 422 : struct odr_enum
599 : {
600 : location_t locus;
601 : auto_vec<odr_enum_val, 0> vals;
602 : bool warned;
603 : };
604 :
605 : /* A table of all ODR enum definitions. */
606 : static hash_map <nofree_string_hash, odr_enum> *odr_enum_map = NULL;
607 : static struct obstack odr_enum_obstack;
608 :
609 : /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
610 : void
611 0 : set_type_binfo (tree type, tree binfo)
612 : {
613 0 : for (; type; type = TYPE_NEXT_VARIANT (type))
614 0 : if (COMPLETE_TYPE_P (type))
615 0 : TYPE_BINFO (type) = binfo;
616 : else
617 0 : gcc_assert (!TYPE_BINFO (type));
618 0 : }
619 :
620 : /* Return true if type variants match.
621 : This assumes that we already verified that T1 and T2 are variants of the
622 : same type. */
623 :
624 : static bool
625 7924 : type_variants_equivalent_p (tree t1, tree t2)
626 : {
627 7924 : if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
628 : return false;
629 :
630 7917 : if (comp_type_attributes (t1, t2) != 1)
631 : return false;
632 :
633 15832 : if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
634 8242 : && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
635 : return false;
636 :
637 : return true;
638 : }
639 :
640 : /* Compare T1 and T2 based on name or structure. */
641 :
642 : static bool
643 439 : odr_subtypes_equivalent_p (tree t1, tree t2,
644 : hash_set<type_pair> *visited,
645 : location_t loc1, location_t loc2)
646 : {
647 :
648 : /* This can happen in incomplete types that should be handled earlier. */
649 439 : gcc_assert (t1 && t2);
650 :
651 439 : if (t1 == t2)
652 : return true;
653 :
654 : /* Anonymous namespace types must match exactly. */
655 102 : if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
656 78 : && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
657 176 : || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
658 75 : && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
659 11 : return false;
660 :
661 : /* For ODR types be sure to compare their names.
662 : To support -Wno-odr-type-merging we allow one type to be non-ODR
663 : and other ODR even though it is a violation. */
664 91 : if (types_odr_comparable (t1, t2))
665 : {
666 78 : if (t1 != t2
667 78 : && odr_type_p (TYPE_MAIN_VARIANT (t1))
668 68 : && get_odr_type (TYPE_MAIN_VARIANT (t1), true)->odr_violated)
669 : return false;
670 54 : if (!types_same_for_odr (t1, t2))
671 : return false;
672 47 : if (!type_variants_equivalent_p (t1, t2))
673 : return false;
674 : /* Limit recursion: If subtypes are ODR types and we know
675 : that they are same, be happy. */
676 40 : if (odr_type_p (TYPE_MAIN_VARIANT (t1)))
677 : return true;
678 : }
679 :
680 : /* Component types, builtins and possibly violating ODR types
681 : have to be compared structurally. */
682 16 : if (TREE_CODE (t1) != TREE_CODE (t2))
683 : return false;
684 16 : if (AGGREGATE_TYPE_P (t1)
685 16 : && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
686 : return false;
687 :
688 16 : type_pair pair={TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2)};
689 16 : if (TYPE_UID (TYPE_MAIN_VARIANT (t1)) > TYPE_UID (TYPE_MAIN_VARIANT (t2)))
690 : {
691 0 : pair.first = TYPE_MAIN_VARIANT (t2);
692 0 : pair.second = TYPE_MAIN_VARIANT (t1);
693 : }
694 16 : if (visited->add (pair))
695 : return true;
696 16 : if (!odr_types_equivalent_p (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
697 : false, NULL, visited, loc1, loc2))
698 : return false;
699 5 : if (!type_variants_equivalent_p (t1, t2))
700 : return false;
701 : return true;
702 : }
703 :
704 : /* Return true if DECL1 and DECL2 are identical methods. Consider
705 : name equivalent to name.localalias.xyz. */
706 :
707 : static bool
708 120 : methods_equal_p (tree decl1, tree decl2)
709 : {
710 120 : if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
711 : return true;
712 0 : const char sep = symbol_table::symbol_suffix_separator ();
713 :
714 0 : const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
715 0 : const char *ptr1 = strchr (name1, sep);
716 0 : int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
717 :
718 0 : const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
719 0 : const char *ptr2 = strchr (name2, sep);
720 0 : int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
721 :
722 0 : if (len1 != len2)
723 : return false;
724 0 : return !strncmp (name1, name2, len1);
725 : }
726 :
727 : /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
728 : violation warnings. */
729 :
730 : void
731 63 : compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
732 : {
733 63 : int n1, n2;
734 :
735 63 : if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
736 : {
737 0 : odr_violation_reported = true;
738 0 : if (DECL_VIRTUAL_P (prevailing->decl))
739 : {
740 0 : varpool_node *tmp = prevailing;
741 0 : prevailing = vtable;
742 0 : vtable = tmp;
743 : }
744 0 : auto_diagnostic_group d;
745 0 : if (warning_at (DECL_SOURCE_LOCATION
746 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
747 0 : OPT_Wodr,
748 : "virtual table of type %qD violates one definition rule",
749 0 : DECL_CONTEXT (vtable->decl)))
750 0 : inform (DECL_SOURCE_LOCATION (prevailing->decl),
751 : "variable of same assembler name as the virtual table is "
752 : "defined in another translation unit");
753 0 : return;
754 0 : }
755 63 : if (!prevailing->definition || !vtable->definition)
756 : return;
757 :
758 : /* If we do not stream ODR type info, do not bother to do useful compare. */
759 16 : if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
760 16 : || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
761 : return;
762 :
763 16 : odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
764 :
765 16 : if (class_type->odr_violated)
766 : return;
767 :
768 40 : for (n1 = 0, n2 = 0; true; n1++, n2++)
769 : {
770 56 : struct ipa_ref *ref1, *ref2;
771 56 : bool end1, end2;
772 :
773 56 : end1 = !prevailing->iterate_reference (n1, ref1);
774 56 : end2 = !vtable->iterate_reference (n2, ref2);
775 :
776 : /* !DECL_VIRTUAL_P means RTTI entry;
777 : We warn when RTTI is lost because non-RTTI prevails; we silently
778 : accept the other case. */
779 56 : while (!end2
780 40 : && (end1
781 40 : || (methods_equal_p (ref1->referred->decl,
782 40 : ref2->referred->decl)
783 40 : && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
784 80 : && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
785 : {
786 0 : if (!class_type->rtti_broken)
787 : {
788 0 : auto_diagnostic_group d;
789 0 : if (warning_at (DECL_SOURCE_LOCATION
790 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
791 0 : OPT_Wodr,
792 : "virtual table of type %qD contains RTTI "
793 : "information",
794 0 : DECL_CONTEXT (vtable->decl)))
795 : {
796 0 : inform (DECL_SOURCE_LOCATION
797 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
798 : "but is prevailed by one without from other"
799 : " translation unit");
800 0 : inform (DECL_SOURCE_LOCATION
801 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
802 : "RTTI will not work on this type");
803 0 : class_type->rtti_broken = true;
804 : }
805 0 : }
806 0 : n2++;
807 0 : end2 = !vtable->iterate_reference (n2, ref2);
808 : }
809 : while (!end1
810 40 : && (end2
811 40 : || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
812 40 : && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
813 80 : && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
814 : {
815 0 : n1++;
816 0 : end1 = !prevailing->iterate_reference (n1, ref1);
817 : }
818 :
819 : /* Finished? */
820 56 : if (end1 && end2)
821 : {
822 : /* Extra paranoia; compare the sizes. We do not have information
823 : about virtual inheritance offsets, so just be sure that these
824 : match.
825 : Do this as very last check so the not very informative error
826 : is not output too often. */
827 16 : if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
828 : {
829 0 : class_type->odr_violated = true;
830 0 : auto_diagnostic_group d;
831 0 : tree ctx = TYPE_NAME (DECL_CONTEXT (vtable->decl));
832 0 : if (warning_at (DECL_SOURCE_LOCATION (ctx), OPT_Wodr,
833 : "virtual table of type %qD violates "
834 : "one definition rule",
835 0 : DECL_CONTEXT (vtable->decl)))
836 : {
837 0 : ctx = TYPE_NAME (DECL_CONTEXT (prevailing->decl));
838 0 : inform (DECL_SOURCE_LOCATION (ctx),
839 : "the conflicting type defined in another translation"
840 : " unit has virtual table of different size");
841 : }
842 0 : }
843 16 : return;
844 : }
845 :
846 40 : if (!end1 && !end2)
847 : {
848 40 : if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
849 40 : continue;
850 :
851 0 : class_type->odr_violated = true;
852 :
853 : /* If the loops above stopped on non-virtual pointer, we have
854 : mismatch in RTTI information mangling. */
855 0 : if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
856 0 : && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
857 : {
858 0 : auto_diagnostic_group d;
859 0 : if (warning_at (DECL_SOURCE_LOCATION
860 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
861 0 : OPT_Wodr,
862 : "virtual table of type %qD violates "
863 : "one definition rule",
864 0 : DECL_CONTEXT (vtable->decl)))
865 : {
866 0 : inform (DECL_SOURCE_LOCATION
867 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
868 : "the conflicting type defined in another translation "
869 : "unit with different RTTI information");
870 : }
871 0 : return;
872 0 : }
873 : /* At this point both REF1 and REF2 points either to virtual table
874 : or virtual method. If one points to virtual table and other to
875 : method we can complain the same way as if one table was shorter
876 : than other pointing out the extra method. */
877 0 : if (TREE_CODE (ref1->referred->decl)
878 0 : != TREE_CODE (ref2->referred->decl))
879 : {
880 0 : if (VAR_P (ref1->referred->decl))
881 : end1 = true;
882 0 : else if (VAR_P (ref2->referred->decl))
883 0 : end2 = true;
884 : }
885 : }
886 :
887 0 : class_type->odr_violated = true;
888 :
889 : /* Complain about size mismatch. Either we have too many virtual
890 : functions or too many virtual table pointers. */
891 0 : if (end1 || end2)
892 : {
893 0 : if (end1)
894 : {
895 0 : varpool_node *tmp = prevailing;
896 0 : prevailing = vtable;
897 0 : vtable = tmp;
898 0 : ref1 = ref2;
899 : }
900 0 : auto_diagnostic_group d;
901 0 : if (warning_at (DECL_SOURCE_LOCATION
902 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
903 0 : OPT_Wodr,
904 : "virtual table of type %qD violates "
905 : "one definition rule",
906 0 : DECL_CONTEXT (vtable->decl)))
907 : {
908 0 : if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
909 : {
910 0 : inform (DECL_SOURCE_LOCATION
911 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
912 : "the conflicting type defined in another translation "
913 : "unit");
914 0 : inform (DECL_SOURCE_LOCATION
915 : (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
916 : "contains additional virtual method %qD",
917 0 : ref1->referred->decl);
918 : }
919 : else
920 : {
921 0 : inform (DECL_SOURCE_LOCATION
922 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
923 : "the conflicting type defined in another translation "
924 : "unit has virtual table with more entries");
925 : }
926 : }
927 0 : return;
928 0 : }
929 :
930 : /* And in the last case we have either mismatch in between two virtual
931 : methods or two virtual table pointers. */
932 0 : auto_diagnostic_group d;
933 0 : if (warning_at (DECL_SOURCE_LOCATION
934 0 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
935 : "virtual table of type %qD violates "
936 : "one definition rule",
937 0 : DECL_CONTEXT (vtable->decl)))
938 : {
939 0 : if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
940 : {
941 0 : inform (DECL_SOURCE_LOCATION
942 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
943 : "the conflicting type defined in another translation "
944 : "unit");
945 0 : gcc_assert (TREE_CODE (ref2->referred->decl)
946 : == FUNCTION_DECL);
947 0 : inform (DECL_SOURCE_LOCATION
948 : (ref1->referred->ultimate_alias_target ()->decl),
949 : "virtual method %qD",
950 0 : ref1->referred->ultimate_alias_target ()->decl);
951 0 : inform (DECL_SOURCE_LOCATION
952 : (ref2->referred->ultimate_alias_target ()->decl),
953 : "ought to match virtual method %qD but does not",
954 0 : ref2->referred->ultimate_alias_target ()->decl);
955 : }
956 : else
957 0 : inform (DECL_SOURCE_LOCATION
958 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
959 : "the conflicting type defined in another translation "
960 : "unit has virtual table with different contents");
961 0 : return;
962 : }
963 40 : }
964 : }
965 :
966 : /* Output ODR violation warning about T1 and T2 with REASON.
967 : Display location of ST1 and ST2 if REASON speaks about field or
968 : method of the type.
969 : If WARN is false, do nothing. Set WARNED if warning was indeed
970 : output. */
971 :
972 : static void
973 109 : warn_odr (tree t1, tree t2, tree st1, tree st2,
974 : bool warn, bool *warned, const char *reason)
975 : {
976 109 : tree decl2 = TYPE_NAME (TYPE_MAIN_VARIANT (t2));
977 109 : if (warned)
978 86 : *warned = false;
979 :
980 109 : if (!warn || !TYPE_NAME(TYPE_MAIN_VARIANT (t1)))
981 57 : return;
982 :
983 : /* ODR warnings are output during LTO streaming; we must apply location
984 : cache for potential warnings to be output correctly. */
985 55 : if (lto_location_cache::current_cache)
986 55 : lto_location_cache::current_cache->apply_location_cache ();
987 :
988 55 : auto_diagnostic_group d;
989 55 : if (t1 != TYPE_MAIN_VARIANT (t1)
990 55 : && TYPE_NAME (t1) != TYPE_NAME (TYPE_MAIN_VARIANT (t1)))
991 : {
992 0 : if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
993 0 : OPT_Wodr, "type %qT (typedef of %qT) violates the "
994 : "C++ One Definition Rule",
995 0 : t1, TYPE_MAIN_VARIANT (t1)))
996 : return;
997 : }
998 : else
999 : {
1000 55 : if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
1001 55 : OPT_Wodr, "type %qT violates the C++ One Definition Rule",
1002 : t1))
1003 : return;
1004 : }
1005 52 : if (!st1 && !st2)
1006 : ;
1007 : /* For FIELD_DECL support also case where one of fields is
1008 : NULL - this is used when the structures have mismatching number of
1009 : elements. */
1010 34 : else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
1011 : {
1012 34 : inform (DECL_SOURCE_LOCATION (decl2),
1013 : "a different type is defined in another translation unit");
1014 34 : if (!st1)
1015 : {
1016 1 : st1 = st2;
1017 1 : st2 = NULL;
1018 : }
1019 34 : inform (DECL_SOURCE_LOCATION (st1),
1020 : "the first difference of corresponding definitions is field %qD",
1021 : st1);
1022 34 : if (st2)
1023 52 : decl2 = st2;
1024 : }
1025 0 : else if (TREE_CODE (st1) == FUNCTION_DECL)
1026 : {
1027 0 : inform (DECL_SOURCE_LOCATION (decl2),
1028 : "a different type is defined in another translation unit");
1029 0 : inform (DECL_SOURCE_LOCATION (st1),
1030 : "the first difference of corresponding definitions is method %qD",
1031 : st1);
1032 0 : decl2 = st2;
1033 : }
1034 : else
1035 : return;
1036 52 : inform (DECL_SOURCE_LOCATION (decl2), reason);
1037 :
1038 52 : if (warned)
1039 52 : *warned = true;
1040 55 : }
1041 :
1042 : /* Return true if T1 and T2 are incompatible and we want to recursively
1043 : dive into them from warn_type_mismatch to give sensible answer. */
1044 :
1045 : static bool
1046 6 : type_mismatch_p (tree t1, tree t2)
1047 : {
1048 12 : if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
1049 12 : && !odr_types_equivalent_p (t1, t2))
1050 : return true;
1051 0 : return !types_compatible_p (t1, t2);
1052 : }
1053 :
1054 :
1055 : /* Types T1 and T2 was found to be incompatible in a context they can't
1056 : (either used to declare a symbol of same assembler name or unified by
1057 : ODR rule). We already output warning about this, but if possible, output
1058 : extra information on how the types mismatch.
1059 :
1060 : This is hard to do in general. We basically handle the common cases.
1061 :
1062 : If LOC1 and LOC2 are meaningful locations, use it in the case the types
1063 : themselves do not have one. */
1064 :
1065 : void
1066 40 : warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
1067 : {
1068 : /* Location of type is known only if it has TYPE_NAME and the name is
1069 : TYPE_DECL. */
1070 85 : location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1071 78 : ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
1072 46 : : UNKNOWN_LOCATION;
1073 79 : location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1074 71 : ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
1075 71 : : UNKNOWN_LOCATION;
1076 25 : bool loc_t2_useful = false;
1077 :
1078 : /* With LTO it is a common case that the location of both types match.
1079 : See if T2 has a location that is different from T1. If so, we will
1080 : inform user about the location.
1081 : Do not consider the location passed to us in LOC1/LOC2 as those are
1082 : already output. */
1083 25 : if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
1084 : {
1085 19 : if (loc_t1 <= BUILTINS_LOCATION)
1086 : loc_t2_useful = true;
1087 : else
1088 : {
1089 19 : expanded_location xloc1 = expand_location (loc_t1);
1090 19 : expanded_location xloc2 = expand_location (loc_t2);
1091 :
1092 19 : if (strcmp (xloc1.file, xloc2.file)
1093 0 : || xloc1.line != xloc2.line
1094 0 : || xloc1.column != xloc2.column)
1095 19 : loc_t2_useful = true;
1096 : }
1097 : }
1098 :
1099 46 : if (loc_t1 <= BUILTINS_LOCATION)
1100 : loc_t1 = loc1;
1101 46 : if (loc_t2 <= BUILTINS_LOCATION)
1102 27 : loc_t2 = loc2;
1103 :
1104 46 : location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
1105 :
1106 : /* It is a quite common bug to reference anonymous namespace type in
1107 : non-anonymous namespace class. */
1108 46 : tree mt1 = TYPE_MAIN_VARIANT (t1);
1109 46 : tree mt2 = TYPE_MAIN_VARIANT (t2);
1110 46 : if ((type_with_linkage_p (mt1)
1111 31 : && type_in_anonymous_namespace_p (mt1))
1112 71 : || (type_with_linkage_p (mt2)
1113 26 : && type_in_anonymous_namespace_p (mt2)))
1114 : {
1115 13 : if (!type_with_linkage_p (mt1)
1116 13 : || !type_in_anonymous_namespace_p (mt1))
1117 : {
1118 : std::swap (t1, t2);
1119 : std::swap (mt1, mt2);
1120 : std::swap (loc_t1, loc_t2);
1121 : }
1122 13 : gcc_assert (TYPE_NAME (mt1)
1123 : && TREE_CODE (TYPE_NAME (mt1)) == TYPE_DECL);
1124 13 : tree n1 = TYPE_NAME (mt1);
1125 13 : tree n2 = TYPE_NAME (mt2) ? TYPE_NAME (mt2) : NULL;
1126 :
1127 13 : if (TREE_CODE (n1) == TYPE_DECL)
1128 13 : n1 = DECL_NAME (n1);
1129 13 : if (n2 && TREE_CODE (n2) == TYPE_DECL)
1130 7 : n2 = DECL_NAME (n2);
1131 : /* Most of the time, the type names will match, do not be unnecessarily
1132 : verbose. */
1133 13 : if (n1 != n2)
1134 1 : inform (loc_t1,
1135 : "type %qT defined in anonymous namespace cannot match "
1136 : "type %qT across the translation unit boundary",
1137 : t1, t2);
1138 : else
1139 12 : inform (loc_t1,
1140 : "type %qT defined in anonymous namespace cannot match "
1141 : "across the translation unit boundary",
1142 : t1);
1143 13 : if (loc_t2_useful)
1144 6 : inform (loc_t2,
1145 : "the incompatible type defined in another translation unit");
1146 13 : return;
1147 : }
1148 : /* If types have mangled ODR names and they are different, it is most
1149 : informative to output those.
1150 : This also covers types defined in different namespaces. */
1151 33 : const char *odr1 = get_odr_name_for_type (mt1);
1152 33 : const char *odr2 = get_odr_name_for_type (mt2);
1153 33 : if (odr1 != NULL && odr2 != NULL && odr1 != odr2)
1154 : {
1155 6 : const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
1156 6 : char *name1 = xstrdup (cplus_demangle (odr1, opts));
1157 6 : char *name2 = cplus_demangle (odr2, opts);
1158 6 : if (name1 && name2 && strcmp (name1, name2))
1159 : {
1160 6 : inform (loc_t1,
1161 : "type name %qs should match type name %qs",
1162 : name1, name2);
1163 6 : if (loc_t2_useful)
1164 6 : inform (loc_t2,
1165 : "the incompatible type is defined here");
1166 6 : free (name1);
1167 6 : return;
1168 : }
1169 0 : free (name1);
1170 : }
1171 : /* A tricky case are compound types. Often they appear the same in source
1172 : code and the mismatch is dragged in by type they are build from.
1173 : Look for those differences in subtypes and try to be informative. In other
1174 : cases just output nothing because the source code is probably different
1175 : and in this case we already output a all necessary info. */
1176 27 : if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1177 : {
1178 7 : if (TREE_CODE (t1) == TREE_CODE (t2))
1179 : {
1180 7 : if (TREE_CODE (t1) == ARRAY_TYPE
1181 7 : && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1182 : {
1183 1 : tree i1 = TYPE_DOMAIN (t1);
1184 1 : tree i2 = TYPE_DOMAIN (t2);
1185 :
1186 1 : if (i1 && i2
1187 1 : && TYPE_MAX_VALUE (i1)
1188 1 : && TYPE_MAX_VALUE (i2)
1189 2 : && !operand_equal_p (TYPE_MAX_VALUE (i1),
1190 1 : TYPE_MAX_VALUE (i2), 0))
1191 : {
1192 1 : inform (loc,
1193 : "array types have different bounds");
1194 1 : return;
1195 : }
1196 : }
1197 0 : if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1198 6 : && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1199 6 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1200 0 : else if (TREE_CODE (t1) == METHOD_TYPE
1201 0 : || TREE_CODE (t1) == FUNCTION_TYPE)
1202 : {
1203 0 : tree parms1 = NULL, parms2 = NULL;
1204 0 : int count = 1;
1205 :
1206 0 : if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1207 : {
1208 0 : inform (loc, "return value type mismatch");
1209 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1210 : loc_t2);
1211 0 : return;
1212 : }
1213 0 : if (prototype_p (t1) && prototype_p (t2))
1214 0 : for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1215 0 : parms1 && parms2;
1216 0 : parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1217 : count++)
1218 : {
1219 0 : if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1220 : {
1221 0 : if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1222 0 : inform (loc,
1223 : "implicit this pointer type mismatch");
1224 : else
1225 0 : inform (loc,
1226 : "type mismatch in parameter %i",
1227 0 : count - (TREE_CODE (t1) == METHOD_TYPE));
1228 0 : warn_types_mismatch (TREE_VALUE (parms1),
1229 0 : TREE_VALUE (parms2),
1230 : loc_t1, loc_t2);
1231 0 : return;
1232 : }
1233 : }
1234 0 : if (parms1 || parms2)
1235 : {
1236 0 : inform (loc,
1237 : "types have different parameter counts");
1238 0 : return;
1239 : }
1240 : }
1241 : }
1242 0 : return;
1243 : }
1244 :
1245 20 : if (types_odr_comparable (t1, t2)
1246 : /* We make assign integers mangled names to be able to handle
1247 : signed/unsigned chars. Accepting them here would however lead to
1248 : confusing message like
1249 : "type ‘const int’ itself violates the C++ One Definition Rule" */
1250 19 : && TREE_CODE (t1) != INTEGER_TYPE
1251 33 : && types_same_for_odr (t1, t2))
1252 13 : inform (loc_t1,
1253 : "type %qT itself violates the C++ One Definition Rule", t1);
1254 : /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1255 7 : else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1256 7 : && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1257 : return;
1258 : else
1259 6 : inform (loc_t1, "type %qT should match type %qT",
1260 : t1, t2);
1261 19 : if (loc_t2_useful)
1262 7 : inform (loc_t2, "the incompatible type is defined here");
1263 : }
1264 :
1265 : /* Return true if T should be ignored in TYPE_FIELDS for ODR comparison. */
1266 :
1267 : static bool
1268 690 : skip_in_fields_list_p (tree t)
1269 : {
1270 690 : if (TREE_CODE (t) != FIELD_DECL)
1271 : return true;
1272 : /* C++ FE introduces zero sized fields depending on -std setting, see
1273 : PR89358. */
1274 690 : if (DECL_SIZE (t)
1275 690 : && integer_zerop (DECL_SIZE (t))
1276 2 : && DECL_ARTIFICIAL (t)
1277 2 : && DECL_IGNORED_P (t)
1278 692 : && !DECL_NAME (t))
1279 : return true;
1280 : return false;
1281 : }
1282 :
1283 : /* Compare T1 and T2, report ODR violations if WARN is true and set
1284 : WARNED to true if anything is reported. Return true if types match.
1285 : If true is returned, the types are also compatible in the sense of
1286 : gimple_canonical_types_compatible_p.
1287 : If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1288 : about the type if the type itself do not have location. */
1289 :
1290 : static bool
1291 7988 : odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1292 : hash_set<type_pair> *visited,
1293 : location_t loc1, location_t loc2)
1294 : {
1295 : /* If we are asked to warn, we need warned to keep track if warning was
1296 : output. */
1297 7988 : gcc_assert (!warn || warned);
1298 : /* Check first for the obvious case of pointer identity. */
1299 7988 : if (t1 == t2)
1300 : return true;
1301 :
1302 : /* Can't be the same type if the types don't have the same code. */
1303 7985 : if (TREE_CODE (t1) != TREE_CODE (t2))
1304 : {
1305 13 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1306 : G_("a different type is defined in another translation unit"));
1307 13 : return false;
1308 : }
1309 :
1310 7972 : if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
1311 7869 : && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
1312 15841 : || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
1313 7869 : && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
1314 : {
1315 : /* We cannot trip this when comparing ODR types, only when trying to
1316 : match different ODR derivations from different declarations.
1317 : So WARN should be always false. */
1318 6 : gcc_assert (!warn);
1319 : return false;
1320 : }
1321 :
1322 : /* Non-aggregate types can be handled cheaply. */
1323 7966 : if (INTEGRAL_TYPE_P (t1)
1324 7966 : || SCALAR_FLOAT_TYPE_P (t1)
1325 7943 : || FIXED_POINT_TYPE_P (t1)
1326 7943 : || VECTOR_TYPE_P (t1)
1327 7943 : || TREE_CODE (t1) == COMPLEX_TYPE
1328 7943 : || TREE_CODE (t1) == OFFSET_TYPE
1329 7943 : || POINTER_TYPE_P (t1))
1330 : {
1331 37 : if (!VECTOR_TYPE_P (t1) && TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1332 : {
1333 7 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1334 : G_("a type with different precision is defined "
1335 : "in another translation unit"));
1336 7 : return false;
1337 : }
1338 30 : if (VECTOR_TYPE_P (t1)
1339 30 : && maybe_ne (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2)))
1340 : {
1341 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1342 : G_("a vector type with different number of elements "
1343 : "is defined in another translation unit"));
1344 0 : return false;
1345 : }
1346 30 : if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1347 : {
1348 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1349 : G_("a type with different signedness is defined "
1350 : "in another translation unit"));
1351 0 : return false;
1352 : }
1353 :
1354 30 : if (TREE_CODE (t1) == INTEGER_TYPE
1355 30 : && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1356 : {
1357 : /* char WRT uint_8? */
1358 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1359 : G_("a different type is defined in another "
1360 : "translation unit"));
1361 0 : return false;
1362 : }
1363 :
1364 : /* For canonical type comparisons we do not want to build SCCs
1365 : so we cannot compare pointed-to types. But we can, for now,
1366 : require the same pointed-to type kind and match what
1367 : useless_type_conversion_p would do. */
1368 30 : if (POINTER_TYPE_P (t1))
1369 : {
1370 14 : if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1371 14 : != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1372 : {
1373 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1374 : G_("it is defined as a pointer in different address "
1375 : "space in another translation unit"));
1376 0 : return false;
1377 : }
1378 :
1379 14 : if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1380 : visited, loc1, loc2))
1381 : {
1382 9 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1383 : G_("it is defined as a pointer to different type "
1384 : "in another translation unit"));
1385 9 : if (warn && *warned)
1386 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1387 : loc1, loc2);
1388 9 : return false;
1389 : }
1390 : }
1391 :
1392 21 : if ((VECTOR_TYPE_P (t1) || TREE_CODE (t1) == COMPLEX_TYPE)
1393 21 : && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1394 : visited, loc1, loc2))
1395 : {
1396 : /* Probably specific enough. */
1397 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1398 : G_("a different type is defined "
1399 : "in another translation unit"));
1400 0 : if (warn && *warned)
1401 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1402 0 : return false;
1403 : }
1404 : }
1405 : /* Do type-specific comparisons. */
1406 7929 : else switch (TREE_CODE (t1))
1407 : {
1408 2 : case ARRAY_TYPE:
1409 2 : {
1410 : /* Array types are the same if the element types are the same and
1411 : the number of elements are the same. */
1412 2 : if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1413 : visited, loc1, loc2))
1414 : {
1415 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1416 : G_("a different type is defined in another "
1417 : "translation unit"));
1418 0 : if (warn && *warned)
1419 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1420 : }
1421 2 : gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1422 2 : gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1423 : == TYPE_NONALIASED_COMPONENT (t2));
1424 :
1425 2 : tree i1 = TYPE_DOMAIN (t1);
1426 2 : tree i2 = TYPE_DOMAIN (t2);
1427 :
1428 : /* For an incomplete external array, the type domain can be
1429 : NULL_TREE. Check this condition also. */
1430 2 : if (i1 == NULL_TREE || i2 == NULL_TREE)
1431 0 : return type_variants_equivalent_p (t1, t2);
1432 :
1433 2 : tree min1 = TYPE_MIN_VALUE (i1);
1434 2 : tree min2 = TYPE_MIN_VALUE (i2);
1435 2 : tree max1 = TYPE_MAX_VALUE (i1);
1436 2 : tree max2 = TYPE_MAX_VALUE (i2);
1437 :
1438 : /* In C++, minimums should be always 0. */
1439 2 : gcc_assert (min1 == min2);
1440 2 : if (!operand_equal_p (max1, max2, 0))
1441 : {
1442 2 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1443 : G_("an array of different size is defined "
1444 : "in another translation unit"));
1445 2 : return false;
1446 : }
1447 : }
1448 : break;
1449 :
1450 87 : case METHOD_TYPE:
1451 87 : case FUNCTION_TYPE:
1452 : /* Function types are the same if the return type and arguments types
1453 : are the same. */
1454 87 : if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1455 : visited, loc1, loc2))
1456 : {
1457 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1458 : G_("has different return value "
1459 : "in another translation unit"));
1460 0 : if (warn && *warned)
1461 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1462 0 : return false;
1463 : }
1464 :
1465 87 : if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1466 87 : || !prototype_p (t1) || !prototype_p (t2))
1467 87 : return type_variants_equivalent_p (t1, t2);
1468 : else
1469 : {
1470 0 : tree parms1, parms2;
1471 :
1472 0 : for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1473 0 : parms1 && parms2;
1474 0 : parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1475 : {
1476 0 : if (!odr_subtypes_equivalent_p
1477 0 : (TREE_VALUE (parms1), TREE_VALUE (parms2),
1478 : visited, loc1, loc2))
1479 : {
1480 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1481 : G_("has different parameters in another "
1482 : "translation unit"));
1483 0 : if (warn && *warned)
1484 0 : warn_types_mismatch (TREE_VALUE (parms1),
1485 0 : TREE_VALUE (parms2), loc1, loc2);
1486 0 : return false;
1487 : }
1488 : }
1489 :
1490 0 : if (parms1 || parms2)
1491 : {
1492 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1493 : G_("has different parameters "
1494 : "in another translation unit"));
1495 0 : return false;
1496 : }
1497 :
1498 0 : return type_variants_equivalent_p (t1, t2);
1499 : }
1500 :
1501 7840 : case RECORD_TYPE:
1502 7840 : case UNION_TYPE:
1503 7840 : case QUAL_UNION_TYPE:
1504 7840 : {
1505 7840 : tree f1, f2;
1506 :
1507 : /* For aggregate types, all the fields must be the same. */
1508 7840 : if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1509 : {
1510 426 : if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1511 597 : && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1512 171 : != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1513 : {
1514 0 : if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1515 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1516 : G_("a type defined in another translation unit "
1517 : "is not polymorphic"));
1518 : else
1519 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1520 : G_("a type defined in another translation unit "
1521 : "is polymorphic"));
1522 0 : return false;
1523 : }
1524 255 : for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1525 540 : f1 || f2;
1526 285 : f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1527 : {
1528 : /* Skip non-fields. */
1529 354 : while (f1 && skip_in_fields_list_p (f1))
1530 2 : f1 = TREE_CHAIN (f1);
1531 352 : while (f2 && skip_in_fields_list_p (f2))
1532 0 : f2 = TREE_CHAIN (f2);
1533 352 : if (!f1 || !f2)
1534 : break;
1535 336 : if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1536 : {
1537 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1538 : G_("a type with different virtual table pointers"
1539 : " is defined in another translation unit"));
1540 0 : return false;
1541 : }
1542 336 : if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1543 : {
1544 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1545 : G_("a type with different bases is defined "
1546 : "in another translation unit"));
1547 0 : return false;
1548 : }
1549 336 : if (DECL_NAME (f1) != DECL_NAME (f2)
1550 336 : && !DECL_ARTIFICIAL (f1))
1551 : {
1552 0 : warn_odr (t1, t2, f1, f2, warn, warned,
1553 : G_("a field with different name is defined "
1554 : "in another translation unit"));
1555 0 : return false;
1556 : }
1557 336 : if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1558 336 : TREE_TYPE (f2),
1559 : visited, loc1, loc2))
1560 : {
1561 : /* Do not warn about artificial fields and just go into
1562 : generic field mismatch warning. */
1563 51 : if (DECL_ARTIFICIAL (f1))
1564 : break;
1565 :
1566 51 : warn_odr (t1, t2, f1, f2, warn, warned,
1567 : G_("a field of same name but different type "
1568 : "is defined in another translation unit"));
1569 51 : if (warn && *warned)
1570 27 : warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1571 51 : return false;
1572 : }
1573 285 : if (!gimple_compare_field_offset (f1, f2))
1574 : {
1575 : /* Do not warn about artificial fields and just go into
1576 : generic field mismatch warning. */
1577 0 : if (DECL_ARTIFICIAL (f1))
1578 : break;
1579 0 : warn_odr (t1, t2, f1, f2, warn, warned,
1580 : G_("fields have different layout "
1581 : "in another translation unit"));
1582 0 : return false;
1583 : }
1584 285 : if (DECL_BIT_FIELD (f1) != DECL_BIT_FIELD (f2))
1585 : {
1586 0 : warn_odr (t1, t2, f1, f2, warn, warned,
1587 : G_("one field is a bitfield while the other "
1588 : "is not"));
1589 0 : return false;
1590 : }
1591 : else
1592 285 : gcc_assert (DECL_NONADDRESSABLE_P (f1)
1593 : == DECL_NONADDRESSABLE_P (f2));
1594 : }
1595 :
1596 : /* If one aggregate has more fields than the other, they
1597 : are not the same. */
1598 204 : if (f1 || f2)
1599 : {
1600 16 : if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1601 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1602 : G_("a type with different virtual table pointers"
1603 : " is defined in another translation unit"));
1604 8 : else if ((f1 && DECL_ARTIFICIAL (f1))
1605 24 : || (f2 && DECL_ARTIFICIAL (f2)))
1606 2 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1607 : G_("a type with different bases is defined "
1608 : "in another translation unit"));
1609 : else
1610 14 : warn_odr (t1, t2, f1, f2, warn, warned,
1611 : G_("a type with different number of fields "
1612 : "is defined in another translation unit"));
1613 :
1614 16 : return false;
1615 : }
1616 : }
1617 : break;
1618 : }
1619 : case VOID_TYPE:
1620 : case OPAQUE_TYPE:
1621 : case NULLPTR_TYPE:
1622 : break;
1623 :
1624 0 : default:
1625 0 : debug_tree (t1);
1626 0 : gcc_unreachable ();
1627 : }
1628 :
1629 : /* Those are better to come last as they are utterly uninformative. */
1630 15588 : if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1631 7998 : && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1632 : {
1633 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1634 : G_("a type with different size "
1635 : "is defined in another translation unit"));
1636 0 : return false;
1637 : }
1638 :
1639 7794 : if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2)
1640 7794 : && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1641 : {
1642 9 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1643 : G_("one type needs to be constructed while the other does not"));
1644 9 : gcc_checking_assert (RECORD_OR_UNION_TYPE_P (t1));
1645 : return false;
1646 : }
1647 : /* There is no really good user facing warning for this.
1648 : Either the original reason for modes being different is lost during
1649 : streaming or we should catch earlier warnings. We however must detect
1650 : the mismatch to avoid type verifier from cmplaining on mismatched
1651 : types between type and canonical type. See PR91576. */
1652 7785 : if (TYPE_MODE (t1) != TYPE_MODE (t2)
1653 7785 : && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1654 : {
1655 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1656 : G_("memory layout mismatch"));
1657 0 : return false;
1658 : }
1659 :
1660 7785 : gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1661 : || operand_equal_p (TYPE_SIZE_UNIT (t1),
1662 : TYPE_SIZE_UNIT (t2), 0));
1663 7785 : return type_variants_equivalent_p (t1, t2);
1664 : }
1665 :
1666 : /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1667 :
1668 : bool
1669 192 : odr_types_equivalent_p (tree type1, tree type2)
1670 : {
1671 192 : gcc_checking_assert (odr_or_derived_type_p (type1)
1672 : && odr_or_derived_type_p (type2));
1673 :
1674 192 : hash_set<type_pair> visited;
1675 192 : return odr_types_equivalent_p (type1, type2, false, NULL,
1676 192 : &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1677 192 : }
1678 :
1679 : /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1680 : from VAL->type. This may happen in LTO where tree merging did not merge
1681 : all variants of the same type or due to ODR violation.
1682 :
1683 : Analyze and report ODR violations and add type to duplicate list.
1684 : If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1685 : this is first time we see definition of a class return true so the
1686 : base types are analyzed. */
1687 :
1688 : static bool
1689 7821 : add_type_duplicate (odr_type val, tree type)
1690 : {
1691 7821 : bool build_bases = false;
1692 7821 : bool prevail = false;
1693 7821 : bool odr_must_violate = false;
1694 :
1695 7821 : if (!val->types_set)
1696 7678 : val->types_set = new hash_set<tree>;
1697 :
1698 : /* Chose polymorphic type as leader (this happens only in case of ODR
1699 : violations. */
1700 7678 : if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1701 229 : && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1702 8050 : && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1703 93 : || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1704 : {
1705 : prevail = true;
1706 : build_bases = true;
1707 : }
1708 : /* Always prefer complete type to be the leader. */
1709 7685 : else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1710 : {
1711 1226 : prevail = true;
1712 1226 : if (TREE_CODE (type) == RECORD_TYPE)
1713 1205 : build_bases = TYPE_BINFO (type);
1714 : }
1715 6459 : else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1716 : ;
1717 225 : else if (TREE_CODE (val->type) == RECORD_TYPE
1718 150 : && TREE_CODE (type) == RECORD_TYPE
1719 374 : && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1720 : {
1721 0 : gcc_assert (!val->bases.length ());
1722 : build_bases = true;
1723 : prevail = true;
1724 : }
1725 :
1726 1205 : if (prevail)
1727 1362 : std::swap (val->type, type);
1728 :
1729 7821 : val->types_set->add (type);
1730 :
1731 7821 : if (!odr_hash)
1732 : return false;
1733 :
1734 23463 : gcc_checking_assert (can_be_name_hashed_p (type)
1735 : && can_be_name_hashed_p (val->type));
1736 :
1737 7821 : bool merge = true;
1738 7821 : bool base_mismatch = false;
1739 7821 : unsigned int i;
1740 7821 : bool warned = false;
1741 7821 : hash_set<type_pair> visited;
1742 :
1743 7821 : gcc_assert (in_lto_p);
1744 7821 : vec_safe_push (val->types, type);
1745 :
1746 : /* If both are class types, compare the bases. */
1747 8046 : if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1748 225 : && TREE_CODE (val->type) == RECORD_TYPE
1749 150 : && TREE_CODE (type) == RECORD_TYPE
1750 7970 : && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1751 : {
1752 186 : if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1753 93 : != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1754 : {
1755 0 : if (!flag_ltrans && !warned && !val->odr_violated)
1756 : {
1757 0 : tree extra_base;
1758 0 : warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1759 : "a type with the same name but different "
1760 : "number of polymorphic bases is "
1761 : "defined in another translation unit");
1762 0 : if (warned)
1763 : {
1764 0 : if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1765 0 : > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1766 0 : extra_base = BINFO_BASE_BINFO
1767 : (TYPE_BINFO (type),
1768 : BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1769 : else
1770 0 : extra_base = BINFO_BASE_BINFO
1771 : (TYPE_BINFO (val->type),
1772 : BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1773 0 : tree extra_base_type = BINFO_TYPE (extra_base);
1774 0 : inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1775 : "the extra base is defined here");
1776 : }
1777 : }
1778 : base_mismatch = true;
1779 : }
1780 : else
1781 145 : for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1782 : {
1783 54 : tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1784 54 : tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1785 54 : tree type1 = BINFO_TYPE (base1);
1786 54 : tree type2 = BINFO_TYPE (base2);
1787 :
1788 54 : if (types_odr_comparable (type1, type2))
1789 : {
1790 54 : if (!types_same_for_odr (type1, type2))
1791 : base_mismatch = true;
1792 : }
1793 : else
1794 0 : if (!odr_types_equivalent_p (type1, type2))
1795 : base_mismatch = true;
1796 54 : if (base_mismatch)
1797 : {
1798 0 : if (!warned && !val->odr_violated)
1799 : {
1800 0 : warn_odr (type, val->type, NULL, NULL,
1801 : !warned, &warned,
1802 : "a type with the same name but different base "
1803 : "type is defined in another translation unit");
1804 0 : if (warned)
1805 0 : warn_types_mismatch (type1, type2,
1806 : UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1807 : }
1808 : break;
1809 : }
1810 54 : if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1811 : {
1812 2 : base_mismatch = true;
1813 2 : if (!warned && !val->odr_violated)
1814 2 : warn_odr (type, val->type, NULL, NULL,
1815 : !warned, &warned,
1816 : "a type with the same name but different base "
1817 : "layout is defined in another translation unit");
1818 : break;
1819 : }
1820 : /* One of bases is not of complete type. */
1821 52 : if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1822 : {
1823 : /* If we have a polymorphic type info specified for TYPE1
1824 : but not for TYPE2 we possibly missed a base when recording
1825 : VAL->type earlier.
1826 : Be sure this does not happen. */
1827 0 : if (TYPE_BINFO (type1)
1828 0 : && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1829 0 : && !build_bases)
1830 : odr_must_violate = true;
1831 : break;
1832 : }
1833 : /* One base is polymorphic and the other not.
1834 : This ought to be diagnosed earlier, but do not ICE in the
1835 : checking below. */
1836 52 : else if (TYPE_BINFO (type1)
1837 104 : && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1838 52 : != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1839 : {
1840 0 : if (!warned && !val->odr_violated)
1841 0 : warn_odr (type, val->type, NULL, NULL,
1842 : !warned, &warned,
1843 : "a base of the type is polymorphic only in one "
1844 : "translation unit");
1845 : base_mismatch = true;
1846 : break;
1847 : }
1848 : }
1849 93 : if (base_mismatch)
1850 : {
1851 2 : merge = false;
1852 2 : odr_violation_reported = true;
1853 2 : val->odr_violated = true;
1854 :
1855 2 : if (symtab->dump_file)
1856 : {
1857 0 : fprintf (symtab->dump_file, "ODR base violation\n");
1858 :
1859 0 : print_node (symtab->dump_file, "", val->type, 0);
1860 0 : putc ('\n',symtab->dump_file);
1861 0 : print_node (symtab->dump_file, "", type, 0);
1862 0 : putc ('\n',symtab->dump_file);
1863 : }
1864 : }
1865 : }
1866 :
1867 : /* Next compare memory layout.
1868 : The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
1869 : We must apply the location cache to ensure that they are valid
1870 : before we can pass them to odr_types_equivalent_p (PR lto/83121). */
1871 7821 : if (lto_location_cache::current_cache)
1872 7821 : lto_location_cache::current_cache->apply_location_cache ();
1873 : /* As a special case we stream mangled names of integer types so we can see
1874 : if they are believed to be same even though they have different
1875 : representation. Avoid bogus warning on mismatches in these. */
1876 7821 : if (TREE_CODE (type) != INTEGER_TYPE
1877 7780 : && TREE_CODE (val->type) != INTEGER_TYPE
1878 15601 : && !odr_types_equivalent_p (val->type, type,
1879 7780 : !flag_ltrans && !val->odr_violated && !warned,
1880 : &warned, &visited,
1881 7780 : DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1882 7780 : DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1883 : {
1884 84 : merge = false;
1885 84 : odr_violation_reported = true;
1886 84 : val->odr_violated = true;
1887 : }
1888 7821 : gcc_assert (val->odr_violated || !odr_must_violate);
1889 : /* Sanity check that all bases will be build same way again. */
1890 7821 : if (flag_checking
1891 7821 : && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1892 225 : && TREE_CODE (val->type) == RECORD_TYPE
1893 150 : && TREE_CODE (type) == RECORD_TYPE
1894 149 : && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1895 93 : && !val->odr_violated
1896 7910 : && !base_mismatch && val->bases.length ())
1897 : {
1898 : unsigned int num_poly_bases = 0;
1899 : unsigned int j;
1900 :
1901 95 : for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1902 52 : if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1903 : (TYPE_BINFO (type), i)))
1904 52 : num_poly_bases++;
1905 43 : gcc_assert (num_poly_bases == val->bases.length ());
1906 95 : for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1907 : i++)
1908 52 : if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1909 : (TYPE_BINFO (type), i)))
1910 : {
1911 52 : odr_type base = get_odr_type
1912 52 : (BINFO_TYPE
1913 : (BINFO_BASE_BINFO (TYPE_BINFO (type),
1914 : i)),
1915 : true);
1916 52 : gcc_assert (val->bases[j] == base);
1917 52 : j++;
1918 : }
1919 : }
1920 :
1921 :
1922 : /* Regularize things a little. During LTO same types may come with
1923 : different BINFOs. Either because their virtual table was
1924 : not merged by tree merging and only later at decl merging or
1925 : because one type comes with external vtable, while other
1926 : with internal. We want to merge equivalent binfos to conserve
1927 : memory and streaming overhead.
1928 :
1929 : The external vtables are more harmful: they contain references
1930 : to external declarations of methods that may be defined in the
1931 : merged LTO unit. For this reason we absolutely need to remove
1932 : them and replace by internal variants. Not doing so will lead
1933 : to incomplete answers from possible_polymorphic_call_targets.
1934 :
1935 : FIXME: disable for now; because ODR types are now build during
1936 : streaming in, the variants do not need to be linked to the type,
1937 : yet. We need to do the merging in cleanup pass to be implemented
1938 : soon. */
1939 7821 : if (!flag_ltrans && merge
1940 : && 0
1941 : && TREE_CODE (val->type) == RECORD_TYPE
1942 : && TREE_CODE (type) == RECORD_TYPE
1943 : && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1944 : && TYPE_MAIN_VARIANT (type) == type
1945 : && TYPE_MAIN_VARIANT (val->type) == val->type
1946 : && BINFO_VTABLE (TYPE_BINFO (val->type))
1947 : && BINFO_VTABLE (TYPE_BINFO (type)))
1948 : {
1949 : tree master_binfo = TYPE_BINFO (val->type);
1950 : tree v1 = BINFO_VTABLE (master_binfo);
1951 : tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1952 :
1953 : if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1954 : {
1955 : gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1956 : && operand_equal_p (TREE_OPERAND (v1, 1),
1957 : TREE_OPERAND (v2, 1), 0));
1958 : v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1959 : v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1960 : }
1961 : gcc_assert (DECL_ASSEMBLER_NAME (v1)
1962 : == DECL_ASSEMBLER_NAME (v2));
1963 :
1964 : if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1965 : {
1966 : unsigned int i;
1967 :
1968 : set_type_binfo (val->type, TYPE_BINFO (type));
1969 : for (i = 0; i < val->types->length (); i++)
1970 : {
1971 : if (TYPE_BINFO ((*val->types)[i])
1972 : == master_binfo)
1973 : set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1974 : }
1975 : BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1976 : }
1977 : else
1978 : set_type_binfo (type, master_binfo);
1979 : }
1980 7821 : return build_bases;
1981 7821 : }
1982 :
1983 : /* REF is OBJ_TYPE_REF, return the class the ref corresponds to.
1984 : FOR_DUMP_P is true when being called from the dump routines. */
1985 :
1986 : tree
1987 3597119 : obj_type_ref_class (const_tree ref, bool for_dump_p)
1988 : {
1989 3597119 : gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
1990 3597119 : ref = TREE_TYPE (ref);
1991 3597119 : gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1992 3597119 : ref = TREE_TYPE (ref);
1993 : /* We look for type THIS points to. ObjC also builds
1994 : OBJ_TYPE_REF with non-method calls, Their first parameter
1995 : ID however also corresponds to class type. */
1996 3597119 : gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
1997 : || TREE_CODE (ref) == FUNCTION_TYPE);
1998 3597119 : ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
1999 3597119 : gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
2000 3597119 : tree ret = TREE_TYPE (ref);
2001 3597119 : if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (ret))
2002 3561352 : ret = TYPE_CANONICAL (ret);
2003 35767 : else if (odr_type ot = get_odr_type (ret, !for_dump_p))
2004 35767 : ret = ot->type;
2005 : else
2006 0 : gcc_assert (for_dump_p);
2007 3597119 : return ret;
2008 : }
2009 :
2010 : /* Get ODR type hash entry for TYPE. If INSERT is true, create
2011 : possibly new entry. */
2012 :
2013 : odr_type
2014 6804070 : get_odr_type (tree type, bool insert)
2015 : {
2016 6804070 : odr_type_d **slot = NULL;
2017 6804070 : odr_type val = NULL;
2018 6804070 : hashval_t hash;
2019 6804070 : bool build_bases = false;
2020 6804070 : bool insert_to_odr_array = false;
2021 6804070 : int base_id = -1;
2022 :
2023 6804070 : type = TYPE_MAIN_VARIANT (type);
2024 6804070 : if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (type))
2025 6656143 : type = TYPE_CANONICAL (type);
2026 :
2027 6951997 : gcc_checking_assert (can_be_name_hashed_p (type));
2028 :
2029 6804070 : hash = hash_odr_name (type);
2030 6864247 : slot = odr_hash->find_slot_with_hash (type, hash,
2031 : insert ? INSERT : NO_INSERT);
2032 :
2033 6804070 : if (!slot)
2034 : return NULL;
2035 :
2036 : /* See if we already have entry for type. */
2037 6794724 : if (*slot)
2038 : {
2039 5375485 : val = *slot;
2040 :
2041 45421 : if (val->type != type && insert
2042 5419142 : && (!val->types_set || !val->types_set->add (type)))
2043 7821 : build_bases = add_type_duplicate (val, type);
2044 : }
2045 : else
2046 : {
2047 1419239 : val = ggc_cleared_alloc<odr_type_d> ();
2048 1419239 : val->type = type;
2049 1419239 : val->bases = vNULL;
2050 1419239 : val->derived_types = vNULL;
2051 1419239 : val->no_derived_construction_vtables = false;
2052 1419239 : if (type_with_linkage_p (type))
2053 1419239 : val->anonymous_namespace = type_in_anonymous_namespace_p (type);
2054 : else
2055 0 : val->anonymous_namespace = 0;
2056 1419239 : build_bases = COMPLETE_TYPE_P (val->type);
2057 1419239 : insert_to_odr_array = true;
2058 1419239 : *slot = val;
2059 : }
2060 :
2061 1416501 : if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
2062 1406805 : && type_with_linkage_p (type)
2063 2833865 : && type == TYPE_MAIN_VARIANT (type))
2064 : {
2065 1406805 : tree binfo = TYPE_BINFO (type);
2066 1406805 : unsigned int i;
2067 :
2068 1406805 : gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
2069 :
2070 1406805 : val->all_derivations_known = type_all_derivations_known_p (type);
2071 2881455 : for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
2072 : /* For now record only polymorphic types. other are
2073 : pointless for devirtualization and we cannot precisely
2074 : determine ODR equivalency of these during LTO. */
2075 1474650 : if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
2076 : {
2077 1274397 : tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
2078 1274397 : odr_type base = get_odr_type (base_type, true);
2079 1274397 : gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
2080 1274397 : base->derived_types.safe_push (val);
2081 1274397 : val->bases.safe_push (base);
2082 1274397 : if (base->id > base_id)
2083 : base_id = base->id;
2084 : }
2085 : }
2086 : /* Ensure that type always appears after bases. */
2087 6794724 : if (insert_to_odr_array)
2088 : {
2089 1419239 : if (odr_types_ptr)
2090 1390491 : val->id = odr_types.length ();
2091 1419239 : vec_safe_push (odr_types_ptr, val);
2092 : }
2093 5375485 : else if (base_id > val->id)
2094 : {
2095 78 : odr_types[val->id] = 0;
2096 : /* Be sure we did not recorded any derived types; these may need
2097 : renumbering too. */
2098 78 : gcc_assert (val->derived_types.length() == 0);
2099 78 : val->id = odr_types.length ();
2100 78 : vec_safe_push (odr_types_ptr, val);
2101 : }
2102 6794724 : return val;
2103 : }
2104 :
2105 : /* Return type that in ODR type hash prevailed TYPE. Be careful and punt
2106 : on ODR violations. */
2107 :
2108 : tree
2109 10898 : prevailing_odr_type (tree type)
2110 : {
2111 10898 : odr_type t = get_odr_type (type, false);
2112 10898 : if (!t || t->odr_violated)
2113 : return type;
2114 10893 : return t->type;
2115 : }
2116 :
2117 : /* Set tbaa_enabled flag for TYPE. */
2118 :
2119 : void
2120 10602 : enable_odr_based_tbaa (tree type)
2121 : {
2122 10602 : odr_type t = get_odr_type (type, true);
2123 10602 : t->tbaa_enabled = true;
2124 10602 : }
2125 :
2126 : /* True if canonical type of TYPE is determined using ODR name. */
2127 :
2128 : bool
2129 7983 : odr_based_tbaa_p (const_tree type)
2130 : {
2131 7983 : if (!RECORD_OR_UNION_TYPE_P (type))
2132 : return false;
2133 6535 : if (!odr_hash)
2134 : return false;
2135 6532 : odr_type t = get_odr_type (const_cast <tree> (type), false);
2136 6532 : if (!t || !t->tbaa_enabled)
2137 : return false;
2138 : return true;
2139 : }
2140 :
2141 : /* Set TYPE_CANONICAL of type and all its variants and duplicates
2142 : to CANONICAL. */
2143 :
2144 : void
2145 10626 : set_type_canonical_for_odr_type (tree type, tree canonical)
2146 : {
2147 10626 : odr_type t = get_odr_type (type, false);
2148 10626 : unsigned int i;
2149 10626 : tree tt;
2150 :
2151 27077 : for (tree t2 = t->type; t2; t2 = TYPE_NEXT_VARIANT (t2))
2152 16451 : TYPE_CANONICAL (t2) = canonical;
2153 10626 : if (t->types)
2154 15193 : FOR_EACH_VEC_ELT (*t->types, i, tt)
2155 19222 : for (tree t2 = tt; t2; t2 = TYPE_NEXT_VARIANT (t2))
2156 11578 : TYPE_CANONICAL (t2) = canonical;
2157 10626 : }
2158 :
2159 : /* Return true if we reported some ODR violation on TYPE. */
2160 :
2161 : bool
2162 10761 : odr_type_violation_reported_p (tree type)
2163 : {
2164 10761 : return get_odr_type (type, false)->odr_violated;
2165 : }
2166 :
2167 : /* Add TYPE of ODR type hash. */
2168 :
2169 : void
2170 32701 : register_odr_type (tree type)
2171 : {
2172 32701 : if (!odr_hash)
2173 1933 : odr_hash = new odr_hash_type (23);
2174 32701 : if (type == TYPE_MAIN_VARIANT (type))
2175 : {
2176 : /* To get ODR warnings right, first register all sub-types. */
2177 32701 : if (RECORD_OR_UNION_TYPE_P (type)
2178 32701 : && COMPLETE_TYPE_P (type))
2179 : {
2180 : /* Limit recursion on types which are already registered. */
2181 21198 : odr_type ot = get_odr_type (type, false);
2182 21198 : if (ot
2183 21198 : && (ot->type == type
2184 1622 : || (ot->types_set
2185 187 : && ot->types_set->contains (type))))
2186 10325 : return;
2187 32663 : for (tree f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
2188 21790 : if (TREE_CODE (f) == FIELD_DECL)
2189 : {
2190 21790 : tree subtype = TREE_TYPE (f);
2191 :
2192 22482 : while (TREE_CODE (subtype) == ARRAY_TYPE)
2193 692 : subtype = TREE_TYPE (subtype);
2194 21790 : if (type_with_linkage_p (TYPE_MAIN_VARIANT (subtype)))
2195 10067 : register_odr_type (TYPE_MAIN_VARIANT (subtype));
2196 : }
2197 10873 : if (TYPE_BINFO (type))
2198 1068 : for (unsigned int i = 0;
2199 2472 : i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
2200 1068 : register_odr_type (BINFO_TYPE (BINFO_BASE_BINFO
2201 : (TYPE_BINFO (type), i)));
2202 : }
2203 22376 : get_odr_type (type, true);
2204 : }
2205 : }
2206 :
2207 : /* Return true if type is known to have no derivations. */
2208 :
2209 : bool
2210 1661994 : type_known_to_have_no_derivations_p (tree t)
2211 : {
2212 1661994 : return (type_all_derivations_known_p (t)
2213 1661994 : && (TYPE_FINAL_P (t)
2214 3327 : || (odr_hash
2215 3327 : && !get_odr_type (t, true)->derived_types.length())));
2216 : }
2217 :
2218 : /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2219 : recursive printing. */
2220 :
2221 : static void
2222 64 : dump_odr_type (FILE *f, odr_type t, int indent=0)
2223 : {
2224 64 : unsigned int i;
2225 64 : fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2226 64 : print_generic_expr (f, t->type, TDF_SLIM);
2227 128 : fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)" : "");
2228 128 : fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)" : "");
2229 64 : if (TYPE_NAME (t->type))
2230 : {
2231 64 : if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2232 14 : fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2233 7 : IDENTIFIER_POINTER
2234 : (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2235 : }
2236 64 : if (t->bases.length ())
2237 : {
2238 32 : fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2239 96 : for (i = 0; i < t->bases.length (); i++)
2240 32 : fprintf (f, " %i", t->bases[i]->id);
2241 32 : fprintf (f, "\n");
2242 : }
2243 64 : if (t->derived_types.length ())
2244 : {
2245 29 : fprintf (f, "%*s derived types:\n", indent * 2, "");
2246 90 : for (i = 0; i < t->derived_types.length (); i++)
2247 32 : dump_odr_type (f, t->derived_types[i], indent + 1);
2248 : }
2249 64 : fprintf (f, "\n");
2250 64 : }
2251 :
2252 : /* Dump the type inheritance graph. */
2253 :
2254 : static void
2255 141 : dump_type_inheritance_graph (FILE *f)
2256 : {
2257 141 : unsigned int i;
2258 141 : unsigned int num_all_types = 0, num_types = 0, num_duplicates = 0;
2259 141 : if (!odr_types_ptr)
2260 : return;
2261 29 : fprintf (f, "\n\nType inheritance graph:\n");
2262 122 : for (i = 0; i < odr_types.length (); i++)
2263 : {
2264 64 : if (odr_types[i] && odr_types[i]->bases.length () == 0)
2265 32 : dump_odr_type (f, odr_types[i]);
2266 : }
2267 93 : for (i = 0; i < odr_types.length (); i++)
2268 : {
2269 64 : if (!odr_types[i])
2270 0 : continue;
2271 :
2272 64 : num_all_types++;
2273 64 : if (!odr_types[i]->types || !odr_types[i]->types->length ())
2274 61 : continue;
2275 :
2276 : /* To aid ODR warnings we also mangle integer constants but do
2277 : not consider duplicates there. */
2278 3 : if (TREE_CODE (odr_types[i]->type) == INTEGER_TYPE)
2279 0 : continue;
2280 :
2281 : /* It is normal to have one duplicate and one normal variant. */
2282 6 : if (odr_types[i]->types->length () == 1
2283 3 : && COMPLETE_TYPE_P (odr_types[i]->type)
2284 6 : && !COMPLETE_TYPE_P ((*odr_types[i]->types)[0]))
2285 3 : continue;
2286 :
2287 0 : num_types ++;
2288 :
2289 0 : unsigned int j;
2290 0 : fprintf (f, "Duplicate tree types for odr type %i\n", i);
2291 0 : print_node (f, "", odr_types[i]->type, 0);
2292 0 : print_node (f, "", TYPE_NAME (odr_types[i]->type), 0);
2293 0 : putc ('\n',f);
2294 0 : for (j = 0; j < odr_types[i]->types->length (); j++)
2295 : {
2296 0 : tree t;
2297 0 : num_duplicates ++;
2298 0 : fprintf (f, "duplicate #%i\n", j);
2299 0 : print_node (f, "", (*odr_types[i]->types)[j], 0);
2300 0 : t = (*odr_types[i]->types)[j];
2301 0 : while (TYPE_P (t) && TYPE_CONTEXT (t))
2302 : {
2303 0 : t = TYPE_CONTEXT (t);
2304 0 : print_node (f, "", t, 0);
2305 : }
2306 0 : print_node (f, "", TYPE_NAME ((*odr_types[i]->types)[j]), 0);
2307 0 : putc ('\n',f);
2308 : }
2309 : }
2310 29 : fprintf (f, "Out of %i types there are %i types with duplicates; "
2311 : "%i duplicates overall\n", num_all_types, num_types, num_duplicates);
2312 : }
2313 :
2314 : /* Save some WPA->ltrans streaming by freeing stuff needed only for good
2315 : ODR warnings.
2316 : We make TYPE_DECLs to not point back
2317 : to the type (which is needed to keep them in the same SCC and preserve
2318 : location information to output warnings) and subsequently we make all
2319 : TYPE_DECLS of same assembler name equivalent. */
2320 :
2321 : static void
2322 1989229 : free_odr_warning_data ()
2323 : {
2324 1989229 : static bool odr_data_freed = false;
2325 :
2326 1989229 : if (odr_data_freed || !flag_wpa || !odr_types_ptr)
2327 : return;
2328 :
2329 706 : odr_data_freed = true;
2330 :
2331 6351 : for (unsigned int i = 0; i < odr_types.length (); i++)
2332 5645 : if (odr_types[i])
2333 : {
2334 5614 : tree t = odr_types[i]->type;
2335 :
2336 5614 : TREE_TYPE (TYPE_NAME (t)) = void_type_node;
2337 :
2338 5614 : if (odr_types[i]->types)
2339 6346 : for (unsigned int j = 0; j < odr_types[i]->types->length (); j++)
2340 : {
2341 3203 : tree td = (*odr_types[i]->types)[j];
2342 :
2343 3203 : TYPE_NAME (td) = TYPE_NAME (t);
2344 : }
2345 : }
2346 706 : odr_data_freed = true;
2347 : }
2348 :
2349 : /* Initialize IPA devirt and build inheritance tree graph. */
2350 :
2351 : void
2352 1989229 : build_type_inheritance_graph (void)
2353 : {
2354 1989229 : struct symtab_node *n;
2355 1989229 : FILE *inheritance_dump_file;
2356 1989229 : dump_flags_t flags;
2357 :
2358 1989229 : if (odr_hash)
2359 : {
2360 1480809 : free_odr_warning_data ();
2361 1480809 : return;
2362 : }
2363 508420 : timevar_push (TV_IPA_INHERITANCE);
2364 508420 : inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2365 508420 : odr_hash = new odr_hash_type (23);
2366 :
2367 : /* We reconstruct the graph starting of types of all methods seen in the
2368 : unit. */
2369 146557157 : FOR_EACH_SYMBOL (n)
2370 146048737 : if (is_a <cgraph_node *> (n)
2371 107751551 : && DECL_VIRTUAL_P (n->decl)
2372 1512045 : && n->real_symbol_p ())
2373 1211994 : get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2374 :
2375 : /* Look also for virtual tables of types that do not define any methods.
2376 :
2377 : We need it in a case where class B has virtual base of class A
2378 : re-defining its virtual method and there is class C with no virtual
2379 : methods with B as virtual base.
2380 :
2381 : Here we output B's virtual method in two variant - for non-virtual
2382 : and virtual inheritance. B's virtual table has non-virtual version,
2383 : while C's has virtual.
2384 :
2385 : For this reason we need to know about C in order to include both
2386 : variants of B. More correctly, record_target_from_binfo should
2387 : add both variants of the method when walking B, but we have no
2388 : link in between them.
2389 :
2390 : We rely on fact that either the method is exported and thus we
2391 : assume it is called externally or C is in anonymous namespace and
2392 : thus we will see the vtable. */
2393 :
2394 184345923 : else if (is_a <varpool_node *> (n)
2395 38297186 : && DECL_VIRTUAL_P (n->decl)
2396 1255716 : && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2397 1255716 : && TYPE_BINFO (DECL_CONTEXT (n->decl))
2398 1244248 : && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2399 1244248 : get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2400 508420 : if (inheritance_dump_file)
2401 : {
2402 100 : dump_type_inheritance_graph (inheritance_dump_file);
2403 100 : dump_end (TDI_inheritance, inheritance_dump_file);
2404 : }
2405 508420 : free_odr_warning_data ();
2406 508420 : timevar_pop (TV_IPA_INHERITANCE);
2407 : }
2408 :
2409 : /* Return true if N has reference from live virtual table
2410 : (and thus can be a destination of polymorphic call).
2411 : Be conservatively correct when callgraph is not built or
2412 : if the method may be referred externally. */
2413 :
2414 : static bool
2415 161477 : referenced_from_vtable_p (struct cgraph_node *node)
2416 : {
2417 161477 : int i;
2418 161477 : struct ipa_ref *ref;
2419 161477 : bool found = false;
2420 :
2421 161477 : if (node->externally_visible
2422 20834 : || DECL_EXTERNAL (node->decl)
2423 169192 : || node->used_from_other_partition)
2424 : return true;
2425 :
2426 : /* Keep this test constant time.
2427 : It is unlikely this can happen except for the case where speculative
2428 : devirtualization introduced many speculative edges to this node.
2429 : In this case the target is very likely alive anyway. */
2430 7715 : if (node->ref_list.referring.length () > 100)
2431 : return true;
2432 :
2433 : /* We need references built. */
2434 7715 : if (symtab->state <= CONSTRUCTION)
2435 : return true;
2436 :
2437 6989 : for (i = 0; node->iterate_referring (i, ref); i++)
2438 6979 : if ((ref->use == IPA_REF_ALIAS
2439 1070 : && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2440 6983 : || (ref->use == IPA_REF_ADDR
2441 6444 : && VAR_P (ref->referring->decl)
2442 6443 : && DECL_VIRTUAL_P (ref->referring->decl)))
2443 : {
2444 : found = true;
2445 : break;
2446 : }
2447 : return found;
2448 : }
2449 :
2450 : /* Return if TARGET is cxa_pure_virtual. */
2451 :
2452 : static bool
2453 364692 : is_cxa_pure_virtual_p (tree target)
2454 : {
2455 364495 : return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
2456 55575 : && DECL_NAME (target)
2457 420267 : && id_equal (DECL_NAME (target),
2458 364692 : "__cxa_pure_virtual");
2459 : }
2460 :
2461 : /* If TARGET has associated node, record it in the NODES array.
2462 : CAN_REFER specify if program can refer to the target directly.
2463 : if TARGET is unknown (NULL) or it cannot be inserted (for example because
2464 : its body was already removed and there is no way to refer to it), clear
2465 : COMPLETEP. */
2466 :
2467 : static void
2468 274973 : maybe_record_node (vec <cgraph_node *> &nodes,
2469 : tree target, hash_set<tree> *inserted,
2470 : bool can_refer,
2471 : bool *completep)
2472 : {
2473 274973 : struct cgraph_node *target_node, *alias_target;
2474 274973 : enum availability avail;
2475 274973 : bool pure_virtual = is_cxa_pure_virtual_p (target);
2476 :
2477 : /* __builtin_unreachable do not need to be added into
2478 : list of targets; the runtime effect of calling them is undefined.
2479 : Only "real" virtual methods should be accounted. */
2480 274973 : if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
2481 45452 : return;
2482 :
2483 269062 : if (!can_refer)
2484 : {
2485 : /* The only case when method of anonymous namespace becomes unreferable
2486 : is when we completely optimized it out. */
2487 36854 : if (flag_ltrans
2488 36789 : || !target
2489 73511 : || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2490 36837 : *completep = false;
2491 36854 : return;
2492 : }
2493 :
2494 232208 : if (!target)
2495 : return;
2496 :
2497 232208 : target_node = cgraph_node::get (target);
2498 :
2499 : /* Prefer alias target over aliases, so we do not get confused by
2500 : fake duplicates. */
2501 232208 : if (target_node)
2502 : {
2503 232208 : alias_target = target_node->ultimate_alias_target (&avail);
2504 232208 : if (target_node != alias_target
2505 5007 : && avail >= AVAIL_AVAILABLE
2506 236787 : && target_node->get_availability ())
2507 4579 : target_node = alias_target;
2508 : }
2509 :
2510 : /* Method can only be called by polymorphic call if any
2511 : of vtables referring to it are alive.
2512 :
2513 : While this holds for non-anonymous functions, too, there are
2514 : cases where we want to keep them in the list; for example
2515 : inline functions with -fno-weak are static, but we still
2516 : may devirtualize them when instance comes from other unit.
2517 : The same holds for LTO.
2518 :
2519 : Currently we ignore these functions in speculative devirtualization.
2520 : ??? Maybe it would make sense to be more aggressive for LTO even
2521 : elsewhere. */
2522 232208 : if (!flag_ltrans
2523 232155 : && !pure_virtual
2524 202734 : && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2525 237942 : && (!target_node
2526 5734 : || !referenced_from_vtable_p (target_node)))
2527 : ;
2528 : /* See if TARGET is useful function we can deal with. */
2529 232202 : else if (target_node != NULL
2530 232202 : && (TREE_PUBLIC (target)
2531 5848 : || DECL_EXTERNAL (target)
2532 5848 : || target_node->definition)
2533 464400 : && target_node->real_symbol_p ())
2534 : {
2535 232198 : gcc_assert (!target_node->inlined_to);
2536 232198 : gcc_assert (target_node->real_symbol_p ());
2537 : /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2538 : by valid program. */
2539 232198 : if (flag_sanitize & SANITIZE_UNREACHABLE)
2540 : ;
2541 : /* Only add pure virtual if it is the only possible target. This way
2542 : we will preserve the diagnostics about pure virtual called in many
2543 : cases without disabling optimization in other. */
2544 232134 : else if (pure_virtual)
2545 : {
2546 29409 : if (nodes.length ())
2547 : return;
2548 : }
2549 : /* If we found a real target, take away cxa_pure_virtual. */
2550 202725 : else if (!pure_virtual && nodes.length () == 1
2551 68684 : && is_cxa_pure_virtual_p (nodes[0]->decl))
2552 20235 : nodes.pop ();
2553 229511 : if (pure_virtual && nodes.length ())
2554 : return;
2555 229511 : if (!inserted->add (target))
2556 : {
2557 195474 : cached_polymorphic_call_targets->add (target_node);
2558 195474 : nodes.safe_push (target_node);
2559 : }
2560 : }
2561 4 : else if (!completep)
2562 : ;
2563 : /* We have definition of __cxa_pure_virtual that is not accessible (it is
2564 : optimized out or partitioned to other unit) so we cannot add it. When
2565 : not sanitizing, there is nothing to do.
2566 : Otherwise declare the list incomplete. */
2567 4 : else if (pure_virtual)
2568 : {
2569 0 : if (flag_sanitize & SANITIZE_UNREACHABLE)
2570 0 : *completep = false;
2571 : }
2572 4 : else if (flag_ltrans
2573 4 : || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2574 4 : *completep = false;
2575 : }
2576 :
2577 : /* See if BINFO's type matches OUTER_TYPE. If so, look up
2578 : BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2579 : method in vtable and insert method to NODES array
2580 : or BASES_TO_CONSIDER if this array is non-NULL.
2581 : Otherwise recurse to base BINFOs.
2582 : This matches what get_binfo_at_offset does, but with offset
2583 : being unknown.
2584 :
2585 : TYPE_BINFOS is a stack of BINFOS of types with defined
2586 : virtual table seen on way from class type to BINFO.
2587 :
2588 : MATCHED_VTABLES tracks virtual tables we already did lookup
2589 : for virtual function in. INSERTED tracks nodes we already
2590 : inserted.
2591 :
2592 : ANONYMOUS is true if BINFO is part of anonymous namespace.
2593 :
2594 : Clear COMPLETEP when we hit unreferable target.
2595 : */
2596 :
2597 : static void
2598 390966 : record_target_from_binfo (vec <cgraph_node *> &nodes,
2599 : vec <tree> *bases_to_consider,
2600 : tree binfo,
2601 : tree otr_type,
2602 : vec <tree> &type_binfos,
2603 : HOST_WIDE_INT otr_token,
2604 : tree outer_type,
2605 : HOST_WIDE_INT offset,
2606 : hash_set<tree> *inserted,
2607 : hash_set<tree> *matched_vtables,
2608 : bool anonymous,
2609 : bool *completep)
2610 : {
2611 390966 : tree type = BINFO_TYPE (binfo);
2612 390966 : int i;
2613 390966 : tree base_binfo;
2614 :
2615 :
2616 390966 : if (BINFO_VTABLE (binfo))
2617 174508 : type_binfos.safe_push (binfo);
2618 390966 : if (types_same_for_odr (type, outer_type))
2619 : {
2620 173387 : int i;
2621 173387 : tree type_binfo = NULL;
2622 :
2623 : /* Look up BINFO with virtual table. For normal types it is always last
2624 : binfo on stack. */
2625 356818 : for (i = type_binfos.length () - 1; i >= 0; i--)
2626 183381 : if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2627 : {
2628 : type_binfo = type_binfos[i];
2629 : break;
2630 : }
2631 173387 : if (BINFO_VTABLE (binfo))
2632 1481 : type_binfos.pop ();
2633 : /* If this is duplicated BINFO for base shared by virtual inheritance,
2634 : we may not have its associated vtable. This is not a problem, since
2635 : we will walk it on the other path. */
2636 173387 : if (!type_binfo)
2637 390966 : return;
2638 173337 : tree inner_binfo = get_binfo_at_offset (type_binfo,
2639 173337 : offset, otr_type);
2640 173337 : if (!inner_binfo)
2641 : {
2642 0 : gcc_assert (odr_violation_reported);
2643 : return;
2644 : }
2645 : /* For types in anonymous namespace first check if the respective vtable
2646 : is alive. If not, we know the type can't be called. */
2647 173337 : if (!flag_ltrans && anonymous)
2648 : {
2649 7388 : tree vtable = BINFO_VTABLE (inner_binfo);
2650 7388 : varpool_node *vnode;
2651 :
2652 7388 : if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2653 7388 : vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2654 7388 : vnode = varpool_node::get (vtable);
2655 7388 : if (!vnode || !vnode->definition)
2656 : return;
2657 : }
2658 173325 : gcc_assert (inner_binfo);
2659 173325 : if (bases_to_consider
2660 346650 : ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2661 173325 : : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2662 : {
2663 166637 : bool can_refer;
2664 166637 : tree target = gimple_get_virt_method_for_binfo (otr_token,
2665 : inner_binfo,
2666 166637 : &can_refer);
2667 166637 : if (!bases_to_consider)
2668 166637 : maybe_record_node (nodes, target, inserted, can_refer, completep);
2669 : /* Destructors are never called via construction vtables. */
2670 0 : else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2671 0 : bases_to_consider->safe_push (target);
2672 : }
2673 173325 : return;
2674 : }
2675 :
2676 : /* Walk bases. */
2677 447376 : for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2678 : /* Walking bases that have no virtual method is pointless exercise. */
2679 229797 : if (polymorphic_type_binfo_p (base_binfo))
2680 223413 : record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2681 : type_binfos,
2682 : otr_token, outer_type, offset, inserted,
2683 : matched_vtables, anonymous, completep);
2684 217579 : if (BINFO_VTABLE (binfo))
2685 173027 : type_binfos.pop ();
2686 : }
2687 :
2688 : /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2689 : of TYPE, insert them to NODES, recurse into derived nodes.
2690 : INSERTED is used to avoid duplicate insertions of methods into NODES.
2691 : MATCHED_VTABLES are used to avoid duplicate walking vtables.
2692 : Clear COMPLETEP if unreferable target is found.
2693 :
2694 : If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2695 : all cases where BASE_SKIPPED is true (because the base is abstract
2696 : class). */
2697 :
2698 : static void
2699 167564 : possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2700 : hash_set<tree> *inserted,
2701 : hash_set<tree> *matched_vtables,
2702 : tree otr_type,
2703 : odr_type type,
2704 : HOST_WIDE_INT otr_token,
2705 : tree outer_type,
2706 : HOST_WIDE_INT offset,
2707 : bool *completep,
2708 : vec <tree> &bases_to_consider,
2709 : bool consider_construction)
2710 : {
2711 167564 : tree binfo = TYPE_BINFO (type->type);
2712 167564 : unsigned int i;
2713 167564 : auto_vec <tree, 8> type_binfos;
2714 167564 : bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2715 :
2716 : /* We may need to consider types w/o instances because of possible derived
2717 : types using their methods either directly or via construction vtables.
2718 : We are safe to skip them when all derivations are known, since we will
2719 : handle them later.
2720 : This is done by recording them to BASES_TO_CONSIDER array. */
2721 167564 : if (possibly_instantiated || consider_construction)
2722 : {
2723 167553 : record_target_from_binfo (nodes,
2724 : (!possibly_instantiated
2725 6 : && type_all_derivations_known_p (type->type))
2726 : ? &bases_to_consider : NULL,
2727 : binfo, otr_type, type_binfos, otr_token,
2728 : outer_type, offset,
2729 : inserted, matched_vtables,
2730 167553 : type->anonymous_namespace, completep);
2731 : }
2732 192612 : for (i = 0; i < type->derived_types.length (); i++)
2733 50096 : possible_polymorphic_call_targets_1 (nodes, inserted,
2734 : matched_vtables,
2735 : otr_type,
2736 25048 : type->derived_types[i],
2737 : otr_token, outer_type, offset, completep,
2738 : bases_to_consider, consider_construction);
2739 167564 : }
2740 :
2741 : /* Cache of queries for polymorphic call targets.
2742 :
2743 : Enumerating all call targets may get expensive when there are many
2744 : polymorphic calls in the program, so we memoize all the previous
2745 : queries and avoid duplicated work. */
2746 :
2747 1417814 : class polymorphic_call_target_d
2748 : {
2749 : public:
2750 : HOST_WIDE_INT otr_token;
2751 : ipa_polymorphic_call_context context;
2752 : odr_type type;
2753 : vec <cgraph_node *> targets;
2754 : tree decl_warning;
2755 : int type_warning;
2756 : unsigned int n_odr_types;
2757 : bool complete;
2758 : bool speculative;
2759 : };
2760 :
2761 : /* Polymorphic call target cache helpers. */
2762 :
2763 : struct polymorphic_call_target_hasher
2764 : : pointer_hash <polymorphic_call_target_d>
2765 : {
2766 : static inline hashval_t hash (const polymorphic_call_target_d *);
2767 : static inline bool equal (const polymorphic_call_target_d *,
2768 : const polymorphic_call_target_d *);
2769 : static inline void remove (polymorphic_call_target_d *);
2770 : };
2771 :
2772 : /* Return the computed hashcode for ODR_QUERY. */
2773 :
2774 : inline hashval_t
2775 6306354 : polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2776 : {
2777 6306354 : inchash::hash hstate (odr_query->otr_token);
2778 :
2779 6306354 : hstate.add_hwi (odr_query->type->id);
2780 6306354 : hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2781 6306354 : hstate.add_hwi (odr_query->context.offset);
2782 6306354 : hstate.add_hwi (odr_query->n_odr_types);
2783 :
2784 6306354 : if (odr_query->context.speculative_outer_type)
2785 : {
2786 28462 : hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2787 28462 : hstate.add_hwi (odr_query->context.speculative_offset);
2788 : }
2789 6306354 : hstate.add_flag (odr_query->speculative);
2790 6306354 : hstate.add_flag (odr_query->context.maybe_in_construction);
2791 6306354 : hstate.add_flag (odr_query->context.maybe_derived_type);
2792 6306354 : hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2793 6306354 : hstate.commit_flag ();
2794 6306354 : return hstate.end ();
2795 : }
2796 :
2797 : /* Compare cache entries T1 and T2. */
2798 :
2799 : inline bool
2800 6190272 : polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2801 : const polymorphic_call_target_d *t2)
2802 : {
2803 3102047 : return (t1->type == t2->type && t1->otr_token == t2->otr_token
2804 2062198 : && t1->speculative == t2->speculative
2805 1784900 : && t1->context.offset == t2->context.offset
2806 1784177 : && t1->context.speculative_offset == t2->context.speculative_offset
2807 1784177 : && t1->context.outer_type == t2->context.outer_type
2808 1687709 : && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2809 1686963 : && t1->context.maybe_in_construction
2810 1686963 : == t2->context.maybe_in_construction
2811 1321107 : && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2812 1316785 : && (t1->context.speculative_maybe_derived_type
2813 1316785 : == t2->context.speculative_maybe_derived_type)
2814 : /* Adding new type may affect outcome of target search. */
2815 7507011 : && t1->n_odr_types == t2->n_odr_types);
2816 : }
2817 :
2818 : /* Remove entry in polymorphic call target cache hash. */
2819 :
2820 : inline void
2821 77428 : polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2822 : {
2823 77428 : v->targets.release ();
2824 77428 : free (v);
2825 77428 : }
2826 :
2827 : /* Polymorphic call target query cache. */
2828 :
2829 : typedef hash_table<polymorphic_call_target_hasher>
2830 : polymorphic_call_target_hash_type;
2831 : static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2832 :
2833 : /* Destroy polymorphic call target query cache. */
2834 :
2835 : static void
2836 755175 : free_polymorphic_call_targets_hash ()
2837 : {
2838 755175 : if (cached_polymorphic_call_targets)
2839 : {
2840 10117 : delete polymorphic_call_target_hash;
2841 10117 : polymorphic_call_target_hash = NULL;
2842 20234 : delete cached_polymorphic_call_targets;
2843 10117 : cached_polymorphic_call_targets = NULL;
2844 : }
2845 755175 : }
2846 :
2847 : /* Force rebuilding type inheritance graph from scratch.
2848 : This is use to make sure that we do not keep references to types
2849 : which was not visible to free_lang_data. */
2850 :
2851 : void
2852 232444 : rebuild_type_inheritance_graph ()
2853 : {
2854 232444 : if (!odr_hash)
2855 : return;
2856 232444 : delete odr_hash;
2857 232444 : odr_hash = NULL;
2858 232444 : odr_types_ptr = NULL;
2859 232444 : free_polymorphic_call_targets_hash ();
2860 : }
2861 :
2862 : /* When virtual function is removed, we may need to flush the cache. */
2863 :
2864 : static void
2865 16332314 : devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2866 : {
2867 16332314 : if (cached_polymorphic_call_targets
2868 2303463 : && !thunk_expansion
2869 18635245 : && cached_polymorphic_call_targets->contains (n))
2870 6023 : free_polymorphic_call_targets_hash ();
2871 16332314 : }
2872 :
2873 : /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2874 :
2875 : tree
2876 21689 : subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2877 : tree vtable)
2878 : {
2879 21689 : tree v = BINFO_VTABLE (binfo);
2880 21689 : int i;
2881 21689 : tree base_binfo;
2882 21689 : unsigned HOST_WIDE_INT this_offset;
2883 :
2884 21689 : if (v)
2885 : {
2886 17854 : if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2887 0 : gcc_unreachable ();
2888 :
2889 17854 : if (offset == this_offset
2890 17854 : && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2891 : return binfo;
2892 : }
2893 :
2894 16172 : for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2895 11263 : if (polymorphic_type_binfo_p (base_binfo))
2896 : {
2897 7580 : base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2898 7580 : if (base_binfo)
2899 : return base_binfo;
2900 : }
2901 : return NULL;
2902 : }
2903 :
2904 : /* T is known constant value of virtual table pointer.
2905 : Store virtual table to V and its offset to OFFSET.
2906 : Return false if T does not look like virtual table reference. */
2907 :
2908 : bool
2909 314836 : vtable_pointer_value_to_vtable (const_tree t, tree *v,
2910 : unsigned HOST_WIDE_INT *offset)
2911 : {
2912 : /* We expect &MEM[(void *)&virtual_table + 16B].
2913 : We obtain object's BINFO from the context of the virtual table.
2914 : This one contains pointer to virtual table represented via
2915 : POINTER_PLUS_EXPR. Verify that this pointer matches what
2916 : we propagated through.
2917 :
2918 : In the case of virtual inheritance, the virtual tables may
2919 : be nested, i.e. the offset may be different from 16 and we may
2920 : need to dive into the type representation. */
2921 314836 : if (TREE_CODE (t) == ADDR_EXPR
2922 21935 : && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2923 21935 : && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2924 21935 : && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2925 21935 : && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2926 : == VAR_DECL)
2927 336771 : && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2928 : (TREE_OPERAND (t, 0), 0), 0)))
2929 : {
2930 21935 : *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2931 21935 : *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2932 21935 : return true;
2933 : }
2934 :
2935 : /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2936 : We need to handle it when T comes from static variable initializer or
2937 : BINFO. */
2938 292901 : if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2939 : {
2940 292833 : *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2941 292833 : t = TREE_OPERAND (t, 0);
2942 : }
2943 : else
2944 68 : *offset = 0;
2945 :
2946 292901 : if (TREE_CODE (t) != ADDR_EXPR)
2947 : return false;
2948 292833 : *v = TREE_OPERAND (t, 0);
2949 292833 : return true;
2950 : }
2951 :
2952 : /* T is known constant value of virtual table pointer. Return BINFO of the
2953 : instance type. */
2954 :
2955 : tree
2956 0 : vtable_pointer_value_to_binfo (const_tree t)
2957 : {
2958 0 : tree vtable;
2959 0 : unsigned HOST_WIDE_INT offset;
2960 :
2961 0 : if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2962 : return NULL_TREE;
2963 :
2964 : /* FIXME: for stores of construction vtables we return NULL,
2965 : because we do not have BINFO for those. Eventually we should fix
2966 : our representation to allow this case to be handled, too.
2967 : In the case we see store of BINFO we however may assume
2968 : that standard folding will be able to cope with it. */
2969 0 : return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2970 0 : offset, vtable);
2971 : }
2972 :
2973 : /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2974 : Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2975 : and insert them in NODES.
2976 :
2977 : MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2978 :
2979 : static void
2980 606 : record_targets_from_bases (tree otr_type,
2981 : HOST_WIDE_INT otr_token,
2982 : tree outer_type,
2983 : HOST_WIDE_INT offset,
2984 : vec <cgraph_node *> &nodes,
2985 : hash_set<tree> *inserted,
2986 : hash_set<tree> *matched_vtables,
2987 : bool *completep)
2988 : {
2989 1353 : while (true)
2990 : {
2991 1353 : HOST_WIDE_INT pos, size;
2992 1353 : tree base_binfo;
2993 1353 : tree fld;
2994 :
2995 1353 : if (types_same_for_odr (outer_type, otr_type))
2996 : return;
2997 :
2998 2241 : for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2999 : {
3000 2241 : if (TREE_CODE (fld) != FIELD_DECL)
3001 1488 : continue;
3002 :
3003 753 : pos = int_bit_position (fld);
3004 753 : size = tree_to_shwi (DECL_SIZE (fld));
3005 753 : if (pos <= offset && (pos + size) > offset
3006 : /* Do not get confused by zero sized bases. */
3007 1500 : && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
3008 : break;
3009 : }
3010 : /* Within a class type we should always find corresponding fields. */
3011 747 : gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
3012 :
3013 : /* Nonbase types should have been stripped by outer_class_type. */
3014 747 : gcc_assert (DECL_ARTIFICIAL (fld));
3015 :
3016 747 : outer_type = TREE_TYPE (fld);
3017 747 : offset -= pos;
3018 :
3019 747 : base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
3020 747 : offset, otr_type);
3021 747 : if (!base_binfo)
3022 : {
3023 0 : gcc_assert (odr_violation_reported);
3024 : return;
3025 : }
3026 747 : gcc_assert (base_binfo);
3027 747 : if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
3028 : {
3029 747 : bool can_refer;
3030 747 : tree target = gimple_get_virt_method_for_binfo (otr_token,
3031 : base_binfo,
3032 : &can_refer);
3033 1491 : if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
3034 747 : maybe_record_node (nodes, target, inserted, can_refer, completep);
3035 747 : matched_vtables->add (BINFO_VTABLE (base_binfo));
3036 : }
3037 : }
3038 : }
3039 :
3040 : /* When virtual table is removed, we may need to flush the cache. */
3041 :
3042 : static void
3043 7184140 : devirt_variable_node_removal_hook (varpool_node *n,
3044 : void *d ATTRIBUTE_UNUSED)
3045 : {
3046 7184140 : if (cached_polymorphic_call_targets
3047 12179 : && DECL_VIRTUAL_P (n->decl)
3048 7187450 : && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3049 18 : free_polymorphic_call_targets_hash ();
3050 7184140 : }
3051 :
3052 : /* Record about how many calls would benefit from given type to be final. */
3053 :
3054 : struct odr_type_warn_count
3055 : {
3056 : tree type;
3057 : int count;
3058 : profile_count dyn_count;
3059 : };
3060 :
3061 : /* Record about how many calls would benefit from given method to be final. */
3062 :
3063 : struct decl_warn_count
3064 : {
3065 : tree decl;
3066 : int count;
3067 : profile_count dyn_count;
3068 : };
3069 :
3070 : /* Information about type and decl warnings. */
3071 :
3072 10 : class final_warning_record
3073 : {
3074 : public:
3075 : /* If needed grow type_warnings vector and initialize new decl_warn_count
3076 : to have dyn_count set to profile_count::zero (). */
3077 : void grow_type_warnings (unsigned newlen);
3078 :
3079 : profile_count dyn_count;
3080 : auto_vec<odr_type_warn_count> type_warnings;
3081 : hash_map<tree, decl_warn_count> decl_warnings;
3082 : };
3083 :
3084 : void
3085 19 : final_warning_record::grow_type_warnings (unsigned newlen)
3086 : {
3087 19 : unsigned len = type_warnings.length ();
3088 19 : if (newlen > len)
3089 : {
3090 10 : type_warnings.safe_grow_cleared (newlen, true);
3091 23 : for (unsigned i = len; i < newlen; i++)
3092 13 : type_warnings[i].dyn_count = profile_count::zero ();
3093 : }
3094 19 : }
3095 :
3096 : class final_warning_record *final_warning_records;
3097 :
3098 : /* Return vector containing possible targets of polymorphic call of type
3099 : OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3100 : If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
3101 : OTR_TYPE and include their virtual method. This is useful for types
3102 : possibly in construction or destruction where the virtual table may
3103 : temporarily change to one of base types. INCLUDE_DERIVED_TYPES make
3104 : us to walk the inheritance graph for all derivations.
3105 :
3106 : If COMPLETEP is non-NULL, store true if the list is complete.
3107 : CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3108 : in the target cache. If user needs to visit every target list
3109 : just once, it can memoize them.
3110 :
3111 : If SPECULATIVE is set, the list will not contain targets that
3112 : are not speculatively taken.
3113 :
3114 : Returned vector is placed into cache. It is NOT caller's responsibility
3115 : to free it. The vector can be freed on cgraph_remove_node call if
3116 : the particular node is a virtual function present in the cache. */
3117 :
3118 : vec <cgraph_node *>
3119 1417814 : possible_polymorphic_call_targets (tree otr_type,
3120 : HOST_WIDE_INT otr_token,
3121 : ipa_polymorphic_call_context context,
3122 : bool *completep,
3123 : void **cache_token,
3124 : bool speculative)
3125 : {
3126 1417814 : static struct cgraph_node_hook_list *node_removal_hook_holder;
3127 1417814 : vec <cgraph_node *> nodes = vNULL;
3128 1417814 : auto_vec <tree, 8> bases_to_consider;
3129 1417814 : odr_type type, outer_type;
3130 1417814 : polymorphic_call_target_d key;
3131 1417814 : polymorphic_call_target_d **slot;
3132 1417814 : unsigned int i;
3133 1417814 : tree binfo, target;
3134 1417814 : bool complete;
3135 1417814 : bool can_refer = false;
3136 1417814 : bool skipped = false;
3137 :
3138 1417814 : otr_type = TYPE_MAIN_VARIANT (otr_type);
3139 :
3140 : /* If ODR is not initialized or the context is invalid, return empty
3141 : incomplete list. */
3142 1417814 : if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3143 : {
3144 95 : if (completep)
3145 95 : *completep = context.invalid;
3146 95 : if (cache_token)
3147 2 : *cache_token = NULL;
3148 95 : return nodes;
3149 : }
3150 :
3151 : /* Do not bother to compute speculative info when user do not asks for it. */
3152 1417719 : if (!speculative || !context.speculative_outer_type)
3153 1351528 : context.clear_speculation ();
3154 :
3155 1417719 : type = get_odr_type (otr_type, true);
3156 :
3157 : /* Recording type variants would waste results cache. */
3158 1417719 : gcc_assert (!context.outer_type
3159 : || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3160 :
3161 : /* Look up the outer class type we want to walk.
3162 : If we fail to do so, the context is invalid. */
3163 373044 : if ((context.outer_type || context.speculative_outer_type)
3164 1483179 : && !context.restrict_to_inner_class (otr_type))
3165 : {
3166 39 : if (completep)
3167 39 : *completep = true;
3168 39 : if (cache_token)
3169 0 : *cache_token = NULL;
3170 39 : return nodes;
3171 : }
3172 1417680 : gcc_assert (!context.invalid);
3173 :
3174 : /* Check that restrict_to_inner_class kept the main variant. */
3175 1417680 : gcc_assert (!context.outer_type
3176 : || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3177 :
3178 : /* We canonicalize our query, so we do not need extra hashtable entries. */
3179 :
3180 : /* Without outer type, we have no use for offset. Just do the
3181 : basic search from inner type. */
3182 1417680 : if (!context.outer_type)
3183 307584 : context.clear_outer_type (otr_type);
3184 : /* We need to update our hierarchy if the type does not exist. */
3185 1417680 : outer_type = get_odr_type (context.outer_type, true);
3186 : /* If the type is complete, there are no derivations. */
3187 1417680 : if (TYPE_FINAL_P (outer_type->type))
3188 17 : context.maybe_derived_type = false;
3189 :
3190 : /* Initialize query cache. */
3191 1417680 : if (!cached_polymorphic_call_targets)
3192 : {
3193 13840 : cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3194 13840 : polymorphic_call_target_hash
3195 13840 : = new polymorphic_call_target_hash_type (23);
3196 13840 : if (!node_removal_hook_holder)
3197 : {
3198 8388 : node_removal_hook_holder =
3199 4194 : symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3200 4194 : symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3201 : NULL);
3202 : }
3203 : }
3204 :
3205 1417680 : if (in_lto_p)
3206 : {
3207 8990 : if (context.outer_type != otr_type)
3208 208 : context.outer_type
3209 208 : = get_odr_type (context.outer_type, true)->type;
3210 8990 : if (context.speculative_outer_type)
3211 10 : context.speculative_outer_type
3212 10 : = get_odr_type (context.speculative_outer_type, true)->type;
3213 : }
3214 :
3215 : /* Look up cached answer. */
3216 1417680 : key.type = type;
3217 1417680 : key.otr_token = otr_token;
3218 1417680 : key.speculative = speculative;
3219 1417680 : key.context = context;
3220 1417680 : key.n_odr_types = odr_types.length ();
3221 1417680 : slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3222 1417680 : if (cache_token)
3223 213137 : *cache_token = (void *)*slot;
3224 1417680 : if (*slot)
3225 : {
3226 1310316 : if (completep)
3227 1223510 : *completep = (*slot)->complete;
3228 1310316 : if ((*slot)->type_warning && final_warning_records)
3229 : {
3230 6 : final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3231 6 : if (!final_warning_records->type_warnings
3232 6 : [(*slot)->type_warning - 1].dyn_count.initialized_p ())
3233 6 : final_warning_records->type_warnings
3234 6 : [(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
3235 6 : if (final_warning_records->dyn_count > 0)
3236 0 : final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3237 0 : = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3238 0 : + final_warning_records->dyn_count;
3239 : }
3240 1310316 : if (!speculative && (*slot)->decl_warning && final_warning_records)
3241 : {
3242 6 : struct decl_warn_count *c =
3243 6 : final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3244 6 : c->count++;
3245 6 : if (final_warning_records->dyn_count > 0)
3246 0 : c->dyn_count += final_warning_records->dyn_count;
3247 : }
3248 1310316 : return (*slot)->targets;
3249 : }
3250 :
3251 107364 : complete = true;
3252 :
3253 : /* Do actual search. */
3254 107364 : timevar_push (TV_IPA_VIRTUAL_CALL);
3255 107364 : *slot = XCNEW (polymorphic_call_target_d);
3256 107364 : if (cache_token)
3257 48965 : *cache_token = (void *)*slot;
3258 107364 : (*slot)->type = type;
3259 107364 : (*slot)->otr_token = otr_token;
3260 107364 : (*slot)->context = context;
3261 107364 : (*slot)->speculative = speculative;
3262 :
3263 107364 : hash_set<tree> inserted;
3264 107364 : hash_set<tree> matched_vtables;
3265 :
3266 : /* First insert targets we speculatively identified as likely. */
3267 107364 : if (context.speculative_outer_type)
3268 : {
3269 1620 : odr_type speculative_outer_type;
3270 1620 : bool speculation_complete = true;
3271 1620 : bool check_derived_types = false;
3272 :
3273 : /* First insert target from type itself and check if it may have
3274 : derived types. */
3275 1620 : speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3276 1620 : if (TYPE_FINAL_P (speculative_outer_type->type))
3277 10 : context.speculative_maybe_derived_type = false;
3278 1620 : binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3279 1620 : context.speculative_offset, otr_type);
3280 1620 : if (binfo)
3281 1620 : target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3282 : &can_refer);
3283 : else
3284 : target = NULL;
3285 :
3286 : /* In the case we get complete method, we don't need
3287 : to walk derivations. */
3288 3238 : if (target && DECL_FINAL_P (target))
3289 0 : context.speculative_maybe_derived_type = false;
3290 1620 : if (check_derived_types
3291 : ? type_or_derived_type_possibly_instantiated_p
3292 : (speculative_outer_type)
3293 1620 : : type_possibly_instantiated_p (speculative_outer_type->type))
3294 1620 : maybe_record_node (nodes, target, &inserted, can_refer,
3295 : &speculation_complete);
3296 1620 : if (binfo)
3297 1620 : matched_vtables.add (BINFO_VTABLE (binfo));
3298 :
3299 :
3300 : /* Next walk recursively all derived types. */
3301 1620 : if (context.speculative_maybe_derived_type)
3302 1970 : for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3303 700 : possible_polymorphic_call_targets_1 (nodes, &inserted,
3304 : &matched_vtables,
3305 : otr_type,
3306 350 : speculative_outer_type->derived_types[i],
3307 : otr_token, speculative_outer_type->type,
3308 : context.speculative_offset,
3309 : &speculation_complete,
3310 : bases_to_consider,
3311 : false);
3312 : }
3313 :
3314 108753 : if (!speculative || !nodes.length ())
3315 : {
3316 105975 : bool check_derived_types = false;
3317 : /* First see virtual method of type itself. */
3318 105975 : binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3319 105975 : context.offset, otr_type);
3320 105975 : if (binfo)
3321 105975 : target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3322 : &can_refer);
3323 : else
3324 : {
3325 0 : gcc_assert (odr_violation_reported);
3326 : target = NULL;
3327 : }
3328 :
3329 : /* Destructors are never called through construction virtual tables,
3330 : because the type is always known. */
3331 211800 : if (target && DECL_CXX_DESTRUCTOR_P (target))
3332 13317 : context.maybe_in_construction = false;
3333 :
3334 : /* In the case we get complete method, we don't need
3335 : to walk derivations. */
3336 211800 : if (target && DECL_FINAL_P (target))
3337 : {
3338 6 : check_derived_types = true;
3339 6 : context.maybe_derived_type = false;
3340 : }
3341 :
3342 : /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3343 105975 : if (check_derived_types
3344 6 : ? type_or_derived_type_possibly_instantiated_p (outer_type)
3345 105969 : : type_possibly_instantiated_p (outer_type->type))
3346 105962 : maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3347 : else
3348 : skipped = true;
3349 :
3350 105975 : if (binfo)
3351 105975 : matched_vtables.add (BINFO_VTABLE (binfo));
3352 :
3353 : /* Next walk recursively all derived types. */
3354 105975 : if (context.maybe_derived_type)
3355 : {
3356 244867 : for (i = 0; i < outer_type->derived_types.length(); i++)
3357 284332 : possible_polymorphic_call_targets_1 (nodes, &inserted,
3358 : &matched_vtables,
3359 : otr_type,
3360 142166 : outer_type->derived_types[i],
3361 : otr_token, outer_type->type,
3362 : context.offset, &complete,
3363 : bases_to_consider,
3364 142166 : context.maybe_in_construction);
3365 :
3366 102701 : if (!outer_type->all_derivations_known)
3367 : {
3368 86502 : if (!speculative && final_warning_records
3369 9 : && nodes.length () == 1
3370 102583 : && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
3371 : {
3372 9 : if (complete
3373 9 : && warn_suggest_final_types
3374 18 : && !outer_type->derived_types.length ())
3375 : {
3376 9 : final_warning_records->grow_type_warnings
3377 9 : (outer_type->id);
3378 9 : final_warning_records->type_warnings[outer_type->id].count++;
3379 9 : if (!final_warning_records->type_warnings
3380 9 : [outer_type->id].dyn_count.initialized_p ())
3381 0 : final_warning_records->type_warnings
3382 0 : [outer_type->id].dyn_count = profile_count::zero ();
3383 18 : final_warning_records->type_warnings[outer_type->id].dyn_count
3384 9 : += final_warning_records->dyn_count;
3385 18 : final_warning_records->type_warnings[outer_type->id].type
3386 9 : = outer_type->type;
3387 9 : (*slot)->type_warning = outer_type->id + 1;
3388 : }
3389 9 : if (complete
3390 9 : && warn_suggest_final_methods
3391 15 : && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3392 6 : outer_type->type))
3393 : {
3394 6 : bool existed;
3395 6 : struct decl_warn_count &c =
3396 6 : final_warning_records->decl_warnings.get_or_insert
3397 6 : (nodes[0]->decl, &existed);
3398 :
3399 6 : if (existed)
3400 : {
3401 0 : c.count++;
3402 0 : c.dyn_count += final_warning_records->dyn_count;
3403 : }
3404 : else
3405 : {
3406 6 : c.count = 1;
3407 6 : c.dyn_count = final_warning_records->dyn_count;
3408 6 : c.decl = nodes[0]->decl;
3409 : }
3410 6 : (*slot)->decl_warning = nodes[0]->decl;
3411 : }
3412 : }
3413 102574 : complete = false;
3414 : }
3415 : }
3416 :
3417 105975 : if (!speculative)
3418 : {
3419 : /* Destructors are never called through construction virtual tables,
3420 : because the type is always known. One of entries may be
3421 : cxa_pure_virtual so look to at least two of them. */
3422 89373 : if (context.maybe_in_construction)
3423 88491 : for (i =0 ; i < MIN (nodes.length (), 2); i++)
3424 47672 : if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3425 2877 : context.maybe_in_construction = false;
3426 89373 : if (context.maybe_in_construction)
3427 : {
3428 39140 : if (type != outer_type
3429 39140 : && (!skipped
3430 0 : || (context.maybe_derived_type
3431 0 : && !type_all_derivations_known_p (outer_type->type))))
3432 606 : record_targets_from_bases (otr_type, otr_token, outer_type->type,
3433 : context.offset, nodes, &inserted,
3434 : &matched_vtables, &complete);
3435 39140 : if (skipped)
3436 7 : maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3437 39140 : for (i = 0; i < bases_to_consider.length(); i++)
3438 0 : maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3439 : }
3440 : }
3441 : }
3442 :
3443 107364 : (*slot)->targets = nodes;
3444 107364 : (*slot)->complete = complete;
3445 107364 : (*slot)->n_odr_types = odr_types.length ();
3446 107364 : if (completep)
3447 102533 : *completep = complete;
3448 :
3449 107364 : timevar_pop (TV_IPA_VIRTUAL_CALL);
3450 107364 : return nodes;
3451 107364 : }
3452 :
3453 : bool
3454 6 : add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3455 : vec<const decl_warn_count*> *vec)
3456 : {
3457 6 : vec->safe_push (&value);
3458 6 : return true;
3459 : }
3460 :
3461 : /* Dump target list TARGETS into FILE. */
3462 :
3463 : static void
3464 88 : dump_targets (FILE *f, vec <cgraph_node *> targets, bool verbose)
3465 : {
3466 88 : unsigned int i;
3467 :
3468 203 : for (i = 0; i < targets.length (); i++)
3469 : {
3470 115 : char *name = NULL;
3471 115 : if (in_lto_p)
3472 3 : name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3473 115 : fprintf (f, " %s", name ? name : targets[i]->dump_name ());
3474 115 : if (in_lto_p)
3475 3 : free (name);
3476 115 : if (!targets[i]->definition)
3477 43 : fprintf (f, " (no definition%s)",
3478 43 : DECL_DECLARED_INLINE_P (targets[i]->decl)
3479 : ? " inline" : "");
3480 : /* With many targets for every call polymorphic dumps are going to
3481 : be quadratic in size. */
3482 115 : if (i > 10 && !verbose)
3483 : {
3484 0 : fprintf (f, " ... and %i more targets\n", targets.length () - i);
3485 0 : return;
3486 : }
3487 : }
3488 88 : fprintf (f, "\n");
3489 : }
3490 :
3491 : /* Dump all possible targets of a polymorphic call. */
3492 :
3493 : void
3494 72 : dump_possible_polymorphic_call_targets (FILE *f,
3495 : tree otr_type,
3496 : HOST_WIDE_INT otr_token,
3497 : const ipa_polymorphic_call_context &ctx,
3498 : bool verbose)
3499 : {
3500 72 : vec <cgraph_node *> targets;
3501 72 : bool final;
3502 72 : odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3503 72 : unsigned int len;
3504 :
3505 72 : if (!type)
3506 0 : return;
3507 72 : targets = possible_polymorphic_call_targets (otr_type, otr_token,
3508 : ctx,
3509 : &final, NULL, false);
3510 72 : fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3511 72 : print_generic_expr (f, type->type, TDF_SLIM);
3512 72 : fprintf (f, " token %i\n", (int)otr_token);
3513 :
3514 72 : ctx.dump (f);
3515 :
3516 135 : fprintf (f, " %s%s%s%s\n ",
3517 : final ? "This is a complete list." :
3518 : "This is partial list; extra targets may be defined in other units.",
3519 72 : ctx.maybe_in_construction ? " (base types included)" : "",
3520 72 : ctx.maybe_derived_type ? " (derived types included)" : "",
3521 72 : ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3522 72 : len = targets.length ();
3523 72 : dump_targets (f, targets, verbose);
3524 :
3525 72 : targets = possible_polymorphic_call_targets (otr_type, otr_token,
3526 : ctx,
3527 : &final, NULL, true);
3528 140 : if (targets.length () != len)
3529 : {
3530 16 : fprintf (f, " Speculative targets:");
3531 16 : dump_targets (f, targets, verbose);
3532 : }
3533 : /* Ugly: during callgraph construction the target cache may get populated
3534 : before all targets are found. While this is harmless (because all local
3535 : types are discovered and only in those case we devirtualize fully and we
3536 : don't do speculative devirtualization before IPA stage) it triggers
3537 : assert here when dumping at that stage also populates the case with
3538 : speculative targets. Quietly ignore this. */
3539 128 : gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3540 72 : fprintf (f, "\n");
3541 : }
3542 :
3543 :
3544 : /* Return true if N can be possibly target of a polymorphic call of
3545 : OTR_TYPE/OTR_TOKEN. */
3546 :
3547 : bool
3548 21125 : possible_polymorphic_call_target_p (tree otr_type,
3549 : HOST_WIDE_INT otr_token,
3550 : const ipa_polymorphic_call_context &ctx,
3551 : struct cgraph_node *n)
3552 : {
3553 21125 : vec <cgraph_node *> targets;
3554 21125 : unsigned int i;
3555 21125 : bool final;
3556 :
3557 21125 : if (fndecl_built_in_p (n->decl, BUILT_IN_NORMAL)
3558 21125 : && (DECL_FUNCTION_CODE (n->decl) == BUILT_IN_UNREACHABLE
3559 0 : || DECL_FUNCTION_CODE (n->decl) == BUILT_IN_TRAP
3560 0 : || DECL_FUNCTION_CODE (n->decl) == BUILT_IN_UNREACHABLE_TRAP))
3561 : return true;
3562 :
3563 21035 : if (is_cxa_pure_virtual_p (n->decl))
3564 : return true;
3565 :
3566 21027 : if (!odr_hash)
3567 : return true;
3568 21027 : targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3569 28454 : for (i = 0; i < targets.length (); i++)
3570 28190 : if (n->semantically_equivalent_p (targets[i]))
3571 : return true;
3572 :
3573 : /* At a moment we allow middle end to dig out new external declarations
3574 : as a targets of polymorphic calls. */
3575 264 : if (!final && !n->definition)
3576 : return true;
3577 : return false;
3578 : }
3579 :
3580 :
3581 :
3582 : /* Return true if N can be possibly target of a polymorphic call of
3583 : OBJ_TYPE_REF expression REF in STMT. */
3584 :
3585 : bool
3586 6 : possible_polymorphic_call_target_p (tree ref,
3587 : gimple *stmt,
3588 : struct cgraph_node *n)
3589 : {
3590 6 : ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3591 6 : tree call_fn = gimple_call_fn (stmt);
3592 :
3593 12 : return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3594 : tree_to_uhwi
3595 6 : (OBJ_TYPE_REF_TOKEN (call_fn)),
3596 : context,
3597 6 : n);
3598 : }
3599 :
3600 :
3601 : /* After callgraph construction new external nodes may appear.
3602 : Add them into the graph. */
3603 :
3604 : void
3605 516680 : update_type_inheritance_graph (void)
3606 : {
3607 516680 : struct cgraph_node *n;
3608 :
3609 516680 : if (!odr_hash)
3610 : return;
3611 516680 : free_polymorphic_call_targets_hash ();
3612 516680 : timevar_push (TV_IPA_INHERITANCE);
3613 : /* We reconstruct the graph starting from types of all methods seen in the
3614 : unit. */
3615 110097383 : FOR_EACH_FUNCTION (n)
3616 109580703 : if (DECL_VIRTUAL_P (n->decl)
3617 1567915 : && !n->definition
3618 109984573 : && n->real_symbol_p ())
3619 103819 : get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3620 516680 : timevar_pop (TV_IPA_INHERITANCE);
3621 : }
3622 :
3623 :
3624 : /* Return true if N looks like likely target of a polymorphic call.
3625 : Rule out cxa_pure_virtual, noreturns, function declared cold and
3626 : other obvious cases. */
3627 :
3628 : bool
3629 156889 : likely_target_p (struct cgraph_node *n)
3630 : {
3631 156889 : int flags;
3632 : /* cxa_pure_virtual and similar things are not likely. */
3633 156889 : if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3634 : return false;
3635 155823 : flags = flags_from_decl_or_type (n->decl);
3636 155823 : if (flags & ECF_NORETURN)
3637 : return false;
3638 155208 : if (lookup_attribute ("cold",
3639 155208 : DECL_ATTRIBUTES (n->decl)))
3640 : return false;
3641 155208 : if (n->frequency < NODE_FREQUENCY_NORMAL)
3642 : return false;
3643 : /* If there are no live virtual tables referring the target,
3644 : the only way the target can be called is an instance coming from other
3645 : compilation unit; speculative devirtualization is built around an
3646 : assumption that won't happen. */
3647 155208 : if (!referenced_from_vtable_p (n))
3648 : return false;
3649 : return true;
3650 : }
3651 :
3652 : /* Compare type warning records P1 and P2 and choose one with larger count;
3653 : helper for qsort. */
3654 :
3655 : static int
3656 0 : type_warning_cmp (const void *p1, const void *p2)
3657 : {
3658 0 : const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3659 0 : const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3660 :
3661 0 : if (t1->dyn_count < t2->dyn_count)
3662 : return 1;
3663 0 : if (t1->dyn_count > t2->dyn_count)
3664 : return -1;
3665 0 : return t2->count - t1->count;
3666 : }
3667 :
3668 : /* Compare decl warning records P1 and P2 and choose one with larger count;
3669 : helper for qsort. */
3670 :
3671 : static int
3672 9 : decl_warning_cmp (const void *p1, const void *p2)
3673 : {
3674 9 : const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3675 9 : const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3676 :
3677 9 : if (t1->dyn_count < t2->dyn_count)
3678 : return 1;
3679 9 : if (t1->dyn_count > t2->dyn_count)
3680 : return -1;
3681 9 : return t2->count - t1->count;
3682 : }
3683 :
3684 :
3685 : /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3686 : context CTX. */
3687 :
3688 : struct cgraph_node *
3689 91622 : try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3690 : ipa_polymorphic_call_context ctx)
3691 : {
3692 91622 : vec <cgraph_node *>targets
3693 : = possible_polymorphic_call_targets
3694 91622 : (otr_type, otr_token, ctx, NULL, NULL, true);
3695 91622 : unsigned int i;
3696 91622 : struct cgraph_node *likely_target = NULL;
3697 :
3698 162496 : for (i = 0; i < targets.length (); i++)
3699 132483 : if (likely_target_p (targets[i]))
3700 : {
3701 131733 : if (likely_target)
3702 : return NULL;
3703 70124 : likely_target = targets[i];
3704 : }
3705 30013 : if (!likely_target
3706 8515 : ||!likely_target->definition
3707 36196 : || DECL_EXTERNAL (likely_target->decl))
3708 : return NULL;
3709 :
3710 : /* Don't use an implicitly-declared destructor (c++/58678). */
3711 5284 : struct cgraph_node *non_thunk_target
3712 5284 : = likely_target->function_symbol ();
3713 5284 : if (DECL_ARTIFICIAL (non_thunk_target->decl))
3714 : return NULL;
3715 699 : if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3716 699 : && likely_target->can_be_discarded_p ())
3717 : return NULL;
3718 : return likely_target;
3719 : }
3720 :
3721 : /* Various statistics counters collected during devirtualization. */
3722 :
3723 : struct devirt_stats
3724 : {
3725 : int npolymorphic, nspeculated, nconverted, ncold;
3726 : int nmultiple, noverwritable, ndevirtualized, nnotdefined;
3727 : int nwrong, nok, nexternal, nartificial;
3728 : int ndropped;
3729 : };
3730 :
3731 : /* Check LIKELY_TARGET and return true if it a suitable target for
3732 : devirtualization or speculative devirtualization. Increase the respective
3733 : counter in STATS if any check fails. */
3734 :
3735 : static bool
3736 21890 : devirt_target_ok_p (cgraph_node *likely_target, struct devirt_stats *stats)
3737 : {
3738 21890 : if (!likely_target->definition)
3739 : {
3740 5645 : if (dump_file)
3741 15 : fprintf (dump_file, "Target is not a definition\n\n");
3742 5645 : stats->nnotdefined++;
3743 5645 : return false;
3744 : }
3745 : /* Do not introduce new references to external symbols. While we
3746 : can handle these just well, it is common for programs to
3747 : incorrectly with headers defining methods they are linked
3748 : with. */
3749 16245 : if (DECL_EXTERNAL (likely_target->decl))
3750 : {
3751 426 : if (dump_file)
3752 0 : fprintf (dump_file, "Target is external\n\n");
3753 426 : stats->nexternal++;
3754 426 : return false;
3755 : }
3756 : /* Don't use an implicitly-declared destructor (c++/58678). */
3757 15819 : struct cgraph_node *non_thunk_target
3758 15819 : = likely_target->function_symbol ();
3759 15819 : if (DECL_ARTIFICIAL (non_thunk_target->decl))
3760 : {
3761 723 : if (dump_file)
3762 3 : fprintf (dump_file, "Target is artificial\n\n");
3763 723 : stats->nartificial++;
3764 723 : return false;
3765 : }
3766 15096 : if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3767 15096 : && likely_target->can_be_discarded_p ())
3768 : {
3769 212 : if (dump_file)
3770 0 : fprintf (dump_file, "Target is overwritable\n\n");
3771 212 : stats->noverwritable++;
3772 212 : return false;
3773 : }
3774 : return true;
3775 : }
3776 :
3777 : /* The ipa-devirt pass.
3778 : When polymorphic call has only one likely target in the unit,
3779 : turn it into a speculative call. */
3780 :
3781 : static unsigned int
3782 128872 : ipa_devirt (void)
3783 : {
3784 128872 : struct cgraph_node *n;
3785 128872 : hash_set<void *> bad_call_targets;
3786 128872 : struct cgraph_edge *e;
3787 128872 : struct devirt_stats stats;
3788 128872 : memset (&stats, 0, sizeof (stats));
3789 :
3790 128872 : if (dump_file)
3791 : {
3792 41 : dump_type_inheritance_graph (dump_file);
3793 41 : ipa_dump_noted_record_fnptrs (dump_file);
3794 : }
3795 :
3796 : /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3797 : This is implemented by setting up final_warning_records that are updated
3798 : by get_polymorphic_call_targets.
3799 : We need to clear cache in this case to trigger recomputation of all
3800 : entries. */
3801 128872 : if (odr_types_ptr && (warn_suggest_final_methods || warn_suggest_final_types))
3802 : {
3803 10 : final_warning_records = new (final_warning_record);
3804 10 : final_warning_records->dyn_count = profile_count::zero ();
3805 10 : final_warning_records->grow_type_warnings (odr_types.length ());
3806 10 : free_polymorphic_call_targets_hash ();
3807 : }
3808 :
3809 1479500 : FOR_EACH_DEFINED_FUNCTION (n)
3810 : {
3811 1350628 : bool update = false;
3812 1350628 : if (!opt_for_fn (n->decl, flag_devirtualize))
3813 10124 : continue;
3814 1340504 : if (dump_file && n->indirect_calls)
3815 42 : fprintf (dump_file, "\n\nProcesing function %s\n",
3816 : n->dump_name ());
3817 1474601 : for (e = n->indirect_calls; e; e = e->next_callee)
3818 134097 : if (!e->maybe_hot_p ())
3819 : {
3820 15607 : if (dump_file)
3821 5 : fprintf (dump_file, "Call is cold\n\n");
3822 15607 : stats.ncold++;
3823 15607 : continue;
3824 : }
3825 118490 : else if (cgraph_polymorphic_indirect_info *pii
3826 118490 : = dyn_cast <cgraph_polymorphic_indirect_info *> (e->indirect_info))
3827 : {
3828 22184 : if (!pii->usable_p () || !odr_types_ptr)
3829 7331 : continue;
3830 :
3831 22184 : void *cache_token;
3832 22184 : bool final;
3833 :
3834 22184 : if (final_warning_records)
3835 15 : final_warning_records->dyn_count = e->count.ipa ();
3836 :
3837 22184 : vec <cgraph_node *>targets
3838 : = possible_polymorphic_call_targets
3839 22184 : (e, &final, &cache_token, true);
3840 22184 : unsigned int i;
3841 :
3842 : /* Trigger warnings by calculating non-speculative targets. */
3843 22184 : if (warn_suggest_final_methods || warn_suggest_final_types)
3844 15 : possible_polymorphic_call_targets (e);
3845 :
3846 22184 : if (dump_file)
3847 38 : dump_possible_polymorphic_call_targets
3848 38 : (dump_file, e, (dump_flags & TDF_DETAILS));
3849 :
3850 22184 : stats.npolymorphic++;
3851 :
3852 : /* See if the call can be devirtualized by means of ipa-prop's
3853 : polymorphic call context propagation. If not, we can just
3854 : forget about this call being polymorphic and avoid some heavy
3855 : lifting in remove_unreachable_nodes that will otherwise try to
3856 : keep all possible targets alive until inlining and in the inliner
3857 : itself.
3858 :
3859 : This may need to be revisited once we add further ways to use
3860 : the may edges, but it is a reasonable thing to do right now. */
3861 :
3862 22184 : if ((pii->param_index == -1
3863 9152 : || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3864 57 : && pii->vptr_changed))
3865 22184 : && !flag_ltrans_devirtualize)
3866 : {
3867 13032 : pii->mark_unusable ();
3868 13032 : stats.ndropped++;
3869 13032 : if (dump_file)
3870 15 : fprintf (dump_file, "Dropping polymorphic call info;"
3871 : " it cannot be used by ipa-prop\n");
3872 : }
3873 :
3874 22184 : if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3875 57 : continue;
3876 :
3877 22127 : if (e->speculative)
3878 : {
3879 0 : if (dump_file)
3880 0 : fprintf (dump_file, "Call is already speculated\n\n");
3881 0 : stats.nspeculated++;
3882 :
3883 : /* When dumping see if we agree with speculation. */
3884 0 : if (!dump_file)
3885 0 : continue;
3886 : }
3887 22127 : if (bad_call_targets.contains (cache_token))
3888 : {
3889 3145 : if (dump_file)
3890 0 : fprintf (dump_file, "Target list is known to be useless\n\n");
3891 3145 : stats.nmultiple++;
3892 3145 : continue;
3893 : }
3894 18982 : auto_vec <cgraph_node *, 20> likely_targets;
3895 42508 : for (i = 0; i < targets.length (); i++)
3896 24406 : if (likely_target_p (targets[i]))
3897 : {
3898 46950 : if ((int)likely_targets.length () >= param_max_devirt_targets)
3899 : {
3900 880 : likely_targets.truncate (0);
3901 880 : if (dump_file)
3902 0 : fprintf (dump_file, "More than %i likely targets\n\n",
3903 : param_max_devirt_targets);
3904 880 : stats.nmultiple++;
3905 880 : break;
3906 : }
3907 22595 : likely_targets.safe_push (targets[i]);
3908 : }
3909 18982 : if (!likely_targets.length ())
3910 : {
3911 4129 : bad_call_targets.add (cache_token);
3912 4129 : continue;
3913 : }
3914 : /* This is reached only when dumping; check if we agree or disagree
3915 : with the speculation. */
3916 14853 : if (e->speculative)
3917 : {
3918 0 : for (cgraph_node * likely_target: likely_targets)
3919 0 : if (e->speculative_call_for_target (likely_target))
3920 : {
3921 0 : fprintf (dump_file,
3922 : "We agree with speculation on target %s\n\n",
3923 : likely_target->dump_name ());
3924 0 : stats.nok++;
3925 : }
3926 : else
3927 : {
3928 0 : fprintf (dump_file,
3929 : "We disagree with speculation on target %s\n\n",
3930 : likely_target->dump_name ());
3931 0 : stats.nwrong++;
3932 : }
3933 0 : continue;
3934 0 : }
3935 14853 : bool first = true;
3936 14853 : unsigned speculative_id = e->get_next_speculative_id ();
3937 64526 : for (cgraph_node * likely_target: likely_targets)
3938 : {
3939 19967 : if (!devirt_target_ok_p (likely_target, &stats))
3940 6986 : continue;
3941 12981 : else if (dbg_cnt (devirt))
3942 : {
3943 12981 : if (dump_enabled_p ())
3944 : {
3945 19 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
3946 : "speculatively devirtualizing call "
3947 : "in %s to %s\n",
3948 : n->dump_name (),
3949 : likely_target->dump_name ());
3950 : }
3951 12981 : if (!likely_target->can_be_discarded_p ())
3952 : {
3953 839 : cgraph_node *alias;
3954 839 : alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3955 : if (alias)
3956 12981 : likely_target = alias;
3957 : }
3958 12981 : if (first)
3959 8415 : stats.nconverted++;
3960 12981 : first = false;
3961 12981 : update = true;
3962 12981 : e->make_speculative
3963 25962 : (likely_target,
3964 12981 : e->count.apply_scale (8, 10 * likely_targets.length ()),
3965 : speculative_id++);
3966 : }
3967 : }
3968 14853 : if (speculative_id > 1 && dump_enabled_p ())
3969 : {
3970 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
3971 : "devirtualized call in %s to %i targets\n",
3972 : n->dump_name (),
3973 : speculative_id);
3974 : }
3975 18982 : }
3976 96306 : else if (cgraph_simple_indirect_info *sii
3977 230403 : = dyn_cast <cgraph_simple_indirect_info *> (e->indirect_info))
3978 : {
3979 96281 : if (e->speculative)
3980 : {
3981 0 : if (dump_file)
3982 0 : fprintf (dump_file, "Call is already speculated\n\n");
3983 0 : stats.nspeculated++;
3984 :
3985 : /* When dumping see if we agree with speculation. */
3986 0 : if (!dump_file)
3987 0 : continue;
3988 : }
3989 155314 : if (!sii->fnptr_loaded_from_record
3990 96281 : || !opt_for_fn (n->decl,
3991 : flag_speculatively_call_stored_functions))
3992 59033 : continue;
3993 :
3994 37248 : tree rec_type = sii->rec_type;
3995 37248 : unsigned fld_off = sii->fld_offset;
3996 37248 : tree likely_tgt_decl = ipa_single_noted_fnptr_in_record (rec_type,
3997 : fld_off);
3998 37248 : cgraph_node *likely_tgt_node;
3999 37248 : if (likely_tgt_decl
4000 1923 : && (likely_tgt_node = cgraph_node::get (likely_tgt_decl))
4001 39171 : && devirt_target_ok_p (likely_tgt_node, &stats))
4002 : {
4003 1903 : if (!likely_tgt_node->can_be_discarded_p ())
4004 : {
4005 1710 : cgraph_node *alias;
4006 1710 : alias = dyn_cast<cgraph_node *> (likely_tgt_node
4007 : ->noninterposable_alias ());
4008 : if (alias)
4009 1903 : likely_tgt_node = alias;
4010 : }
4011 1903 : if (e->speculative)
4012 : {
4013 0 : if (e->speculative_call_for_target (likely_tgt_node))
4014 : {
4015 0 : fprintf (dump_file, "Simple call agree with speculation\n\n");
4016 0 : stats.nok++;
4017 : }
4018 : else
4019 : {
4020 0 : fprintf (dump_file, "Simple call disagree with speculation\n\n");
4021 0 : stats.nwrong++;
4022 : }
4023 0 : continue;
4024 : }
4025 :
4026 1903 : if (dump_enabled_p ())
4027 3 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
4028 : "speculatively turning an indirect call "
4029 : "in %s to a direct one to %s\n",
4030 : n->dump_name (),
4031 : likely_tgt_node->dump_name ());
4032 :
4033 1903 : update = true;
4034 1903 : e->make_speculative (likely_tgt_node,
4035 : e->count.apply_scale (8, 10),
4036 1903 : e->get_next_speculative_id ());
4037 : }
4038 : }
4039 1340504 : if (update)
4040 7821 : ipa_update_overall_fn_summary (n);
4041 : }
4042 128872 : ipa_free_noted_fnptr_calls ();
4043 128872 : if (odr_types_ptr && (warn_suggest_final_methods || warn_suggest_final_types))
4044 : {
4045 10 : if (warn_suggest_final_types)
4046 : {
4047 9 : final_warning_records->type_warnings.qsort (type_warning_cmp);
4048 9 : for (unsigned int i = 0;
4049 18 : i < final_warning_records->type_warnings.length (); i++)
4050 9 : if (final_warning_records->type_warnings[i].count)
4051 : {
4052 6 : tree type = final_warning_records->type_warnings[i].type;
4053 6 : int count = final_warning_records->type_warnings[i].count;
4054 6 : profile_count dyn_count
4055 6 : = final_warning_records->type_warnings[i].dyn_count;
4056 :
4057 6 : if (!(dyn_count > 0))
4058 6 : warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
4059 6 : OPT_Wsuggest_final_types, count,
4060 : "Declaring type %qD final "
4061 : "would enable devirtualization of %i call",
4062 : "Declaring type %qD final "
4063 : "would enable devirtualization of %i calls",
4064 : type,
4065 : count);
4066 : else
4067 0 : warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
4068 0 : OPT_Wsuggest_final_types, count,
4069 : "Declaring type %qD final "
4070 : "would enable devirtualization of %i call "
4071 : "executed %lli times",
4072 : "Declaring type %qD final "
4073 : "would enable devirtualization of %i calls "
4074 : "executed %lli times",
4075 : type,
4076 : count,
4077 0 : (long long) dyn_count.to_gcov_type ());
4078 : }
4079 : }
4080 :
4081 10 : if (warn_suggest_final_methods)
4082 : {
4083 4 : auto_vec<const decl_warn_count*> decl_warnings_vec;
4084 :
4085 4 : final_warning_records->decl_warnings.traverse
4086 10 : <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
4087 4 : decl_warnings_vec.qsort (decl_warning_cmp);
4088 10 : for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
4089 : {
4090 6 : tree decl = decl_warnings_vec[i]->decl;
4091 6 : int count = decl_warnings_vec[i]->count;
4092 6 : profile_count dyn_count
4093 6 : = decl_warnings_vec[i]->dyn_count;
4094 :
4095 6 : if (!(dyn_count > 0))
4096 6 : if (DECL_CXX_DESTRUCTOR_P (decl))
4097 0 : warning_n (DECL_SOURCE_LOCATION (decl),
4098 0 : OPT_Wsuggest_final_methods, count,
4099 : "Declaring virtual destructor of %qD final "
4100 : "would enable devirtualization of %i call",
4101 : "Declaring virtual destructor of %qD final "
4102 : "would enable devirtualization of %i calls",
4103 0 : DECL_CONTEXT (decl), count);
4104 : else
4105 6 : warning_n (DECL_SOURCE_LOCATION (decl),
4106 6 : OPT_Wsuggest_final_methods, count,
4107 : "Declaring method %qD final "
4108 : "would enable devirtualization of %i call",
4109 : "Declaring method %qD final "
4110 : "would enable devirtualization of %i calls",
4111 : decl, count);
4112 0 : else if (DECL_CXX_DESTRUCTOR_P (decl))
4113 0 : warning_n (DECL_SOURCE_LOCATION (decl),
4114 0 : OPT_Wsuggest_final_methods, count,
4115 : "Declaring virtual destructor of %qD final "
4116 : "would enable devirtualization of %i call "
4117 : "executed %lli times",
4118 : "Declaring virtual destructor of %qD final "
4119 : "would enable devirtualization of %i calls "
4120 : "executed %lli times",
4121 0 : DECL_CONTEXT (decl), count,
4122 0 : (long long)dyn_count.to_gcov_type ());
4123 : else
4124 0 : warning_n (DECL_SOURCE_LOCATION (decl),
4125 0 : OPT_Wsuggest_final_methods, count,
4126 : "Declaring method %qD final "
4127 : "would enable devirtualization of %i call "
4128 : "executed %lli times",
4129 : "Declaring method %qD final "
4130 : "would enable devirtualization of %i calls "
4131 : "executed %lli times",
4132 : decl, count,
4133 0 : (long long)dyn_count.to_gcov_type ());
4134 : }
4135 4 : }
4136 :
4137 20 : delete (final_warning_records);
4138 10 : final_warning_records = 0;
4139 : }
4140 :
4141 128872 : if (dump_file)
4142 41 : fprintf (dump_file,
4143 : "%i polymorphic calls, %i devirtualized,"
4144 : " %i speculatively devirtualized, %i cold\n"
4145 : "%i have multiple targets, %i overwritable,"
4146 : " %i already speculated (%i agree, %i disagree),"
4147 : " %i external, %i not defined, %i artificial, %i infos dropped\n",
4148 : stats.npolymorphic, stats.ndevirtualized, stats.nconverted,
4149 : stats.ncold, stats.nmultiple, stats.noverwritable,
4150 : stats.nspeculated, stats.nok, stats.nwrong,
4151 : stats.nexternal, stats.nnotdefined, stats.nartificial,
4152 : stats.ndropped);
4153 128872 : return stats.ndevirtualized || stats.ndropped ? TODO_remove_functions : 0;
4154 128872 : }
4155 :
4156 : namespace {
4157 :
4158 : const pass_data pass_data_ipa_devirt =
4159 : {
4160 : IPA_PASS, /* type */
4161 : "devirt", /* name */
4162 : OPTGROUP_NONE, /* optinfo_flags */
4163 : TV_IPA_DEVIRT, /* tv_id */
4164 : 0, /* properties_required */
4165 : 0, /* properties_provided */
4166 : 0, /* properties_destroyed */
4167 : 0, /* todo_flags_start */
4168 : ( TODO_dump_symtab ), /* todo_flags_finish */
4169 : };
4170 :
4171 : class pass_ipa_devirt : public ipa_opt_pass_d
4172 : {
4173 : public:
4174 288775 : pass_ipa_devirt (gcc::context *ctxt)
4175 : : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
4176 : NULL, /* generate_summary */
4177 : NULL, /* write_summary */
4178 : NULL, /* read_summary */
4179 : NULL, /* write_optimization_summary */
4180 : NULL, /* read_optimization_summary */
4181 : NULL, /* stmt_fixup */
4182 : 0, /* function_transform_todo_flags_start */
4183 : NULL, /* function_transform */
4184 288775 : NULL) /* variable_transform */
4185 288775 : {}
4186 :
4187 : /* opt_pass methods: */
4188 569413 : bool gate (function *) final override
4189 : {
4190 : /* In LTO, always run the IPA passes and decide on function basis if the
4191 : pass is enabled. */
4192 569413 : if (in_lto_p)
4193 : return true;
4194 452455 : return (optimize
4195 452455 : && ((flag_devirtualize
4196 242261 : && (flag_devirtualize_speculatively
4197 226 : || (warn_suggest_final_methods
4198 226 : || warn_suggest_final_types)))
4199 47888 : || flag_speculatively_call_stored_functions));
4200 : }
4201 :
4202 128872 : unsigned int execute (function *) final override { return ipa_devirt (); }
4203 :
4204 : }; // class pass_ipa_devirt
4205 :
4206 : } // anon namespace
4207 :
4208 : ipa_opt_pass_d *
4209 288775 : make_pass_ipa_devirt (gcc::context *ctxt)
4210 : {
4211 288775 : return new pass_ipa_devirt (ctxt);
4212 : }
4213 :
4214 : /* Print ODR name of a TYPE if available.
4215 : Use demangler when option DEMANGLE is used. */
4216 :
4217 : DEBUG_FUNCTION void
4218 0 : debug_tree_odr_name (tree type, bool demangle)
4219 : {
4220 0 : const char *odr = get_odr_name_for_type (type);
4221 0 : if (demangle)
4222 : {
4223 0 : const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
4224 0 : odr = cplus_demangle (odr, opts);
4225 : }
4226 :
4227 0 : fprintf (stderr, "%s\n", odr);
4228 0 : }
4229 :
4230 : /* Register ODR enum so we later stream record about its values. */
4231 :
4232 : void
4233 341 : register_odr_enum (tree t)
4234 : {
4235 341 : if (flag_lto)
4236 341 : vec_safe_push (odr_enums, t);
4237 341 : }
4238 :
4239 : /* Write ODR enums to LTO stream file. */
4240 :
4241 : static void
4242 23316 : ipa_odr_summary_write (void)
4243 : {
4244 23316 : if (!odr_enums && !odr_enum_map)
4245 23316 : return;
4246 176 : struct output_block *ob = create_output_block (LTO_section_odr_types);
4247 176 : unsigned int i;
4248 176 : tree t;
4249 :
4250 176 : if (odr_enums)
4251 : {
4252 173 : streamer_write_uhwi (ob, odr_enums->length ());
4253 :
4254 : /* For every ODR enum stream out
4255 : - its ODR name
4256 : - number of values,
4257 : - value names and constant their represent
4258 : - bitpack of locations so we can do good diagnostics. */
4259 514 : FOR_EACH_VEC_ELT (*odr_enums, i, t)
4260 : {
4261 682 : streamer_write_string (ob, ob->main_stream,
4262 341 : IDENTIFIER_POINTER
4263 : (DECL_ASSEMBLER_NAME (TYPE_NAME (t))),
4264 : true);
4265 :
4266 341 : int n = 0;
4267 2334 : for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4268 1993 : n++;
4269 341 : streamer_write_uhwi (ob, n);
4270 2334 : for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4271 : {
4272 1993 : streamer_write_string (ob, ob->main_stream,
4273 1993 : IDENTIFIER_POINTER (TREE_PURPOSE (e)),
4274 : true);
4275 3986 : streamer_write_wide_int (ob,
4276 3986 : wi::to_wide (DECL_INITIAL
4277 : (TREE_VALUE (e))));
4278 : }
4279 :
4280 341 : bitpack_d bp = bitpack_create (ob->main_stream);
4281 341 : lto_output_location (ob, &bp, DECL_SOURCE_LOCATION (TYPE_NAME (t)));
4282 2334 : for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4283 3986 : lto_output_location (ob, &bp,
4284 1993 : DECL_SOURCE_LOCATION (TREE_VALUE (e)));
4285 341 : streamer_write_bitpack (&bp);
4286 : }
4287 173 : vec_free (odr_enums);
4288 173 : odr_enums = NULL;
4289 : }
4290 : /* During LTO incremental linking we already have streamed in types. */
4291 3 : else if (odr_enum_map)
4292 : {
4293 3 : gcc_checking_assert (!odr_enums);
4294 3 : streamer_write_uhwi (ob, odr_enum_map->elements ());
4295 :
4296 3 : hash_map<nofree_string_hash, odr_enum>::iterator iter
4297 3 : = odr_enum_map->begin ();
4298 6 : for (; iter != odr_enum_map->end (); ++iter)
4299 : {
4300 3 : odr_enum &this_enum = (*iter).second;
4301 3 : streamer_write_string (ob, ob->main_stream, (*iter).first, true);
4302 :
4303 3 : streamer_write_uhwi (ob, this_enum.vals.length ());
4304 11 : for (unsigned j = 0; j < this_enum.vals.length (); j++)
4305 : {
4306 16 : streamer_write_string (ob, ob->main_stream,
4307 8 : this_enum.vals[j].name, true);
4308 8 : streamer_write_wide_int (ob, this_enum.vals[j].val);
4309 : }
4310 :
4311 3 : bitpack_d bp = bitpack_create (ob->main_stream);
4312 3 : lto_output_location (ob, &bp, this_enum.locus);
4313 11 : for (unsigned j = 0; j < this_enum.vals.length (); j++)
4314 8 : lto_output_location (ob, &bp, this_enum.vals[j].locus);
4315 3 : streamer_write_bitpack (&bp);
4316 : }
4317 :
4318 6 : delete odr_enum_map;
4319 3 : obstack_free (&odr_enum_obstack, NULL);
4320 3 : odr_enum_map = NULL;
4321 : }
4322 :
4323 176 : produce_asm (ob);
4324 176 : destroy_output_block (ob);
4325 : }
4326 :
4327 : /* Write ODR enums from LTO stream file and warn on mismatches. */
4328 :
4329 : static void
4330 96 : ipa_odr_read_section (struct lto_file_decl_data *file_data, const char *data,
4331 : size_t len)
4332 : {
4333 96 : const struct lto_function_header *header
4334 : = (const struct lto_function_header *) data;
4335 96 : const int cfg_offset = sizeof (struct lto_function_header);
4336 96 : const int main_offset = cfg_offset + header->cfg_size;
4337 96 : const int string_offset = main_offset + header->main_size;
4338 96 : class data_in *data_in;
4339 :
4340 96 : lto_input_block ib ((const char *) data + main_offset, header->main_size,
4341 96 : file_data);
4342 :
4343 96 : data_in
4344 192 : = lto_data_in_create (file_data, (const char *) data + string_offset,
4345 96 : header->string_size, vNULL);
4346 96 : unsigned int n = streamer_read_uhwi (&ib);
4347 :
4348 96 : if (!odr_enum_map)
4349 : {
4350 69 : gcc_obstack_init (&odr_enum_obstack);
4351 69 : odr_enum_map = new (hash_map <nofree_string_hash, odr_enum>);
4352 : }
4353 :
4354 329 : for (unsigned i = 0; i < n; i++)
4355 : {
4356 233 : const char *rname = streamer_read_string (data_in, &ib);
4357 233 : unsigned int nvals = streamer_read_uhwi (&ib);
4358 233 : char *name;
4359 :
4360 233 : obstack_grow (&odr_enum_obstack, rname, strlen (rname) + 1);
4361 233 : name = XOBFINISH (&odr_enum_obstack, char *);
4362 :
4363 233 : bool existed_p;
4364 233 : class odr_enum &this_enum
4365 233 : = odr_enum_map->get_or_insert (xstrdup (name), &existed_p);
4366 :
4367 : /* If this is first time we see the enum, remember its definition. */
4368 233 : if (!existed_p)
4369 : {
4370 206 : this_enum.vals.safe_grow_cleared (nvals, true);
4371 206 : this_enum.warned = false;
4372 206 : if (dump_file)
4373 0 : fprintf (dump_file, "enum %s\n{\n", name);
4374 1739 : for (unsigned j = 0; j < nvals; j++)
4375 : {
4376 1533 : const char *val_name = streamer_read_string (data_in, &ib);
4377 1533 : obstack_grow (&odr_enum_obstack, val_name, strlen (val_name) + 1);
4378 1533 : this_enum.vals[j].name = XOBFINISH (&odr_enum_obstack, char *);
4379 1533 : this_enum.vals[j].val = streamer_read_wide_int (&ib);
4380 1533 : if (dump_file)
4381 0 : fprintf (dump_file, " %s = " HOST_WIDE_INT_PRINT_DEC ",\n",
4382 0 : val_name, wi::fits_shwi_p (this_enum.vals[j].val)
4383 0 : ? this_enum.vals[j].val.to_shwi () : -1);
4384 : }
4385 206 : bitpack_d bp = streamer_read_bitpack (&ib);
4386 206 : stream_input_location (&this_enum.locus, &bp, data_in);
4387 1739 : for (unsigned j = 0; j < nvals; j++)
4388 1533 : stream_input_location (&this_enum.vals[j].locus, &bp, data_in);
4389 206 : data_in->location_cache.apply_location_cache ();
4390 206 : if (dump_file)
4391 0 : fprintf (dump_file, "}\n");
4392 : }
4393 : /* If we already have definition, compare it with new one and output
4394 : warnings if they differs. */
4395 : else
4396 : {
4397 27 : int do_warning = -1;
4398 27 : char *warn_name = NULL;
4399 27 : wide_int warn_value = wi::zero (1);
4400 :
4401 27 : if (dump_file)
4402 0 : fprintf (dump_file, "Comparing enum %s\n", name);
4403 :
4404 : /* Look for differences which we will warn about later once locations
4405 : are streamed. */
4406 107 : for (unsigned j = 0; j < nvals; j++)
4407 : {
4408 80 : const char *id = streamer_read_string (data_in, &ib);
4409 80 : wide_int val = streamer_read_wide_int (&ib);
4410 :
4411 80 : if (do_warning != -1 || j >= this_enum.vals.length ())
4412 20 : continue;
4413 60 : if (strcmp (id, this_enum.vals[j].name)
4414 53 : || (val.get_precision() !=
4415 53 : this_enum.vals[j].val.get_precision())
4416 107 : || val != this_enum.vals[j].val)
4417 : {
4418 13 : warn_name = xstrdup (id);
4419 13 : warn_value = val;
4420 13 : do_warning = j;
4421 13 : if (dump_file)
4422 0 : fprintf (dump_file, " Different on entry %i\n", j);
4423 : }
4424 80 : }
4425 :
4426 : /* Stream in locations, but do not apply them unless we are going
4427 : to warn. */
4428 27 : bitpack_d bp = streamer_read_bitpack (&ib);
4429 27 : location_t locus;
4430 :
4431 27 : stream_input_location (&locus, &bp, data_in);
4432 :
4433 : /* Did we find a difference? */
4434 41 : if (do_warning != -1 || nvals != this_enum.vals.length ())
4435 : {
4436 13 : data_in->location_cache.apply_location_cache ();
4437 :
4438 13 : const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
4439 13 : char *dmgname = cplus_demangle (name, opts);
4440 13 : if (this_enum.warned
4441 26 : || !warning_at (this_enum.locus,
4442 13 : OPT_Wodr, "type %qs violates the "
4443 : "C++ One Definition Rule",
4444 : dmgname))
4445 : do_warning = -1;
4446 : else
4447 : {
4448 13 : this_enum.warned = true;
4449 13 : if (do_warning == -1)
4450 0 : inform (locus,
4451 : "an enum with different number of values is defined"
4452 : " in another translation unit");
4453 13 : else if (warn_name)
4454 13 : inform (locus,
4455 : "an enum with different value name"
4456 : " is defined in another translation unit");
4457 : else
4458 0 : inform (locus,
4459 : "an enum with different values"
4460 : " is defined in another translation unit");
4461 : }
4462 : }
4463 : else
4464 14 : data_in->location_cache.revert_location_cache ();
4465 :
4466 : /* Finally look up for location of the actual value that diverged. */
4467 107 : for (unsigned j = 0; j < nvals; j++)
4468 : {
4469 80 : location_t id_locus;
4470 :
4471 80 : data_in->location_cache.revert_location_cache ();
4472 80 : stream_input_location (&id_locus, &bp, data_in);
4473 :
4474 80 : if ((int) j == do_warning)
4475 : {
4476 13 : data_in->location_cache.apply_location_cache ();
4477 :
4478 13 : if (strcmp (warn_name, this_enum.vals[j].name))
4479 7 : inform (this_enum.vals[j].locus,
4480 : "name %qs differs from name %qs defined"
4481 : " in another translation unit",
4482 7 : this_enum.vals[j].name, warn_name);
4483 6 : else if (this_enum.vals[j].val.get_precision() !=
4484 6 : warn_value.get_precision())
4485 6 : inform (this_enum.vals[j].locus,
4486 : "name %qs is defined as %u-bit while another "
4487 : "translation unit defines it as %u-bit",
4488 6 : warn_name, this_enum.vals[j].val.get_precision(),
4489 : warn_value.get_precision());
4490 : /* FIXME: In case there is easy way to print wide_ints,
4491 : perhaps we could do it here instead of overflow check. */
4492 0 : else if (wi::fits_shwi_p (this_enum.vals[j].val)
4493 0 : && wi::fits_shwi_p (warn_value))
4494 0 : inform (this_enum.vals[j].locus,
4495 : "name %qs is defined to %wd while another "
4496 : "translation unit defines it as %wd",
4497 0 : warn_name, this_enum.vals[j].val.to_shwi (),
4498 : warn_value.to_shwi ());
4499 : else
4500 0 : inform (this_enum.vals[j].locus,
4501 : "name %qs is defined to different value "
4502 : "in another translation unit",
4503 : warn_name);
4504 :
4505 13 : inform (id_locus,
4506 : "mismatching definition");
4507 : }
4508 : else
4509 67 : data_in->location_cache.revert_location_cache ();
4510 : }
4511 27 : if (warn_name)
4512 13 : free (warn_name);
4513 27 : obstack_free (&odr_enum_obstack, name);
4514 27 : }
4515 : }
4516 96 : lto_free_section_data (file_data, LTO_section_ipa_fn_summary, NULL, data,
4517 : len);
4518 96 : lto_data_in_delete (data_in);
4519 96 : }
4520 :
4521 : /* Read all ODR type sections. */
4522 :
4523 : static void
4524 12399 : ipa_odr_summary_read (void)
4525 : {
4526 12399 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4527 12399 : struct lto_file_decl_data *file_data;
4528 12399 : unsigned int j = 0;
4529 :
4530 38252 : while ((file_data = file_data_vec[j++]))
4531 : {
4532 13454 : size_t len;
4533 13454 : const char *data
4534 13454 : = lto_get_summary_section_data (file_data, LTO_section_odr_types,
4535 : &len);
4536 13454 : if (data)
4537 96 : ipa_odr_read_section (file_data, data, len);
4538 : }
4539 : /* Enum info is used only to produce warnings. Only case we will need it
4540 : again is streaming for incremental LTO. */
4541 12399 : if (flag_incremental_link != INCREMENTAL_LINK_LTO)
4542 : {
4543 12432 : delete odr_enum_map;
4544 12366 : obstack_free (&odr_enum_obstack, NULL);
4545 12366 : odr_enum_map = NULL;
4546 : }
4547 12399 : }
4548 :
4549 : namespace {
4550 :
4551 : const pass_data pass_data_ipa_odr =
4552 : {
4553 : IPA_PASS, /* type */
4554 : "odr", /* name */
4555 : OPTGROUP_NONE, /* optinfo_flags */
4556 : TV_IPA_ODR, /* tv_id */
4557 : 0, /* properties_required */
4558 : 0, /* properties_provided */
4559 : 0, /* properties_destroyed */
4560 : 0, /* todo_flags_start */
4561 : 0, /* todo_flags_finish */
4562 : };
4563 :
4564 : class pass_ipa_odr : public ipa_opt_pass_d
4565 : {
4566 : public:
4567 288775 : pass_ipa_odr (gcc::context *ctxt)
4568 : : ipa_opt_pass_d (pass_data_ipa_odr, ctxt,
4569 : NULL, /* generate_summary */
4570 : ipa_odr_summary_write, /* write_summary */
4571 : ipa_odr_summary_read, /* read_summary */
4572 : NULL, /* write_optimization_summary */
4573 : NULL, /* read_optimization_summary */
4574 : NULL, /* stmt_fixup */
4575 : 0, /* function_transform_todo_flags_start */
4576 : NULL, /* function_transform */
4577 288775 : NULL) /* variable_transform */
4578 288775 : {}
4579 :
4580 : /* opt_pass methods: */
4581 592729 : bool gate (function *) final override
4582 : {
4583 592729 : return (in_lto_p || flag_lto);
4584 : }
4585 :
4586 23509 : unsigned int execute (function *) final override
4587 : {
4588 23509 : return 0;
4589 : }
4590 :
4591 : }; // class pass_ipa_odr
4592 :
4593 : } // anon namespace
4594 :
4595 : ipa_opt_pass_d *
4596 288775 : make_pass_ipa_odr (gcc::context *ctxt)
4597 : {
4598 288775 : return new pass_ipa_odr (ctxt);
4599 : }
4600 :
4601 :
4602 : #include "gt-ipa-devirt.h"
|