Line data Source code
1 : /* Basic IPA utilities for type inheritance graph construction and
2 : devirtualization.
3 : Copyright (C) 2013-2026 Free Software Foundation, Inc.
4 : Contributed by Jan Hubicka
5 :
6 : This file is part of GCC.
7 :
8 : GCC is free software; you can redistribute it and/or modify it under
9 : the terms of the GNU General Public License as published by the Free
10 : Software Foundation; either version 3, or (at your option) any later
11 : version.
12 :
13 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 : for more details.
17 :
18 : You should have received a copy of the GNU General Public License
19 : along with GCC; see the file COPYING3. If not see
20 : <http://www.gnu.org/licenses/>. */
21 :
22 : /* Brief vocabulary:
23 : ODR = One Definition Rule
24 : In short, the ODR states that:
25 : 1 In any translation unit, a template, type, function, or object can
26 : have no more than one definition. Some of these can have any number
27 : of declarations. A definition provides an instance.
28 : 2 In the entire program, an object or non-inline function cannot have
29 : more than one definition; if an object or function is used, it must
30 : have exactly one definition. You can declare an object or function
31 : that is never used, in which case you don't have to provide
32 : a definition. In no event can there be more than one definition.
33 : 3 Some things, like types, templates, and extern inline functions, can
34 : be defined in more than one translation unit. For a given entity,
35 : each definition must be the same. Non-extern objects and functions
36 : in different translation units are different entities, even if their
37 : names and types are the same.
38 :
39 : OTR = OBJ_TYPE_REF
40 : This is the Gimple representation of type information of a polymorphic call.
41 : It contains two parameters:
42 : otr_type is a type of class whose method is called.
43 : otr_token is the index into virtual table where address is taken.
44 :
45 : BINFO
46 : This is the type inheritance information attached to each tree
47 : RECORD_TYPE by the C++ frontend. It provides information about base
48 : types and virtual tables.
49 :
50 : BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 : BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 : BINFO_VTABLE.
53 :
54 : Base types of a given type are enumerated by BINFO_BASE_BINFO
55 : vector. Members of this vectors are not BINFOs associated
56 : with a base type. Rather they are new copies of BINFOs
57 : (base BINFOs). Their virtual tables may differ from
58 : virtual table of the base type. Also BINFO_OFFSET specifies
59 : offset of the base within the type.
60 :
61 : In the case of single inheritance, the virtual table is shared
62 : and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 : inheritance the individual virtual tables are pointer to by
64 : BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 : binfo associated to the base type).
66 :
67 : BINFO lookup for a given base type and offset can be done by
68 : get_binfo_at_offset. It returns proper BINFO whose virtual table
69 : can be used for lookup of virtual methods associated with the
70 : base type.
71 :
72 : token
73 : This is an index of virtual method in virtual table associated
74 : to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 : or from DECL_VINDEX of a given virtual table.
76 :
77 : polymorphic (indirect) call
78 : This is callgraph representation of virtual method call. Every
79 : polymorphic call contains otr_type and otr_token taken from
80 : original OBJ_TYPE_REF at callgraph construction time.
81 :
82 : What we do here:
83 :
84 : build_type_inheritance_graph triggers a construction of the type inheritance
85 : graph.
86 :
87 : We reconstruct it based on types of methods we see in the unit.
88 : This means that the graph is not complete. Types with no methods are not
89 : inserted into the graph. Also types without virtual methods are not
90 : represented at all, though it may be easy to add this.
91 :
92 : The inheritance graph is represented as follows:
93 :
94 : Vertices are structures odr_type. Every odr_type may correspond
95 : to one or more tree type nodes that are equivalent by ODR rule.
96 : (the multiple type nodes appear only with linktime optimization)
97 :
98 : Edges are represented by odr_type->base and odr_type->derived_types.
99 : At the moment we do not track offsets of types for multiple inheritance.
100 : Adding this is easy.
101 :
102 : possible_polymorphic_call_targets returns, given an parameters found in
103 : indirect polymorphic edge all possible polymorphic call targets of the call.
104 :
105 : pass_ipa_devirt performs simple speculative devirtualization.
106 : */
107 :
108 : #include "config.h"
109 : #include "system.h"
110 : #include "coretypes.h"
111 : #include "backend.h"
112 : #include "rtl.h"
113 : #include "tree.h"
114 : #include "gimple.h"
115 : #include "alloc-pool.h"
116 : #include "tree-pass.h"
117 : #include "cgraph.h"
118 : #include "lto-streamer.h"
119 : #include "fold-const.h"
120 : #include "print-tree.h"
121 : #include "calls.h"
122 : #include "ipa-utils.h"
123 : #include "gimple-iterator.h"
124 : #include "gimple-fold.h"
125 : #include "symbol-summary.h"
126 : #include "tree-vrp.h"
127 : #include "sreal.h"
128 : #include "ipa-cp.h"
129 : #include "ipa-prop.h"
130 : #include "ipa-fnsummary.h"
131 : #include "demangle.h"
132 : #include "dbgcnt.h"
133 : #include "gimple-pretty-print.h"
134 : #include "intl.h"
135 : #include "stringpool.h"
136 : #include "attribs.h"
137 : #include "data-streamer.h"
138 : #include "lto-streamer.h"
139 : #include "streamer-hooks.h"
140 :
141 : /* Hash based set of pairs of types. */
142 : struct type_pair
143 : {
144 : tree first;
145 : tree second;
146 : };
147 :
148 : template <>
149 : struct default_hash_traits <type_pair>
150 : : typed_noop_remove <type_pair>
151 : {
152 : GTY((skip)) typedef type_pair value_type;
153 : GTY((skip)) typedef type_pair compare_type;
154 : static hashval_t
155 13 : hash (type_pair p)
156 : {
157 13 : return TYPE_UID (p.first) ^ TYPE_UID (p.second);
158 : }
159 : static const bool empty_zero_p = true;
160 : static bool
161 98540 : is_empty (type_pair p)
162 : {
163 98527 : return p.first == NULL;
164 : }
165 : static bool
166 : is_deleted (type_pair p ATTRIBUTE_UNUSED)
167 : {
168 : return false;
169 : }
170 : static bool
171 0 : equal (const type_pair &a, const type_pair &b)
172 : {
173 0 : return a.first==b.first && a.second == b.second;
174 : }
175 : static void
176 0 : mark_empty (type_pair &e)
177 : {
178 0 : e.first = NULL;
179 : }
180 : };
181 :
182 : /* HACK alert: this is used to communicate with ipa-inline-transform that
183 : thunk is being expanded and there is no need to clear the polymorphic
184 : call target cache. */
185 : bool thunk_expansion;
186 :
187 : static bool odr_types_equivalent_p (tree, tree, bool, bool *,
188 : hash_set<type_pair> *,
189 : location_t, location_t);
190 : static void warn_odr (tree t1, tree t2, tree st1, tree st2,
191 : bool warn, bool *warned, const char *reason);
192 :
193 : static bool odr_violation_reported = false;
194 :
195 :
196 : /* Pointer set of all call targets appearing in the cache. */
197 : static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
198 :
199 : /* The node of type inheritance graph. For each type unique in
200 : One Definition Rule (ODR) sense, we produce one node linking all
201 : main variants of types equivalent to it, bases and derived types. */
202 :
203 : struct GTY(()) odr_type_d
204 : {
205 : /* leader type. */
206 : tree type;
207 : /* All bases; built only for main variants of types. */
208 : vec<odr_type> GTY((skip)) bases;
209 : /* All derived types with virtual methods seen in unit;
210 : built only for main variants of types. */
211 : vec<odr_type> GTY((skip)) derived_types;
212 :
213 : /* All equivalent types, if more than one. */
214 : vec<tree, va_gc> *types;
215 : /* Set of all equivalent types, if NON-NULL. */
216 : hash_set<tree> * GTY((skip)) types_set;
217 :
218 : /* Unique ID indexing the type in odr_types array. */
219 : int id;
220 : /* Is it in anonymous namespace? */
221 : bool anonymous_namespace;
222 : /* Do we know about all derivations of given type? */
223 : bool all_derivations_known;
224 : /* Did we report ODR violation here? */
225 : bool odr_violated;
226 : /* Set when virtual table without RTTI prevailed table with. */
227 : bool rtti_broken;
228 : /* Set when the canonical type is determined using the type name. */
229 : bool tbaa_enabled;
230 : };
231 :
232 : /* Return TRUE if all derived types of T are known and thus
233 : we may consider the walk of derived type complete.
234 :
235 : This is typically true only for final anonymous namespace types and types
236 : defined within functions (that may be COMDAT and thus shared across units,
237 : but with the same set of derived types). */
238 :
239 : bool
240 3294970 : type_all_derivations_known_p (const_tree t)
241 : {
242 3294970 : if (TYPE_FINAL_P (t))
243 : return true;
244 3200377 : if (flag_ltrans)
245 : return false;
246 : /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
247 3195086 : if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
248 : return true;
249 3195086 : if (type_in_anonymous_namespace_p (t))
250 : return true;
251 3191899 : return (decl_function_context (TYPE_NAME (t)) != NULL);
252 : }
253 :
254 : /* Return TRUE if type's constructors are all visible. */
255 :
256 : static bool
257 308259 : type_all_ctors_visible_p (tree t)
258 : {
259 308259 : return !flag_ltrans
260 308150 : && symtab->state >= CONSTRUCTION
261 : /* We cannot always use type_all_derivations_known_p.
262 : For function local types we must assume case where
263 : the function is COMDAT and shared in between units.
264 :
265 : TODO: These cases are quite easy to get, but we need
266 : to keep track of C++ privatizing via -Wno-weak
267 : as well as the IPA privatizing. */
268 616409 : && type_in_anonymous_namespace_p (t);
269 : }
270 :
271 : /* Return TRUE if type may have instance. */
272 :
273 : static bool
274 308259 : type_possibly_instantiated_p (tree t)
275 : {
276 308259 : tree vtable;
277 308259 : varpool_node *vnode;
278 :
279 : /* TODO: Add abstract types here. */
280 308259 : if (!type_all_ctors_visible_p (t))
281 : return true;
282 :
283 7526 : vtable = BINFO_VTABLE (TYPE_BINFO (t));
284 7526 : if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
285 7526 : vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
286 7526 : vnode = varpool_node::get (vtable);
287 7526 : return vnode && vnode->definition;
288 : }
289 :
290 : /* Return true if T or type derived from T may have instance. */
291 :
292 : static bool
293 9 : type_or_derived_type_possibly_instantiated_p (odr_type t)
294 : {
295 9 : if (type_possibly_instantiated_p (t->type))
296 : return true;
297 9 : for (auto derived : t->derived_types)
298 3 : if (type_or_derived_type_possibly_instantiated_p (derived))
299 : return true;
300 : return false;
301 : }
302 :
303 : /* Hash used to unify ODR types based on their mangled name and for anonymous
304 : namespace types. */
305 :
306 : struct odr_name_hasher : pointer_hash <odr_type_d>
307 : {
308 : typedef union tree_node *compare_type;
309 : static inline hashval_t hash (const odr_type_d *);
310 : static inline bool equal (const odr_type_d *, const tree_node *);
311 : static inline void remove (odr_type_d *);
312 : };
313 :
314 : static bool
315 7320562 : can_be_name_hashed_p (tree t)
316 : {
317 158231 : return (!in_lto_p || odr_type_p (t));
318 : }
319 :
320 : /* Hash type by its ODR name. */
321 :
322 : static hashval_t
323 42289228 : hash_odr_name (const_tree t)
324 : {
325 42289228 : gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
326 :
327 : /* If not in LTO, all main variants are unique, so we can do
328 : pointer hash. */
329 42289228 : if (!in_lto_p)
330 41724333 : return htab_hash_pointer (t);
331 :
332 : /* Anonymous types are unique. */
333 564895 : if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
334 14933 : return htab_hash_pointer (t);
335 :
336 549962 : gcc_checking_assert (TYPE_NAME (t)
337 : && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
338 549962 : return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
339 : }
340 :
341 : /* Return the computed hashcode for ODR_TYPE. */
342 :
343 : inline hashval_t
344 34983416 : odr_name_hasher::hash (const odr_type_d *odr_type)
345 : {
346 34983416 : return hash_odr_name (odr_type->type);
347 : }
348 :
349 : /* For languages with One Definition Rule, work out if
350 : types are the same based on their name.
351 :
352 : This is non-trivial for LTO where minor differences in
353 : the type representation may have prevented type merging
354 : to merge two copies of otherwise equivalent type.
355 :
356 : Until we start streaming mangled type names, this function works
357 : only for polymorphic types.
358 : */
359 :
360 : bool
361 4813770 : types_same_for_odr (const_tree type1, const_tree type2)
362 : {
363 4813770 : gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
364 :
365 4813770 : type1 = TYPE_MAIN_VARIANT (type1);
366 4813770 : type2 = TYPE_MAIN_VARIANT (type2);
367 :
368 4813770 : if (type1 == type2)
369 : return true;
370 :
371 1528115 : if (!in_lto_p)
372 : return false;
373 :
374 : /* Anonymous namespace types are never duplicated. */
375 3032 : if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
376 3032 : || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
377 0 : return false;
378 :
379 : /* If both type has mangled defined check if they are same.
380 : Watch for anonymous types which are all mangled as "<anon">. */
381 1516 : if (!type_with_linkage_p (type1) || !type_with_linkage_p (type2))
382 : return false;
383 1516 : if (type_in_anonymous_namespace_p (type1)
384 1516 : || type_in_anonymous_namespace_p (type2))
385 0 : return false;
386 1516 : return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
387 1516 : == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
388 : }
389 :
390 : /* Return true if we can decide on ODR equivalency.
391 :
392 : In non-LTO it is always decide, in LTO however it depends in the type has
393 : ODR info attached. */
394 :
395 : bool
396 3751191 : types_odr_comparable (tree t1, tree t2)
397 : {
398 3751191 : return (!in_lto_p
399 19439 : || TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)
400 3752451 : || (odr_type_p (TYPE_MAIN_VARIANT (t1))
401 1246 : && odr_type_p (TYPE_MAIN_VARIANT (t2))));
402 : }
403 :
404 : /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
405 : known, be conservative and return false. */
406 :
407 : bool
408 1090989 : types_must_be_same_for_odr (tree t1, tree t2)
409 : {
410 1090989 : if (types_odr_comparable (t1, t2))
411 1090989 : return types_same_for_odr (t1, t2);
412 : else
413 0 : return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
414 : }
415 :
416 : /* If T is compound type, return type it is based on. */
417 :
418 : static tree
419 6734 : compound_type_base (const_tree t)
420 : {
421 6734 : if (TREE_CODE (t) == ARRAY_TYPE
422 6596 : || POINTER_TYPE_P (t)
423 5051 : || TREE_CODE (t) == COMPLEX_TYPE
424 5042 : || VECTOR_TYPE_P (t))
425 1696 : return TREE_TYPE (t);
426 5038 : if (TREE_CODE (t) == METHOD_TYPE)
427 422 : return TYPE_METHOD_BASETYPE (t);
428 4616 : if (TREE_CODE (t) == OFFSET_TYPE)
429 0 : return TYPE_OFFSET_BASETYPE (t);
430 : return NULL_TREE;
431 : }
432 :
433 : /* Return true if T is either ODR type or compound type based from it.
434 : If the function return true, we know that T is a type originating from C++
435 : source even at link-time. */
436 :
437 : bool
438 6709 : odr_or_derived_type_p (const_tree t)
439 : {
440 8827 : do
441 : {
442 8827 : if (odr_type_p (TYPE_MAIN_VARIANT (t)))
443 : return true;
444 : /* Function type is a tricky one. Basically we can consider it
445 : ODR derived if return type or any of the parameters is.
446 : We need to check all parameters because LTO streaming merges
447 : common types (such as void) and they are not considered ODR then. */
448 7857 : if (TREE_CODE (t) == FUNCTION_TYPE)
449 : {
450 1123 : if (TYPE_METHOD_BASETYPE (t))
451 0 : t = TYPE_METHOD_BASETYPE (t);
452 : else
453 : {
454 1123 : if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
455 : return true;
456 3445 : for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
457 2374 : if (odr_or_derived_type_p (TYPE_MAIN_VARIANT (TREE_VALUE (t))))
458 : return true;
459 : return false;
460 : }
461 : }
462 : else
463 6734 : t = compound_type_base (t);
464 : }
465 6734 : while (t);
466 : return t;
467 : }
468 :
469 : /* Compare types T1 and T2 and return true if they are
470 : equivalent. */
471 :
472 : inline bool
473 41803625 : odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
474 : {
475 41803625 : tree t1 = o1->type;
476 :
477 41803625 : gcc_checking_assert (TYPE_MAIN_VARIANT (t2) == t2);
478 41803625 : gcc_checking_assert (TYPE_MAIN_VARIANT (t1) == t1);
479 41803625 : if (t1 == t2)
480 : return true;
481 36054333 : if (!in_lto_p)
482 : return false;
483 : /* Check for anonymous namespaces. */
484 949718 : if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
485 937757 : || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
486 19756 : return false;
487 455103 : gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
488 455103 : gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
489 455103 : return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
490 455103 : == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
491 : }
492 :
493 : /* Free ODR type V. */
494 :
495 : inline void
496 1380672 : odr_name_hasher::remove (odr_type_d *v)
497 : {
498 1380672 : v->bases.release ();
499 1380672 : v->derived_types.release ();
500 1380672 : if (v->types_set)
501 0 : delete v->types_set;
502 1380672 : ggc_free (v);
503 1380672 : }
504 :
505 : /* ODR type hash used to look up ODR type based on tree type node. */
506 :
507 : typedef hash_table<odr_name_hasher> odr_hash_type;
508 : static odr_hash_type *odr_hash;
509 :
510 : /* ODR types are also stored into ODR_TYPE vector to allow consistent
511 : walking. Bases appear before derived types. Vector is garbage collected
512 : so we won't end up visiting empty types. */
513 :
514 : static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
515 : #define odr_types (*odr_types_ptr)
516 :
517 : /* All enums defined and accessible for the unit. */
518 : static GTY(()) vec <tree, va_gc> *odr_enums;
519 :
520 : /* Information we hold about value defined by an enum type. */
521 3066 : struct odr_enum_val
522 : {
523 : const char *name;
524 : wide_int val;
525 : location_t locus;
526 : };
527 :
528 : /* Information about enum values. */
529 422 : struct odr_enum
530 : {
531 : location_t locus;
532 : auto_vec<odr_enum_val, 0> vals;
533 : bool warned;
534 : };
535 :
536 : /* A table of all ODR enum definitions. */
537 : static hash_map <nofree_string_hash, odr_enum> *odr_enum_map = NULL;
538 : static struct obstack odr_enum_obstack;
539 :
540 : /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
541 : void
542 0 : set_type_binfo (tree type, tree binfo)
543 : {
544 0 : for (; type; type = TYPE_NEXT_VARIANT (type))
545 0 : if (COMPLETE_TYPE_P (type))
546 0 : TYPE_BINFO (type) = binfo;
547 : else
548 0 : gcc_assert (!TYPE_BINFO (type));
549 0 : }
550 :
551 : /* Return true if type variants match.
552 : This assumes that we already verified that T1 and T2 are variants of the
553 : same type. */
554 :
555 : static bool
556 7469 : type_variants_equivalent_p (tree t1, tree t2)
557 : {
558 7469 : if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
559 : return false;
560 :
561 7462 : if (comp_type_attributes (t1, t2) != 1)
562 : return false;
563 :
564 14922 : if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
565 7778 : && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
566 : return false;
567 :
568 : return true;
569 : }
570 :
571 : /* Compare T1 and T2 based on name or structure. */
572 :
573 : static bool
574 436 : odr_subtypes_equivalent_p (tree t1, tree t2,
575 : hash_set<type_pair> *visited,
576 : location_t loc1, location_t loc2)
577 : {
578 :
579 : /* This can happen in incomplete types that should be handled earlier. */
580 436 : gcc_assert (t1 && t2);
581 :
582 436 : if (t1 == t2)
583 : return true;
584 :
585 : /* Anonymous namespace types must match exactly. */
586 99 : if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
587 78 : && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
588 173 : || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
589 75 : && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
590 11 : return false;
591 :
592 : /* For ODR types be sure to compare their names.
593 : To support -Wno-odr-type-merging we allow one type to be non-ODR
594 : and other ODR even though it is a violation. */
595 88 : if (types_odr_comparable (t1, t2))
596 : {
597 75 : if (t1 != t2
598 75 : && odr_type_p (TYPE_MAIN_VARIANT (t1))
599 68 : && get_odr_type (TYPE_MAIN_VARIANT (t1), true)->odr_violated)
600 : return false;
601 51 : if (!types_same_for_odr (t1, t2))
602 : return false;
603 44 : if (!type_variants_equivalent_p (t1, t2))
604 : return false;
605 : /* Limit recursion: If subtypes are ODR types and we know
606 : that they are same, be happy. */
607 37 : if (odr_type_p (TYPE_MAIN_VARIANT (t1)))
608 : return true;
609 : }
610 :
611 : /* Component types, builtins and possibly violating ODR types
612 : have to be compared structurally. */
613 13 : if (TREE_CODE (t1) != TREE_CODE (t2))
614 : return false;
615 13 : if (AGGREGATE_TYPE_P (t1)
616 13 : && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
617 : return false;
618 :
619 13 : type_pair pair={TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2)};
620 13 : if (TYPE_UID (TYPE_MAIN_VARIANT (t1)) > TYPE_UID (TYPE_MAIN_VARIANT (t2)))
621 : {
622 0 : pair.first = TYPE_MAIN_VARIANT (t2);
623 0 : pair.second = TYPE_MAIN_VARIANT (t1);
624 : }
625 13 : if (visited->add (pair))
626 : return true;
627 13 : if (!odr_types_equivalent_p (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
628 : false, NULL, visited, loc1, loc2))
629 : return false;
630 2 : if (!type_variants_equivalent_p (t1, t2))
631 : return false;
632 : return true;
633 : }
634 :
635 : /* Return true if DECL1 and DECL2 are identical methods. Consider
636 : name equivalent to name.localalias.xyz. */
637 :
638 : static bool
639 120 : methods_equal_p (tree decl1, tree decl2)
640 : {
641 120 : if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
642 : return true;
643 0 : const char sep = symbol_table::symbol_suffix_separator ();
644 :
645 0 : const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
646 0 : const char *ptr1 = strchr (name1, sep);
647 0 : int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
648 :
649 0 : const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
650 0 : const char *ptr2 = strchr (name2, sep);
651 0 : int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
652 :
653 0 : if (len1 != len2)
654 : return false;
655 0 : return !strncmp (name1, name2, len1);
656 : }
657 :
658 : /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
659 : violation warnings. */
660 :
661 : void
662 63 : compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
663 : {
664 63 : int n1, n2;
665 :
666 63 : if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
667 : {
668 0 : odr_violation_reported = true;
669 0 : if (DECL_VIRTUAL_P (prevailing->decl))
670 : {
671 0 : varpool_node *tmp = prevailing;
672 0 : prevailing = vtable;
673 0 : vtable = tmp;
674 : }
675 0 : auto_diagnostic_group d;
676 0 : if (warning_at (DECL_SOURCE_LOCATION
677 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
678 0 : OPT_Wodr,
679 : "virtual table of type %qD violates one definition rule",
680 0 : DECL_CONTEXT (vtable->decl)))
681 0 : inform (DECL_SOURCE_LOCATION (prevailing->decl),
682 : "variable of same assembler name as the virtual table is "
683 : "defined in another translation unit");
684 0 : return;
685 0 : }
686 63 : if (!prevailing->definition || !vtable->definition)
687 : return;
688 :
689 : /* If we do not stream ODR type info, do not bother to do useful compare. */
690 16 : if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
691 16 : || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
692 : return;
693 :
694 16 : odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
695 :
696 16 : if (class_type->odr_violated)
697 : return;
698 :
699 40 : for (n1 = 0, n2 = 0; true; n1++, n2++)
700 : {
701 56 : struct ipa_ref *ref1, *ref2;
702 56 : bool end1, end2;
703 :
704 56 : end1 = !prevailing->iterate_reference (n1, ref1);
705 56 : end2 = !vtable->iterate_reference (n2, ref2);
706 :
707 : /* !DECL_VIRTUAL_P means RTTI entry;
708 : We warn when RTTI is lost because non-RTTI prevails; we silently
709 : accept the other case. */
710 56 : while (!end2
711 40 : && (end1
712 40 : || (methods_equal_p (ref1->referred->decl,
713 40 : ref2->referred->decl)
714 40 : && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
715 80 : && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
716 : {
717 0 : if (!class_type->rtti_broken)
718 : {
719 0 : auto_diagnostic_group d;
720 0 : if (warning_at (DECL_SOURCE_LOCATION
721 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
722 0 : OPT_Wodr,
723 : "virtual table of type %qD contains RTTI "
724 : "information",
725 0 : DECL_CONTEXT (vtable->decl)))
726 : {
727 0 : inform (DECL_SOURCE_LOCATION
728 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
729 : "but is prevailed by one without from other"
730 : " translation unit");
731 0 : inform (DECL_SOURCE_LOCATION
732 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
733 : "RTTI will not work on this type");
734 0 : class_type->rtti_broken = true;
735 : }
736 0 : }
737 0 : n2++;
738 0 : end2 = !vtable->iterate_reference (n2, ref2);
739 : }
740 : while (!end1
741 40 : && (end2
742 40 : || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
743 40 : && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
744 80 : && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
745 : {
746 0 : n1++;
747 0 : end1 = !prevailing->iterate_reference (n1, ref1);
748 : }
749 :
750 : /* Finished? */
751 56 : if (end1 && end2)
752 : {
753 : /* Extra paranoia; compare the sizes. We do not have information
754 : about virtual inheritance offsets, so just be sure that these
755 : match.
756 : Do this as very last check so the not very informative error
757 : is not output too often. */
758 16 : if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
759 : {
760 0 : class_type->odr_violated = true;
761 0 : auto_diagnostic_group d;
762 0 : tree ctx = TYPE_NAME (DECL_CONTEXT (vtable->decl));
763 0 : if (warning_at (DECL_SOURCE_LOCATION (ctx), OPT_Wodr,
764 : "virtual table of type %qD violates "
765 : "one definition rule",
766 0 : DECL_CONTEXT (vtable->decl)))
767 : {
768 0 : ctx = TYPE_NAME (DECL_CONTEXT (prevailing->decl));
769 0 : inform (DECL_SOURCE_LOCATION (ctx),
770 : "the conflicting type defined in another translation"
771 : " unit has virtual table of different size");
772 : }
773 0 : }
774 16 : return;
775 : }
776 :
777 40 : if (!end1 && !end2)
778 : {
779 40 : if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
780 40 : continue;
781 :
782 0 : class_type->odr_violated = true;
783 :
784 : /* If the loops above stopped on non-virtual pointer, we have
785 : mismatch in RTTI information mangling. */
786 0 : if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
787 0 : && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
788 : {
789 0 : auto_diagnostic_group d;
790 0 : if (warning_at (DECL_SOURCE_LOCATION
791 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
792 0 : OPT_Wodr,
793 : "virtual table of type %qD violates "
794 : "one definition rule",
795 0 : DECL_CONTEXT (vtable->decl)))
796 : {
797 0 : inform (DECL_SOURCE_LOCATION
798 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
799 : "the conflicting type defined in another translation "
800 : "unit with different RTTI information");
801 : }
802 0 : return;
803 0 : }
804 : /* At this point both REF1 and REF2 points either to virtual table
805 : or virtual method. If one points to virtual table and other to
806 : method we can complain the same way as if one table was shorter
807 : than other pointing out the extra method. */
808 0 : if (TREE_CODE (ref1->referred->decl)
809 0 : != TREE_CODE (ref2->referred->decl))
810 : {
811 0 : if (VAR_P (ref1->referred->decl))
812 : end1 = true;
813 0 : else if (VAR_P (ref2->referred->decl))
814 0 : end2 = true;
815 : }
816 : }
817 :
818 0 : class_type->odr_violated = true;
819 :
820 : /* Complain about size mismatch. Either we have too many virtual
821 : functions or too many virtual table pointers. */
822 0 : if (end1 || end2)
823 : {
824 0 : if (end1)
825 : {
826 0 : varpool_node *tmp = prevailing;
827 0 : prevailing = vtable;
828 0 : vtable = tmp;
829 0 : ref1 = ref2;
830 : }
831 0 : auto_diagnostic_group d;
832 0 : if (warning_at (DECL_SOURCE_LOCATION
833 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
834 0 : OPT_Wodr,
835 : "virtual table of type %qD violates "
836 : "one definition rule",
837 0 : DECL_CONTEXT (vtable->decl)))
838 : {
839 0 : if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
840 : {
841 0 : inform (DECL_SOURCE_LOCATION
842 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
843 : "the conflicting type defined in another translation "
844 : "unit");
845 0 : inform (DECL_SOURCE_LOCATION
846 : (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
847 : "contains additional virtual method %qD",
848 0 : ref1->referred->decl);
849 : }
850 : else
851 : {
852 0 : inform (DECL_SOURCE_LOCATION
853 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
854 : "the conflicting type defined in another translation "
855 : "unit has virtual table with more entries");
856 : }
857 : }
858 0 : return;
859 0 : }
860 :
861 : /* And in the last case we have either mismatch in between two virtual
862 : methods or two virtual table pointers. */
863 0 : auto_diagnostic_group d;
864 0 : if (warning_at (DECL_SOURCE_LOCATION
865 0 : (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
866 : "virtual table of type %qD violates "
867 : "one definition rule",
868 0 : DECL_CONTEXT (vtable->decl)))
869 : {
870 0 : if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
871 : {
872 0 : inform (DECL_SOURCE_LOCATION
873 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
874 : "the conflicting type defined in another translation "
875 : "unit");
876 0 : gcc_assert (TREE_CODE (ref2->referred->decl)
877 : == FUNCTION_DECL);
878 0 : inform (DECL_SOURCE_LOCATION
879 : (ref1->referred->ultimate_alias_target ()->decl),
880 : "virtual method %qD",
881 0 : ref1->referred->ultimate_alias_target ()->decl);
882 0 : inform (DECL_SOURCE_LOCATION
883 : (ref2->referred->ultimate_alias_target ()->decl),
884 : "ought to match virtual method %qD but does not",
885 0 : ref2->referred->ultimate_alias_target ()->decl);
886 : }
887 : else
888 0 : inform (DECL_SOURCE_LOCATION
889 : (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
890 : "the conflicting type defined in another translation "
891 : "unit has virtual table with different contents");
892 0 : return;
893 : }
894 40 : }
895 : }
896 :
897 : /* Output ODR violation warning about T1 and T2 with REASON.
898 : Display location of ST1 and ST2 if REASON speaks about field or
899 : method of the type.
900 : If WARN is false, do nothing. Set WARNED if warning was indeed
901 : output. */
902 :
903 : static void
904 109 : warn_odr (tree t1, tree t2, tree st1, tree st2,
905 : bool warn, bool *warned, const char *reason)
906 : {
907 109 : tree decl2 = TYPE_NAME (TYPE_MAIN_VARIANT (t2));
908 109 : if (warned)
909 86 : *warned = false;
910 :
911 109 : if (!warn || !TYPE_NAME(TYPE_MAIN_VARIANT (t1)))
912 57 : return;
913 :
914 : /* ODR warnings are output during LTO streaming; we must apply location
915 : cache for potential warnings to be output correctly. */
916 55 : if (lto_location_cache::current_cache)
917 55 : lto_location_cache::current_cache->apply_location_cache ();
918 :
919 55 : auto_diagnostic_group d;
920 55 : if (t1 != TYPE_MAIN_VARIANT (t1)
921 55 : && TYPE_NAME (t1) != TYPE_NAME (TYPE_MAIN_VARIANT (t1)))
922 : {
923 0 : if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
924 0 : OPT_Wodr, "type %qT (typedef of %qT) violates the "
925 : "C++ One Definition Rule",
926 0 : t1, TYPE_MAIN_VARIANT (t1)))
927 : return;
928 : }
929 : else
930 : {
931 55 : if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
932 55 : OPT_Wodr, "type %qT violates the C++ One Definition Rule",
933 : t1))
934 : return;
935 : }
936 52 : if (!st1 && !st2)
937 : ;
938 : /* For FIELD_DECL support also case where one of fields is
939 : NULL - this is used when the structures have mismatching number of
940 : elements. */
941 34 : else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
942 : {
943 34 : inform (DECL_SOURCE_LOCATION (decl2),
944 : "a different type is defined in another translation unit");
945 34 : if (!st1)
946 : {
947 1 : st1 = st2;
948 1 : st2 = NULL;
949 : }
950 34 : inform (DECL_SOURCE_LOCATION (st1),
951 : "the first difference of corresponding definitions is field %qD",
952 : st1);
953 34 : if (st2)
954 52 : decl2 = st2;
955 : }
956 0 : else if (TREE_CODE (st1) == FUNCTION_DECL)
957 : {
958 0 : inform (DECL_SOURCE_LOCATION (decl2),
959 : "a different type is defined in another translation unit");
960 0 : inform (DECL_SOURCE_LOCATION (st1),
961 : "the first difference of corresponding definitions is method %qD",
962 : st1);
963 0 : decl2 = st2;
964 : }
965 : else
966 : return;
967 52 : inform (DECL_SOURCE_LOCATION (decl2), reason);
968 :
969 52 : if (warned)
970 52 : *warned = true;
971 55 : }
972 :
973 : /* Return true if T1 and T2 are incompatible and we want to recursively
974 : dive into them from warn_type_mismatch to give sensible answer. */
975 :
976 : static bool
977 6 : type_mismatch_p (tree t1, tree t2)
978 : {
979 12 : if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
980 12 : && !odr_types_equivalent_p (t1, t2))
981 : return true;
982 0 : return !types_compatible_p (t1, t2);
983 : }
984 :
985 :
986 : /* Types T1 and T2 was found to be incompatible in a context they can't
987 : (either used to declare a symbol of same assembler name or unified by
988 : ODR rule). We already output warning about this, but if possible, output
989 : extra information on how the types mismatch.
990 :
991 : This is hard to do in general. We basically handle the common cases.
992 :
993 : If LOC1 and LOC2 are meaningful locations, use it in the case the types
994 : themselves do not have one. */
995 :
996 : void
997 40 : warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
998 : {
999 : /* Location of type is known only if it has TYPE_NAME and the name is
1000 : TYPE_DECL. */
1001 85 : location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1002 78 : ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
1003 46 : : UNKNOWN_LOCATION;
1004 79 : location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1005 71 : ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
1006 71 : : UNKNOWN_LOCATION;
1007 25 : bool loc_t2_useful = false;
1008 :
1009 : /* With LTO it is a common case that the location of both types match.
1010 : See if T2 has a location that is different from T1. If so, we will
1011 : inform user about the location.
1012 : Do not consider the location passed to us in LOC1/LOC2 as those are
1013 : already output. */
1014 25 : if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
1015 : {
1016 19 : if (loc_t1 <= BUILTINS_LOCATION)
1017 : loc_t2_useful = true;
1018 : else
1019 : {
1020 19 : expanded_location xloc1 = expand_location (loc_t1);
1021 19 : expanded_location xloc2 = expand_location (loc_t2);
1022 :
1023 19 : if (strcmp (xloc1.file, xloc2.file)
1024 0 : || xloc1.line != xloc2.line
1025 0 : || xloc1.column != xloc2.column)
1026 19 : loc_t2_useful = true;
1027 : }
1028 : }
1029 :
1030 46 : if (loc_t1 <= BUILTINS_LOCATION)
1031 : loc_t1 = loc1;
1032 46 : if (loc_t2 <= BUILTINS_LOCATION)
1033 27 : loc_t2 = loc2;
1034 :
1035 46 : location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
1036 :
1037 : /* It is a quite common bug to reference anonymous namespace type in
1038 : non-anonymous namespace class. */
1039 46 : tree mt1 = TYPE_MAIN_VARIANT (t1);
1040 46 : tree mt2 = TYPE_MAIN_VARIANT (t2);
1041 46 : if ((type_with_linkage_p (mt1)
1042 31 : && type_in_anonymous_namespace_p (mt1))
1043 71 : || (type_with_linkage_p (mt2)
1044 26 : && type_in_anonymous_namespace_p (mt2)))
1045 : {
1046 13 : if (!type_with_linkage_p (mt1)
1047 13 : || !type_in_anonymous_namespace_p (mt1))
1048 : {
1049 : std::swap (t1, t2);
1050 : std::swap (mt1, mt2);
1051 : std::swap (loc_t1, loc_t2);
1052 : }
1053 13 : gcc_assert (TYPE_NAME (mt1)
1054 : && TREE_CODE (TYPE_NAME (mt1)) == TYPE_DECL);
1055 13 : tree n1 = TYPE_NAME (mt1);
1056 13 : tree n2 = TYPE_NAME (mt2) ? TYPE_NAME (mt2) : NULL;
1057 :
1058 13 : if (TREE_CODE (n1) == TYPE_DECL)
1059 13 : n1 = DECL_NAME (n1);
1060 13 : if (n2 && TREE_CODE (n2) == TYPE_DECL)
1061 7 : n2 = DECL_NAME (n2);
1062 : /* Most of the time, the type names will match, do not be unnecessarily
1063 : verbose. */
1064 13 : if (n1 != n2)
1065 1 : inform (loc_t1,
1066 : "type %qT defined in anonymous namespace cannot match "
1067 : "type %qT across the translation unit boundary",
1068 : t1, t2);
1069 : else
1070 12 : inform (loc_t1,
1071 : "type %qT defined in anonymous namespace cannot match "
1072 : "across the translation unit boundary",
1073 : t1);
1074 13 : if (loc_t2_useful)
1075 6 : inform (loc_t2,
1076 : "the incompatible type defined in another translation unit");
1077 13 : return;
1078 : }
1079 : /* If types have mangled ODR names and they are different, it is most
1080 : informative to output those.
1081 : This also covers types defined in different namespaces. */
1082 33 : const char *odr1 = get_odr_name_for_type (mt1);
1083 33 : const char *odr2 = get_odr_name_for_type (mt2);
1084 33 : if (odr1 != NULL && odr2 != NULL && odr1 != odr2)
1085 : {
1086 6 : const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
1087 6 : char *name1 = xstrdup (cplus_demangle (odr1, opts));
1088 6 : char *name2 = cplus_demangle (odr2, opts);
1089 6 : if (name1 && name2 && strcmp (name1, name2))
1090 : {
1091 6 : inform (loc_t1,
1092 : "type name %qs should match type name %qs",
1093 : name1, name2);
1094 6 : if (loc_t2_useful)
1095 6 : inform (loc_t2,
1096 : "the incompatible type is defined here");
1097 6 : free (name1);
1098 6 : return;
1099 : }
1100 0 : free (name1);
1101 : }
1102 : /* A tricky case are compound types. Often they appear the same in source
1103 : code and the mismatch is dragged in by type they are build from.
1104 : Look for those differences in subtypes and try to be informative. In other
1105 : cases just output nothing because the source code is probably different
1106 : and in this case we already output a all necessary info. */
1107 27 : if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1108 : {
1109 7 : if (TREE_CODE (t1) == TREE_CODE (t2))
1110 : {
1111 7 : if (TREE_CODE (t1) == ARRAY_TYPE
1112 7 : && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1113 : {
1114 1 : tree i1 = TYPE_DOMAIN (t1);
1115 1 : tree i2 = TYPE_DOMAIN (t2);
1116 :
1117 1 : if (i1 && i2
1118 1 : && TYPE_MAX_VALUE (i1)
1119 1 : && TYPE_MAX_VALUE (i2)
1120 2 : && !operand_equal_p (TYPE_MAX_VALUE (i1),
1121 1 : TYPE_MAX_VALUE (i2), 0))
1122 : {
1123 1 : inform (loc,
1124 : "array types have different bounds");
1125 1 : return;
1126 : }
1127 : }
1128 0 : if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1129 6 : && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1130 6 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1131 0 : else if (TREE_CODE (t1) == METHOD_TYPE
1132 0 : || TREE_CODE (t1) == FUNCTION_TYPE)
1133 : {
1134 0 : tree parms1 = NULL, parms2 = NULL;
1135 0 : int count = 1;
1136 :
1137 0 : if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1138 : {
1139 0 : inform (loc, "return value type mismatch");
1140 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1141 : loc_t2);
1142 0 : return;
1143 : }
1144 0 : if (prototype_p (t1) && prototype_p (t2))
1145 0 : for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1146 0 : parms1 && parms2;
1147 0 : parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1148 : count++)
1149 : {
1150 0 : if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1151 : {
1152 0 : if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1153 0 : inform (loc,
1154 : "implicit this pointer type mismatch");
1155 : else
1156 0 : inform (loc,
1157 : "type mismatch in parameter %i",
1158 0 : count - (TREE_CODE (t1) == METHOD_TYPE));
1159 0 : warn_types_mismatch (TREE_VALUE (parms1),
1160 0 : TREE_VALUE (parms2),
1161 : loc_t1, loc_t2);
1162 0 : return;
1163 : }
1164 : }
1165 0 : if (parms1 || parms2)
1166 : {
1167 0 : inform (loc,
1168 : "types have different parameter counts");
1169 0 : return;
1170 : }
1171 : }
1172 : }
1173 0 : return;
1174 : }
1175 :
1176 20 : if (types_odr_comparable (t1, t2)
1177 : /* We make assign integers mangled names to be able to handle
1178 : signed/unsigned chars. Accepting them here would however lead to
1179 : confusing message like
1180 : "type ‘const int’ itself violates the C++ One Definition Rule" */
1181 19 : && TREE_CODE (t1) != INTEGER_TYPE
1182 33 : && types_same_for_odr (t1, t2))
1183 13 : inform (loc_t1,
1184 : "type %qT itself violates the C++ One Definition Rule", t1);
1185 : /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1186 7 : else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1187 7 : && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1188 : return;
1189 : else
1190 6 : inform (loc_t1, "type %qT should match type %qT",
1191 : t1, t2);
1192 19 : if (loc_t2_useful)
1193 7 : inform (loc_t2, "the incompatible type is defined here");
1194 : }
1195 :
1196 : /* Return true if T should be ignored in TYPE_FIELDS for ODR comparison. */
1197 :
1198 : static bool
1199 690 : skip_in_fields_list_p (tree t)
1200 : {
1201 690 : if (TREE_CODE (t) != FIELD_DECL)
1202 : return true;
1203 : /* C++ FE introduces zero sized fields depending on -std setting, see
1204 : PR89358. */
1205 690 : if (DECL_SIZE (t)
1206 690 : && integer_zerop (DECL_SIZE (t))
1207 2 : && DECL_ARTIFICIAL (t)
1208 2 : && DECL_IGNORED_P (t)
1209 692 : && !DECL_NAME (t))
1210 : return true;
1211 : return false;
1212 : }
1213 :
1214 : /* Compare T1 and T2, report ODR violations if WARN is true and set
1215 : WARNED to true if anything is reported. Return true if types match.
1216 : If true is returned, the types are also compatible in the sense of
1217 : gimple_canonical_types_compatible_p.
1218 : If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1219 : about the type if the type itself do not have location. */
1220 :
1221 : static bool
1222 7536 : odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1223 : hash_set<type_pair> *visited,
1224 : location_t loc1, location_t loc2)
1225 : {
1226 : /* If we are asked to warn, we need warned to keep track if warning was
1227 : output. */
1228 7536 : gcc_assert (!warn || warned);
1229 : /* Check first for the obvious case of pointer identity. */
1230 7536 : if (t1 == t2)
1231 : return true;
1232 :
1233 : /* Can't be the same type if the types don't have the same code. */
1234 7536 : if (TREE_CODE (t1) != TREE_CODE (t2))
1235 : {
1236 13 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1237 : G_("a different type is defined in another translation unit"));
1238 13 : return false;
1239 : }
1240 :
1241 7523 : if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
1242 7423 : && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
1243 14946 : || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
1244 7423 : && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
1245 : {
1246 : /* We cannot trip this when comparing ODR types, only when trying to
1247 : match different ODR derivations from different declarations.
1248 : So WARN should be always false. */
1249 6 : gcc_assert (!warn);
1250 : return false;
1251 : }
1252 :
1253 : /* Non-aggregate types can be handled cheaply. */
1254 7517 : if (INTEGRAL_TYPE_P (t1)
1255 7517 : || SCALAR_FLOAT_TYPE_P (t1)
1256 7494 : || FIXED_POINT_TYPE_P (t1)
1257 7494 : || VECTOR_TYPE_P (t1)
1258 7494 : || TREE_CODE (t1) == COMPLEX_TYPE
1259 7494 : || TREE_CODE (t1) == OFFSET_TYPE
1260 7494 : || POINTER_TYPE_P (t1))
1261 : {
1262 37 : if (!VECTOR_TYPE_P (t1) && TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1263 : {
1264 7 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1265 : G_("a type with different precision is defined "
1266 : "in another translation unit"));
1267 7 : return false;
1268 : }
1269 30 : if (VECTOR_TYPE_P (t1)
1270 30 : && maybe_ne (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2)))
1271 : {
1272 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1273 : G_("a vector type with different number of elements "
1274 : "is defined in another translation unit"));
1275 0 : return false;
1276 : }
1277 30 : if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1278 : {
1279 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1280 : G_("a type with different signedness is defined "
1281 : "in another translation unit"));
1282 0 : return false;
1283 : }
1284 :
1285 30 : if (TREE_CODE (t1) == INTEGER_TYPE
1286 30 : && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1287 : {
1288 : /* char WRT uint_8? */
1289 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1290 : G_("a different type is defined in another "
1291 : "translation unit"));
1292 0 : return false;
1293 : }
1294 :
1295 : /* For canonical type comparisons we do not want to build SCCs
1296 : so we cannot compare pointed-to types. But we can, for now,
1297 : require the same pointed-to type kind and match what
1298 : useless_type_conversion_p would do. */
1299 30 : if (POINTER_TYPE_P (t1))
1300 : {
1301 14 : if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1302 14 : != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1303 : {
1304 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1305 : G_("it is defined as a pointer in different address "
1306 : "space in another translation unit"));
1307 0 : return false;
1308 : }
1309 :
1310 14 : if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1311 : visited, loc1, loc2))
1312 : {
1313 9 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1314 : G_("it is defined as a pointer to different type "
1315 : "in another translation unit"));
1316 9 : if (warn && *warned)
1317 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1318 : loc1, loc2);
1319 9 : return false;
1320 : }
1321 : }
1322 :
1323 21 : if ((VECTOR_TYPE_P (t1) || TREE_CODE (t1) == COMPLEX_TYPE)
1324 21 : && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1325 : visited, loc1, loc2))
1326 : {
1327 : /* Probably specific enough. */
1328 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1329 : G_("a different type is defined "
1330 : "in another translation unit"));
1331 0 : if (warn && *warned)
1332 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1333 0 : return false;
1334 : }
1335 : }
1336 : /* Do type-specific comparisons. */
1337 7480 : else switch (TREE_CODE (t1))
1338 : {
1339 2 : case ARRAY_TYPE:
1340 2 : {
1341 : /* Array types are the same if the element types are the same and
1342 : the number of elements are the same. */
1343 2 : if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1344 : visited, loc1, loc2))
1345 : {
1346 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1347 : G_("a different type is defined in another "
1348 : "translation unit"));
1349 0 : if (warn && *warned)
1350 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1351 : }
1352 2 : gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1353 2 : gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1354 : == TYPE_NONALIASED_COMPONENT (t2));
1355 :
1356 2 : tree i1 = TYPE_DOMAIN (t1);
1357 2 : tree i2 = TYPE_DOMAIN (t2);
1358 :
1359 : /* For an incomplete external array, the type domain can be
1360 : NULL_TREE. Check this condition also. */
1361 2 : if (i1 == NULL_TREE || i2 == NULL_TREE)
1362 0 : return type_variants_equivalent_p (t1, t2);
1363 :
1364 2 : tree min1 = TYPE_MIN_VALUE (i1);
1365 2 : tree min2 = TYPE_MIN_VALUE (i2);
1366 2 : tree max1 = TYPE_MAX_VALUE (i1);
1367 2 : tree max2 = TYPE_MAX_VALUE (i2);
1368 :
1369 : /* In C++, minimums should be always 0. */
1370 2 : gcc_assert (min1 == min2);
1371 2 : if (!operand_equal_p (max1, max2, 0))
1372 : {
1373 2 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1374 : G_("an array of different size is defined "
1375 : "in another translation unit"));
1376 2 : return false;
1377 : }
1378 : }
1379 : break;
1380 :
1381 84 : case METHOD_TYPE:
1382 84 : case FUNCTION_TYPE:
1383 : /* Function types are the same if the return type and arguments types
1384 : are the same. */
1385 84 : if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1386 : visited, loc1, loc2))
1387 : {
1388 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1389 : G_("has different return value "
1390 : "in another translation unit"));
1391 0 : if (warn && *warned)
1392 0 : warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1393 0 : return false;
1394 : }
1395 :
1396 84 : if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1397 84 : || !prototype_p (t1) || !prototype_p (t2))
1398 84 : return type_variants_equivalent_p (t1, t2);
1399 : else
1400 : {
1401 0 : tree parms1, parms2;
1402 :
1403 0 : for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1404 0 : parms1 && parms2;
1405 0 : parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1406 : {
1407 0 : if (!odr_subtypes_equivalent_p
1408 0 : (TREE_VALUE (parms1), TREE_VALUE (parms2),
1409 : visited, loc1, loc2))
1410 : {
1411 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1412 : G_("has different parameters in another "
1413 : "translation unit"));
1414 0 : if (warn && *warned)
1415 0 : warn_types_mismatch (TREE_VALUE (parms1),
1416 0 : TREE_VALUE (parms2), loc1, loc2);
1417 0 : return false;
1418 : }
1419 : }
1420 :
1421 0 : if (parms1 || parms2)
1422 : {
1423 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1424 : G_("has different parameters "
1425 : "in another translation unit"));
1426 0 : return false;
1427 : }
1428 :
1429 0 : return type_variants_equivalent_p (t1, t2);
1430 : }
1431 :
1432 7394 : case RECORD_TYPE:
1433 7394 : case UNION_TYPE:
1434 7394 : case QUAL_UNION_TYPE:
1435 7394 : {
1436 7394 : tree f1, f2;
1437 :
1438 : /* For aggregate types, all the fields must be the same. */
1439 7394 : if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1440 : {
1441 426 : if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1442 597 : && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1443 171 : != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1444 : {
1445 0 : if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1446 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1447 : G_("a type defined in another translation unit "
1448 : "is not polymorphic"));
1449 : else
1450 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1451 : G_("a type defined in another translation unit "
1452 : "is polymorphic"));
1453 0 : return false;
1454 : }
1455 255 : for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1456 540 : f1 || f2;
1457 285 : f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1458 : {
1459 : /* Skip non-fields. */
1460 354 : while (f1 && skip_in_fields_list_p (f1))
1461 2 : f1 = TREE_CHAIN (f1);
1462 352 : while (f2 && skip_in_fields_list_p (f2))
1463 0 : f2 = TREE_CHAIN (f2);
1464 352 : if (!f1 || !f2)
1465 : break;
1466 336 : if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1467 : {
1468 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1469 : G_("a type with different virtual table pointers"
1470 : " is defined in another translation unit"));
1471 0 : return false;
1472 : }
1473 336 : if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1474 : {
1475 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1476 : G_("a type with different bases is defined "
1477 : "in another translation unit"));
1478 0 : return false;
1479 : }
1480 336 : if (DECL_NAME (f1) != DECL_NAME (f2)
1481 336 : && !DECL_ARTIFICIAL (f1))
1482 : {
1483 0 : warn_odr (t1, t2, f1, f2, warn, warned,
1484 : G_("a field with different name is defined "
1485 : "in another translation unit"));
1486 0 : return false;
1487 : }
1488 336 : if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1489 336 : TREE_TYPE (f2),
1490 : visited, loc1, loc2))
1491 : {
1492 : /* Do not warn about artificial fields and just go into
1493 : generic field mismatch warning. */
1494 51 : if (DECL_ARTIFICIAL (f1))
1495 : break;
1496 :
1497 51 : warn_odr (t1, t2, f1, f2, warn, warned,
1498 : G_("a field of same name but different type "
1499 : "is defined in another translation unit"));
1500 51 : if (warn && *warned)
1501 27 : warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1502 51 : return false;
1503 : }
1504 285 : if (!gimple_compare_field_offset (f1, f2))
1505 : {
1506 : /* Do not warn about artificial fields and just go into
1507 : generic field mismatch warning. */
1508 0 : if (DECL_ARTIFICIAL (f1))
1509 : break;
1510 0 : warn_odr (t1, t2, f1, f2, warn, warned,
1511 : G_("fields have different layout "
1512 : "in another translation unit"));
1513 0 : return false;
1514 : }
1515 285 : if (DECL_BIT_FIELD (f1) != DECL_BIT_FIELD (f2))
1516 : {
1517 0 : warn_odr (t1, t2, f1, f2, warn, warned,
1518 : G_("one field is a bitfield while the other "
1519 : "is not"));
1520 0 : return false;
1521 : }
1522 : else
1523 285 : gcc_assert (DECL_NONADDRESSABLE_P (f1)
1524 : == DECL_NONADDRESSABLE_P (f2));
1525 : }
1526 :
1527 : /* If one aggregate has more fields than the other, they
1528 : are not the same. */
1529 204 : if (f1 || f2)
1530 : {
1531 16 : if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1532 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1533 : G_("a type with different virtual table pointers"
1534 : " is defined in another translation unit"));
1535 8 : else if ((f1 && DECL_ARTIFICIAL (f1))
1536 24 : || (f2 && DECL_ARTIFICIAL (f2)))
1537 2 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1538 : G_("a type with different bases is defined "
1539 : "in another translation unit"));
1540 : else
1541 14 : warn_odr (t1, t2, f1, f2, warn, warned,
1542 : G_("a type with different number of fields "
1543 : "is defined in another translation unit"));
1544 :
1545 16 : return false;
1546 : }
1547 : }
1548 : break;
1549 : }
1550 : case VOID_TYPE:
1551 : case OPAQUE_TYPE:
1552 : case NULLPTR_TYPE:
1553 : break;
1554 :
1555 0 : default:
1556 0 : debug_tree (t1);
1557 0 : gcc_unreachable ();
1558 : }
1559 :
1560 : /* Those are better to come last as they are utterly uninformative. */
1561 14696 : if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1562 7552 : && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1563 : {
1564 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1565 : G_("a type with different size "
1566 : "is defined in another translation unit"));
1567 0 : return false;
1568 : }
1569 :
1570 7348 : if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2)
1571 7348 : && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1572 : {
1573 9 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1574 : G_("one type needs to be constructed while the other does not"));
1575 9 : gcc_checking_assert (RECORD_OR_UNION_TYPE_P (t1));
1576 : return false;
1577 : }
1578 : /* There is no really good user facing warning for this.
1579 : Either the original reason for modes being different is lost during
1580 : streaming or we should catch earlier warnings. We however must detect
1581 : the mismatch to avoid type verifier from cmplaining on mismatched
1582 : types between type and canonical type. See PR91576. */
1583 7339 : if (TYPE_MODE (t1) != TYPE_MODE (t2)
1584 7339 : && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1585 : {
1586 0 : warn_odr (t1, t2, NULL, NULL, warn, warned,
1587 : G_("memory layout mismatch"));
1588 0 : return false;
1589 : }
1590 :
1591 7339 : gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1592 : || operand_equal_p (TYPE_SIZE_UNIT (t1),
1593 : TYPE_SIZE_UNIT (t2), 0));
1594 7339 : return type_variants_equivalent_p (t1, t2);
1595 : }
1596 :
1597 : /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1598 :
1599 : bool
1600 189 : odr_types_equivalent_p (tree type1, tree type2)
1601 : {
1602 189 : gcc_checking_assert (odr_or_derived_type_p (type1)
1603 : && odr_or_derived_type_p (type2));
1604 :
1605 189 : hash_set<type_pair> visited;
1606 189 : return odr_types_equivalent_p (type1, type2, false, NULL,
1607 189 : &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1608 189 : }
1609 :
1610 : /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1611 : from VAL->type. This may happen in LTO where tree merging did not merge
1612 : all variants of the same type or due to ODR violation.
1613 :
1614 : Analyze and report ODR violations and add type to duplicate list.
1615 : If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1616 : this is first time we see definition of a class return true so the
1617 : base types are analyzed. */
1618 :
1619 : static bool
1620 7375 : add_type_duplicate (odr_type val, tree type)
1621 : {
1622 7375 : bool build_bases = false;
1623 7375 : bool prevail = false;
1624 7375 : bool odr_must_violate = false;
1625 :
1626 7375 : if (!val->types_set)
1627 7232 : val->types_set = new hash_set<tree>;
1628 :
1629 : /* Chose polymorphic type as leader (this happens only in case of ODR
1630 : violations. */
1631 7235 : if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1632 226 : && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1633 7601 : && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1634 93 : || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1635 : {
1636 : prevail = true;
1637 : build_bases = true;
1638 : }
1639 : /* Always prefer complete type to be the leader. */
1640 7242 : else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1641 : {
1642 1217 : prevail = true;
1643 1217 : if (TREE_CODE (type) == RECORD_TYPE)
1644 1196 : build_bases = TYPE_BINFO (type);
1645 : }
1646 6025 : else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1647 : ;
1648 225 : else if (TREE_CODE (val->type) == RECORD_TYPE
1649 150 : && TREE_CODE (type) == RECORD_TYPE
1650 374 : && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1651 : {
1652 0 : gcc_assert (!val->bases.length ());
1653 : build_bases = true;
1654 : prevail = true;
1655 : }
1656 :
1657 1196 : if (prevail)
1658 1350 : std::swap (val->type, type);
1659 :
1660 7375 : val->types_set->add (type);
1661 :
1662 7375 : if (!odr_hash)
1663 : return false;
1664 :
1665 22125 : gcc_checking_assert (can_be_name_hashed_p (type)
1666 : && can_be_name_hashed_p (val->type));
1667 :
1668 7375 : bool merge = true;
1669 7375 : bool base_mismatch = false;
1670 7375 : unsigned int i;
1671 7375 : bool warned = false;
1672 7375 : hash_set<type_pair> visited;
1673 :
1674 7375 : gcc_assert (in_lto_p);
1675 7375 : vec_safe_push (val->types, type);
1676 :
1677 : /* If both are class types, compare the bases. */
1678 7600 : if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1679 225 : && TREE_CODE (val->type) == RECORD_TYPE
1680 150 : && TREE_CODE (type) == RECORD_TYPE
1681 7524 : && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1682 : {
1683 186 : if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1684 93 : != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1685 : {
1686 0 : if (!flag_ltrans && !warned && !val->odr_violated)
1687 : {
1688 0 : tree extra_base;
1689 0 : warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1690 : "a type with the same name but different "
1691 : "number of polymorphic bases is "
1692 : "defined in another translation unit");
1693 0 : if (warned)
1694 : {
1695 0 : if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1696 0 : > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1697 0 : extra_base = BINFO_BASE_BINFO
1698 : (TYPE_BINFO (type),
1699 : BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1700 : else
1701 0 : extra_base = BINFO_BASE_BINFO
1702 : (TYPE_BINFO (val->type),
1703 : BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1704 0 : tree extra_base_type = BINFO_TYPE (extra_base);
1705 0 : inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1706 : "the extra base is defined here");
1707 : }
1708 : }
1709 : base_mismatch = true;
1710 : }
1711 : else
1712 145 : for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1713 : {
1714 54 : tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1715 54 : tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1716 54 : tree type1 = BINFO_TYPE (base1);
1717 54 : tree type2 = BINFO_TYPE (base2);
1718 :
1719 54 : if (types_odr_comparable (type1, type2))
1720 : {
1721 54 : if (!types_same_for_odr (type1, type2))
1722 : base_mismatch = true;
1723 : }
1724 : else
1725 0 : if (!odr_types_equivalent_p (type1, type2))
1726 : base_mismatch = true;
1727 54 : if (base_mismatch)
1728 : {
1729 0 : if (!warned && !val->odr_violated)
1730 : {
1731 0 : warn_odr (type, val->type, NULL, NULL,
1732 : !warned, &warned,
1733 : "a type with the same name but different base "
1734 : "type is defined in another translation unit");
1735 0 : if (warned)
1736 0 : warn_types_mismatch (type1, type2,
1737 : UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1738 : }
1739 : break;
1740 : }
1741 54 : if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1742 : {
1743 2 : base_mismatch = true;
1744 2 : if (!warned && !val->odr_violated)
1745 2 : warn_odr (type, val->type, NULL, NULL,
1746 : !warned, &warned,
1747 : "a type with the same name but different base "
1748 : "layout is defined in another translation unit");
1749 : break;
1750 : }
1751 : /* One of bases is not of complete type. */
1752 52 : if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1753 : {
1754 : /* If we have a polymorphic type info specified for TYPE1
1755 : but not for TYPE2 we possibly missed a base when recording
1756 : VAL->type earlier.
1757 : Be sure this does not happen. */
1758 0 : if (TYPE_BINFO (type1)
1759 0 : && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1760 0 : && !build_bases)
1761 : odr_must_violate = true;
1762 : break;
1763 : }
1764 : /* One base is polymorphic and the other not.
1765 : This ought to be diagnosed earlier, but do not ICE in the
1766 : checking below. */
1767 52 : else if (TYPE_BINFO (type1)
1768 104 : && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1769 52 : != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1770 : {
1771 0 : if (!warned && !val->odr_violated)
1772 0 : warn_odr (type, val->type, NULL, NULL,
1773 : !warned, &warned,
1774 : "a base of the type is polymorphic only in one "
1775 : "translation unit");
1776 : base_mismatch = true;
1777 : break;
1778 : }
1779 : }
1780 93 : if (base_mismatch)
1781 : {
1782 2 : merge = false;
1783 2 : odr_violation_reported = true;
1784 2 : val->odr_violated = true;
1785 :
1786 2 : if (symtab->dump_file)
1787 : {
1788 0 : fprintf (symtab->dump_file, "ODR base violation\n");
1789 :
1790 0 : print_node (symtab->dump_file, "", val->type, 0);
1791 0 : putc ('\n',symtab->dump_file);
1792 0 : print_node (symtab->dump_file, "", type, 0);
1793 0 : putc ('\n',symtab->dump_file);
1794 : }
1795 : }
1796 : }
1797 :
1798 : /* Next compare memory layout.
1799 : The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
1800 : We must apply the location cache to ensure that they are valid
1801 : before we can pass them to odr_types_equivalent_p (PR lto/83121). */
1802 7375 : if (lto_location_cache::current_cache)
1803 7375 : lto_location_cache::current_cache->apply_location_cache ();
1804 : /* As a special case we stream mangled names of integer types so we can see
1805 : if they are believed to be same even though they have different
1806 : representation. Avoid bogus warning on mismatches in these. */
1807 7375 : if (TREE_CODE (type) != INTEGER_TYPE
1808 7334 : && TREE_CODE (val->type) != INTEGER_TYPE
1809 14709 : && !odr_types_equivalent_p (val->type, type,
1810 7334 : !flag_ltrans && !val->odr_violated && !warned,
1811 : &warned, &visited,
1812 7334 : DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1813 7334 : DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1814 : {
1815 84 : merge = false;
1816 84 : odr_violation_reported = true;
1817 84 : val->odr_violated = true;
1818 : }
1819 7375 : gcc_assert (val->odr_violated || !odr_must_violate);
1820 : /* Sanity check that all bases will be build same way again. */
1821 7375 : if (flag_checking
1822 7375 : && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1823 225 : && TREE_CODE (val->type) == RECORD_TYPE
1824 150 : && TREE_CODE (type) == RECORD_TYPE
1825 149 : && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1826 93 : && !val->odr_violated
1827 7464 : && !base_mismatch && val->bases.length ())
1828 : {
1829 : unsigned int num_poly_bases = 0;
1830 : unsigned int j;
1831 :
1832 95 : for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1833 52 : if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1834 : (TYPE_BINFO (type), i)))
1835 52 : num_poly_bases++;
1836 43 : gcc_assert (num_poly_bases == val->bases.length ());
1837 95 : for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1838 : i++)
1839 52 : if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1840 : (TYPE_BINFO (type), i)))
1841 : {
1842 52 : odr_type base = get_odr_type
1843 52 : (BINFO_TYPE
1844 : (BINFO_BASE_BINFO (TYPE_BINFO (type),
1845 : i)),
1846 : true);
1847 52 : gcc_assert (val->bases[j] == base);
1848 52 : j++;
1849 : }
1850 : }
1851 :
1852 :
1853 : /* Regularize things a little. During LTO same types may come with
1854 : different BINFOs. Either because their virtual table was
1855 : not merged by tree merging and only later at decl merging or
1856 : because one type comes with external vtable, while other
1857 : with internal. We want to merge equivalent binfos to conserve
1858 : memory and streaming overhead.
1859 :
1860 : The external vtables are more harmful: they contain references
1861 : to external declarations of methods that may be defined in the
1862 : merged LTO unit. For this reason we absolutely need to remove
1863 : them and replace by internal variants. Not doing so will lead
1864 : to incomplete answers from possible_polymorphic_call_targets.
1865 :
1866 : FIXME: disable for now; because ODR types are now build during
1867 : streaming in, the variants do not need to be linked to the type,
1868 : yet. We need to do the merging in cleanup pass to be implemented
1869 : soon. */
1870 7375 : if (!flag_ltrans && merge
1871 : && 0
1872 : && TREE_CODE (val->type) == RECORD_TYPE
1873 : && TREE_CODE (type) == RECORD_TYPE
1874 : && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1875 : && TYPE_MAIN_VARIANT (type) == type
1876 : && TYPE_MAIN_VARIANT (val->type) == val->type
1877 : && BINFO_VTABLE (TYPE_BINFO (val->type))
1878 : && BINFO_VTABLE (TYPE_BINFO (type)))
1879 : {
1880 : tree master_binfo = TYPE_BINFO (val->type);
1881 : tree v1 = BINFO_VTABLE (master_binfo);
1882 : tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1883 :
1884 : if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1885 : {
1886 : gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1887 : && operand_equal_p (TREE_OPERAND (v1, 1),
1888 : TREE_OPERAND (v2, 1), 0));
1889 : v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1890 : v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1891 : }
1892 : gcc_assert (DECL_ASSEMBLER_NAME (v1)
1893 : == DECL_ASSEMBLER_NAME (v2));
1894 :
1895 : if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1896 : {
1897 : unsigned int i;
1898 :
1899 : set_type_binfo (val->type, TYPE_BINFO (type));
1900 : for (i = 0; i < val->types->length (); i++)
1901 : {
1902 : if (TYPE_BINFO ((*val->types)[i])
1903 : == master_binfo)
1904 : set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1905 : }
1906 : BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1907 : }
1908 : else
1909 : set_type_binfo (type, master_binfo);
1910 : }
1911 7375 : return build_bases;
1912 7375 : }
1913 :
1914 : /* REF is OBJ_TYPE_REF, return the class the ref corresponds to.
1915 : FOR_DUMP_P is true when being called from the dump routines. */
1916 :
1917 : tree
1918 3726160 : obj_type_ref_class (const_tree ref, bool for_dump_p)
1919 : {
1920 3726160 : gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
1921 3726160 : ref = TREE_TYPE (ref);
1922 3726160 : gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1923 3726160 : ref = TREE_TYPE (ref);
1924 : /* We look for type THIS points to. ObjC also builds
1925 : OBJ_TYPE_REF with non-method calls, Their first parameter
1926 : ID however also corresponds to class type. */
1927 3726160 : gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
1928 : || TREE_CODE (ref) == FUNCTION_TYPE);
1929 3726160 : ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
1930 3726160 : gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1931 3726160 : tree ret = TREE_TYPE (ref);
1932 3726160 : if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (ret))
1933 3691211 : ret = TYPE_CANONICAL (ret);
1934 34949 : else if (odr_type ot = get_odr_type (ret, !for_dump_p))
1935 34949 : ret = ot->type;
1936 : else
1937 0 : gcc_assert (for_dump_p);
1938 3726160 : return ret;
1939 : }
1940 :
1941 : /* Get ODR type hash entry for TYPE. If INSERT is true, create
1942 : possibly new entry. */
1943 :
1944 : odr_type
1945 7305812 : get_odr_type (tree type, bool insert)
1946 : {
1947 7305812 : odr_type_d **slot = NULL;
1948 7305812 : odr_type val = NULL;
1949 7305812 : hashval_t hash;
1950 7305812 : bool build_bases = false;
1951 7305812 : bool insert_to_odr_array = false;
1952 7305812 : int base_id = -1;
1953 :
1954 7305812 : type = TYPE_MAIN_VARIANT (type);
1955 7305812 : if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (type))
1956 7162331 : type = TYPE_CANONICAL (type);
1957 :
1958 7449293 : gcc_checking_assert (can_be_name_hashed_p (type));
1959 :
1960 7305812 : hash = hash_odr_name (type);
1961 7364213 : slot = odr_hash->find_slot_with_hash (type, hash,
1962 : insert ? INSERT : NO_INSERT);
1963 :
1964 7305812 : if (!slot)
1965 : return NULL;
1966 :
1967 : /* See if we already have entry for type. */
1968 7296753 : if (*slot)
1969 : {
1970 5793437 : val = *slot;
1971 :
1972 44145 : if (val->type != type && insert
1973 5835830 : && (!val->types_set || !val->types_set->add (type)))
1974 7375 : build_bases = add_type_duplicate (val, type);
1975 : }
1976 : else
1977 : {
1978 1503316 : val = ggc_cleared_alloc<odr_type_d> ();
1979 1503316 : val->type = type;
1980 1503316 : val->bases = vNULL;
1981 1503316 : val->derived_types = vNULL;
1982 1503316 : if (type_with_linkage_p (type))
1983 1503316 : val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1984 : else
1985 0 : val->anonymous_namespace = 0;
1986 1503316 : build_bases = COMPLETE_TYPE_P (val->type);
1987 1503316 : insert_to_odr_array = true;
1988 1503316 : *slot = val;
1989 : }
1990 :
1991 1500653 : if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1992 1491216 : && type_with_linkage_p (type)
1993 3001907 : && type == TYPE_MAIN_VARIANT (type))
1994 : {
1995 1491216 : tree binfo = TYPE_BINFO (type);
1996 1491216 : unsigned int i;
1997 :
1998 1491216 : gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
1999 :
2000 1491216 : val->all_derivations_known = type_all_derivations_known_p (type);
2001 3031499 : for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
2002 : /* For now record only polymorphic types. other are
2003 : pointless for devirtualization and we cannot precisely
2004 : determine ODR equivalency of these during LTO. */
2005 1540283 : if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
2006 : {
2007 1342716 : tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
2008 1342716 : odr_type base = get_odr_type (base_type, true);
2009 1342716 : gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
2010 1342716 : base->derived_types.safe_push (val);
2011 1342716 : val->bases.safe_push (base);
2012 1342716 : if (base->id > base_id)
2013 : base_id = base->id;
2014 : }
2015 : }
2016 : /* Ensure that type always appears after bases. */
2017 7296753 : if (insert_to_odr_array)
2018 : {
2019 1503316 : if (odr_types_ptr)
2020 1475039 : val->id = odr_types.length ();
2021 1503316 : vec_safe_push (odr_types_ptr, val);
2022 : }
2023 5793437 : else if (base_id > val->id)
2024 : {
2025 78 : odr_types[val->id] = 0;
2026 : /* Be sure we did not recorded any derived types; these may need
2027 : renumbering too. */
2028 78 : gcc_assert (val->derived_types.length() == 0);
2029 78 : val->id = odr_types.length ();
2030 78 : vec_safe_push (odr_types_ptr, val);
2031 : }
2032 7296753 : return val;
2033 : }
2034 :
2035 : /* Return type that in ODR type hash prevailed TYPE. Be careful and punt
2036 : on ODR violations. */
2037 :
2038 : tree
2039 10590 : prevailing_odr_type (tree type)
2040 : {
2041 10590 : odr_type t = get_odr_type (type, false);
2042 10590 : if (!t || t->odr_violated)
2043 : return type;
2044 10585 : return t->type;
2045 : }
2046 :
2047 : /* Set tbaa_enabled flag for TYPE. */
2048 :
2049 : void
2050 10303 : enable_odr_based_tbaa (tree type)
2051 : {
2052 10303 : odr_type t = get_odr_type (type, true);
2053 10303 : t->tbaa_enabled = true;
2054 10303 : }
2055 :
2056 : /* True if canonical type of TYPE is determined using ODR name. */
2057 :
2058 : bool
2059 7735 : odr_based_tbaa_p (const_tree type)
2060 : {
2061 7735 : if (!RECORD_OR_UNION_TYPE_P (type))
2062 : return false;
2063 6279 : if (!odr_hash)
2064 : return false;
2065 6276 : odr_type t = get_odr_type (const_cast <tree> (type), false);
2066 6276 : if (!t || !t->tbaa_enabled)
2067 : return false;
2068 : return true;
2069 : }
2070 :
2071 : /* Set TYPE_CANONICAL of type and all its variants and duplicates
2072 : to CANONICAL. */
2073 :
2074 : void
2075 10327 : set_type_canonical_for_odr_type (tree type, tree canonical)
2076 : {
2077 10327 : odr_type t = get_odr_type (type, false);
2078 10327 : unsigned int i;
2079 10327 : tree tt;
2080 :
2081 26375 : for (tree t2 = t->type; t2; t2 = TYPE_NEXT_VARIANT (t2))
2082 16048 : TYPE_CANONICAL (t2) = canonical;
2083 10327 : if (t->types)
2084 14301 : FOR_EACH_VEC_ELT (*t->types, i, tt)
2085 18219 : for (tree t2 = tt; t2; t2 = TYPE_NEXT_VARIANT (t2))
2086 11021 : TYPE_CANONICAL (t2) = canonical;
2087 10327 : }
2088 :
2089 : /* Return true if we reported some ODR violation on TYPE. */
2090 :
2091 : bool
2092 10462 : odr_type_violation_reported_p (tree type)
2093 : {
2094 10462 : return get_odr_type (type, false)->odr_violated;
2095 : }
2096 :
2097 : /* Add TYPE of ODR type hash. */
2098 :
2099 : void
2100 31654 : register_odr_type (tree type)
2101 : {
2102 31654 : if (!odr_hash)
2103 1912 : odr_hash = new odr_hash_type (23);
2104 31654 : if (type == TYPE_MAIN_VARIANT (type))
2105 : {
2106 : /* To get ODR warnings right, first register all sub-types. */
2107 31654 : if (RECORD_OR_UNION_TYPE_P (type)
2108 31654 : && COMPLETE_TYPE_P (type))
2109 : {
2110 : /* Limit recursion on types which are already registered. */
2111 20664 : odr_type ot = get_odr_type (type, false);
2112 20664 : if (ot
2113 20664 : && (ot->type == type
2114 1610 : || (ot->types_set
2115 187 : && ot->types_set->contains (type))))
2116 10090 : return;
2117 31976 : for (tree f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
2118 21402 : if (TREE_CODE (f) == FIELD_DECL)
2119 : {
2120 21402 : tree subtype = TREE_TYPE (f);
2121 :
2122 22082 : while (TREE_CODE (subtype) == ARRAY_TYPE)
2123 680 : subtype = TREE_TYPE (subtype);
2124 21402 : if (type_with_linkage_p (TYPE_MAIN_VARIANT (subtype)))
2125 9840 : register_odr_type (TYPE_MAIN_VARIANT (subtype));
2126 : }
2127 10574 : if (TYPE_BINFO (type))
2128 1051 : for (unsigned int i = 0;
2129 2432 : i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
2130 1051 : register_odr_type (BINFO_TYPE (BINFO_BASE_BINFO
2131 : (TYPE_BINFO (type), i)));
2132 : }
2133 21564 : get_odr_type (type, true);
2134 : }
2135 : }
2136 :
2137 : /* Return true if type is known to have no derivations. */
2138 :
2139 : bool
2140 1803745 : type_known_to_have_no_derivations_p (tree t)
2141 : {
2142 1803745 : return (type_all_derivations_known_p (t)
2143 1803745 : && (TYPE_FINAL_P (t)
2144 2129 : || (odr_hash
2145 2129 : && !get_odr_type (t, true)->derived_types.length())));
2146 : }
2147 :
2148 : /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2149 : recursive printing. */
2150 :
2151 : static void
2152 64 : dump_odr_type (FILE *f, odr_type t, int indent=0)
2153 : {
2154 64 : unsigned int i;
2155 64 : fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2156 64 : print_generic_expr (f, t->type, TDF_SLIM);
2157 128 : fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)" : "");
2158 128 : fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)" : "");
2159 64 : if (TYPE_NAME (t->type))
2160 : {
2161 64 : if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2162 14 : fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2163 7 : IDENTIFIER_POINTER
2164 : (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2165 : }
2166 64 : if (t->bases.length ())
2167 : {
2168 32 : fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2169 96 : for (i = 0; i < t->bases.length (); i++)
2170 32 : fprintf (f, " %i", t->bases[i]->id);
2171 32 : fprintf (f, "\n");
2172 : }
2173 64 : if (t->derived_types.length ())
2174 : {
2175 29 : fprintf (f, "%*s derived types:\n", indent * 2, "");
2176 90 : for (i = 0; i < t->derived_types.length (); i++)
2177 32 : dump_odr_type (f, t->derived_types[i], indent + 1);
2178 : }
2179 64 : fprintf (f, "\n");
2180 64 : }
2181 :
2182 : /* Dump the type inheritance graph. */
2183 :
2184 : static void
2185 141 : dump_type_inheritance_graph (FILE *f)
2186 : {
2187 141 : unsigned int i;
2188 141 : unsigned int num_all_types = 0, num_types = 0, num_duplicates = 0;
2189 141 : if (!odr_types_ptr)
2190 : return;
2191 29 : fprintf (f, "\n\nType inheritance graph:\n");
2192 122 : for (i = 0; i < odr_types.length (); i++)
2193 : {
2194 64 : if (odr_types[i] && odr_types[i]->bases.length () == 0)
2195 32 : dump_odr_type (f, odr_types[i]);
2196 : }
2197 93 : for (i = 0; i < odr_types.length (); i++)
2198 : {
2199 64 : if (!odr_types[i])
2200 0 : continue;
2201 :
2202 64 : num_all_types++;
2203 64 : if (!odr_types[i]->types || !odr_types[i]->types->length ())
2204 61 : continue;
2205 :
2206 : /* To aid ODR warnings we also mangle integer constants but do
2207 : not consider duplicates there. */
2208 3 : if (TREE_CODE (odr_types[i]->type) == INTEGER_TYPE)
2209 0 : continue;
2210 :
2211 : /* It is normal to have one duplicate and one normal variant. */
2212 6 : if (odr_types[i]->types->length () == 1
2213 3 : && COMPLETE_TYPE_P (odr_types[i]->type)
2214 6 : && !COMPLETE_TYPE_P ((*odr_types[i]->types)[0]))
2215 3 : continue;
2216 :
2217 0 : num_types ++;
2218 :
2219 0 : unsigned int j;
2220 0 : fprintf (f, "Duplicate tree types for odr type %i\n", i);
2221 0 : print_node (f, "", odr_types[i]->type, 0);
2222 0 : print_node (f, "", TYPE_NAME (odr_types[i]->type), 0);
2223 0 : putc ('\n',f);
2224 0 : for (j = 0; j < odr_types[i]->types->length (); j++)
2225 : {
2226 0 : tree t;
2227 0 : num_duplicates ++;
2228 0 : fprintf (f, "duplicate #%i\n", j);
2229 0 : print_node (f, "", (*odr_types[i]->types)[j], 0);
2230 0 : t = (*odr_types[i]->types)[j];
2231 0 : while (TYPE_P (t) && TYPE_CONTEXT (t))
2232 : {
2233 0 : t = TYPE_CONTEXT (t);
2234 0 : print_node (f, "", t, 0);
2235 : }
2236 0 : print_node (f, "", TYPE_NAME ((*odr_types[i]->types)[j]), 0);
2237 0 : putc ('\n',f);
2238 : }
2239 : }
2240 29 : fprintf (f, "Out of %i types there are %i types with duplicates; "
2241 : "%i duplicates overall\n", num_all_types, num_types, num_duplicates);
2242 : }
2243 :
2244 : /* Save some WPA->ltrans streaming by freeing stuff needed only for good
2245 : ODR warnings.
2246 : We make TYPE_DECLs to not point back
2247 : to the type (which is needed to keep them in the same SCC and preserve
2248 : location information to output warnings) and subsequently we make all
2249 : TYPE_DECLS of same assembler name equivalent. */
2250 :
2251 : static void
2252 1969436 : free_odr_warning_data ()
2253 : {
2254 1969436 : static bool odr_data_freed = false;
2255 :
2256 1969436 : if (odr_data_freed || !flag_wpa || !odr_types_ptr)
2257 : return;
2258 :
2259 698 : odr_data_freed = true;
2260 :
2261 6225 : for (unsigned int i = 0; i < odr_types.length (); i++)
2262 5527 : if (odr_types[i])
2263 : {
2264 5496 : tree t = odr_types[i]->type;
2265 :
2266 5496 : TREE_TYPE (TYPE_NAME (t)) = void_type_node;
2267 :
2268 5496 : if (odr_types[i]->types)
2269 6046 : for (unsigned int j = 0; j < odr_types[i]->types->length (); j++)
2270 : {
2271 3053 : tree td = (*odr_types[i]->types)[j];
2272 :
2273 3053 : TYPE_NAME (td) = TYPE_NAME (t);
2274 : }
2275 : }
2276 698 : odr_data_freed = true;
2277 : }
2278 :
2279 : /* Initialize IPA devirt and build inheritance tree graph. */
2280 :
2281 : void
2282 1969436 : build_type_inheritance_graph (void)
2283 : {
2284 1969436 : struct symtab_node *n;
2285 1969436 : FILE *inheritance_dump_file;
2286 1969436 : dump_flags_t flags;
2287 :
2288 1969436 : if (odr_hash)
2289 : {
2290 1466500 : free_odr_warning_data ();
2291 1466500 : return;
2292 : }
2293 502936 : timevar_push (TV_IPA_INHERITANCE);
2294 502936 : inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2295 502936 : odr_hash = new odr_hash_type (23);
2296 :
2297 : /* We reconstruct the graph starting of types of all methods seen in the
2298 : unit. */
2299 159212488 : FOR_EACH_SYMBOL (n)
2300 158709552 : if (is_a <cgraph_node *> (n)
2301 115907702 : && DECL_VIRTUAL_P (n->decl)
2302 1745688 : && n->real_symbol_p ())
2303 1449061 : get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2304 :
2305 : /* Look also for virtual tables of types that do not define any methods.
2306 :
2307 : We need it in a case where class B has virtual base of class A
2308 : re-defining its virtual method and there is class C with no virtual
2309 : methods with B as virtual base.
2310 :
2311 : Here we output B's virtual method in two variant - for non-virtual
2312 : and virtual inheritance. B's virtual table has non-virtual version,
2313 : while C's has virtual.
2314 :
2315 : For this reason we need to know about C in order to include both
2316 : variants of B. More correctly, record_target_from_binfo should
2317 : add both variants of the method when walking B, but we have no
2318 : link in between them.
2319 :
2320 : We rely on fact that either the method is exported and thus we
2321 : assume it is called externally or C is in anonymous namespace and
2322 : thus we will see the vtable. */
2323 :
2324 201511402 : else if (is_a <varpool_node *> (n)
2325 42801850 : && DECL_VIRTUAL_P (n->decl)
2326 1341833 : && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2327 1341833 : && TYPE_BINFO (DECL_CONTEXT (n->decl))
2328 1330515 : && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2329 1330515 : get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2330 502936 : if (inheritance_dump_file)
2331 : {
2332 100 : dump_type_inheritance_graph (inheritance_dump_file);
2333 100 : dump_end (TDI_inheritance, inheritance_dump_file);
2334 : }
2335 502936 : free_odr_warning_data ();
2336 502936 : timevar_pop (TV_IPA_INHERITANCE);
2337 : }
2338 :
2339 : /* Return true if N has reference from live virtual table
2340 : (and thus can be a destination of polymorphic call).
2341 : Be conservatively correct when callgraph is not built or
2342 : if the method may be referred externally. */
2343 :
2344 : static bool
2345 290015 : referenced_from_vtable_p (struct cgraph_node *node)
2346 : {
2347 290015 : int i;
2348 290015 : struct ipa_ref *ref;
2349 290015 : bool found = false;
2350 :
2351 290015 : if (node->externally_visible
2352 20575 : || DECL_EXTERNAL (node->decl)
2353 297678 : || node->used_from_other_partition)
2354 : return true;
2355 :
2356 : /* Keep this test constant time.
2357 : It is unlikely this can happen except for the case where speculative
2358 : devirtualization introduced many speculative edges to this node.
2359 : In this case the target is very likely alive anyway. */
2360 7663 : if (node->ref_list.referring.length () > 100)
2361 : return true;
2362 :
2363 : /* We need references built. */
2364 7663 : if (symtab->state <= CONSTRUCTION)
2365 : return true;
2366 :
2367 6949 : for (i = 0; node->iterate_referring (i, ref); i++)
2368 6939 : if ((ref->use == IPA_REF_ALIAS
2369 1070 : && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2370 6943 : || (ref->use == IPA_REF_ADDR
2371 6404 : && VAR_P (ref->referring->decl)
2372 6403 : && DECL_VIRTUAL_P (ref->referring->decl)))
2373 : {
2374 : found = true;
2375 : break;
2376 : }
2377 : return found;
2378 : }
2379 :
2380 : /* Return if TARGET is cxa_pure_virtual. */
2381 :
2382 : static bool
2383 417383 : is_cxa_pure_virtual_p (tree target)
2384 : {
2385 417214 : return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
2386 58434 : && DECL_NAME (target)
2387 475817 : && id_equal (DECL_NAME (target),
2388 417383 : "__cxa_pure_virtual");
2389 : }
2390 :
2391 : /* If TARGET has associated node, record it in the NODES array.
2392 : CAN_REFER specify if program can refer to the target directly.
2393 : if TARGET is unknown (NULL) or it cannot be inserted (for example because
2394 : its body was already removed and there is no way to refer to it), clear
2395 : COMPLETEP. */
2396 :
2397 : static void
2398 308072 : maybe_record_node (vec <cgraph_node *> &nodes,
2399 : tree target, hash_set<tree> *inserted,
2400 : bool can_refer,
2401 : bool *completep)
2402 : {
2403 308072 : struct cgraph_node *target_node, *alias_target;
2404 308072 : enum availability avail;
2405 308072 : bool pure_virtual = is_cxa_pure_virtual_p (target);
2406 :
2407 : /* __builtin_unreachable do not need to be added into
2408 : list of targets; the runtime effect of calling them is undefined.
2409 : Only "real" virtual methods should be accounted. */
2410 308072 : if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
2411 50182 : return;
2412 :
2413 302181 : if (!can_refer)
2414 : {
2415 : /* The only case when method of anonymous namespace becomes unreferable
2416 : is when we completely optimized it out. */
2417 41237 : if (flag_ltrans
2418 41176 : || !target
2419 82305 : || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2420 41220 : *completep = false;
2421 41237 : return;
2422 : }
2423 :
2424 260944 : if (!target)
2425 : return;
2426 :
2427 260944 : target_node = cgraph_node::get (target);
2428 :
2429 : /* Prefer alias target over aliases, so we do not get confused by
2430 : fake duplicates. */
2431 260944 : if (target_node)
2432 : {
2433 260944 : alias_target = target_node->ultimate_alias_target (&avail);
2434 260944 : if (target_node != alias_target
2435 5033 : && avail >= AVAIL_AVAILABLE
2436 265549 : && target_node->get_availability ())
2437 4605 : target_node = alias_target;
2438 : }
2439 :
2440 : /* Method can only be called by polymorphic call if any
2441 : of vtables referring to it are alive.
2442 :
2443 : While this holds for non-anonymous functions, too, there are
2444 : cases where we want to keep them in the list; for example
2445 : inline functions with -fno-weak are static, but we still
2446 : may devirtualize them when instance comes from other unit.
2447 : The same holds for LTO.
2448 :
2449 : Currently we ignore these functions in speculative devirtualization.
2450 : ??? Maybe it would make sense to be more aggressive for LTO even
2451 : elsewhere. */
2452 260944 : if (!flag_ltrans
2453 260894 : && !pure_virtual
2454 229850 : && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2455 266634 : && (!target_node
2456 5690 : || !referenced_from_vtable_p (target_node)))
2457 : ;
2458 : /* See if TARGET is useful function we can deal with. */
2459 260938 : else if (target_node != NULL
2460 260938 : && (TREE_PUBLIC (target)
2461 5793 : || DECL_EXTERNAL (target)
2462 5793 : || target_node->definition)
2463 521872 : && target_node->real_symbol_p ())
2464 : {
2465 260934 : gcc_assert (!target_node->inlined_to);
2466 260934 : gcc_assert (target_node->real_symbol_p ());
2467 : /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2468 : by valid program. */
2469 260934 : if (flag_sanitize & SANITIZE_UNREACHABLE)
2470 : ;
2471 : /* Only add pure virtual if it is the only possible target. This way
2472 : we will preserve the diagnostics about pure virtual called in many
2473 : cases without disabling optimization in other. */
2474 260870 : else if (pure_virtual)
2475 : {
2476 31032 : if (nodes.length ())
2477 : return;
2478 : }
2479 : /* If we found a real target, take away cxa_pure_virtual. */
2480 229838 : else if (!pure_virtual && nodes.length () == 1
2481 75355 : && is_cxa_pure_virtual_p (nodes[0]->decl))
2482 21491 : nodes.pop ();
2483 257880 : if (pure_virtual && nodes.length ())
2484 : return;
2485 257880 : if (!inserted->add (target))
2486 : {
2487 212619 : cached_polymorphic_call_targets->add (target_node);
2488 212619 : nodes.safe_push (target_node);
2489 : }
2490 : }
2491 4 : else if (!completep)
2492 : ;
2493 : /* We have definition of __cxa_pure_virtual that is not accessible (it is
2494 : optimized out or partitioned to other unit) so we cannot add it. When
2495 : not sanitizing, there is nothing to do.
2496 : Otherwise declare the list incomplete. */
2497 4 : else if (pure_virtual)
2498 : {
2499 0 : if (flag_sanitize & SANITIZE_UNREACHABLE)
2500 0 : *completep = false;
2501 : }
2502 4 : else if (flag_ltrans
2503 4 : || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2504 4 : *completep = false;
2505 : }
2506 :
2507 : /* See if BINFO's type matches OUTER_TYPE. If so, look up
2508 : BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2509 : method in vtable and insert method to NODES array
2510 : or BASES_TO_CONSIDER if this array is non-NULL.
2511 : Otherwise recurse to base BINFOs.
2512 : This matches what get_binfo_at_offset does, but with offset
2513 : being unknown.
2514 :
2515 : TYPE_BINFOS is a stack of BINFOS of types with defined
2516 : virtual table seen on way from class type to BINFO.
2517 :
2518 : MATCHED_VTABLES tracks virtual tables we already did lookup
2519 : for virtual function in. INSERTED tracks nodes we already
2520 : inserted.
2521 :
2522 : ANONYMOUS is true if BINFO is part of anonymous namespace.
2523 :
2524 : Clear COMPLETEP when we hit unreferable target.
2525 : */
2526 :
2527 : static void
2528 456849 : record_target_from_binfo (vec <cgraph_node *> &nodes,
2529 : vec <tree> *bases_to_consider,
2530 : tree binfo,
2531 : tree otr_type,
2532 : vec <tree> &type_binfos,
2533 : HOST_WIDE_INT otr_token,
2534 : tree outer_type,
2535 : HOST_WIDE_INT offset,
2536 : hash_set<tree> *inserted,
2537 : hash_set<tree> *matched_vtables,
2538 : bool anonymous,
2539 : bool *completep)
2540 : {
2541 456849 : tree type = BINFO_TYPE (binfo);
2542 456849 : int i;
2543 456849 : tree base_binfo;
2544 :
2545 :
2546 456849 : if (BINFO_VTABLE (binfo))
2547 202758 : type_binfos.safe_push (binfo);
2548 456849 : if (types_same_for_odr (type, outer_type))
2549 : {
2550 201637 : int i;
2551 201637 : tree type_binfo = NULL;
2552 :
2553 : /* Look up BINFO with virtual table. For normal types it is always last
2554 : binfo on stack. */
2555 413318 : for (i = type_binfos.length () - 1; i >= 0; i--)
2556 211631 : if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2557 : {
2558 : type_binfo = type_binfos[i];
2559 : break;
2560 : }
2561 201637 : if (BINFO_VTABLE (binfo))
2562 1481 : type_binfos.pop ();
2563 : /* If this is duplicated BINFO for base shared by virtual inheritance,
2564 : we may not have its associated vtable. This is not a problem, since
2565 : we will walk it on the other path. */
2566 201637 : if (!type_binfo)
2567 456849 : return;
2568 201587 : tree inner_binfo = get_binfo_at_offset (type_binfo,
2569 201587 : offset, otr_type);
2570 201587 : if (!inner_binfo)
2571 : {
2572 0 : gcc_assert (odr_violation_reported);
2573 : return;
2574 : }
2575 : /* For types in anonymous namespace first check if the respective vtable
2576 : is alive. If not, we know the type can't be called. */
2577 201587 : if (!flag_ltrans && anonymous)
2578 : {
2579 7367 : tree vtable = BINFO_VTABLE (inner_binfo);
2580 7367 : varpool_node *vnode;
2581 :
2582 7367 : if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2583 7367 : vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2584 7367 : vnode = varpool_node::get (vtable);
2585 7367 : if (!vnode || !vnode->definition)
2586 : return;
2587 : }
2588 201578 : gcc_assert (inner_binfo);
2589 201578 : if (bases_to_consider
2590 403156 : ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2591 201578 : : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2592 : {
2593 194890 : bool can_refer;
2594 194890 : tree target = gimple_get_virt_method_for_binfo (otr_token,
2595 : inner_binfo,
2596 194890 : &can_refer);
2597 194890 : if (!bases_to_consider)
2598 194890 : maybe_record_node (nodes, target, inserted, can_refer, completep);
2599 : /* Destructors are never called via construction vtables. */
2600 0 : else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2601 0 : bases_to_consider->safe_push (target);
2602 : }
2603 201578 : return;
2604 : }
2605 :
2606 : /* Walk bases. */
2607 522642 : for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2608 : /* Walking bases that have no virtual method is pointless exercise. */
2609 267430 : if (polymorphic_type_binfo_p (base_binfo))
2610 261046 : record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2611 : type_binfos,
2612 : otr_token, outer_type, offset, inserted,
2613 : matched_vtables, anonymous, completep);
2614 255212 : if (BINFO_VTABLE (binfo))
2615 201277 : type_binfos.pop ();
2616 : }
2617 :
2618 : /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2619 : of TYPE, insert them to NODES, recurse into derived nodes.
2620 : INSERTED is used to avoid duplicate insertions of methods into NODES.
2621 : MATCHED_VTABLES are used to avoid duplicate walking vtables.
2622 : Clear COMPLETEP if unreferable target is found.
2623 :
2624 : If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2625 : all cases where BASE_SKIPPED is true (because the base is abstract
2626 : class). */
2627 :
2628 : static void
2629 195812 : possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2630 : hash_set<tree> *inserted,
2631 : hash_set<tree> *matched_vtables,
2632 : tree otr_type,
2633 : odr_type type,
2634 : HOST_WIDE_INT otr_token,
2635 : tree outer_type,
2636 : HOST_WIDE_INT offset,
2637 : bool *completep,
2638 : vec <tree> &bases_to_consider,
2639 : bool consider_construction)
2640 : {
2641 195812 : tree binfo = TYPE_BINFO (type->type);
2642 195812 : unsigned int i;
2643 195812 : auto_vec <tree, 8> type_binfos;
2644 195812 : bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2645 :
2646 : /* We may need to consider types w/o instances because of possible derived
2647 : types using their methods either directly or via construction vtables.
2648 : We are safe to skip them when all derivations are known, since we will
2649 : handle them later.
2650 : This is done by recording them to BASES_TO_CONSIDER array. */
2651 195812 : if (possibly_instantiated || consider_construction)
2652 : {
2653 195803 : record_target_from_binfo (nodes,
2654 : (!possibly_instantiated
2655 9 : && type_all_derivations_known_p (type->type))
2656 : ? &bases_to_consider : NULL,
2657 : binfo, otr_type, type_binfos, otr_token,
2658 : outer_type, offset,
2659 : inserted, matched_vtables,
2660 195803 : type->anonymous_namespace, completep);
2661 : }
2662 229115 : for (i = 0; i < type->derived_types.length (); i++)
2663 66606 : possible_polymorphic_call_targets_1 (nodes, inserted,
2664 : matched_vtables,
2665 : otr_type,
2666 33303 : type->derived_types[i],
2667 : otr_token, outer_type, offset, completep,
2668 : bases_to_consider, consider_construction);
2669 195812 : }
2670 :
2671 : /* Cache of queries for polymorphic call targets.
2672 :
2673 : Enumerating all call targets may get expensive when there are many
2674 : polymorphic calls in the program, so we memoize all the previous
2675 : queries and avoid duplicated work. */
2676 :
2677 1476716 : class polymorphic_call_target_d
2678 : {
2679 : public:
2680 : HOST_WIDE_INT otr_token;
2681 : ipa_polymorphic_call_context context;
2682 : odr_type type;
2683 : vec <cgraph_node *> targets;
2684 : tree decl_warning;
2685 : int type_warning;
2686 : unsigned int n_odr_types;
2687 : bool complete;
2688 : bool speculative;
2689 : };
2690 :
2691 : /* Polymorphic call target cache helpers. */
2692 :
2693 : struct polymorphic_call_target_hasher
2694 : : pointer_hash <polymorphic_call_target_d>
2695 : {
2696 : static inline hashval_t hash (const polymorphic_call_target_d *);
2697 : static inline bool equal (const polymorphic_call_target_d *,
2698 : const polymorphic_call_target_d *);
2699 : static inline void remove (polymorphic_call_target_d *);
2700 : };
2701 :
2702 : /* Return the computed hashcode for ODR_QUERY. */
2703 :
2704 : inline hashval_t
2705 6825634 : polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2706 : {
2707 6825634 : inchash::hash hstate (odr_query->otr_token);
2708 :
2709 6825634 : hstate.add_hwi (odr_query->type->id);
2710 6825634 : hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2711 6825634 : hstate.add_hwi (odr_query->context.offset);
2712 6825634 : hstate.add_hwi (odr_query->n_odr_types);
2713 :
2714 6825634 : if (odr_query->context.speculative_outer_type)
2715 : {
2716 25354 : hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2717 25354 : hstate.add_hwi (odr_query->context.speculative_offset);
2718 : }
2719 6825634 : hstate.add_flag (odr_query->speculative);
2720 6825634 : hstate.add_flag (odr_query->context.maybe_in_construction);
2721 6825634 : hstate.add_flag (odr_query->context.maybe_derived_type);
2722 6825634 : hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2723 6825634 : hstate.commit_flag ();
2724 6825634 : return hstate.end ();
2725 : }
2726 :
2727 : /* Compare cache entries T1 and T2. */
2728 :
2729 : inline bool
2730 6737198 : polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2731 : const polymorphic_call_target_d *t2)
2732 : {
2733 3287280 : return (t1->type == t2->type && t1->otr_token == t2->otr_token
2734 2169309 : && t1->speculative == t2->speculative
2735 1783139 : && t1->context.offset == t2->context.offset
2736 1782418 : && t1->context.speculative_offset == t2->context.speculative_offset
2737 1782418 : && t1->context.outer_type == t2->context.outer_type
2738 1705005 : && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2739 1704263 : && t1->context.maybe_in_construction
2740 1704263 : == t2->context.maybe_in_construction
2741 1374409 : && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2742 1369441 : && (t1->context.speculative_maybe_derived_type
2743 1369441 : == t2->context.speculative_maybe_derived_type)
2744 : /* Adding new type may affect outcome of target search. */
2745 8106593 : && t1->n_odr_types == t2->n_odr_types);
2746 : }
2747 :
2748 : /* Remove entry in polymorphic call target cache hash. */
2749 :
2750 : inline void
2751 81906 : polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2752 : {
2753 81906 : v->targets.release ();
2754 81906 : free (v);
2755 81906 : }
2756 :
2757 : /* Polymorphic call target query cache. */
2758 :
2759 : typedef hash_table<polymorphic_call_target_hasher>
2760 : polymorphic_call_target_hash_type;
2761 : static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2762 :
2763 : /* Destroy polymorphic call target query cache. */
2764 :
2765 : static void
2766 747364 : free_polymorphic_call_targets_hash ()
2767 : {
2768 747364 : if (cached_polymorphic_call_targets)
2769 : {
2770 9988 : delete polymorphic_call_target_hash;
2771 9988 : polymorphic_call_target_hash = NULL;
2772 19976 : delete cached_polymorphic_call_targets;
2773 9988 : cached_polymorphic_call_targets = NULL;
2774 : }
2775 747364 : }
2776 :
2777 : /* Force rebuilding type inheritance graph from scratch.
2778 : This is use to make sure that we do not keep references to types
2779 : which was not visible to free_lang_data. */
2780 :
2781 : void
2782 229955 : rebuild_type_inheritance_graph ()
2783 : {
2784 229955 : if (!odr_hash)
2785 : return;
2786 229955 : delete odr_hash;
2787 229955 : odr_hash = NULL;
2788 229955 : odr_types_ptr = NULL;
2789 229955 : free_polymorphic_call_targets_hash ();
2790 : }
2791 :
2792 : /* When virtual function is removed, we may need to flush the cache. */
2793 :
2794 : static void
2795 18073207 : devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2796 : {
2797 18073207 : if (cached_polymorphic_call_targets
2798 2256428 : && !thunk_expansion
2799 20329103 : && cached_polymorphic_call_targets->contains (n))
2800 6019 : free_polymorphic_call_targets_hash ();
2801 18073207 : }
2802 :
2803 : /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2804 :
2805 : tree
2806 21558 : subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2807 : tree vtable)
2808 : {
2809 21558 : tree v = BINFO_VTABLE (binfo);
2810 21558 : int i;
2811 21558 : tree base_binfo;
2812 21558 : unsigned HOST_WIDE_INT this_offset;
2813 :
2814 21558 : if (v)
2815 : {
2816 17737 : if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2817 0 : gcc_unreachable ();
2818 :
2819 17737 : if (offset == this_offset
2820 17737 : && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2821 : return binfo;
2822 : }
2823 :
2824 16116 : for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2825 11235 : if (polymorphic_type_binfo_p (base_binfo))
2826 : {
2827 7566 : base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2828 7566 : if (base_binfo)
2829 : return base_binfo;
2830 : }
2831 : return NULL;
2832 : }
2833 :
2834 : /* T is known constant value of virtual table pointer.
2835 : Store virtual table to V and its offset to OFFSET.
2836 : Return false if T does not look like virtual table reference. */
2837 :
2838 : bool
2839 359079 : vtable_pointer_value_to_vtable (const_tree t, tree *v,
2840 : unsigned HOST_WIDE_INT *offset)
2841 : {
2842 : /* We expect &MEM[(void *)&virtual_table + 16B].
2843 : We obtain object's BINFO from the context of the virtual table.
2844 : This one contains pointer to virtual table represented via
2845 : POINTER_PLUS_EXPR. Verify that this pointer matches what
2846 : we propagated through.
2847 :
2848 : In the case of virtual inheritance, the virtual tables may
2849 : be nested, i.e. the offset may be different from 16 and we may
2850 : need to dive into the type representation. */
2851 359079 : if (TREE_CODE (t) == ADDR_EXPR
2852 33196 : && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2853 33196 : && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2854 33196 : && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2855 33196 : && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2856 : == VAR_DECL)
2857 392275 : && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2858 : (TREE_OPERAND (t, 0), 0), 0)))
2859 : {
2860 33196 : *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2861 33196 : *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2862 33196 : return true;
2863 : }
2864 :
2865 : /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2866 : We need to handle it when T comes from static variable initializer or
2867 : BINFO. */
2868 325883 : if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2869 : {
2870 325815 : *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2871 325815 : t = TREE_OPERAND (t, 0);
2872 : }
2873 : else
2874 68 : *offset = 0;
2875 :
2876 325883 : if (TREE_CODE (t) != ADDR_EXPR)
2877 : return false;
2878 325815 : *v = TREE_OPERAND (t, 0);
2879 325815 : return true;
2880 : }
2881 :
2882 : /* T is known constant value of virtual table pointer. Return BINFO of the
2883 : instance type. */
2884 :
2885 : tree
2886 0 : vtable_pointer_value_to_binfo (const_tree t)
2887 : {
2888 0 : tree vtable;
2889 0 : unsigned HOST_WIDE_INT offset;
2890 :
2891 0 : if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2892 : return NULL_TREE;
2893 :
2894 : /* FIXME: for stores of construction vtables we return NULL,
2895 : because we do not have BINFO for those. Eventually we should fix
2896 : our representation to allow this case to be handled, too.
2897 : In the case we see store of BINFO we however may assume
2898 : that standard folding will be able to cope with it. */
2899 0 : return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2900 0 : offset, vtable);
2901 : }
2902 :
2903 : /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2904 : Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2905 : and insert them in NODES.
2906 :
2907 : MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2908 :
2909 : static void
2910 604 : record_targets_from_bases (tree otr_type,
2911 : HOST_WIDE_INT otr_token,
2912 : tree outer_type,
2913 : HOST_WIDE_INT offset,
2914 : vec <cgraph_node *> &nodes,
2915 : hash_set<tree> *inserted,
2916 : hash_set<tree> *matched_vtables,
2917 : bool *completep)
2918 : {
2919 1348 : while (true)
2920 : {
2921 1348 : HOST_WIDE_INT pos, size;
2922 1348 : tree base_binfo;
2923 1348 : tree fld;
2924 :
2925 1348 : if (types_same_for_odr (outer_type, otr_type))
2926 : return;
2927 :
2928 2211 : for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2929 : {
2930 2211 : if (TREE_CODE (fld) != FIELD_DECL)
2931 1461 : continue;
2932 :
2933 750 : pos = int_bit_position (fld);
2934 750 : size = tree_to_shwi (DECL_SIZE (fld));
2935 750 : if (pos <= offset && (pos + size) > offset
2936 : /* Do not get confused by zero sized bases. */
2937 1494 : && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2938 : break;
2939 : }
2940 : /* Within a class type we should always find corresponding fields. */
2941 744 : gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2942 :
2943 : /* Nonbase types should have been stripped by outer_class_type. */
2944 744 : gcc_assert (DECL_ARTIFICIAL (fld));
2945 :
2946 744 : outer_type = TREE_TYPE (fld);
2947 744 : offset -= pos;
2948 :
2949 744 : base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2950 744 : offset, otr_type);
2951 744 : if (!base_binfo)
2952 : {
2953 0 : gcc_assert (odr_violation_reported);
2954 : return;
2955 : }
2956 744 : gcc_assert (base_binfo);
2957 744 : if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2958 : {
2959 744 : bool can_refer;
2960 744 : tree target = gimple_get_virt_method_for_binfo (otr_token,
2961 : base_binfo,
2962 : &can_refer);
2963 1485 : if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2964 744 : maybe_record_node (nodes, target, inserted, can_refer, completep);
2965 744 : matched_vtables->add (BINFO_VTABLE (base_binfo));
2966 : }
2967 : }
2968 : }
2969 :
2970 : /* When virtual table is removed, we may need to flush the cache. */
2971 :
2972 : static void
2973 8130679 : devirt_variable_node_removal_hook (varpool_node *n,
2974 : void *d ATTRIBUTE_UNUSED)
2975 : {
2976 8130679 : if (cached_polymorphic_call_targets
2977 11384 : && DECL_VIRTUAL_P (n->decl)
2978 8133902 : && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2979 12 : free_polymorphic_call_targets_hash ();
2980 8130679 : }
2981 :
2982 : /* Record about how many calls would benefit from given type to be final. */
2983 :
2984 : struct odr_type_warn_count
2985 : {
2986 : tree type;
2987 : int count;
2988 : profile_count dyn_count;
2989 : };
2990 :
2991 : /* Record about how many calls would benefit from given method to be final. */
2992 :
2993 : struct decl_warn_count
2994 : {
2995 : tree decl;
2996 : int count;
2997 : profile_count dyn_count;
2998 : };
2999 :
3000 : /* Information about type and decl warnings. */
3001 :
3002 10 : class final_warning_record
3003 : {
3004 : public:
3005 : /* If needed grow type_warnings vector and initialize new decl_warn_count
3006 : to have dyn_count set to profile_count::zero (). */
3007 : void grow_type_warnings (unsigned newlen);
3008 :
3009 : profile_count dyn_count;
3010 : auto_vec<odr_type_warn_count> type_warnings;
3011 : hash_map<tree, decl_warn_count> decl_warnings;
3012 : };
3013 :
3014 : void
3015 19 : final_warning_record::grow_type_warnings (unsigned newlen)
3016 : {
3017 19 : unsigned len = type_warnings.length ();
3018 19 : if (newlen > len)
3019 : {
3020 10 : type_warnings.safe_grow_cleared (newlen, true);
3021 23 : for (unsigned i = len; i < newlen; i++)
3022 13 : type_warnings[i].dyn_count = profile_count::zero ();
3023 : }
3024 19 : }
3025 :
3026 : class final_warning_record *final_warning_records;
3027 :
3028 : /* Return vector containing possible targets of polymorphic call of type
3029 : OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3030 : If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
3031 : OTR_TYPE and include their virtual method. This is useful for types
3032 : possibly in construction or destruction where the virtual table may
3033 : temporarily change to one of base types. INCLUDE_DERIVED_TYPES make
3034 : us to walk the inheritance graph for all derivations.
3035 :
3036 : If COMPLETEP is non-NULL, store true if the list is complete.
3037 : CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3038 : in the target cache. If user needs to visit every target list
3039 : just once, it can memoize them.
3040 :
3041 : If SPECULATIVE is set, the list will not contain targets that
3042 : are not speculatively taken.
3043 :
3044 : Returned vector is placed into cache. It is NOT caller's responsibility
3045 : to free it. The vector can be freed on cgraph_remove_node call if
3046 : the particular node is a virtual function present in the cache. */
3047 :
3048 : vec <cgraph_node *>
3049 1476716 : possible_polymorphic_call_targets (tree otr_type,
3050 : HOST_WIDE_INT otr_token,
3051 : ipa_polymorphic_call_context context,
3052 : bool *completep,
3053 : void **cache_token,
3054 : bool speculative)
3055 : {
3056 1476716 : static struct cgraph_node_hook_list *node_removal_hook_holder;
3057 1476716 : vec <cgraph_node *> nodes = vNULL;
3058 1476716 : auto_vec <tree, 8> bases_to_consider;
3059 1476716 : odr_type type, outer_type;
3060 1476716 : polymorphic_call_target_d key;
3061 1476716 : polymorphic_call_target_d **slot;
3062 1476716 : unsigned int i;
3063 1476716 : tree binfo, target;
3064 1476716 : bool complete;
3065 1476716 : bool can_refer = false;
3066 1476716 : bool skipped = false;
3067 :
3068 1476716 : otr_type = TYPE_MAIN_VARIANT (otr_type);
3069 :
3070 : /* If ODR is not initialized or the context is invalid, return empty
3071 : incomplete list. */
3072 1476716 : if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3073 : {
3074 95 : if (completep)
3075 95 : *completep = context.invalid;
3076 95 : if (cache_token)
3077 2 : *cache_token = NULL;
3078 95 : return nodes;
3079 : }
3080 :
3081 : /* Do not bother to compute speculative info when user do not asks for it. */
3082 1476621 : if (!speculative || !context.speculative_outer_type)
3083 1352249 : context.clear_speculation ();
3084 :
3085 1476621 : type = get_odr_type (otr_type, true);
3086 :
3087 : /* Recording type variants would waste results cache. */
3088 1476621 : gcc_assert (!context.outer_type
3089 : || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3090 :
3091 : /* Look up the outer class type we want to walk.
3092 : If we fail to do so, the context is invalid. */
3093 371706 : if ((context.outer_type || context.speculative_outer_type)
3094 1599925 : && !context.restrict_to_inner_class (otr_type))
3095 : {
3096 39 : if (completep)
3097 39 : *completep = true;
3098 39 : if (cache_token)
3099 0 : *cache_token = NULL;
3100 39 : return nodes;
3101 : }
3102 1476582 : gcc_assert (!context.invalid);
3103 :
3104 : /* Check that restrict_to_inner_class kept the main variant. */
3105 1476582 : gcc_assert (!context.outer_type
3106 : || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3107 :
3108 : /* We canonicalize our query, so we do not need extra hashtable entries. */
3109 :
3110 : /* Without outer type, we have no use for offset. Just do the
3111 : basic search from inner type. */
3112 1476582 : if (!context.outer_type)
3113 248402 : context.clear_outer_type (otr_type);
3114 : /* We need to update our hierarchy if the type does not exist. */
3115 1476582 : outer_type = get_odr_type (context.outer_type, true);
3116 : /* If the type is complete, there are no derivations. */
3117 1476582 : if (TYPE_FINAL_P (outer_type->type))
3118 17 : context.maybe_derived_type = false;
3119 :
3120 : /* Initialize query cache. */
3121 1476582 : if (!cached_polymorphic_call_targets)
3122 : {
3123 13613 : cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3124 13613 : polymorphic_call_target_hash
3125 13613 : = new polymorphic_call_target_hash_type (23);
3126 13613 : if (!node_removal_hook_holder)
3127 : {
3128 8144 : node_removal_hook_holder =
3129 4072 : symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3130 4072 : symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3131 : NULL);
3132 : }
3133 : }
3134 :
3135 1476582 : if (in_lto_p)
3136 : {
3137 8602 : if (context.outer_type != otr_type)
3138 174 : context.outer_type
3139 174 : = get_odr_type (context.outer_type, true)->type;
3140 8602 : if (context.speculative_outer_type)
3141 10 : context.speculative_outer_type
3142 10 : = get_odr_type (context.speculative_outer_type, true)->type;
3143 : }
3144 :
3145 : /* Look up cached answer. */
3146 1476582 : key.type = type;
3147 1476582 : key.otr_token = otr_token;
3148 1476582 : key.speculative = speculative;
3149 1476582 : key.context = context;
3150 1476582 : key.n_odr_types = odr_types.length ();
3151 1476582 : slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3152 1476582 : if (cache_token)
3153 228464 : *cache_token = (void *)*slot;
3154 1476582 : if (*slot)
3155 : {
3156 1364369 : if (completep)
3157 1215299 : *completep = (*slot)->complete;
3158 1364369 : if ((*slot)->type_warning && final_warning_records)
3159 : {
3160 6 : final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3161 6 : if (!final_warning_records->type_warnings
3162 6 : [(*slot)->type_warning - 1].dyn_count.initialized_p ())
3163 6 : final_warning_records->type_warnings
3164 6 : [(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
3165 6 : if (final_warning_records->dyn_count > 0)
3166 0 : final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3167 0 : = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3168 0 : + final_warning_records->dyn_count;
3169 : }
3170 1364369 : if (!speculative && (*slot)->decl_warning && final_warning_records)
3171 : {
3172 6 : struct decl_warn_count *c =
3173 6 : final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3174 6 : c->count++;
3175 6 : if (final_warning_records->dyn_count > 0)
3176 0 : c->dyn_count += final_warning_records->dyn_count;
3177 : }
3178 1364369 : return (*slot)->targets;
3179 : }
3180 :
3181 112213 : complete = true;
3182 :
3183 : /* Do actual search. */
3184 112213 : timevar_push (TV_IPA_VIRTUAL_CALL);
3185 112213 : *slot = XCNEW (polymorphic_call_target_d);
3186 112213 : if (cache_token)
3187 51710 : *cache_token = (void *)*slot;
3188 112213 : (*slot)->type = type;
3189 112213 : (*slot)->otr_token = otr_token;
3190 112213 : (*slot)->context = context;
3191 112213 : (*slot)->speculative = speculative;
3192 :
3193 112213 : hash_set<tree> inserted;
3194 112213 : hash_set<tree> matched_vtables;
3195 :
3196 : /* First insert targets we speculatively identified as likely. */
3197 112213 : if (context.speculative_outer_type)
3198 : {
3199 1609 : odr_type speculative_outer_type;
3200 1609 : bool speculation_complete = true;
3201 1609 : bool check_derived_types = false;
3202 :
3203 : /* First insert target from type itself and check if it may have
3204 : derived types. */
3205 1609 : speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3206 1609 : if (TYPE_FINAL_P (speculative_outer_type->type))
3207 10 : context.speculative_maybe_derived_type = false;
3208 1609 : binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3209 1609 : context.speculative_offset, otr_type);
3210 1609 : if (binfo)
3211 1609 : target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3212 : &can_refer);
3213 : else
3214 : target = NULL;
3215 :
3216 : /* In the case we get complete method, we don't need
3217 : to walk derivations. */
3218 3216 : if (target && DECL_FINAL_P (target))
3219 0 : context.speculative_maybe_derived_type = false;
3220 1609 : if (check_derived_types
3221 : ? type_or_derived_type_possibly_instantiated_p
3222 : (speculative_outer_type)
3223 1609 : : type_possibly_instantiated_p (speculative_outer_type->type))
3224 1609 : maybe_record_node (nodes, target, &inserted, can_refer,
3225 : &speculation_complete);
3226 1609 : if (binfo)
3227 1609 : matched_vtables.add (BINFO_VTABLE (binfo));
3228 :
3229 :
3230 : /* Next walk recursively all derived types. */
3231 1609 : if (context.speculative_maybe_derived_type)
3232 1959 : for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3233 700 : possible_polymorphic_call_targets_1 (nodes, &inserted,
3234 : &matched_vtables,
3235 : otr_type,
3236 350 : speculative_outer_type->derived_types[i],
3237 : otr_token, speculative_outer_type->type,
3238 : context.speculative_offset,
3239 : &speculation_complete,
3240 : bases_to_consider,
3241 : false);
3242 : }
3243 :
3244 113591 : if (!speculative || !nodes.length ())
3245 : {
3246 110835 : bool check_derived_types = false;
3247 : /* First see virtual method of type itself. */
3248 110835 : binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3249 110835 : context.offset, otr_type);
3250 110835 : if (binfo)
3251 110835 : target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3252 : &can_refer);
3253 : else
3254 : {
3255 0 : gcc_assert (odr_violation_reported);
3256 : target = NULL;
3257 : }
3258 :
3259 : /* Destructors are never called through construction virtual tables,
3260 : because the type is always known. */
3261 221536 : if (target && DECL_CXX_DESTRUCTOR_P (target))
3262 13370 : context.maybe_in_construction = false;
3263 :
3264 : /* In the case we get complete method, we don't need
3265 : to walk derivations. */
3266 221536 : if (target && DECL_FINAL_P (target))
3267 : {
3268 6 : check_derived_types = true;
3269 6 : context.maybe_derived_type = false;
3270 : }
3271 :
3272 : /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3273 110835 : if (check_derived_types
3274 6 : ? type_or_derived_type_possibly_instantiated_p (outer_type)
3275 110829 : : type_possibly_instantiated_p (outer_type->type))
3276 110813 : maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3277 : else
3278 : skipped = true;
3279 :
3280 110835 : if (binfo)
3281 110835 : matched_vtables.add (BINFO_VTABLE (binfo));
3282 :
3283 : /* Next walk recursively all derived types. */
3284 110835 : if (context.maybe_derived_type)
3285 : {
3286 269649 : for (i = 0; i < outer_type->derived_types.length(); i++)
3287 324318 : possible_polymorphic_call_targets_1 (nodes, &inserted,
3288 : &matched_vtables,
3289 : otr_type,
3290 162159 : outer_type->derived_types[i],
3291 : otr_token, outer_type->type,
3292 : context.offset, &complete,
3293 : bases_to_consider,
3294 162159 : context.maybe_in_construction);
3295 :
3296 107490 : if (!outer_type->all_derivations_known)
3297 : {
3298 90474 : if (!speculative && final_warning_records
3299 9 : && nodes.length () == 1
3300 107396 : && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
3301 : {
3302 9 : if (complete
3303 9 : && warn_suggest_final_types
3304 18 : && !outer_type->derived_types.length ())
3305 : {
3306 9 : final_warning_records->grow_type_warnings
3307 9 : (outer_type->id);
3308 9 : final_warning_records->type_warnings[outer_type->id].count++;
3309 9 : if (!final_warning_records->type_warnings
3310 9 : [outer_type->id].dyn_count.initialized_p ())
3311 0 : final_warning_records->type_warnings
3312 0 : [outer_type->id].dyn_count = profile_count::zero ();
3313 18 : final_warning_records->type_warnings[outer_type->id].dyn_count
3314 9 : += final_warning_records->dyn_count;
3315 18 : final_warning_records->type_warnings[outer_type->id].type
3316 9 : = outer_type->type;
3317 9 : (*slot)->type_warning = outer_type->id + 1;
3318 : }
3319 9 : if (complete
3320 9 : && warn_suggest_final_methods
3321 15 : && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3322 6 : outer_type->type))
3323 : {
3324 6 : bool existed;
3325 6 : struct decl_warn_count &c =
3326 6 : final_warning_records->decl_warnings.get_or_insert
3327 6 : (nodes[0]->decl, &existed);
3328 :
3329 6 : if (existed)
3330 : {
3331 0 : c.count++;
3332 0 : c.dyn_count += final_warning_records->dyn_count;
3333 : }
3334 : else
3335 : {
3336 6 : c.count = 1;
3337 6 : c.dyn_count = final_warning_records->dyn_count;
3338 6 : c.decl = nodes[0]->decl;
3339 : }
3340 6 : (*slot)->decl_warning = nodes[0]->decl;
3341 : }
3342 : }
3343 107387 : complete = false;
3344 : }
3345 : }
3346 :
3347 110835 : if (!speculative)
3348 : {
3349 : /* Destructors are never called through construction virtual tables,
3350 : because the type is always known. One of entries may be
3351 : cxa_pure_virtual so look to at least two of them. */
3352 93392 : if (context.maybe_in_construction)
3353 90966 : for (i =0 ; i < MIN (nodes.length (), 2); i++)
3354 48812 : if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3355 2853 : context.maybe_in_construction = false;
3356 93392 : if (context.maybe_in_construction)
3357 : {
3358 40494 : if (type != outer_type
3359 40494 : && (!skipped
3360 0 : || (context.maybe_derived_type
3361 0 : && !type_all_derivations_known_p (outer_type->type))))
3362 604 : record_targets_from_bases (otr_type, otr_token, outer_type->type,
3363 : context.offset, nodes, &inserted,
3364 : &matched_vtables, &complete);
3365 40494 : if (skipped)
3366 16 : maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3367 40494 : for (i = 0; i < bases_to_consider.length(); i++)
3368 0 : maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3369 : }
3370 : }
3371 : }
3372 :
3373 112213 : (*slot)->targets = nodes;
3374 112213 : (*slot)->complete = complete;
3375 112213 : (*slot)->n_odr_types = odr_types.length ();
3376 112213 : if (completep)
3377 106856 : *completep = complete;
3378 :
3379 112213 : timevar_pop (TV_IPA_VIRTUAL_CALL);
3380 112213 : return nodes;
3381 112213 : }
3382 :
3383 : bool
3384 6 : add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3385 : vec<const decl_warn_count*> *vec)
3386 : {
3387 6 : vec->safe_push (&value);
3388 6 : return true;
3389 : }
3390 :
3391 : /* Dump target list TARGETS into FILE. */
3392 :
3393 : static void
3394 88 : dump_targets (FILE *f, vec <cgraph_node *> targets, bool verbose)
3395 : {
3396 88 : unsigned int i;
3397 :
3398 203 : for (i = 0; i < targets.length (); i++)
3399 : {
3400 115 : char *name = NULL;
3401 115 : if (in_lto_p)
3402 3 : name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3403 115 : fprintf (f, " %s", name ? name : targets[i]->dump_name ());
3404 115 : if (in_lto_p)
3405 3 : free (name);
3406 115 : if (!targets[i]->definition)
3407 43 : fprintf (f, " (no definition%s)",
3408 43 : DECL_DECLARED_INLINE_P (targets[i]->decl)
3409 : ? " inline" : "");
3410 : /* With many targets for every call polymorphic dumps are going to
3411 : be quadratic in size. */
3412 115 : if (i > 10 && !verbose)
3413 : {
3414 0 : fprintf (f, " ... and %i more targets\n", targets.length () - i);
3415 0 : return;
3416 : }
3417 : }
3418 88 : fprintf (f, "\n");
3419 : }
3420 :
3421 : /* Dump all possible targets of a polymorphic call. */
3422 :
3423 : void
3424 72 : dump_possible_polymorphic_call_targets (FILE *f,
3425 : tree otr_type,
3426 : HOST_WIDE_INT otr_token,
3427 : const ipa_polymorphic_call_context &ctx,
3428 : bool verbose)
3429 : {
3430 72 : vec <cgraph_node *> targets;
3431 72 : bool final;
3432 72 : odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3433 72 : unsigned int len;
3434 :
3435 72 : if (!type)
3436 0 : return;
3437 72 : targets = possible_polymorphic_call_targets (otr_type, otr_token,
3438 : ctx,
3439 : &final, NULL, false);
3440 72 : fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3441 72 : print_generic_expr (f, type->type, TDF_SLIM);
3442 72 : fprintf (f, " token %i\n", (int)otr_token);
3443 :
3444 72 : ctx.dump (f);
3445 :
3446 135 : fprintf (f, " %s%s%s%s\n ",
3447 : final ? "This is a complete list." :
3448 : "This is partial list; extra targets may be defined in other units.",
3449 72 : ctx.maybe_in_construction ? " (base types included)" : "",
3450 72 : ctx.maybe_derived_type ? " (derived types included)" : "",
3451 72 : ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3452 72 : len = targets.length ();
3453 72 : dump_targets (f, targets, verbose);
3454 :
3455 72 : targets = possible_polymorphic_call_targets (otr_type, otr_token,
3456 : ctx,
3457 : &final, NULL, true);
3458 140 : if (targets.length () != len)
3459 : {
3460 16 : fprintf (f, " Speculative targets:");
3461 16 : dump_targets (f, targets, verbose);
3462 : }
3463 : /* Ugly: during callgraph construction the target cache may get populated
3464 : before all targets are found. While this is harmless (because all local
3465 : types are discovered and only in those case we devirtualize fully and we
3466 : don't do speculative devirtualization before IPA stage) it triggers
3467 : assert here when dumping at that stage also populates the case with
3468 : speculative targets. Quietly ignore this. */
3469 128 : gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3470 72 : fprintf (f, "\n");
3471 : }
3472 :
3473 :
3474 : /* Return true if N can be possibly target of a polymorphic call of
3475 : OTR_TYPE/OTR_TOKEN. */
3476 :
3477 : bool
3478 34046 : possible_polymorphic_call_target_p (tree otr_type,
3479 : HOST_WIDE_INT otr_token,
3480 : const ipa_polymorphic_call_context &ctx,
3481 : struct cgraph_node *n)
3482 : {
3483 34046 : vec <cgraph_node *> targets;
3484 34046 : unsigned int i;
3485 34046 : bool final;
3486 :
3487 34046 : if (fndecl_built_in_p (n->decl, BUILT_IN_NORMAL)
3488 34046 : && (DECL_FUNCTION_CODE (n->decl) == BUILT_IN_UNREACHABLE
3489 0 : || DECL_FUNCTION_CODE (n->decl) == BUILT_IN_TRAP
3490 0 : || DECL_FUNCTION_CODE (n->decl) == BUILT_IN_UNREACHABLE_TRAP))
3491 : return true;
3492 :
3493 33956 : if (is_cxa_pure_virtual_p (n->decl))
3494 : return true;
3495 :
3496 33948 : if (!odr_hash)
3497 : return true;
3498 33948 : targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3499 50348 : for (i = 0; i < targets.length (); i++)
3500 50083 : if (n->semantically_equivalent_p (targets[i]))
3501 : return true;
3502 :
3503 : /* At a moment we allow middle end to dig out new external declarations
3504 : as a targets of polymorphic calls. */
3505 265 : if (!final && !n->definition)
3506 : return true;
3507 : return false;
3508 : }
3509 :
3510 :
3511 :
3512 : /* Return true if N can be possibly target of a polymorphic call of
3513 : OBJ_TYPE_REF expression REF in STMT. */
3514 :
3515 : bool
3516 6 : possible_polymorphic_call_target_p (tree ref,
3517 : gimple *stmt,
3518 : struct cgraph_node *n)
3519 : {
3520 6 : ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3521 6 : tree call_fn = gimple_call_fn (stmt);
3522 :
3523 12 : return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3524 : tree_to_uhwi
3525 6 : (OBJ_TYPE_REF_TOKEN (call_fn)),
3526 : context,
3527 6 : n);
3528 : }
3529 :
3530 :
3531 : /* After callgraph construction new external nodes may appear.
3532 : Add them into the graph. */
3533 :
3534 : void
3535 511368 : update_type_inheritance_graph (void)
3536 : {
3537 511368 : struct cgraph_node *n;
3538 :
3539 511368 : if (!odr_hash)
3540 : return;
3541 511368 : free_polymorphic_call_targets_hash ();
3542 511368 : timevar_push (TV_IPA_INHERITANCE);
3543 : /* We reconstruct the graph starting from types of all methods seen in the
3544 : unit. */
3545 118237468 : FOR_EACH_FUNCTION (n)
3546 117726100 : if (DECL_VIRTUAL_P (n->decl)
3547 1800259 : && !n->definition
3548 118123779 : && n->real_symbol_p ())
3549 101052 : get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3550 511368 : timevar_pop (TV_IPA_INHERITANCE);
3551 : }
3552 :
3553 :
3554 : /* Return true if N looks like likely target of a polymorphic call.
3555 : Rule out cxa_pure_virtual, noreturns, function declared cold and
3556 : other obvious cases. */
3557 :
3558 : bool
3559 285508 : likely_target_p (struct cgraph_node *n)
3560 : {
3561 285508 : int flags;
3562 : /* cxa_pure_virtual and similar things are not likely. */
3563 285508 : if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3564 : return false;
3565 284439 : flags = flags_from_decl_or_type (n->decl);
3566 284439 : if (flags & ECF_NORETURN)
3567 : return false;
3568 283790 : if (lookup_attribute ("cold",
3569 283790 : DECL_ATTRIBUTES (n->decl)))
3570 : return false;
3571 283790 : if (n->frequency < NODE_FREQUENCY_NORMAL)
3572 : return false;
3573 : /* If there are no live virtual tables referring the target,
3574 : the only way the target can be called is an instance coming from other
3575 : compilation unit; speculative devirtualization is built around an
3576 : assumption that won't happen. */
3577 283790 : if (!referenced_from_vtable_p (n))
3578 : return false;
3579 : return true;
3580 : }
3581 :
3582 : /* Compare type warning records P1 and P2 and choose one with larger count;
3583 : helper for qsort. */
3584 :
3585 : static int
3586 0 : type_warning_cmp (const void *p1, const void *p2)
3587 : {
3588 0 : const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3589 0 : const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3590 :
3591 0 : if (t1->dyn_count < t2->dyn_count)
3592 : return 1;
3593 0 : if (t1->dyn_count > t2->dyn_count)
3594 : return -1;
3595 0 : return t2->count - t1->count;
3596 : }
3597 :
3598 : /* Compare decl warning records P1 and P2 and choose one with larger count;
3599 : helper for qsort. */
3600 :
3601 : static int
3602 9 : decl_warning_cmp (const void *p1, const void *p2)
3603 : {
3604 9 : const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3605 9 : const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3606 :
3607 9 : if (t1->dyn_count < t2->dyn_count)
3608 : return 1;
3609 9 : if (t1->dyn_count > t2->dyn_count)
3610 : return -1;
3611 9 : return t2->count - t1->count;
3612 : }
3613 :
3614 :
3615 : /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3616 : context CTX. */
3617 :
3618 : struct cgraph_node *
3619 154412 : try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3620 : ipa_polymorphic_call_context ctx)
3621 : {
3622 154412 : vec <cgraph_node *>targets
3623 : = possible_polymorphic_call_targets
3624 154412 : (otr_type, otr_token, ctx, NULL, NULL, true);
3625 154412 : unsigned int i;
3626 154412 : struct cgraph_node *likely_target = NULL;
3627 :
3628 288100 : for (i = 0; i < targets.length (); i++)
3629 257930 : if (likely_target_p (targets[i]))
3630 : {
3631 257153 : if (likely_target)
3632 : return NULL;
3633 132911 : likely_target = targets[i];
3634 : }
3635 30170 : if (!likely_target
3636 8669 : ||!likely_target->definition
3637 36693 : || DECL_EXTERNAL (likely_target->decl))
3638 : return NULL;
3639 :
3640 : /* Don't use an implicitly-declared destructor (c++/58678). */
3641 5624 : struct cgraph_node *non_thunk_target
3642 5624 : = likely_target->function_symbol ();
3643 5624 : if (DECL_ARTIFICIAL (non_thunk_target->decl))
3644 : return NULL;
3645 1037 : if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3646 1037 : && likely_target->can_be_discarded_p ())
3647 : return NULL;
3648 : return likely_target;
3649 : }
3650 :
3651 : /* Various statistics counters collected during devirtualization. */
3652 :
3653 : struct devirt_stats
3654 : {
3655 : int npolymorphic, nspeculated, nconverted, ncold;
3656 : int nmultiple, noverwritable, ndevirtualized, nnotdefined;
3657 : int nwrong, nok, nexternal, nartificial;
3658 : int ndropped;
3659 : };
3660 :
3661 : /* Check LIKELY_TARGET and return true if it a suitable target for
3662 : devirtualization or speculative devirtualization. Increase the respective
3663 : counter in STATS if any check fails. */
3664 :
3665 : static bool
3666 24404 : devirt_target_ok_p (cgraph_node *likely_target, struct devirt_stats *stats)
3667 : {
3668 24404 : if (!likely_target->definition)
3669 : {
3670 5645 : if (dump_file)
3671 15 : fprintf (dump_file, "Target is not a definition\n\n");
3672 5645 : stats->nnotdefined++;
3673 5645 : return false;
3674 : }
3675 : /* Do not introduce new references to external symbols. While we
3676 : can handle these just well, it is common for programs to
3677 : incorrectly with headers defining methods they are linked
3678 : with. */
3679 18759 : if (DECL_EXTERNAL (likely_target->decl))
3680 : {
3681 445 : if (dump_file)
3682 0 : fprintf (dump_file, "Target is external\n\n");
3683 445 : stats->nexternal++;
3684 445 : return false;
3685 : }
3686 : /* Don't use an implicitly-declared destructor (c++/58678). */
3687 18314 : struct cgraph_node *non_thunk_target
3688 18314 : = likely_target->function_symbol ();
3689 18314 : if (DECL_ARTIFICIAL (non_thunk_target->decl))
3690 : {
3691 723 : if (dump_file)
3692 3 : fprintf (dump_file, "Target is artificial\n\n");
3693 723 : stats->nartificial++;
3694 723 : return false;
3695 : }
3696 17591 : if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3697 17591 : && likely_target->can_be_discarded_p ())
3698 : {
3699 212 : if (dump_file)
3700 0 : fprintf (dump_file, "Target is overwritable\n\n");
3701 212 : stats->noverwritable++;
3702 212 : return false;
3703 : }
3704 : return true;
3705 : }
3706 :
3707 : /* The ipa-devirt pass.
3708 : When polymorphic call has only one likely target in the unit,
3709 : turn it into a speculative call. */
3710 :
3711 : static unsigned int
3712 127944 : ipa_devirt (void)
3713 : {
3714 127944 : struct cgraph_node *n;
3715 127944 : hash_set<void *> bad_call_targets;
3716 127944 : struct cgraph_edge *e;
3717 127944 : struct devirt_stats stats;
3718 127944 : memset (&stats, 0, sizeof (stats));
3719 :
3720 127944 : if (dump_file)
3721 : {
3722 41 : dump_type_inheritance_graph (dump_file);
3723 41 : ipa_dump_noted_record_fnptrs (dump_file);
3724 : }
3725 :
3726 : /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3727 : This is implemented by setting up final_warning_records that are updated
3728 : by get_polymorphic_call_targets.
3729 : We need to clear cache in this case to trigger recomputation of all
3730 : entries. */
3731 127944 : if (odr_types_ptr && (warn_suggest_final_methods || warn_suggest_final_types))
3732 : {
3733 10 : final_warning_records = new (final_warning_record);
3734 10 : final_warning_records->dyn_count = profile_count::zero ();
3735 10 : final_warning_records->grow_type_warnings (odr_types.length ());
3736 10 : free_polymorphic_call_targets_hash ();
3737 : }
3738 :
3739 1473679 : FOR_EACH_DEFINED_FUNCTION (n)
3740 : {
3741 1345735 : bool update = false;
3742 1345735 : if (!opt_for_fn (n->decl, flag_devirtualize))
3743 9919 : continue;
3744 1335816 : if (dump_file && n->indirect_calls)
3745 42 : fprintf (dump_file, "\n\nProcesing function %s\n",
3746 : n->dump_name ());
3747 1471414 : for (e = n->indirect_calls; e; e = e->next_callee)
3748 135598 : if (!e->maybe_hot_p ())
3749 : {
3750 15527 : if (dump_file)
3751 5 : fprintf (dump_file, "Call is cold\n\n");
3752 15527 : stats.ncold++;
3753 15527 : continue;
3754 : }
3755 120071 : else if (cgraph_polymorphic_indirect_info *pii
3756 120071 : = dyn_cast <cgraph_polymorphic_indirect_info *> (e->indirect_info))
3757 : {
3758 23770 : if (!pii->usable_p () || !odr_types_ptr)
3759 8155 : continue;
3760 :
3761 23770 : void *cache_token;
3762 23770 : bool final;
3763 :
3764 23770 : if (final_warning_records)
3765 15 : final_warning_records->dyn_count = e->count.ipa ();
3766 :
3767 23770 : vec <cgraph_node *>targets
3768 : = possible_polymorphic_call_targets
3769 23770 : (e, &final, &cache_token, true);
3770 23770 : unsigned int i;
3771 :
3772 : /* Trigger warnings by calculating non-speculative targets. */
3773 23770 : if (warn_suggest_final_methods || warn_suggest_final_types)
3774 15 : possible_polymorphic_call_targets (e);
3775 :
3776 23770 : if (dump_file)
3777 38 : dump_possible_polymorphic_call_targets
3778 38 : (dump_file, e, (dump_flags & TDF_DETAILS));
3779 :
3780 23770 : stats.npolymorphic++;
3781 :
3782 : /* See if the call can be devirtualized by means of ipa-prop's
3783 : polymorphic call context propagation. If not, we can just
3784 : forget about this call being polymorphic and avoid some heavy
3785 : lifting in remove_unreachable_nodes that will otherwise try to
3786 : keep all possible targets alive until inlining and in the inliner
3787 : itself.
3788 :
3789 : This may need to be revisited once we add further ways to use
3790 : the may edges, but it is a reasonable thing to do right now. */
3791 :
3792 23770 : if ((pii->param_index == -1
3793 9358 : || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3794 57 : && pii->vptr_changed))
3795 23770 : && !flag_ltrans_devirtualize)
3796 : {
3797 14412 : pii->mark_unusable ();
3798 14412 : stats.ndropped++;
3799 14412 : if (dump_file)
3800 15 : fprintf (dump_file, "Dropping polymorphic call info;"
3801 : " it cannot be used by ipa-prop\n");
3802 : }
3803 :
3804 23770 : if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3805 57 : continue;
3806 :
3807 23713 : if (e->speculative)
3808 : {
3809 0 : if (dump_file)
3810 0 : fprintf (dump_file, "Call is already speculated\n\n");
3811 0 : stats.nspeculated++;
3812 :
3813 : /* When dumping see if we agree with speculation. */
3814 0 : if (!dump_file)
3815 0 : continue;
3816 : }
3817 23713 : if (bad_call_targets.contains (cache_token))
3818 : {
3819 3576 : if (dump_file)
3820 0 : fprintf (dump_file, "Target list is known to be useless\n\n");
3821 3576 : stats.nmultiple++;
3822 3576 : continue;
3823 : }
3824 20137 : auto_vec <cgraph_node *, 20> likely_targets;
3825 46676 : for (i = 0; i < targets.length (); i++)
3826 27578 : if (likely_target_p (targets[i]))
3827 : {
3828 53274 : if ((int)likely_targets.length () >= param_max_devirt_targets)
3829 : {
3830 1039 : likely_targets.truncate (0);
3831 1039 : if (dump_file)
3832 0 : fprintf (dump_file, "More than %i likely targets\n\n",
3833 : param_max_devirt_targets);
3834 1039 : stats.nmultiple++;
3835 1039 : break;
3836 : }
3837 25598 : likely_targets.safe_push (targets[i]);
3838 : }
3839 20137 : if (!likely_targets.length ())
3840 : {
3841 4522 : bad_call_targets.add (cache_token);
3842 4522 : continue;
3843 : }
3844 : /* This is reached only when dumping; check if we agree or disagree
3845 : with the speculation. */
3846 15615 : if (e->speculative)
3847 : {
3848 0 : for (cgraph_node * likely_target: likely_targets)
3849 0 : if (e->speculative_call_for_target (likely_target))
3850 : {
3851 0 : fprintf (dump_file,
3852 : "We agree with speculation on target %s\n\n",
3853 : likely_target->dump_name ());
3854 0 : stats.nok++;
3855 : }
3856 : else
3857 : {
3858 0 : fprintf (dump_file,
3859 : "We disagree with speculation on target %s\n\n",
3860 : likely_target->dump_name ());
3861 0 : stats.nwrong++;
3862 : }
3863 0 : continue;
3864 0 : }
3865 15615 : bool first = true;
3866 15615 : unsigned speculative_id = e->get_next_speculative_id ();
3867 69338 : for (cgraph_node * likely_target: likely_targets)
3868 : {
3869 22493 : if (!devirt_target_ok_p (likely_target, &stats))
3870 7005 : continue;
3871 15488 : else if (dbg_cnt (devirt))
3872 : {
3873 15488 : if (dump_enabled_p ())
3874 : {
3875 19 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
3876 : "speculatively devirtualizing call "
3877 : "in %s to %s\n",
3878 : n->dump_name (),
3879 : likely_target->dump_name ());
3880 : }
3881 15488 : if (!likely_target->can_be_discarded_p ())
3882 : {
3883 795 : cgraph_node *alias;
3884 795 : alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3885 : if (alias)
3886 15488 : likely_target = alias;
3887 : }
3888 15488 : if (first)
3889 9161 : stats.nconverted++;
3890 15488 : first = false;
3891 15488 : update = true;
3892 15488 : e->make_speculative
3893 30976 : (likely_target,
3894 15488 : e->count.apply_scale (8, 10 * likely_targets.length ()),
3895 : speculative_id++);
3896 : }
3897 : }
3898 15615 : if (speculative_id > 1 && dump_enabled_p ())
3899 : {
3900 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
3901 : "devirtualized call in %s to %i targets\n",
3902 : n->dump_name (),
3903 : speculative_id);
3904 : }
3905 20137 : }
3906 96301 : else if (cgraph_simple_indirect_info *sii
3907 231899 : = dyn_cast <cgraph_simple_indirect_info *> (e->indirect_info))
3908 : {
3909 96276 : if (e->speculative)
3910 : {
3911 0 : if (dump_file)
3912 0 : fprintf (dump_file, "Call is already speculated\n\n");
3913 0 : stats.nspeculated++;
3914 :
3915 : /* When dumping see if we agree with speculation. */
3916 0 : if (!dump_file)
3917 0 : continue;
3918 : }
3919 155386 : if (!sii->fnptr_loaded_from_record
3920 96276 : || !opt_for_fn (n->decl,
3921 : flag_speculatively_call_stored_functions))
3922 59110 : continue;
3923 :
3924 37166 : tree rec_type = sii->rec_type;
3925 37166 : unsigned fld_off = sii->fld_offset;
3926 37166 : tree likely_tgt_decl = ipa_single_noted_fnptr_in_record (rec_type,
3927 : fld_off);
3928 37166 : cgraph_node *likely_tgt_node;
3929 37166 : if (likely_tgt_decl
3930 1911 : && (likely_tgt_node = cgraph_node::get (likely_tgt_decl))
3931 39077 : && devirt_target_ok_p (likely_tgt_node, &stats))
3932 : {
3933 1891 : if (!likely_tgt_node->can_be_discarded_p ())
3934 : {
3935 1698 : cgraph_node *alias;
3936 1698 : alias = dyn_cast<cgraph_node *> (likely_tgt_node
3937 : ->noninterposable_alias ());
3938 : if (alias)
3939 1891 : likely_tgt_node = alias;
3940 : }
3941 1891 : if (e->speculative)
3942 : {
3943 0 : if (e->speculative_call_for_target (likely_tgt_node))
3944 : {
3945 0 : fprintf (dump_file, "Simple call agree with speculation\n\n");
3946 0 : stats.nok++;
3947 : }
3948 : else
3949 : {
3950 0 : fprintf (dump_file, "Simple call disagree with speculation\n\n");
3951 0 : stats.nwrong++;
3952 : }
3953 0 : continue;
3954 : }
3955 :
3956 1891 : if (dump_enabled_p ())
3957 3 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
3958 : "speculatively turning an indirect call "
3959 : "in %s to a direct one to %s\n",
3960 : n->dump_name (),
3961 : likely_tgt_node->dump_name ());
3962 :
3963 1891 : update = true;
3964 1891 : e->make_speculative (likely_tgt_node,
3965 : e->count.apply_scale (8, 10),
3966 1891 : e->get_next_speculative_id ());
3967 : }
3968 : }
3969 1335816 : if (update)
3970 7962 : ipa_update_overall_fn_summary (n);
3971 : }
3972 127944 : ipa_free_noted_fnptr_calls ();
3973 127944 : if (odr_types_ptr && (warn_suggest_final_methods || warn_suggest_final_types))
3974 : {
3975 10 : if (warn_suggest_final_types)
3976 : {
3977 9 : final_warning_records->type_warnings.qsort (type_warning_cmp);
3978 9 : for (unsigned int i = 0;
3979 18 : i < final_warning_records->type_warnings.length (); i++)
3980 9 : if (final_warning_records->type_warnings[i].count)
3981 : {
3982 6 : tree type = final_warning_records->type_warnings[i].type;
3983 6 : int count = final_warning_records->type_warnings[i].count;
3984 6 : profile_count dyn_count
3985 6 : = final_warning_records->type_warnings[i].dyn_count;
3986 :
3987 6 : if (!(dyn_count > 0))
3988 6 : warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3989 6 : OPT_Wsuggest_final_types, count,
3990 : "Declaring type %qD final "
3991 : "would enable devirtualization of %i call",
3992 : "Declaring type %qD final "
3993 : "would enable devirtualization of %i calls",
3994 : type,
3995 : count);
3996 : else
3997 0 : warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3998 0 : OPT_Wsuggest_final_types, count,
3999 : "Declaring type %qD final "
4000 : "would enable devirtualization of %i call "
4001 : "executed %lli times",
4002 : "Declaring type %qD final "
4003 : "would enable devirtualization of %i calls "
4004 : "executed %lli times",
4005 : type,
4006 : count,
4007 0 : (long long) dyn_count.to_gcov_type ());
4008 : }
4009 : }
4010 :
4011 10 : if (warn_suggest_final_methods)
4012 : {
4013 4 : auto_vec<const decl_warn_count*> decl_warnings_vec;
4014 :
4015 4 : final_warning_records->decl_warnings.traverse
4016 10 : <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
4017 4 : decl_warnings_vec.qsort (decl_warning_cmp);
4018 10 : for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
4019 : {
4020 6 : tree decl = decl_warnings_vec[i]->decl;
4021 6 : int count = decl_warnings_vec[i]->count;
4022 6 : profile_count dyn_count
4023 6 : = decl_warnings_vec[i]->dyn_count;
4024 :
4025 6 : if (!(dyn_count > 0))
4026 6 : if (DECL_CXX_DESTRUCTOR_P (decl))
4027 0 : warning_n (DECL_SOURCE_LOCATION (decl),
4028 0 : OPT_Wsuggest_final_methods, count,
4029 : "Declaring virtual destructor of %qD final "
4030 : "would enable devirtualization of %i call",
4031 : "Declaring virtual destructor of %qD final "
4032 : "would enable devirtualization of %i calls",
4033 0 : DECL_CONTEXT (decl), count);
4034 : else
4035 6 : warning_n (DECL_SOURCE_LOCATION (decl),
4036 6 : OPT_Wsuggest_final_methods, count,
4037 : "Declaring method %qD final "
4038 : "would enable devirtualization of %i call",
4039 : "Declaring method %qD final "
4040 : "would enable devirtualization of %i calls",
4041 : decl, count);
4042 0 : else if (DECL_CXX_DESTRUCTOR_P (decl))
4043 0 : warning_n (DECL_SOURCE_LOCATION (decl),
4044 0 : OPT_Wsuggest_final_methods, count,
4045 : "Declaring virtual destructor of %qD final "
4046 : "would enable devirtualization of %i call "
4047 : "executed %lli times",
4048 : "Declaring virtual destructor of %qD final "
4049 : "would enable devirtualization of %i calls "
4050 : "executed %lli times",
4051 0 : DECL_CONTEXT (decl), count,
4052 0 : (long long)dyn_count.to_gcov_type ());
4053 : else
4054 0 : warning_n (DECL_SOURCE_LOCATION (decl),
4055 0 : OPT_Wsuggest_final_methods, count,
4056 : "Declaring method %qD final "
4057 : "would enable devirtualization of %i call "
4058 : "executed %lli times",
4059 : "Declaring method %qD final "
4060 : "would enable devirtualization of %i calls "
4061 : "executed %lli times",
4062 : decl, count,
4063 0 : (long long)dyn_count.to_gcov_type ());
4064 : }
4065 4 : }
4066 :
4067 20 : delete (final_warning_records);
4068 10 : final_warning_records = 0;
4069 : }
4070 :
4071 127944 : if (dump_file)
4072 41 : fprintf (dump_file,
4073 : "%i polymorphic calls, %i devirtualized,"
4074 : " %i speculatively devirtualized, %i cold\n"
4075 : "%i have multiple targets, %i overwritable,"
4076 : " %i already speculated (%i agree, %i disagree),"
4077 : " %i external, %i not defined, %i artificial, %i infos dropped\n",
4078 : stats.npolymorphic, stats.ndevirtualized, stats.nconverted,
4079 : stats.ncold, stats.nmultiple, stats.noverwritable,
4080 : stats.nspeculated, stats.nok, stats.nwrong,
4081 : stats.nexternal, stats.nnotdefined, stats.nartificial,
4082 : stats.ndropped);
4083 127944 : return stats.ndevirtualized || stats.ndropped ? TODO_remove_functions : 0;
4084 127944 : }
4085 :
4086 : namespace {
4087 :
4088 : const pass_data pass_data_ipa_devirt =
4089 : {
4090 : IPA_PASS, /* type */
4091 : "devirt", /* name */
4092 : OPTGROUP_NONE, /* optinfo_flags */
4093 : TV_IPA_DEVIRT, /* tv_id */
4094 : 0, /* properties_required */
4095 : 0, /* properties_provided */
4096 : 0, /* properties_destroyed */
4097 : 0, /* todo_flags_start */
4098 : ( TODO_dump_symtab ), /* todo_flags_finish */
4099 : };
4100 :
4101 : class pass_ipa_devirt : public ipa_opt_pass_d
4102 : {
4103 : public:
4104 285722 : pass_ipa_devirt (gcc::context *ctxt)
4105 : : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
4106 : NULL, /* generate_summary */
4107 : NULL, /* write_summary */
4108 : NULL, /* read_summary */
4109 : NULL, /* write_optimization_summary */
4110 : NULL, /* read_optimization_summary */
4111 : NULL, /* stmt_fixup */
4112 : 0, /* function_transform_todo_flags_start */
4113 : NULL, /* function_transform */
4114 285722 : NULL) /* variable_transform */
4115 285722 : {}
4116 :
4117 : /* opt_pass methods: */
4118 563719 : bool gate (function *) final override
4119 : {
4120 : /* In LTO, always run the IPA passes and decide on function basis if the
4121 : pass is enabled. */
4122 563719 : if (in_lto_p)
4123 : return true;
4124 447705 : return (optimize
4125 447705 : && ((flag_devirtualize
4126 240740 : && (flag_devirtualize_speculatively
4127 226 : || (warn_suggest_final_methods
4128 226 : || warn_suggest_final_types)))
4129 47618 : || flag_speculatively_call_stored_functions));
4130 : }
4131 :
4132 127944 : unsigned int execute (function *) final override { return ipa_devirt (); }
4133 :
4134 : }; // class pass_ipa_devirt
4135 :
4136 : } // anon namespace
4137 :
4138 : ipa_opt_pass_d *
4139 285722 : make_pass_ipa_devirt (gcc::context *ctxt)
4140 : {
4141 285722 : return new pass_ipa_devirt (ctxt);
4142 : }
4143 :
4144 : /* Print ODR name of a TYPE if available.
4145 : Use demangler when option DEMANGLE is used. */
4146 :
4147 : DEBUG_FUNCTION void
4148 0 : debug_tree_odr_name (tree type, bool demangle)
4149 : {
4150 0 : const char *odr = get_odr_name_for_type (type);
4151 0 : if (demangle)
4152 : {
4153 0 : const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
4154 0 : odr = cplus_demangle (odr, opts);
4155 : }
4156 :
4157 0 : fprintf (stderr, "%s\n", odr);
4158 0 : }
4159 :
4160 : /* Register ODR enum so we later stream record about its values. */
4161 :
4162 : void
4163 341 : register_odr_enum (tree t)
4164 : {
4165 341 : if (flag_lto)
4166 341 : vec_safe_push (odr_enums, t);
4167 341 : }
4168 :
4169 : /* Write ODR enums to LTO stream file. */
4170 :
4171 : static void
4172 23036 : ipa_odr_summary_write (void)
4173 : {
4174 23036 : if (!odr_enums && !odr_enum_map)
4175 23036 : return;
4176 176 : struct output_block *ob = create_output_block (LTO_section_odr_types);
4177 176 : unsigned int i;
4178 176 : tree t;
4179 :
4180 176 : if (odr_enums)
4181 : {
4182 173 : streamer_write_uhwi (ob, odr_enums->length ());
4183 :
4184 : /* For every ODR enum stream out
4185 : - its ODR name
4186 : - number of values,
4187 : - value names and constant their represent
4188 : - bitpack of locations so we can do good diagnostics. */
4189 514 : FOR_EACH_VEC_ELT (*odr_enums, i, t)
4190 : {
4191 682 : streamer_write_string (ob, ob->main_stream,
4192 341 : IDENTIFIER_POINTER
4193 : (DECL_ASSEMBLER_NAME (TYPE_NAME (t))),
4194 : true);
4195 :
4196 341 : int n = 0;
4197 2334 : for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4198 1993 : n++;
4199 341 : streamer_write_uhwi (ob, n);
4200 2334 : for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4201 : {
4202 1993 : streamer_write_string (ob, ob->main_stream,
4203 1993 : IDENTIFIER_POINTER (TREE_PURPOSE (e)),
4204 : true);
4205 3986 : streamer_write_wide_int (ob,
4206 3986 : wi::to_wide (DECL_INITIAL
4207 : (TREE_VALUE (e))));
4208 : }
4209 :
4210 341 : bitpack_d bp = bitpack_create (ob->main_stream);
4211 341 : lto_output_location (ob, &bp, DECL_SOURCE_LOCATION (TYPE_NAME (t)));
4212 2334 : for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4213 3986 : lto_output_location (ob, &bp,
4214 1993 : DECL_SOURCE_LOCATION (TREE_VALUE (e)));
4215 341 : streamer_write_bitpack (&bp);
4216 : }
4217 173 : vec_free (odr_enums);
4218 173 : odr_enums = NULL;
4219 : }
4220 : /* During LTO incremental linking we already have streamed in types. */
4221 3 : else if (odr_enum_map)
4222 : {
4223 3 : gcc_checking_assert (!odr_enums);
4224 3 : streamer_write_uhwi (ob, odr_enum_map->elements ());
4225 :
4226 3 : hash_map<nofree_string_hash, odr_enum>::iterator iter
4227 3 : = odr_enum_map->begin ();
4228 6 : for (; iter != odr_enum_map->end (); ++iter)
4229 : {
4230 3 : odr_enum &this_enum = (*iter).second;
4231 3 : streamer_write_string (ob, ob->main_stream, (*iter).first, true);
4232 :
4233 3 : streamer_write_uhwi (ob, this_enum.vals.length ());
4234 11 : for (unsigned j = 0; j < this_enum.vals.length (); j++)
4235 : {
4236 16 : streamer_write_string (ob, ob->main_stream,
4237 8 : this_enum.vals[j].name, true);
4238 8 : streamer_write_wide_int (ob, this_enum.vals[j].val);
4239 : }
4240 :
4241 3 : bitpack_d bp = bitpack_create (ob->main_stream);
4242 3 : lto_output_location (ob, &bp, this_enum.locus);
4243 11 : for (unsigned j = 0; j < this_enum.vals.length (); j++)
4244 8 : lto_output_location (ob, &bp, this_enum.vals[j].locus);
4245 3 : streamer_write_bitpack (&bp);
4246 : }
4247 :
4248 6 : delete odr_enum_map;
4249 3 : obstack_free (&odr_enum_obstack, NULL);
4250 3 : odr_enum_map = NULL;
4251 : }
4252 :
4253 176 : produce_asm (ob);
4254 176 : destroy_output_block (ob);
4255 : }
4256 :
4257 : /* Write ODR enums from LTO stream file and warn on mismatches. */
4258 :
4259 : static void
4260 96 : ipa_odr_read_section (struct lto_file_decl_data *file_data, const char *data,
4261 : size_t len)
4262 : {
4263 96 : const struct lto_function_header *header
4264 : = (const struct lto_function_header *) data;
4265 96 : const int cfg_offset = sizeof (struct lto_function_header);
4266 96 : const int main_offset = cfg_offset + header->cfg_size;
4267 96 : const int string_offset = main_offset + header->main_size;
4268 96 : class data_in *data_in;
4269 :
4270 96 : lto_input_block ib ((const char *) data + main_offset, header->main_size,
4271 96 : file_data);
4272 :
4273 96 : data_in
4274 192 : = lto_data_in_create (file_data, (const char *) data + string_offset,
4275 96 : header->string_size, vNULL);
4276 96 : unsigned int n = streamer_read_uhwi (&ib);
4277 :
4278 96 : if (!odr_enum_map)
4279 : {
4280 69 : gcc_obstack_init (&odr_enum_obstack);
4281 69 : odr_enum_map = new (hash_map <nofree_string_hash, odr_enum>);
4282 : }
4283 :
4284 329 : for (unsigned i = 0; i < n; i++)
4285 : {
4286 233 : const char *rname = streamer_read_string (data_in, &ib);
4287 233 : unsigned int nvals = streamer_read_uhwi (&ib);
4288 233 : char *name;
4289 :
4290 233 : obstack_grow (&odr_enum_obstack, rname, strlen (rname) + 1);
4291 233 : name = XOBFINISH (&odr_enum_obstack, char *);
4292 :
4293 233 : bool existed_p;
4294 233 : class odr_enum &this_enum
4295 233 : = odr_enum_map->get_or_insert (xstrdup (name), &existed_p);
4296 :
4297 : /* If this is first time we see the enum, remember its definition. */
4298 233 : if (!existed_p)
4299 : {
4300 206 : this_enum.vals.safe_grow_cleared (nvals, true);
4301 206 : this_enum.warned = false;
4302 206 : if (dump_file)
4303 0 : fprintf (dump_file, "enum %s\n{\n", name);
4304 1739 : for (unsigned j = 0; j < nvals; j++)
4305 : {
4306 1533 : const char *val_name = streamer_read_string (data_in, &ib);
4307 1533 : obstack_grow (&odr_enum_obstack, val_name, strlen (val_name) + 1);
4308 1533 : this_enum.vals[j].name = XOBFINISH (&odr_enum_obstack, char *);
4309 1533 : this_enum.vals[j].val = streamer_read_wide_int (&ib);
4310 1533 : if (dump_file)
4311 0 : fprintf (dump_file, " %s = " HOST_WIDE_INT_PRINT_DEC ",\n",
4312 0 : val_name, wi::fits_shwi_p (this_enum.vals[j].val)
4313 0 : ? this_enum.vals[j].val.to_shwi () : -1);
4314 : }
4315 206 : bitpack_d bp = streamer_read_bitpack (&ib);
4316 206 : stream_input_location (&this_enum.locus, &bp, data_in);
4317 1739 : for (unsigned j = 0; j < nvals; j++)
4318 1533 : stream_input_location (&this_enum.vals[j].locus, &bp, data_in);
4319 206 : data_in->location_cache.apply_location_cache ();
4320 206 : if (dump_file)
4321 0 : fprintf (dump_file, "}\n");
4322 : }
4323 : /* If we already have definition, compare it with new one and output
4324 : warnings if they differs. */
4325 : else
4326 : {
4327 27 : int do_warning = -1;
4328 27 : char *warn_name = NULL;
4329 27 : wide_int warn_value = wi::zero (1);
4330 :
4331 27 : if (dump_file)
4332 0 : fprintf (dump_file, "Comparing enum %s\n", name);
4333 :
4334 : /* Look for differences which we will warn about later once locations
4335 : are streamed. */
4336 107 : for (unsigned j = 0; j < nvals; j++)
4337 : {
4338 80 : const char *id = streamer_read_string (data_in, &ib);
4339 80 : wide_int val = streamer_read_wide_int (&ib);
4340 :
4341 80 : if (do_warning != -1 || j >= this_enum.vals.length ())
4342 20 : continue;
4343 60 : if (strcmp (id, this_enum.vals[j].name)
4344 53 : || (val.get_precision() !=
4345 53 : this_enum.vals[j].val.get_precision())
4346 107 : || val != this_enum.vals[j].val)
4347 : {
4348 13 : warn_name = xstrdup (id);
4349 13 : warn_value = val;
4350 13 : do_warning = j;
4351 13 : if (dump_file)
4352 0 : fprintf (dump_file, " Different on entry %i\n", j);
4353 : }
4354 80 : }
4355 :
4356 : /* Stream in locations, but do not apply them unless we are going
4357 : to warn. */
4358 27 : bitpack_d bp = streamer_read_bitpack (&ib);
4359 27 : location_t locus;
4360 :
4361 27 : stream_input_location (&locus, &bp, data_in);
4362 :
4363 : /* Did we find a difference? */
4364 41 : if (do_warning != -1 || nvals != this_enum.vals.length ())
4365 : {
4366 13 : data_in->location_cache.apply_location_cache ();
4367 :
4368 13 : const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
4369 13 : char *dmgname = cplus_demangle (name, opts);
4370 13 : if (this_enum.warned
4371 26 : || !warning_at (this_enum.locus,
4372 13 : OPT_Wodr, "type %qs violates the "
4373 : "C++ One Definition Rule",
4374 : dmgname))
4375 : do_warning = -1;
4376 : else
4377 : {
4378 13 : this_enum.warned = true;
4379 13 : if (do_warning == -1)
4380 0 : inform (locus,
4381 : "an enum with different number of values is defined"
4382 : " in another translation unit");
4383 13 : else if (warn_name)
4384 13 : inform (locus,
4385 : "an enum with different value name"
4386 : " is defined in another translation unit");
4387 : else
4388 0 : inform (locus,
4389 : "an enum with different values"
4390 : " is defined in another translation unit");
4391 : }
4392 : }
4393 : else
4394 14 : data_in->location_cache.revert_location_cache ();
4395 :
4396 : /* Finally look up for location of the actual value that diverged. */
4397 107 : for (unsigned j = 0; j < nvals; j++)
4398 : {
4399 80 : location_t id_locus;
4400 :
4401 80 : data_in->location_cache.revert_location_cache ();
4402 80 : stream_input_location (&id_locus, &bp, data_in);
4403 :
4404 80 : if ((int) j == do_warning)
4405 : {
4406 13 : data_in->location_cache.apply_location_cache ();
4407 :
4408 13 : if (strcmp (warn_name, this_enum.vals[j].name))
4409 7 : inform (this_enum.vals[j].locus,
4410 : "name %qs differs from name %qs defined"
4411 : " in another translation unit",
4412 7 : this_enum.vals[j].name, warn_name);
4413 6 : else if (this_enum.vals[j].val.get_precision() !=
4414 6 : warn_value.get_precision())
4415 6 : inform (this_enum.vals[j].locus,
4416 : "name %qs is defined as %u-bit while another "
4417 : "translation unit defines it as %u-bit",
4418 6 : warn_name, this_enum.vals[j].val.get_precision(),
4419 : warn_value.get_precision());
4420 : /* FIXME: In case there is easy way to print wide_ints,
4421 : perhaps we could do it here instead of overflow check. */
4422 0 : else if (wi::fits_shwi_p (this_enum.vals[j].val)
4423 0 : && wi::fits_shwi_p (warn_value))
4424 0 : inform (this_enum.vals[j].locus,
4425 : "name %qs is defined to %wd while another "
4426 : "translation unit defines it as %wd",
4427 0 : warn_name, this_enum.vals[j].val.to_shwi (),
4428 : warn_value.to_shwi ());
4429 : else
4430 0 : inform (this_enum.vals[j].locus,
4431 : "name %qs is defined to different value "
4432 : "in another translation unit",
4433 : warn_name);
4434 :
4435 13 : inform (id_locus,
4436 : "mismatching definition");
4437 : }
4438 : else
4439 67 : data_in->location_cache.revert_location_cache ();
4440 : }
4441 27 : if (warn_name)
4442 13 : free (warn_name);
4443 27 : obstack_free (&odr_enum_obstack, name);
4444 27 : }
4445 : }
4446 96 : lto_free_section_data (file_data, LTO_section_ipa_fn_summary, NULL, data,
4447 : len);
4448 96 : lto_data_in_delete (data_in);
4449 96 : }
4450 :
4451 : /* Read all ODR type sections. */
4452 :
4453 : static void
4454 12202 : ipa_odr_summary_read (void)
4455 : {
4456 12202 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4457 12202 : struct lto_file_decl_data *file_data;
4458 12202 : unsigned int j = 0;
4459 :
4460 37661 : while ((file_data = file_data_vec[j++]))
4461 : {
4462 13257 : size_t len;
4463 13257 : const char *data
4464 13257 : = lto_get_summary_section_data (file_data, LTO_section_odr_types,
4465 : &len);
4466 13257 : if (data)
4467 96 : ipa_odr_read_section (file_data, data, len);
4468 : }
4469 : /* Enum info is used only to produce warnings. Only case we will need it
4470 : again is streaming for incremental LTO. */
4471 12202 : if (flag_incremental_link != INCREMENTAL_LINK_LTO)
4472 : {
4473 12235 : delete odr_enum_map;
4474 12169 : obstack_free (&odr_enum_obstack, NULL);
4475 12169 : odr_enum_map = NULL;
4476 : }
4477 12202 : }
4478 :
4479 : namespace {
4480 :
4481 : const pass_data pass_data_ipa_odr =
4482 : {
4483 : IPA_PASS, /* type */
4484 : "odr", /* name */
4485 : OPTGROUP_NONE, /* optinfo_flags */
4486 : TV_IPA_ODR, /* tv_id */
4487 : 0, /* properties_required */
4488 : 0, /* properties_provided */
4489 : 0, /* properties_destroyed */
4490 : 0, /* todo_flags_start */
4491 : 0, /* todo_flags_finish */
4492 : };
4493 :
4494 : class pass_ipa_odr : public ipa_opt_pass_d
4495 : {
4496 : public:
4497 285722 : pass_ipa_odr (gcc::context *ctxt)
4498 : : ipa_opt_pass_d (pass_data_ipa_odr, ctxt,
4499 : NULL, /* generate_summary */
4500 : ipa_odr_summary_write, /* write_summary */
4501 : ipa_odr_summary_read, /* read_summary */
4502 : NULL, /* write_optimization_summary */
4503 : NULL, /* read_optimization_summary */
4504 : NULL, /* stmt_fixup */
4505 : 0, /* function_transform_todo_flags_start */
4506 : NULL, /* function_transform */
4507 285722 : NULL) /* variable_transform */
4508 285722 : {}
4509 :
4510 : /* opt_pass methods: */
4511 586755 : bool gate (function *) final override
4512 : {
4513 586755 : return (in_lto_p || flag_lto);
4514 : }
4515 :
4516 23260 : unsigned int execute (function *) final override
4517 : {
4518 23260 : return 0;
4519 : }
4520 :
4521 : }; // class pass_ipa_odr
4522 :
4523 : } // anon namespace
4524 :
4525 : ipa_opt_pass_d *
4526 285722 : make_pass_ipa_odr (gcc::context *ctxt)
4527 : {
4528 285722 : return new pass_ipa_odr (ctxt);
4529 : }
4530 :
4531 :
4532 : #include "gt-ipa-devirt.h"
|