Line data Source code
1 : /* Basic IPA optimizations and utilities.
2 : Copyright (C) 2003-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "target.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "alloc-pool.h"
28 : #include "tree-pass.h"
29 : #include "stringpool.h"
30 : #include "cgraph.h"
31 : #include "gimplify.h"
32 : #include "tree-iterator.h"
33 : #include "ipa-utils.h"
34 : #include "symbol-summary.h"
35 : #include "tree-vrp.h"
36 : #include "sreal.h"
37 : #include "ipa-cp.h"
38 : #include "ipa-prop.h"
39 : #include "ipa-fnsummary.h"
40 : #include "dbgcnt.h"
41 : #include "debug.h"
42 : #include "stringpool.h"
43 : #include "attribs.h"
44 :
45 : /* Return true when NODE has ADDR reference. */
46 :
47 : static bool
48 3320738 : has_addr_references_p (struct cgraph_node *node,
49 : void *)
50 : {
51 3320738 : int i;
52 3320738 : struct ipa_ref *ref = NULL;
53 :
54 3422390 : for (i = 0; node->iterate_referring (i, ref); i++)
55 3329425 : if (ref->use == IPA_REF_ADDR)
56 : return true;
57 : return false;
58 : }
59 :
60 : /* Return true when NODE can be target of an indirect call. */
61 :
62 : static bool
63 508 : is_indirect_call_target_p (struct cgraph_node *node, void *)
64 : {
65 508 : return node->indirect_call_target;
66 : }
67 :
68 : /* Look for all functions inlined to NODE and update their inlined_to pointers
69 : to INLINED_TO. */
70 :
71 : static void
72 0 : update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
73 : {
74 0 : struct cgraph_edge *e;
75 0 : for (e = node->callees; e; e = e->next_callee)
76 0 : if (e->callee->inlined_to)
77 : {
78 0 : e->callee->inlined_to = inlined_to;
79 0 : update_inlined_to_pointer (e->callee, inlined_to);
80 : }
81 0 : }
82 :
83 : /* Add symtab NODE to queue starting at FIRST.
84 :
85 : The queue is linked via AUX pointers and terminated by pointer to 1.
86 : We enqueue nodes at two occasions: when we find them reachable or when we find
87 : their bodies needed for further clonning. In the second case we mark them
88 : by pointer to 2 after processing so they are re-queue when they become
89 : reachable. */
90 :
91 : static void
92 143066039 : enqueue_node (symtab_node *node, symtab_node **first,
93 : hash_set<symtab_node *> *reachable)
94 : {
95 : /* Node is still in queue; do nothing. */
96 143066039 : if (node->aux && node->aux != (void *) 2)
97 : return;
98 : /* Node was already processed as unreachable, re-enqueue
99 : only if it became reachable now. */
100 79524723 : if (node->aux == (void *)2 && !reachable->contains (node))
101 : return;
102 50067157 : node->aux = *first;
103 50067157 : *first = node;
104 : }
105 :
106 : /* Return true if NODE may get inlined later.
107 : This is used to keep DECL_EXTERNAL function bodies around long enough
108 : so inliner can proces them. */
109 :
110 : static bool
111 772168 : possible_inline_candidate_p (symtab_node *node)
112 : {
113 772168 : if (symtab->state >= IPA_SSA_AFTER_INLINING)
114 : return false;
115 748916 : cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
116 717874 : if (!cnode)
117 : return false;
118 717874 : if (DECL_UNINLINABLE (cnode->decl))
119 : return false;
120 715309 : if (opt_for_fn (cnode->decl, optimize))
121 : return true;
122 2043 : if (symtab->state >= IPA_SSA)
123 : return false;
124 1697 : return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
125 : }
126 :
127 : /* Process references. */
128 :
129 : static void
130 36429761 : process_references (symtab_node *snode,
131 : symtab_node **first,
132 : hash_set<symtab_node *> *reachable)
133 : {
134 36429761 : int i;
135 36429761 : struct ipa_ref *ref = NULL;
136 104203921 : for (i = 0; snode->iterate_reference (i, ref); i++)
137 : {
138 67774160 : symtab_node *node = ref->referred;
139 67774160 : symtab_node *body = node->ultimate_alias_target ();
140 :
141 48973711 : if (node->definition && !node->in_other_partition
142 116747785 : && ((!DECL_EXTERNAL (node->decl) || node->alias)
143 74473 : || (possible_inline_candidate_p (node)
144 : /* We use variable constructors during late compilation for
145 : constant folding. Keep references alive so partitioning
146 : knows about potential references. */
147 35480 : || (VAR_P (node->decl)
148 31042 : && (flag_wpa
149 31042 : || flag_incremental_link
150 : == INCREMENTAL_LINK_LTO)
151 0 : && dyn_cast <varpool_node *> (node)
152 0 : ->ctor_useable_for_folding_p ()))))
153 : {
154 : /* Be sure that we will not optimize out alias target
155 : body. */
156 48938145 : if (DECL_EXTERNAL (node->decl)
157 56451 : && node->alias
158 48955603 : && symtab->state < IPA_SSA_AFTER_INLINING)
159 6624 : reachable->add (body);
160 48938145 : reachable->add (node);
161 : }
162 67774160 : enqueue_node (node, first, reachable);
163 : }
164 36429761 : }
165 :
166 : /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
167 : all its potential targets as reachable to permit later inlining if
168 : devirtualization happens. After inlining still keep their declarations
169 : around, so we can devirtualize to a direct call.
170 :
171 : Also try to make trivial devirutalization when no or only one target is
172 : possible. */
173 :
174 : static void
175 184559 : walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
176 : struct cgraph_edge *edge,
177 : symtab_node **first,
178 : hash_set<symtab_node *> *reachable)
179 : {
180 184559 : unsigned int i;
181 184559 : void *cache_token;
182 184559 : bool final;
183 184559 : vec <cgraph_node *>targets
184 : = possible_polymorphic_call_targets
185 184559 : (edge, &final, &cache_token);
186 :
187 184559 : if (cache_token != NULL && !reachable_call_targets->add (cache_token))
188 : {
189 228874 : for (i = 0; i < targets.length (); i++)
190 : {
191 137683 : struct cgraph_node *n = targets[i];
192 :
193 : /* Do not bother to mark virtual methods in anonymous namespace;
194 : either we will find use of virtual table defining it, or it is
195 : unused. */
196 137683 : if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
197 268940 : && type_in_anonymous_namespace_p
198 131257 : (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
199 4774 : continue;
200 :
201 132909 : n->indirect_call_target = true;
202 132909 : symtab_node *body = n->function_symbol ();
203 :
204 : /* Prior inlining, keep alive bodies of possible targets for
205 : devirtualization. */
206 132909 : if (n->definition
207 132909 : && (possible_inline_candidate_p (body)
208 99566 : && opt_for_fn (body->decl, flag_devirtualize)))
209 : {
210 : /* Be sure that we will not optimize out alias target
211 : body. */
212 99566 : if (DECL_EXTERNAL (n->decl)
213 2871 : && n->alias
214 99566 : && symtab->state < IPA_SSA_AFTER_INLINING)
215 0 : reachable->add (body);
216 99566 : reachable->add (n);
217 : }
218 : /* Even after inlining we want to keep the possible targets in the
219 : boundary, so late passes can still produce direct call even if
220 : the chance for inlining is lost. */
221 132909 : enqueue_node (n, first, reachable);
222 : }
223 : }
224 :
225 : /* Very trivial devirtualization; when the type is
226 : final or anonymous (so we know all its derivation)
227 : and there is only one possible virtual call target,
228 : make the edge direct. */
229 184559 : if (final)
230 : {
231 161 : if (targets.length () <= 1 && dbg_cnt (devirt))
232 : {
233 14 : cgraph_node *target, *node = edge->caller;
234 14 : if (targets.length () == 1)
235 6 : target = targets[0];
236 : else
237 8 : target = cgraph_node::get_create (builtin_decl_unreachable ());
238 :
239 14 : if (dump_enabled_p ())
240 : {
241 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
242 : "devirtualizing call in %s to %s\n",
243 0 : edge->caller->dump_name (),
244 : target->dump_name ());
245 : }
246 14 : edge = cgraph_edge::make_direct (edge, target);
247 14 : if (ipa_fn_summaries)
248 2 : ipa_update_overall_fn_summary (node->inlined_to
249 : ? node->inlined_to : node);
250 12 : else if (edge->call_stmt)
251 12 : cgraph_edge::redirect_call_stmt_to_callee (edge);
252 : }
253 : }
254 184559 : }
255 :
256 : /* Perform reachability analysis and reclaim all unreachable nodes.
257 :
258 : The algorithm is basically mark&sweep but with some extra refinements:
259 :
260 : - reachable extern inline functions needs special handling; the bodies needs
261 : to stay in memory until inlining in hope that they will be inlined.
262 : After inlining we release their bodies and turn them into unanalyzed
263 : nodes even when they are reachable.
264 :
265 : - virtual functions are kept in callgraph even if they seem unreachable in
266 : hope calls to them will be devirtualized.
267 :
268 : Again we remove them after inlining. In late optimization some
269 : devirtualization may happen, but it is not important since we won't inline
270 : the call. In theory early opts and IPA should work out all important cases.
271 :
272 : - virtual clones needs bodies of their origins for later materialization;
273 : this means that we want to keep the body even if the origin is unreachable
274 : otherwise. To avoid origin from sitting in the callgraph and being
275 : walked by IPA passes, we turn them into unanalyzed nodes with body
276 : defined.
277 :
278 : We maintain set of function declaration where body needs to stay in
279 : body_needed_for_clonning
280 :
281 : Inline clones represent special case: their declaration match the
282 : declaration of origin and cgraph_remove_node already knows how to
283 : reshape callgraph and preserve body when offline copy of function or
284 : inline clone is being removed.
285 :
286 : - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
287 : variables with DECL_INITIAL set. We finalize these and keep reachable
288 : ones around for constant folding purposes. After inlining we however
289 : stop walking their references to let everything static referenced by them
290 : to be removed when it is otherwise unreachable.
291 :
292 : We maintain queue of both reachable symbols (i.e. defined symbols that needs
293 : to stay) and symbols that are in boundary (i.e. external symbols referenced
294 : by reachable symbols or origins of clones). The queue is represented
295 : as linked list by AUX pointer terminated by 1.
296 :
297 : At the end we keep all reachable symbols. For symbols in boundary we always
298 : turn definition into a declaration, but we may keep function body around
299 : based on body_needed_for_clonning
300 :
301 : All symbols that enter the queue have AUX pointer non-zero and are in the
302 : boundary. Pointer set REACHABLE is used to track reachable symbols.
303 :
304 : Every symbol can be visited twice - once as part of boundary and once
305 : as real reachable symbol. enqueue_node needs to decide whether the
306 : node needs to be re-queued for second processing. For this purpose
307 : we set AUX pointer of processed symbols in the boundary to constant 2. */
308 :
309 : bool
310 1458055 : symbol_table::remove_unreachable_nodes (FILE *file)
311 : {
312 1458055 : symtab_node *first = (symtab_node *) (void *) 1;
313 1458055 : symtab_node *snode;
314 1458055 : struct cgraph_node *node, *next;
315 1458055 : varpool_node *vnode, *vnext;
316 1458055 : bool changed = false;
317 1458055 : hash_set<symtab_node *> reachable;
318 1458055 : hash_set<tree> body_needed_for_clonning;
319 1458055 : hash_set<void *> reachable_call_targets;
320 :
321 1458055 : timevar_push (TV_IPA_UNREACHABLE);
322 1458055 : build_type_inheritance_graph ();
323 1458055 : if (file)
324 700 : fprintf (file, "\nReclaiming functions:");
325 1458055 : if (flag_checking)
326 : {
327 29576481 : FOR_EACH_FUNCTION (node)
328 28118527 : gcc_assert (!node->aux);
329 23702443 : FOR_EACH_VARIABLE (vnode)
330 22244489 : gcc_assert (!vnode->aux);
331 : }
332 : /* Mark functions whose bodies are obviously needed.
333 : This is mostly when they can be referenced externally. Inline clones
334 : are special since their declarations are shared with master clone and thus
335 : cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
336 29576905 : FOR_EACH_FUNCTION (node)
337 : {
338 28118850 : node->used_as_abstract_origin = false;
339 28118850 : node->indirect_call_target = false;
340 28118850 : if (node->definition
341 16587589 : && !node->inlined_to
342 14329350 : && !node->in_other_partition
343 42447951 : && !node->can_remove_if_no_direct_calls_and_refs_p ())
344 : {
345 7692774 : gcc_assert (!node->inlined_to);
346 7692774 : reachable.add (node);
347 7692774 : enqueue_node (node, &first, &reachable);
348 : }
349 : else
350 20426076 : gcc_assert (!node->aux);
351 : }
352 :
353 : /* Mark variables that are obviously needed. */
354 21633383 : FOR_EACH_DEFINED_VARIABLE (vnode)
355 20175328 : if (!vnode->can_remove_if_no_refs_p()
356 20175328 : && !vnode->in_other_partition)
357 : {
358 10116463 : reachable.add (vnode);
359 10116463 : enqueue_node (vnode, &first, &reachable);
360 : }
361 :
362 : /* Declarations or symbols in other partitions are also needed if referenced
363 : from asm. */
364 51821814 : FOR_EACH_SYMBOL (snode)
365 50363759 : if (snode->ref_by_asm)
366 761 : enqueue_node (snode, &first, &reachable);
367 :
368 : /* Perform reachability analysis. */
369 51525212 : while (first != (symtab_node *) (void *) 1)
370 : {
371 50067157 : bool in_boundary_p = !reachable.contains (first);
372 50067157 : symtab_node *node = first;
373 :
374 50067157 : first = (symtab_node *)first->aux;
375 :
376 : /* If we are processing symbol in boundary, mark its AUX pointer for
377 : possible later re-processing in enqueue_node. */
378 50067157 : if (in_boundary_p)
379 : {
380 13637396 : node->aux = (void *)2;
381 13637396 : if (node->alias && node->analyzed)
382 3813 : enqueue_node (node->get_alias_target (), &first, &reachable);
383 : }
384 : else
385 : {
386 36429761 : if (TREE_CODE (node->decl) == FUNCTION_DECL
387 36429761 : && DECL_ABSTRACT_ORIGIN (node->decl))
388 : {
389 3078884 : struct cgraph_node *origin_node
390 3078884 : = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
391 3078884 : if (origin_node && !origin_node->used_as_abstract_origin)
392 : {
393 368697 : origin_node->used_as_abstract_origin = true;
394 368697 : gcc_assert (!origin_node->prev_sibling_clone);
395 368697 : gcc_assert (!origin_node->next_sibling_clone);
396 529722 : for (cgraph_node *n = origin_node->clones; n;
397 161025 : n = n->next_sibling_clone)
398 161025 : if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
399 130298 : n->used_as_abstract_origin = true;
400 : }
401 : }
402 : /* If any non-external and non-local symbol in a comdat group is
403 : reachable, force all externally visible symbols in the same comdat
404 : group to be reachable as well. Comdat-local symbols
405 : can be discarded if all uses were inlined. */
406 36429761 : if (node->same_comdat_group
407 1587342 : && node->externally_visible
408 37992043 : && !DECL_EXTERNAL (node->decl))
409 : {
410 1562282 : symtab_node *next;
411 1562282 : for (next = node->same_comdat_group;
412 4760017 : next != node;
413 3197735 : next = next->same_comdat_group)
414 6395470 : if (!next->comdat_local_p ()
415 3131463 : && !DECL_EXTERNAL (next->decl)
416 3131460 : && !reachable.add (next))
417 743605 : enqueue_node (next, &first, &reachable);
418 : }
419 : /* Mark references as reachable. */
420 36429761 : process_references (node, &first, &reachable);
421 : }
422 :
423 50067157 : if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
424 : {
425 : /* Mark the callees reachable unless they are direct calls to extern
426 : inline functions we decided to not inline. */
427 27865610 : if (!in_boundary_p)
428 : {
429 16297008 : struct cgraph_edge *e;
430 : /* Keep alive possible targets for devirtualization. */
431 16297008 : if (opt_for_fn (cnode->decl, optimize)
432 16297008 : && opt_for_fn (cnode->decl, flag_devirtualize))
433 : {
434 13461289 : struct cgraph_edge *next;
435 13461289 : for (e = cnode->indirect_calls; e; e = next)
436 : {
437 1174191 : next = e->next_callee;
438 14635480 : if (usable_polymorphic_info_p (e->indirect_info))
439 184559 : walk_polymorphic_call_targets (&reachable_call_targets,
440 : e, &first, &reachable);
441 : }
442 : }
443 :
444 : /* A reference to the default node implies use of all the other
445 : versions (they get used in the function resolver made later
446 : in multiple_target.cc) */
447 16297008 : cgraph_function_version_info *node_v = cnode->function_version ();
448 16297008 : if (node_v && is_function_default_version (node->decl))
449 963 : for (cgraph_function_version_info *fvi = node_v->next;
450 4108 : fvi;
451 3145 : fvi = fvi->next)
452 3145 : enqueue_node (fvi->this_node, &first, &reachable);
453 :
454 71973644 : for (e = cnode->callees; e; e = e->next_callee)
455 : {
456 55676636 : symtab_node *body = e->callee->function_symbol ();
457 55676636 : if (e->callee->definition
458 21806705 : && !e->callee->in_other_partition
459 77483204 : && (!e->inline_failed
460 19564296 : || !DECL_EXTERNAL (e->callee->decl)
461 666055 : || e->callee->alias
462 585282 : || possible_inline_candidate_p (e->callee)))
463 : {
464 : /* Be sure that we will not optimize out alias target
465 : body. */
466 21797566 : if (DECL_EXTERNAL (e->callee->decl)
467 772848 : && e->callee->alias
468 21878339 : && symtab->state < IPA_SSA_AFTER_INLINING)
469 79628 : reachable.add (body);
470 21797566 : reachable.add (e->callee);
471 : }
472 55676636 : enqueue_node (e->callee, &first, &reachable);
473 : }
474 :
475 : /* When inline clone exists, mark body to be preserved so when removing
476 : offline copy of the function we don't kill it. */
477 16297008 : if (cnode->inlined_to)
478 2242272 : body_needed_for_clonning.add (cnode->decl);
479 :
480 : /* For non-inline clones, force their origins to the boundary and ensure
481 : that body is not removed. */
482 19406392 : while (cnode->clone_of)
483 : {
484 3109384 : bool noninline = cnode->clone_of->decl != cnode->decl;
485 3109384 : cnode = cnode->clone_of;
486 3109384 : if (noninline)
487 : {
488 826987 : body_needed_for_clonning.add (cnode->decl);
489 826987 : enqueue_node (cnode, &first, &reachable);
490 : }
491 : }
492 :
493 : }
494 11568602 : else if (cnode->thunk)
495 45 : enqueue_node (cnode->callees->callee, &first, &reachable);
496 :
497 : /* If any reachable function has simd clones, mark them as
498 : reachable as well. */
499 27865610 : if (cnode->simd_clones)
500 : {
501 : cgraph_node *next;
502 0 : for (next = cnode->simd_clones;
503 0 : next;
504 0 : next = next->simdclone->next_clone)
505 0 : if (in_boundary_p
506 0 : || !reachable.add (next))
507 0 : enqueue_node (next, &first, &reachable);
508 : }
509 : }
510 : /* When we see constructor of external variable, keep referred nodes in the
511 : boundary. This will also hold initializers of the external vars NODE
512 : refers to. */
513 50067157 : varpool_node *vnode = dyn_cast <varpool_node *> (node);
514 50067157 : if (vnode
515 22201547 : && DECL_EXTERNAL (node->decl)
516 2068543 : && !vnode->alias
517 : && in_boundary_p)
518 : {
519 2163282 : struct ipa_ref *ref = NULL;
520 5689878 : for (int i = 0; node->iterate_reference (i, ref); i++)
521 94741 : enqueue_node (ref->referred, &first, &reachable);
522 : }
523 : }
524 :
525 : /* Remove unreachable functions. */
526 29582163 : for (node = first_function (); node; node = next)
527 : {
528 28124108 : next = next_function (node);
529 :
530 : /* If node is not needed at all, remove it. */
531 28124108 : if (!node->aux)
532 : {
533 265133 : if (file)
534 91 : fprintf (file, " %s", node->dump_name ());
535 265133 : node->remove ();
536 265133 : changed = true;
537 : }
538 : /* If node is unreachable, remove its body. */
539 27858975 : else if (!reachable.contains (node))
540 : {
541 : /* We keep definitions of thunks and aliases in the boundary so
542 : we can walk to the ultimate alias targets and function symbols
543 : reliably. */
544 11561967 : if (node->alias || node->thunk)
545 : ;
546 11557746 : else if (!body_needed_for_clonning.contains (node->decl))
547 : {
548 : /* Make the node a non-clone so that we do not attempt to
549 : materialize it later. */
550 11201890 : if (node->clone_of)
551 0 : node->remove_from_clone_tree ();
552 11201890 : node->release_body ();
553 : }
554 355856 : else if (!node->clone_of)
555 344399 : gcc_assert (in_lto_p || DECL_RESULT (node->decl));
556 11561967 : if (node->definition && !node->alias && !node->thunk)
557 : {
558 142238 : if (file)
559 184 : fprintf (file, " %s", node->dump_name ());
560 142238 : node->body_removed = true;
561 142238 : node->analyzed = false;
562 142238 : node->definition = false;
563 142238 : node->cpp_implicit_alias = false;
564 142238 : node->alias = false;
565 142238 : node->transparent_alias = false;
566 142238 : node->thunk = false;
567 142238 : node->weakref = false;
568 : /* After early inlining we drop always_inline attributes on
569 : bodies of functions that are still referenced (have their
570 : address taken). */
571 142238 : DECL_ATTRIBUTES (node->decl)
572 142238 : = remove_attribute ("always_inline",
573 142238 : DECL_ATTRIBUTES (node->decl));
574 142238 : if (!node->in_other_partition)
575 142055 : node->local = false;
576 142238 : node->remove_callees ();
577 142238 : node->remove_all_references ();
578 142238 : changed = true;
579 : }
580 : }
581 : else
582 16297008 : gcc_assert (node->clone_of || !node->has_gimple_body_p ()
583 : || in_lto_p || DECL_RESULT (node->decl));
584 : }
585 :
586 : /* Inline clones might be kept around so their materializing allows further
587 : cloning. If the function the clone is inlined into is removed, we need
588 : to turn it into normal cone. */
589 29317030 : FOR_EACH_FUNCTION (node)
590 : {
591 27858975 : if (node->inlined_to
592 2242272 : && !node->callers)
593 : {
594 0 : gcc_assert (node->clones);
595 0 : node->inlined_to = NULL;
596 0 : update_inlined_to_pointer (node, node);
597 : }
598 27858975 : node->aux = NULL;
599 : }
600 :
601 : /* Remove unreachable variables. */
602 1458055 : if (file)
603 700 : fprintf (file, "\nReclaiming variables:");
604 23702964 : for (vnode = first_variable (); vnode; vnode = vnext)
605 : {
606 22244909 : vnext = next_variable (vnode);
607 22244909 : if (!vnode->aux
608 : /* For can_refer_decl_in_current_unit_p we want to track for
609 : all external variables if they are defined in other partition
610 : or not. */
611 22244909 : && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
612 : {
613 43325 : struct ipa_ref *ref = NULL;
614 :
615 : /* First remove the aliases, so varpool::remove can possibly lookup
616 : the constructor and save it for future use. */
617 43325 : while (vnode->iterate_direct_aliases (0, ref))
618 : {
619 0 : if (file)
620 0 : fprintf (file, " %s", ref->referred->dump_name ());
621 0 : ref->referring->remove ();
622 : }
623 43325 : if (file)
624 1 : fprintf (file, " %s", vnode->dump_name ());
625 43325 : vnext = next_variable (vnode);
626 : /* Signal removal to the debug machinery. */
627 43325 : if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
628 : {
629 40979 : vnode->definition = false;
630 40979 : (*debug_hooks->late_global_decl) (vnode->decl);
631 : }
632 43325 : vnode->remove ();
633 43325 : changed = true;
634 : }
635 22201584 : else if (!reachable.contains (vnode) && !vnode->alias)
636 : {
637 2068619 : tree init;
638 2068619 : if (vnode->definition)
639 : {
640 16288 : if (file)
641 0 : fprintf (file, " %s", vnode->dump_name ());
642 : changed = true;
643 : }
644 : /* Keep body if it may be useful for constant folding. */
645 2063791 : if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
646 4132356 : || ((init = ctor_for_folding (vnode->decl)) == error_mark_node))
647 1987283 : vnode->remove_initializer ();
648 : else
649 81336 : DECL_INITIAL (vnode->decl) = init;
650 2068619 : vnode->body_removed = true;
651 2068619 : vnode->definition = false;
652 2068619 : vnode->analyzed = false;
653 2068619 : vnode->aux = NULL;
654 :
655 2068619 : vnode->remove_from_same_comdat_group ();
656 :
657 2068619 : vnode->remove_all_references ();
658 : }
659 : else
660 20132965 : vnode->aux = NULL;
661 : }
662 :
663 : /* Now update address_taken flags and try to promote functions to be local. */
664 1458055 : if (file)
665 700 : fprintf (file, "\nClearing address taken flags:");
666 17758619 : FOR_EACH_DEFINED_FUNCTION (node)
667 16300564 : if (node->address_taken
668 3250197 : && !node->used_from_other_partition)
669 : {
670 3250053 : if (!node->call_for_symbol_and_aliases
671 3250053 : (has_addr_references_p, NULL, true))
672 : {
673 22280 : if (file)
674 0 : fprintf (file, " %s", node->dump_name ());
675 22280 : node->address_taken = false;
676 22280 : changed = true;
677 22280 : if (node->local_p ()
678 : /* Virtual functions may be kept in cgraph just because
679 : of possible later devirtualization. Do not mark them as
680 : local too early so we won't optimize them out before
681 : we are done with polymorphic call analysis. */
682 22280 : && (symtab->state >= IPA_SSA_AFTER_INLINING
683 508 : || !node->call_for_symbol_and_aliases
684 508 : (is_indirect_call_target_p, NULL, true)))
685 : {
686 517 : node->local = true;
687 517 : if (file)
688 0 : fprintf (file, " (local)");
689 : }
690 : }
691 : }
692 1458055 : if (file)
693 700 : fprintf (file, "\n");
694 :
695 1458055 : symtab_node::checking_verify_symtab_nodes ();
696 :
697 : /* If we removed something, perhaps profile could be improved. */
698 1458055 : if (changed && (optimize || in_lto_p) && ipa_call_summaries)
699 4075989 : FOR_EACH_DEFINED_FUNCTION (node)
700 4006978 : ipa_propagate_frequency (node);
701 :
702 1458055 : timevar_pop (TV_IPA_UNREACHABLE);
703 1458055 : return changed;
704 1458055 : }
705 :
706 : /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
707 : as needed, also clear EXPLICIT_REFS if the references to given variable
708 : do not need to be explicit. */
709 :
710 : void
711 5391026 : process_references (varpool_node *vnode,
712 : bool *written, bool *address_taken,
713 : bool *read, bool *explicit_refs)
714 : {
715 5391026 : int i;
716 5391026 : struct ipa_ref *ref;
717 :
718 5391026 : if (!vnode->all_refs_explicit_p ()
719 5391026 : || TREE_THIS_VOLATILE (vnode->decl))
720 2934378 : *explicit_refs = false;
721 :
722 4050051 : for (i = 0; vnode->iterate_referring (i, ref)
723 9441077 : && *explicit_refs && (!*written || !*address_taken || !*read); i++)
724 4050051 : switch (ref->use)
725 : {
726 2763970 : case IPA_REF_ADDR:
727 2763970 : *address_taken = true;
728 2763970 : break;
729 703122 : case IPA_REF_LOAD:
730 703122 : *read = true;
731 703122 : break;
732 570698 : case IPA_REF_STORE:
733 570698 : *written = true;
734 570698 : break;
735 12261 : case IPA_REF_ALIAS:
736 12261 : process_references (dyn_cast<varpool_node *> (ref->referring), written,
737 : address_taken, read, explicit_refs);
738 12261 : break;
739 : }
740 5391026 : }
741 :
742 : /* Set TREE_READONLY bit. */
743 :
744 : bool
745 78300 : set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
746 : {
747 78300 : TREE_READONLY (vnode->decl) = true;
748 78300 : return false;
749 : }
750 :
751 : /* Set writeonly bit and clear the initalizer, since it will not be needed. */
752 :
753 : bool
754 25986 : set_writeonly_bit (varpool_node *vnode, void *data)
755 : {
756 25986 : vnode->writeonly = true;
757 25986 : if (optimize || in_lto_p)
758 : {
759 25986 : DECL_INITIAL (vnode->decl) = NULL;
760 25986 : if (!vnode->alias)
761 : {
762 25986 : if (vnode->num_references ())
763 211 : *(bool *)data = true;
764 25986 : vnode->remove_all_references ();
765 : }
766 : }
767 25986 : return false;
768 : }
769 :
770 : /* Clear addressale bit of VNODE. */
771 :
772 : bool
773 176410 : clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
774 : {
775 176410 : vnode->address_taken = false;
776 176410 : TREE_ADDRESSABLE (vnode->decl) = 0;
777 176410 : return false;
778 : }
779 :
780 : /* Discover variables that have no longer address taken, are read-only or
781 : write-only and update their flags.
782 :
783 : Return true when unreachable symbol removal should be done.
784 :
785 : FIXME: This cannot be done in between gimplify and omp_expand since
786 : readonly flag plays role on what is shared and what is not. Currently we do
787 : this transformation as part of whole program visibility and re-do at
788 : ipa-reference pass (to take into account clonning), but it would
789 : make sense to do it before early optimizations. */
790 :
791 : bool
792 303296 : ipa_discover_variable_flags (void)
793 : {
794 303296 : if (!flag_ipa_reference_addressable)
795 : return false;
796 :
797 299343 : bool remove_p = false;
798 299343 : varpool_node *vnode;
799 299343 : if (dump_file)
800 73 : fprintf (dump_file, "Clearing variable flags:");
801 5690606 : FOR_EACH_VARIABLE (vnode)
802 5391263 : if (!vnode->alias
803 5391263 : && (TREE_ADDRESSABLE (vnode->decl)
804 2374681 : || !vnode->writeonly
805 25912 : || !TREE_READONLY (vnode->decl)))
806 : {
807 5378765 : bool written = false;
808 5378765 : bool address_taken = false;
809 5378765 : bool read = false;
810 5378765 : bool explicit_refs = true;
811 :
812 5378765 : process_references (vnode, &written, &address_taken, &read,
813 : &explicit_refs);
814 5378765 : if (!explicit_refs)
815 2934378 : continue;
816 2444387 : if (!address_taken)
817 : {
818 165190 : if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
819 0 : fprintf (dump_file, " %s (non-addressable)",
820 : vnode->dump_name ());
821 165190 : vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
822 : true);
823 : }
824 165190 : if (!address_taken && !written
825 : /* Making variable in explicit section readonly can cause section
826 : type conflict.
827 : See e.g. gcc.c-torture/compile/pr23237.c */
828 2511501 : && vnode->get_section () == NULL)
829 : {
830 67084 : if (!TREE_READONLY (vnode->decl) && dump_file)
831 3 : fprintf (dump_file, " %s (read-only)", vnode->dump_name ());
832 67084 : vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
833 : }
834 2444387 : if (!vnode->writeonly && !read && !address_taken && written)
835 : {
836 25986 : if (dump_file)
837 0 : fprintf (dump_file, " %s (write-only)", vnode->dump_name ());
838 25986 : vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
839 : true);
840 : }
841 : }
842 299343 : if (dump_file)
843 73 : fprintf (dump_file, "\n");
844 299343 : return remove_p;
845 : }
846 :
847 : /* Generate and emit a static constructor or destructor. WHICH must
848 : be one of 'I' (for a constructor), 'D' (for a destructor).
849 : BODY is a STATEMENT_LIST containing GENERIC
850 : statements. PRIORITY is the initialization priority for this
851 : constructor or destructor.
852 :
853 : FINAL specify whether the externally visible name for collect2 should
854 : be produced. */
855 :
856 : static tree
857 4715 : cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final,
858 : tree optimization,
859 : tree target)
860 : {
861 4715 : static int counter = 0;
862 4715 : char which_buf[16];
863 4715 : tree decl, name, resdecl;
864 :
865 : /* The priority is encoded in the constructor or destructor name.
866 : collect2 will sort the names and arrange that they are called at
867 : program startup. */
868 4715 : if (!targetm.have_ctors_dtors && final)
869 : {
870 0 : sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
871 0 : name = get_file_function_name (which_buf);
872 : }
873 : else
874 : {
875 : /* Proudce sane name but one not recognizable by collect2, just for the
876 : case we fail to inline the function. */
877 4715 : sprintf (which_buf, "_sub_%c_%.5d_%d", which, priority, counter++);
878 4715 : name = get_identifier (which_buf);
879 : }
880 :
881 4715 : decl = build_decl (input_location, FUNCTION_DECL, name,
882 : build_function_type_list (void_type_node, NULL_TREE));
883 4715 : current_function_decl = decl;
884 :
885 4715 : resdecl = build_decl (input_location,
886 : RESULT_DECL, NULL_TREE, void_type_node);
887 4715 : DECL_ARTIFICIAL (resdecl) = 1;
888 4715 : DECL_RESULT (decl) = resdecl;
889 4715 : DECL_CONTEXT (resdecl) = decl;
890 :
891 4715 : allocate_struct_function (decl, false);
892 :
893 4715 : TREE_STATIC (decl) = 1;
894 4715 : TREE_USED (decl) = 1;
895 4715 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) = optimization;
896 4715 : DECL_FUNCTION_SPECIFIC_TARGET (decl) = target;
897 4715 : DECL_ARTIFICIAL (decl) = 1;
898 4715 : DECL_IGNORED_P (decl) = 1;
899 4715 : DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
900 4715 : DECL_SAVED_TREE (decl) = body;
901 4715 : if (!targetm.have_ctors_dtors && final)
902 : {
903 0 : TREE_PUBLIC (decl) = 1;
904 0 : DECL_PRESERVE_P (decl) = 1;
905 : }
906 4715 : DECL_UNINLINABLE (decl) = 1;
907 :
908 4715 : DECL_INITIAL (decl) = make_node (BLOCK);
909 4715 : BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
910 4715 : TREE_USED (DECL_INITIAL (decl)) = 1;
911 :
912 4715 : DECL_SOURCE_LOCATION (decl) = input_location;
913 4715 : cfun->function_end_locus = input_location;
914 :
915 4715 : switch (which)
916 : {
917 3236 : case 'I':
918 3236 : DECL_STATIC_CONSTRUCTOR (decl) = 1;
919 3236 : decl_init_priority_insert (decl, priority);
920 3236 : break;
921 1479 : case 'D':
922 1479 : DECL_STATIC_DESTRUCTOR (decl) = 1;
923 1479 : decl_fini_priority_insert (decl, priority);
924 1479 : break;
925 0 : default:
926 0 : gcc_unreachable ();
927 : }
928 :
929 4715 : gimplify_function_tree (decl);
930 :
931 4715 : cgraph_node::add_new_function (decl, false);
932 :
933 4715 : set_cfun (NULL);
934 4715 : current_function_decl = NULL;
935 4715 : return decl;
936 : }
937 :
938 : /* Generate and emit a static constructor or destructor. WHICH must
939 : be one of 'I' (for a constructor) or 'D' (for a destructor).
940 : BODY is a STATEMENT_LIST containing GENERIC
941 : statements. PRIORITY is the initialization priority for this
942 : constructor or destructor. */
943 :
944 : void
945 4707 : cgraph_build_static_cdtor (char which, tree body, int priority)
946 : {
947 : /* FIXME: We should be able to
948 : gcc_assert (!in_lto_p);
949 : because at LTO time the global options are not safe to use.
950 : Unfortunately ASAN finish_file will produce constructors late and they
951 : may lead to surprises. */
952 4707 : cgraph_build_static_cdtor_1 (which, body, priority, false,
953 : optimization_default_node,
954 : target_option_default_node);
955 4707 : }
956 :
957 : /* When target does not have ctors and dtors, we call all constructor
958 : and destructor by special initialization/destruction function
959 : recognized by collect2.
960 :
961 : When we are going to build this function, collect all constructors and
962 : destructors and turn them into normal functions. */
963 :
964 : static void
965 103 : record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
966 : {
967 103 : if (DECL_STATIC_CONSTRUCTOR (node->decl))
968 78 : ctors->safe_push (node->decl);
969 103 : if (DECL_STATIC_DESTRUCTOR (node->decl))
970 29 : dtors->safe_push (node->decl);
971 103 : node = cgraph_node::get (node->decl);
972 103 : DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
973 103 : }
974 :
975 : /* Define global constructors/destructor functions for the CDTORS, of
976 : which they are LEN. The CDTORS are sorted by initialization
977 : priority. If CTOR_P is true, these are constructors; otherwise,
978 : they are destructors. */
979 :
980 : static void
981 75 : build_cdtor (bool ctor_p, const vec<tree> &cdtors)
982 : {
983 75 : size_t i,j;
984 75 : size_t len = cdtors.length ();
985 :
986 75 : i = 0;
987 174 : while (i < len)
988 : {
989 99 : tree body;
990 99 : tree fn;
991 99 : priority_type priority;
992 :
993 99 : priority = 0;
994 99 : body = NULL_TREE;
995 99 : j = i;
996 131 : do
997 : {
998 131 : priority_type p;
999 131 : fn = cdtors[j];
1000 131 : p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1001 131 : if (j == i)
1002 : priority = p;
1003 32 : else if (p != priority)
1004 : break;
1005 107 : j++;
1006 : }
1007 107 : while (j < len);
1008 :
1009 : /* When there is only one cdtor and target supports them, do nothing. */
1010 99 : if (j == i + 1
1011 91 : && targetm.have_ctors_dtors)
1012 : {
1013 91 : i++;
1014 91 : continue;
1015 : }
1016 : /* Find the next batch of constructors/destructors with the same
1017 : initialization priority. */
1018 24 : for (;i < j; i++)
1019 : {
1020 16 : tree call;
1021 16 : fn = cdtors[i];
1022 16 : call = build_call_expr (fn, 0);
1023 16 : if (ctor_p)
1024 8 : DECL_STATIC_CONSTRUCTOR (fn) = 0;
1025 : else
1026 8 : DECL_STATIC_DESTRUCTOR (fn) = 0;
1027 : /* We do not want to optimize away pure/const calls here.
1028 : When optimizing, these should be already removed, when not
1029 : optimizing, we want user to be able to breakpoint in them. */
1030 16 : TREE_SIDE_EFFECTS (call) = 1;
1031 16 : append_to_statement_list (call, &body);
1032 : }
1033 8 : gcc_assert (body != NULL_TREE);
1034 : /* Generate a function to call all the function of like
1035 : priority. */
1036 16 : cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true,
1037 8 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (cdtors[0]),
1038 8 : DECL_FUNCTION_SPECIFIC_TARGET (cdtors[0]));
1039 : }
1040 75 : }
1041 :
1042 : /* Helper functions for build_cxa_dtor_registrations ().
1043 : Build a decl for __cxa_atexit (). */
1044 :
1045 : static tree
1046 0 : build_cxa_atexit_decl ()
1047 : {
1048 : /* The parameter to "__cxa_atexit" is "void (*)(void *)". */
1049 0 : tree fn_type = build_function_type_list (void_type_node,
1050 : ptr_type_node, NULL_TREE);
1051 0 : tree fn_ptr_type = build_pointer_type (fn_type);
1052 : /* The declaration for `__cxa_atexit' is:
1053 : int __cxa_atexit (void (*)(void *), void *, void *). */
1054 0 : const char *name = "__cxa_atexit";
1055 0 : tree cxa_name = get_identifier (name);
1056 0 : fn_type = build_function_type_list (integer_type_node, fn_ptr_type,
1057 : ptr_type_node, ptr_type_node, NULL_TREE);
1058 0 : tree atexit_fndecl = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
1059 : cxa_name, fn_type);
1060 0 : SET_DECL_ASSEMBLER_NAME (atexit_fndecl, cxa_name);
1061 0 : DECL_VISIBILITY (atexit_fndecl) = VISIBILITY_DEFAULT;
1062 0 : DECL_VISIBILITY_SPECIFIED (atexit_fndecl) = true;
1063 0 : set_call_expr_flags (atexit_fndecl, ECF_LEAF | ECF_NOTHROW);
1064 0 : TREE_PUBLIC (atexit_fndecl) = true;
1065 0 : DECL_EXTERNAL (atexit_fndecl) = true;
1066 0 : DECL_ARTIFICIAL (atexit_fndecl) = true;
1067 0 : return atexit_fndecl;
1068 : }
1069 :
1070 : /* Build a decl for __dso_handle. */
1071 :
1072 : static tree
1073 0 : build_dso_handle_decl ()
1074 : {
1075 : /* Declare the __dso_handle variable. */
1076 0 : tree dso_handle_decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1077 : get_identifier ("__dso_handle"),
1078 : ptr_type_node);
1079 0 : TREE_PUBLIC (dso_handle_decl) = true;
1080 0 : DECL_EXTERNAL (dso_handle_decl) = true;
1081 0 : DECL_ARTIFICIAL (dso_handle_decl) = true;
1082 : #ifdef HAVE_GAS_HIDDEN
1083 0 : if (dso_handle_decl != error_mark_node)
1084 : {
1085 0 : DECL_VISIBILITY (dso_handle_decl) = VISIBILITY_HIDDEN;
1086 0 : DECL_VISIBILITY_SPECIFIED (dso_handle_decl) = true;
1087 : }
1088 : #endif
1089 0 : return dso_handle_decl;
1090 : }
1091 :
1092 : /* This builds one or more constructor functions that register DTORs with
1093 : __cxa_atexit (). Within a priority level, DTORs are registered in TU
1094 : order - which means that they will run in reverse TU order from cxa_atexit.
1095 : This is the same behavior as using a .fini / .mod_term_funcs section.
1096 : As the functions are built, they are appended to the CTORs vector. */
1097 :
1098 : static void
1099 0 : build_cxa_dtor_registrations (const vec<tree> &dtors, vec<tree> *ctors)
1100 : {
1101 0 : size_t i,j;
1102 0 : size_t len = dtors.length ();
1103 :
1104 0 : location_t sav_loc = input_location;
1105 0 : input_location = UNKNOWN_LOCATION;
1106 :
1107 0 : tree atexit_fndecl = build_cxa_atexit_decl ();
1108 0 : tree dso_handle_decl = build_dso_handle_decl ();
1109 :
1110 : /* We want &__dso_handle. */
1111 0 : tree dso_ptr = build1_loc (UNKNOWN_LOCATION, ADDR_EXPR,
1112 : ptr_type_node, dso_handle_decl);
1113 :
1114 0 : i = 0;
1115 0 : while (i < len)
1116 : {
1117 0 : priority_type priority = 0;
1118 0 : tree body = NULL_TREE;
1119 0 : j = i;
1120 0 : do
1121 : {
1122 0 : priority_type p;
1123 0 : tree fn = dtors[j];
1124 0 : p = DECL_FINI_PRIORITY (fn);
1125 0 : if (j == i)
1126 : priority = p;
1127 0 : else if (p != priority)
1128 : break;
1129 0 : j++;
1130 : }
1131 0 : while (j < len);
1132 :
1133 : /* Find the next batch of destructors with the same initialization
1134 : priority. */
1135 0 : for (;i < j; i++)
1136 : {
1137 0 : tree fn = dtors[i];
1138 0 : DECL_STATIC_DESTRUCTOR (fn) = 0;
1139 0 : tree dtor_ptr = build1_loc (UNKNOWN_LOCATION, ADDR_EXPR,
1140 : ptr_type_node, fn);
1141 0 : tree call_cxa_atexit
1142 0 : = build_call_expr_loc (UNKNOWN_LOCATION, atexit_fndecl, 3,
1143 : dtor_ptr, null_pointer_node, dso_ptr);
1144 0 : TREE_SIDE_EFFECTS (call_cxa_atexit) = 1;
1145 0 : append_to_statement_list (call_cxa_atexit, &body);
1146 : }
1147 :
1148 0 : gcc_assert (body != NULL_TREE);
1149 : /* Generate a function to register the DTORs at this priority. */
1150 0 : tree new_ctor
1151 0 : = cgraph_build_static_cdtor_1 ('I', body, priority, true,
1152 0 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (dtors[0]),
1153 0 : DECL_FUNCTION_SPECIFIC_TARGET (dtors[0]));
1154 : /* Add this to the list of ctors. */
1155 0 : ctors->safe_push (new_ctor);
1156 : }
1157 0 : input_location = sav_loc;
1158 0 : }
1159 :
1160 : /* Comparison function for qsort. P1 and P2 are actually of type
1161 : "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1162 : used to determine the sort order. */
1163 :
1164 : static int
1165 96 : compare_ctor (const void *p1, const void *p2)
1166 : {
1167 96 : tree f1;
1168 96 : tree f2;
1169 96 : int priority1;
1170 96 : int priority2;
1171 :
1172 96 : f1 = *(const tree *)p1;
1173 96 : f2 = *(const tree *)p2;
1174 96 : priority1 = DECL_INIT_PRIORITY (f1);
1175 96 : priority2 = DECL_INIT_PRIORITY (f2);
1176 :
1177 96 : if (priority1 < priority2)
1178 : return -1;
1179 40 : else if (priority1 > priority2)
1180 : return 1;
1181 : else
1182 : /* Ensure a stable sort. Constructors are executed in backwarding
1183 : order to make LTO initialize braries first. */
1184 16 : return DECL_UID (f2) - DECL_UID (f1);
1185 : }
1186 :
1187 : /* Comparison function for qsort. P1 and P2 are actually of type
1188 : "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1189 : used to determine the sort order. */
1190 :
1191 : static int
1192 96 : compare_dtor (const void *p1, const void *p2)
1193 : {
1194 96 : tree f1;
1195 96 : tree f2;
1196 96 : int priority1;
1197 96 : int priority2;
1198 :
1199 96 : f1 = *(const tree *)p1;
1200 96 : f2 = *(const tree *)p2;
1201 96 : priority1 = DECL_FINI_PRIORITY (f1);
1202 96 : priority2 = DECL_FINI_PRIORITY (f2);
1203 :
1204 96 : if (priority1 < priority2)
1205 : return -1;
1206 40 : else if (priority1 > priority2)
1207 : return 1;
1208 : else
1209 : /* Ensure a stable sort - into TU order. */
1210 16 : return DECL_UID (f1) - DECL_UID (f2);
1211 : }
1212 :
1213 : /* Comparison function for qsort. P1 and P2 are of type "tree *" and point to
1214 : a pair of static constructors or destructors. We first sort on the basis of
1215 : priority and then into TU order (on the strict assumption that DECL_UIDs are
1216 : ordered in the same way as the original functions). ???: this seems quite
1217 : fragile. */
1218 :
1219 : static int
1220 0 : compare_cdtor_tu_order (const void *p1, const void *p2)
1221 : {
1222 0 : tree f1;
1223 0 : tree f2;
1224 0 : int priority1;
1225 0 : int priority2;
1226 :
1227 0 : f1 = *(const tree *)p1;
1228 0 : f2 = *(const tree *)p2;
1229 : /* We process the DTORs first, and then remove their flag, so this order
1230 : allows for functions that are declared as both CTOR and DTOR. */
1231 0 : if (DECL_STATIC_DESTRUCTOR (f1))
1232 : {
1233 0 : gcc_checking_assert (DECL_STATIC_DESTRUCTOR (f2));
1234 0 : priority1 = DECL_FINI_PRIORITY (f1);
1235 0 : priority2 = DECL_FINI_PRIORITY (f2);
1236 : }
1237 : else
1238 : {
1239 0 : priority1 = DECL_INIT_PRIORITY (f1);
1240 0 : priority2 = DECL_INIT_PRIORITY (f2);
1241 : }
1242 :
1243 0 : if (priority1 < priority2)
1244 : return -1;
1245 0 : else if (priority1 > priority2)
1246 : return 1;
1247 : else
1248 : /* For equal priority, sort into the order of definition in the TU. */
1249 0 : return DECL_UID (f1) - DECL_UID (f2);
1250 : }
1251 :
1252 : /* Generate functions to call static constructors and destructors
1253 : for targets that do not support .ctors/.dtors sections. These
1254 : functions have magic names which are detected by collect2. */
1255 :
1256 : static void
1257 12169 : build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
1258 : {
1259 12169 : if (!ctors->is_empty ())
1260 : {
1261 62 : gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1262 62 : ctors->qsort (compare_ctor);
1263 62 : build_cdtor (/*ctor_p=*/true, *ctors);
1264 : }
1265 :
1266 12169 : if (!dtors->is_empty ())
1267 : {
1268 13 : gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1269 13 : dtors->qsort (compare_dtor);
1270 13 : build_cdtor (/*ctor_p=*/false, *dtors);
1271 : }
1272 12169 : }
1273 :
1274 : /* Generate new CTORs to register static destructors with __cxa_atexit and add
1275 : them to the existing list of CTORs; we then process the revised CTORs list.
1276 :
1277 : We sort the DTORs into priority and then TU order, this means that they are
1278 : registered in that order with __cxa_atexit () and therefore will be run in
1279 : the reverse order.
1280 :
1281 : Likewise, CTORs are sorted into priority and then TU order, which means that
1282 : they will run in that order.
1283 :
1284 : This matches the behavior of using init/fini or mod_init_func/mod_term_func
1285 : sections. */
1286 :
1287 : static void
1288 0 : build_cxa_atexit_fns (vec<tree> *ctors, vec<tree> *dtors)
1289 : {
1290 0 : if (!dtors->is_empty ())
1291 : {
1292 0 : gcc_assert (targetm.dtors_from_cxa_atexit);
1293 0 : dtors->qsort (compare_cdtor_tu_order);
1294 0 : build_cxa_dtor_registrations (*dtors, ctors);
1295 : }
1296 :
1297 0 : if (!ctors->is_empty ())
1298 : {
1299 0 : gcc_assert (targetm.dtors_from_cxa_atexit);
1300 0 : ctors->qsort (compare_cdtor_tu_order);
1301 0 : build_cdtor (/*ctor_p=*/true, *ctors);
1302 : }
1303 0 : }
1304 :
1305 : /* Look for constructors and destructors and produce function calling them.
1306 : This is needed for targets not supporting ctors or dtors, but we perform the
1307 : transformation also at linktime to merge possibly numerous
1308 : constructors/destructors into single function to improve code locality and
1309 : reduce size. */
1310 :
1311 : static unsigned int
1312 12169 : ipa_cdtor_merge (void)
1313 : {
1314 : /* A vector of FUNCTION_DECLs declared as static constructors. */
1315 12169 : auto_vec<tree, 20> ctors;
1316 : /* A vector of FUNCTION_DECLs declared as static destructors. */
1317 12169 : auto_vec<tree, 20> dtors;
1318 12169 : struct cgraph_node *node;
1319 90492 : FOR_EACH_DEFINED_FUNCTION (node)
1320 78323 : if (DECL_STATIC_CONSTRUCTOR (node->decl)
1321 78323 : || DECL_STATIC_DESTRUCTOR (node->decl))
1322 103 : record_cdtor_fn (node, &ctors, &dtors);
1323 12169 : if (targetm.dtors_from_cxa_atexit)
1324 0 : build_cxa_atexit_fns (&ctors, &dtors);
1325 : else
1326 12169 : build_cdtor_fns (&ctors, &dtors);
1327 12169 : return 0;
1328 12169 : }
1329 :
1330 : namespace {
1331 :
1332 : const pass_data pass_data_ipa_cdtor_merge =
1333 : {
1334 : IPA_PASS, /* type */
1335 : "cdtor", /* name */
1336 : OPTGROUP_NONE, /* optinfo_flags */
1337 : TV_CGRAPHOPT, /* tv_id */
1338 : 0, /* properties_required */
1339 : 0, /* properties_provided */
1340 : 0, /* properties_destroyed */
1341 : 0, /* todo_flags_start */
1342 : 0, /* todo_flags_finish */
1343 : };
1344 :
1345 : class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1346 : {
1347 : public:
1348 285722 : pass_ipa_cdtor_merge (gcc::context *ctxt)
1349 : : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1350 : NULL, /* generate_summary */
1351 : NULL, /* write_summary */
1352 : NULL, /* read_summary */
1353 : NULL, /* write_optimization_summary */
1354 : NULL, /* read_optimization_summary */
1355 : NULL, /* stmt_fixup */
1356 : 0, /* function_transform_todo_flags_start */
1357 : NULL, /* function_transform */
1358 285722 : NULL) /* variable_transform */
1359 285722 : {}
1360 :
1361 : /* opt_pass methods: */
1362 : bool gate (function *) final override;
1363 12169 : unsigned int execute (function *) final override
1364 : {
1365 12169 : return ipa_cdtor_merge ();
1366 : }
1367 :
1368 : }; // class pass_ipa_cdtor_merge
1369 :
1370 : bool
1371 563719 : pass_ipa_cdtor_merge::gate (function *)
1372 : {
1373 : /* Perform the pass when we have no ctors/dtors support
1374 : or at LTO time to merge multiple constructors into single
1375 : function. */
1376 563719 : return !targetm.have_ctors_dtors || in_lto_p || targetm.dtors_from_cxa_atexit;
1377 : }
1378 :
1379 : } // anon namespace
1380 :
1381 : ipa_opt_pass_d *
1382 285722 : make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1383 : {
1384 285722 : return new pass_ipa_cdtor_merge (ctxt);
1385 : }
1386 :
1387 : /* Invalid pointer representing BOTTOM for single user dataflow. */
1388 : #define BOTTOM ((cgraph_node *)(size_t) 2)
1389 :
1390 : /* Meet operation for single user dataflow.
1391 : Here we want to associate variables with sigle function that may access it.
1392 :
1393 : FUNCTION is current single user of a variable, VAR is variable that uses it.
1394 : Latttice is stored in SINGLE_USER_MAP.
1395 :
1396 : We represent:
1397 : - TOP by no entry in SIGNLE_USER_MAP
1398 : - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1399 : - known single user by cgraph pointer in SINGLE_USER_MAP. */
1400 :
1401 : cgraph_node *
1402 3436430 : meet (cgraph_node *function, varpool_node *var,
1403 : hash_map<varpool_node *, cgraph_node *> &single_user_map)
1404 : {
1405 3436430 : struct cgraph_node *user, **f;
1406 :
1407 3436430 : if (var->aux == BOTTOM)
1408 : return BOTTOM;
1409 :
1410 2453828 : f = single_user_map.get (var);
1411 2453828 : if (!f)
1412 : return function;
1413 1002073 : user = *f;
1414 1002073 : if (!function)
1415 : return user;
1416 980020 : else if (function != user)
1417 : return BOTTOM;
1418 : else
1419 : return function;
1420 : }
1421 :
1422 : /* Propagation step of single-use dataflow.
1423 :
1424 : Check all uses of VNODE and see if they are used by single function FUNCTION.
1425 : SINGLE_USER_MAP represents the dataflow lattice. */
1426 :
1427 : cgraph_node *
1428 1786823 : propagate_single_user (varpool_node *vnode, cgraph_node *function,
1429 : hash_map<varpool_node *, cgraph_node *> &single_user_map)
1430 : {
1431 1786823 : int i;
1432 1786823 : struct ipa_ref *ref;
1433 :
1434 1786823 : gcc_assert (!vnode->externally_visible);
1435 :
1436 : /* If node is an alias, first meet with its target. */
1437 1786823 : if (vnode->alias)
1438 16533 : function = meet (function, vnode->get_alias_target (), single_user_map);
1439 :
1440 : /* Check all users and see if they correspond to a single function. */
1441 5827544 : for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1442 : {
1443 8081442 : struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1444 4040721 : if (cnode)
1445 : {
1446 620824 : if (cnode->inlined_to)
1447 89314 : cnode = cnode->inlined_to;
1448 620824 : if (!function)
1449 : function = cnode;
1450 317817 : else if (function != cnode)
1451 24982 : function = BOTTOM;
1452 : }
1453 : else
1454 6839794 : function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1455 : single_user_map);
1456 : }
1457 1786823 : return function;
1458 : }
1459 :
1460 : /* Pass setting used_by_single_function flag.
1461 : This flag is set on variable when there is only one function that may
1462 : possibly referr to it. */
1463 :
1464 : static unsigned int
1465 230063 : ipa_single_use (void)
1466 : {
1467 230063 : varpool_node *first = (varpool_node *) (void *) 1;
1468 230063 : varpool_node *var;
1469 230063 : hash_map<varpool_node *, cgraph_node *> single_user_map;
1470 :
1471 3094952 : FOR_EACH_DEFINED_VARIABLE (var)
1472 2864889 : if (!var->all_refs_explicit_p ())
1473 1561608 : var->aux = BOTTOM;
1474 : else
1475 : {
1476 : /* Enqueue symbol for dataflow. */
1477 1303281 : var->aux = first;
1478 1303281 : first = var;
1479 : }
1480 :
1481 : /* The actual dataflow. */
1482 :
1483 2016886 : while (first != (void *) 1)
1484 : {
1485 1786823 : cgraph_node *user, *orig_user, **f;
1486 :
1487 1786823 : var = first;
1488 1786823 : first = (varpool_node *)first->aux;
1489 :
1490 1786823 : f = single_user_map.get (var);
1491 1786823 : if (f)
1492 29149 : orig_user = *f;
1493 : else
1494 : orig_user = NULL;
1495 1786823 : user = propagate_single_user (var, orig_user, single_user_map);
1496 :
1497 1786823 : gcc_checking_assert (var->aux != BOTTOM);
1498 :
1499 : /* If user differs, enqueue all references. */
1500 1786823 : if (user != orig_user)
1501 : {
1502 1309395 : unsigned int i;
1503 1309395 : ipa_ref *ref;
1504 :
1505 1309395 : single_user_map.put (var, user);
1506 :
1507 : /* Enqueue all aliases for re-processing. */
1508 2632856 : for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1509 14066 : if (!ref->referring->aux)
1510 : {
1511 4255 : ref->referring->aux = first;
1512 14066 : first = dyn_cast <varpool_node *> (ref->referring);
1513 : }
1514 : /* Enqueue all users for re-processing. */
1515 5221414 : for (i = 0; var->iterate_reference (i, ref); i++)
1516 1301312 : if (!ref->referred->aux
1517 732375 : && ref->referred->definition
1518 2512974 : && is_a <varpool_node *> (ref->referred))
1519 : {
1520 479287 : ref->referred->aux = first;
1521 479287 : first = dyn_cast <varpool_node *> (ref->referred);
1522 : }
1523 :
1524 : /* If user is BOTTOM, just punt on this var. */
1525 1309395 : if (user == BOTTOM)
1526 1011316 : var->aux = BOTTOM;
1527 : else
1528 298079 : var->aux = NULL;
1529 : }
1530 : else
1531 477428 : var->aux = NULL;
1532 : }
1533 :
1534 3094952 : FOR_EACH_DEFINED_VARIABLE (var)
1535 : {
1536 2864889 : if (var->aux != BOTTOM)
1537 : {
1538 : /* Not having the single user known means that the VAR is
1539 : unreachable. Either someone forgot to remove unreachable
1540 : variables or the reachability here is wrong. */
1541 :
1542 291965 : gcc_checking_assert (single_user_map.get (var));
1543 :
1544 291965 : if (dump_file)
1545 : {
1546 10 : fprintf (dump_file, "Variable %s is used by single function\n",
1547 : var->dump_name ());
1548 : }
1549 291965 : var->used_by_single_function = true;
1550 : }
1551 2864889 : var->aux = NULL;
1552 : }
1553 230063 : return 0;
1554 230063 : }
1555 :
1556 : namespace {
1557 :
1558 : const pass_data pass_data_ipa_single_use =
1559 : {
1560 : IPA_PASS, /* type */
1561 : "single-use", /* name */
1562 : OPTGROUP_NONE, /* optinfo_flags */
1563 : TV_CGRAPHOPT, /* tv_id */
1564 : 0, /* properties_required */
1565 : 0, /* properties_provided */
1566 : 0, /* properties_destroyed */
1567 : 0, /* todo_flags_start */
1568 : 0, /* todo_flags_finish */
1569 : };
1570 :
1571 : class pass_ipa_single_use : public ipa_opt_pass_d
1572 : {
1573 : public:
1574 285722 : pass_ipa_single_use (gcc::context *ctxt)
1575 : : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1576 : NULL, /* generate_summary */
1577 : NULL, /* write_summary */
1578 : NULL, /* read_summary */
1579 : NULL, /* write_optimization_summary */
1580 : NULL, /* read_optimization_summary */
1581 : NULL, /* stmt_fixup */
1582 : 0, /* function_transform_todo_flags_start */
1583 : NULL, /* function_transform */
1584 285722 : NULL) /* variable_transform */
1585 285722 : {}
1586 :
1587 : /* opt_pass methods: */
1588 230063 : unsigned int execute (function *) final override { return ipa_single_use (); }
1589 :
1590 : }; // class pass_ipa_single_use
1591 :
1592 : } // anon namespace
1593 :
1594 : ipa_opt_pass_d *
1595 285722 : make_pass_ipa_single_use (gcc::context *ctxt)
1596 : {
1597 285722 : return new pass_ipa_single_use (ctxt);
1598 : }
1599 :
|