Line data Source code
1 : /* Basic IPA optimizations and utilities.
2 : Copyright (C) 2003-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "target.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "alloc-pool.h"
28 : #include "tree-pass.h"
29 : #include "stringpool.h"
30 : #include "cgraph.h"
31 : #include "gimplify.h"
32 : #include "tree-iterator.h"
33 : #include "ipa-utils.h"
34 : #include "symbol-summary.h"
35 : #include "tree-vrp.h"
36 : #include "sreal.h"
37 : #include "ipa-cp.h"
38 : #include "ipa-prop.h"
39 : #include "ipa-fnsummary.h"
40 : #include "dbgcnt.h"
41 : #include "debug.h"
42 : #include "stringpool.h"
43 : #include "attribs.h"
44 :
45 : /* Return true when NODE has ADDR reference. */
46 :
47 : static bool
48 3349132 : has_addr_references_p (struct cgraph_node *node,
49 : void *)
50 : {
51 3349132 : int i;
52 3349132 : struct ipa_ref *ref = NULL;
53 :
54 3450229 : for (i = 0; node->iterate_referring (i, ref); i++)
55 3358942 : if (ref->use == IPA_REF_ADDR)
56 : return true;
57 : return false;
58 : }
59 :
60 : /* Return true when NODE can be target of an indirect call. */
61 :
62 : static bool
63 508 : is_indirect_call_target_p (struct cgraph_node *node, void *)
64 : {
65 508 : return node->indirect_call_target;
66 : }
67 :
68 : /* Look for all functions inlined to NODE and update their inlined_to pointers
69 : to INLINED_TO. */
70 :
71 : static void
72 0 : update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
73 : {
74 0 : struct cgraph_edge *e;
75 0 : for (e = node->callees; e; e = e->next_callee)
76 0 : if (e->callee->inlined_to)
77 : {
78 0 : e->callee->inlined_to = inlined_to;
79 0 : update_inlined_to_pointer (e->callee, inlined_to);
80 : }
81 0 : }
82 :
83 : /* Add symtab NODE to queue starting at FIRST.
84 :
85 : The queue is linked via AUX pointers and terminated by pointer to 1.
86 : We enqueue nodes at two occasions: when we find them reachable or when we find
87 : their bodies needed for further clonning. In the second case we mark them
88 : by pointer to 2 after processing so they are re-queue when they become
89 : reachable. */
90 :
91 : static void
92 143471141 : enqueue_node (symtab_node *node, symtab_node **first,
93 : hash_set<symtab_node *> *reachable)
94 : {
95 : /* Node is still in queue; do nothing. */
96 143471141 : if (node->aux && node->aux != (void *) 2)
97 : return;
98 : /* Node was already processed as unreachable, re-enqueue
99 : only if it became reachable now. */
100 79765101 : if (node->aux == (void *)2 && !reachable->contains (node))
101 : return;
102 50128600 : node->aux = *first;
103 50128600 : *first = node;
104 : }
105 :
106 : /* Return true if NODE may get inlined later.
107 : This is used to keep DECL_EXTERNAL function bodies around long enough
108 : so inliner can proces them. */
109 :
110 : static bool
111 765397 : possible_inline_candidate_p (symtab_node *node)
112 : {
113 765397 : if (symtab->state >= IPA_SSA_AFTER_INLINING)
114 : return false;
115 741650 : cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
116 711623 : if (!cnode)
117 : return false;
118 711623 : if (DECL_UNINLINABLE (cnode->decl))
119 : return false;
120 709447 : if (opt_for_fn (cnode->decl, optimize))
121 : return true;
122 2065 : if (symtab->state >= IPA_SSA)
123 : return false;
124 1716 : return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
125 : }
126 :
127 : /* Process references. */
128 :
129 : static void
130 36474036 : process_references (symtab_node *snode,
131 : symtab_node **first,
132 : hash_set<symtab_node *> *reachable)
133 : {
134 36474036 : int i;
135 36474036 : struct ipa_ref *ref = NULL;
136 104418357 : for (i = 0; snode->iterate_reference (i, ref); i++)
137 : {
138 67944321 : symtab_node *node = ref->referred;
139 67944321 : symtab_node *body = node->ultimate_alias_target ();
140 :
141 49119873 : if (node->definition && !node->in_other_partition
142 117064108 : && ((!DECL_EXTERNAL (node->decl) || node->alias)
143 72041 : || (possible_inline_candidate_p (node)
144 : /* We use variable constructors during late compilation for
145 : constant folding. Keep references alive so partitioning
146 : knows about potential references. */
147 34301 : || (VAR_P (node->decl)
148 30027 : && (flag_wpa
149 30027 : || flag_incremental_link
150 : == INCREMENTAL_LINK_LTO)
151 0 : && dyn_cast <varpool_node *> (node)
152 0 : ->ctor_useable_for_folding_p ()))))
153 : {
154 : /* Be sure that we will not optimize out alias target
155 : body. */
156 49085486 : if (DECL_EXTERNAL (node->decl)
157 56661 : && node->alias
158 49104407 : && symtab->state < IPA_SSA_AFTER_INLINING)
159 6879 : reachable->add (body);
160 49085486 : reachable->add (node);
161 : }
162 67944321 : enqueue_node (node, first, reachable);
163 : }
164 36474036 : }
165 :
166 : /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
167 : all its potential targets as reachable to permit later inlining if
168 : devirtualization happens. After inlining still keep their declarations
169 : around, so we can devirtualize to a direct call.
170 :
171 : Also try to make trivial devirutalization when no or only one target is
172 : possible. */
173 :
174 : static void
175 179664 : walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
176 : struct cgraph_edge *edge,
177 : symtab_node **first,
178 : hash_set<symtab_node *> *reachable)
179 : {
180 179664 : unsigned int i;
181 179664 : void *cache_token;
182 179664 : bool final;
183 179664 : vec <cgraph_node *>targets
184 : = possible_polymorphic_call_targets
185 179664 : (edge, &final, &cache_token);
186 :
187 179664 : if (cache_token != NULL && !reachable_call_targets->add (cache_token))
188 : {
189 224245 : for (i = 0; i < targets.length (); i++)
190 : {
191 134121 : struct cgraph_node *n = targets[i];
192 :
193 : /* Do not bother to mark virtual methods in anonymous namespace;
194 : either we will find use of virtual table defining it, or it is
195 : unused. */
196 134121 : if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
197 261816 : && type_in_anonymous_namespace_p
198 127695 : (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
199 4774 : continue;
200 :
201 129347 : n->indirect_call_target = true;
202 129347 : symtab_node *body = n->function_symbol ();
203 :
204 : /* Prior inlining, keep alive bodies of possible targets for
205 : devirtualization. */
206 129347 : if (n->definition
207 129347 : && (possible_inline_candidate_p (body)
208 97174 : && opt_for_fn (body->decl, flag_devirtualize)))
209 : {
210 : /* Be sure that we will not optimize out alias target
211 : body. */
212 97174 : if (DECL_EXTERNAL (n->decl)
213 2871 : && n->alias
214 97174 : && symtab->state < IPA_SSA_AFTER_INLINING)
215 0 : reachable->add (body);
216 97174 : reachable->add (n);
217 : }
218 : /* Even after inlining we want to keep the possible targets in the
219 : boundary, so late passes can still produce direct call even if
220 : the chance for inlining is lost. */
221 129347 : enqueue_node (n, first, reachable);
222 : }
223 : }
224 :
225 : /* Very trivial devirtualization; when the type is
226 : final or anonymous (so we know all its derivation)
227 : and there is only one possible virtual call target,
228 : make the edge direct. */
229 179664 : if (final)
230 : {
231 161 : if (targets.length () <= 1 && dbg_cnt (devirt))
232 : {
233 14 : cgraph_node *target, *node = edge->caller;
234 14 : if (targets.length () == 1)
235 6 : target = targets[0];
236 : else
237 8 : target = cgraph_node::get_create (builtin_decl_unreachable ());
238 :
239 14 : if (dump_enabled_p ())
240 : {
241 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
242 : "devirtualizing call in %s to %s\n",
243 0 : edge->caller->dump_name (),
244 : target->dump_name ());
245 : }
246 14 : edge = cgraph_edge::make_direct (edge, target);
247 14 : if (ipa_fn_summaries)
248 2 : ipa_update_overall_fn_summary (node->inlined_to
249 : ? node->inlined_to : node);
250 12 : else if (edge->call_stmt)
251 12 : cgraph_edge::redirect_call_stmt_to_callee (edge);
252 : }
253 : }
254 179664 : }
255 :
256 : /* Perform reachability analysis and reclaim all unreachable nodes.
257 :
258 : The algorithm is basically mark&sweep but with some extra refinements:
259 :
260 : - reachable extern inline functions needs special handling; the bodies needs
261 : to stay in memory until inlining in hope that they will be inlined.
262 : After inlining we release their bodies and turn them into unanalyzed
263 : nodes even when they are reachable.
264 :
265 : - virtual functions are kept in callgraph even if they seem unreachable in
266 : hope calls to them will be devirtualized.
267 :
268 : Again we remove them after inlining. In late optimization some
269 : devirtualization may happen, but it is not important since we won't inline
270 : the call. In theory early opts and IPA should work out all important cases.
271 :
272 : - virtual clones needs bodies of their origins for later materialization;
273 : this means that we want to keep the body even if the origin is unreachable
274 : otherwise. To avoid origin from sitting in the callgraph and being
275 : walked by IPA passes, we turn them into unanalyzed nodes with body
276 : defined.
277 :
278 : We maintain set of function declaration where body needs to stay in
279 : body_needed_for_clonning
280 :
281 : Inline clones represent special case: their declaration match the
282 : declaration of origin and cgraph_remove_node already knows how to
283 : reshape callgraph and preserve body when offline copy of function or
284 : inline clone is being removed.
285 :
286 : - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
287 : variables with DECL_INITIAL set. We finalize these and keep reachable
288 : ones around for constant folding purposes. After inlining we however
289 : stop walking their references to let everything static referenced by them
290 : to be removed when it is otherwise unreachable.
291 :
292 : We maintain queue of both reachable symbols (i.e. defined symbols that needs
293 : to stay) and symbols that are in boundary (i.e. external symbols referenced
294 : by reachable symbols or origins of clones). The queue is represented
295 : as linked list by AUX pointer terminated by 1.
296 :
297 : At the end we keep all reachable symbols. For symbols in boundary we always
298 : turn definition into a declaration, but we may keep function body around
299 : based on body_needed_for_clonning
300 :
301 : All symbols that enter the queue have AUX pointer non-zero and are in the
302 : boundary. Pointer set REACHABLE is used to track reachable symbols.
303 :
304 : Every symbol can be visited twice - once as part of boundary and once
305 : as real reachable symbol. enqueue_node needs to decide whether the
306 : node needs to be re-queued for second processing. For this purpose
307 : we set AUX pointer of processed symbols in the boundary to constant 2. */
308 :
309 : bool
310 1468503 : symbol_table::remove_unreachable_nodes (FILE *file)
311 : {
312 1468503 : symtab_node *first = (symtab_node *) (void *) 1;
313 1468503 : symtab_node *snode;
314 1468503 : struct cgraph_node *node, *next;
315 1468503 : varpool_node *vnode, *vnext;
316 1468503 : bool changed = false;
317 1468503 : hash_set<symtab_node *> reachable;
318 1468503 : hash_set<tree> body_needed_for_clonning;
319 1468503 : hash_set<void *> reachable_call_targets;
320 :
321 1468503 : timevar_push (TV_IPA_UNREACHABLE);
322 1468503 : build_type_inheritance_graph ();
323 1468503 : if (file)
324 700 : fprintf (file, "\nReclaiming functions:");
325 1468503 : if (flag_checking)
326 : {
327 29624222 : FOR_EACH_FUNCTION (node)
328 28155820 : gcc_assert (!node->aux);
329 23731292 : FOR_EACH_VARIABLE (vnode)
330 22262890 : gcc_assert (!vnode->aux);
331 : }
332 : /* Mark functions whose bodies are obviously needed.
333 : This is mostly when they can be referenced externally. Inline clones
334 : are special since their declarations are shared with master clone and thus
335 : cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
336 29624646 : FOR_EACH_FUNCTION (node)
337 : {
338 28156143 : node->used_as_abstract_origin = false;
339 28156143 : node->indirect_call_target = false;
340 28156143 : if (node->definition
341 16604753 : && !node->inlined_to
342 14341898 : && !node->in_other_partition
343 42497789 : && !node->can_remove_if_no_direct_calls_and_refs_p ())
344 : {
345 7746860 : gcc_assert (!node->inlined_to);
346 7746860 : reachable.add (node);
347 7746860 : enqueue_node (node, &first, &reachable);
348 : }
349 : else
350 20409283 : gcc_assert (!node->aux);
351 : }
352 :
353 : /* Mark variables that are obviously needed. */
354 21664658 : FOR_EACH_DEFINED_VARIABLE (vnode)
355 20196155 : if (!vnode->can_remove_if_no_refs_p()
356 20196155 : && !vnode->in_other_partition)
357 : {
358 10120598 : reachable.add (vnode);
359 10120598 : enqueue_node (vnode, &first, &reachable);
360 : }
361 :
362 : /* Declarations or symbols in other partitions are also needed if referenced
363 : from asm. */
364 51887956 : FOR_EACH_SYMBOL (snode)
365 50419453 : if (snode->ref_by_asm)
366 763 : enqueue_node (snode, &first, &reachable);
367 :
368 : /* Perform reachability analysis. */
369 51597103 : while (first != (symtab_node *) (void *) 1)
370 : {
371 50128600 : bool in_boundary_p = !reachable.contains (first);
372 50128600 : symtab_node *node = first;
373 :
374 50128600 : first = (symtab_node *)first->aux;
375 :
376 : /* If we are processing symbol in boundary, mark its AUX pointer for
377 : possible later re-processing in enqueue_node. */
378 50128600 : if (in_boundary_p)
379 : {
380 13654564 : node->aux = (void *)2;
381 13654564 : if (node->alias && node->analyzed)
382 3749 : enqueue_node (node->get_alias_target (), &first, &reachable);
383 : }
384 : else
385 : {
386 36474036 : if (TREE_CODE (node->decl) == FUNCTION_DECL
387 36474036 : && DECL_ABSTRACT_ORIGIN (node->decl))
388 : {
389 3060470 : struct cgraph_node *origin_node
390 3060470 : = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
391 3060470 : if (origin_node && !origin_node->used_as_abstract_origin)
392 : {
393 368710 : origin_node->used_as_abstract_origin = true;
394 368710 : gcc_assert (!origin_node->prev_sibling_clone);
395 368710 : gcc_assert (!origin_node->next_sibling_clone);
396 532540 : for (cgraph_node *n = origin_node->clones; n;
397 163830 : n = n->next_sibling_clone)
398 163830 : if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
399 133069 : n->used_as_abstract_origin = true;
400 : }
401 : }
402 : /* If any non-external and non-local symbol in a comdat group is
403 : reachable, force all externally visible symbols in the same comdat
404 : group to be reachable as well. Comdat-local symbols
405 : can be discarded if all uses were inlined. */
406 36474036 : if (node->same_comdat_group
407 1576800 : && node->externally_visible
408 38025598 : && !DECL_EXTERNAL (node->decl))
409 : {
410 1551562 : symtab_node *next;
411 1551562 : for (next = node->same_comdat_group;
412 4737447 : next != node;
413 3185885 : next = next->same_comdat_group)
414 6371770 : if (!next->comdat_local_p ()
415 3119257 : && !DECL_EXTERNAL (next->decl)
416 3119254 : && !reachable.add (next))
417 738194 : enqueue_node (next, &first, &reachable);
418 : }
419 : /* Mark references as reachable. */
420 36474036 : process_references (node, &first, &reachable);
421 : }
422 :
423 50128600 : if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
424 : {
425 : /* Mark the callees reachable unless they are direct calls to extern
426 : inline functions we decided to not inline. */
427 27907684 : if (!in_boundary_p)
428 : {
429 16319599 : struct cgraph_edge *e;
430 : /* Keep alive possible targets for devirtualization. */
431 16319599 : if (opt_for_fn (cnode->decl, optimize)
432 16319599 : && opt_for_fn (cnode->decl, flag_devirtualize))
433 : {
434 13417158 : struct cgraph_edge *next;
435 13417158 : for (e = cnode->indirect_calls; e; e = next)
436 : {
437 1168742 : next = e->next_callee;
438 14585900 : if (usable_polymorphic_info_p (e->indirect_info))
439 179664 : walk_polymorphic_call_targets (&reachable_call_targets,
440 : e, &first, &reachable);
441 : }
442 : }
443 :
444 72187022 : for (e = cnode->callees; e; e = e->next_callee)
445 : {
446 55867423 : symtab_node *body = e->callee->function_symbol ();
447 55867423 : if (e->callee->definition
448 21792808 : && !e->callee->in_other_partition
449 77660091 : && (!e->inline_failed
450 19544363 : || !DECL_EXTERNAL (e->callee->decl)
451 660036 : || e->callee->alias
452 584505 : || possible_inline_candidate_p (e->callee)))
453 : {
454 : /* Be sure that we will not optimize out alias target
455 : body. */
456 21782222 : if (DECL_EXTERNAL (e->callee->decl)
457 769520 : && e->callee->alias
458 21857753 : && symtab->state < IPA_SSA_AFTER_INLINING)
459 74448 : reachable.add (body);
460 21782222 : reachable.add (e->callee);
461 : }
462 55867423 : enqueue_node (e->callee, &first, &reachable);
463 : }
464 :
465 : /* When inline clone exists, mark body to be preserved so when removing
466 : offline copy of the function we don't kill it. */
467 16319599 : if (cnode->inlined_to)
468 2248305 : body_needed_for_clonning.add (cnode->decl);
469 :
470 : /* For non-inline clones, force their origins to the boundary and ensure
471 : that body is not removed. */
472 19447232 : while (cnode->clone_of)
473 : {
474 3127633 : bool noninline = cnode->clone_of->decl != cnode->decl;
475 3127633 : cnode = cnode->clone_of;
476 3127633 : if (noninline)
477 : {
478 824673 : body_needed_for_clonning.add (cnode->decl);
479 824673 : enqueue_node (cnode, &first, &reachable);
480 : }
481 : }
482 :
483 : }
484 11588085 : else if (cnode->thunk)
485 45 : enqueue_node (cnode->callees->callee, &first, &reachable);
486 :
487 : /* A reference to the default node implies use of all the other
488 : versions (they get used in the function resolver made later
489 : in multiple_target.cc) */
490 27907684 : cgraph_function_version_info *node_v = cnode->function_version ();
491 27907684 : if (node_v && is_function_default_version (node->decl))
492 4569 : for (cgraph_function_version_info *fvi = node_v->next; fvi;
493 3494 : fvi = fvi->next)
494 3494 : enqueue_node (fvi->this_node, &first, &reachable);
495 :
496 : /* If any reachable function has simd clones, mark them as
497 : reachable as well. */
498 27907684 : if (cnode->simd_clones)
499 : {
500 : cgraph_node *next;
501 0 : for (next = cnode->simd_clones;
502 0 : next;
503 0 : next = next->simdclone->next_clone)
504 0 : if (in_boundary_p
505 0 : || !reachable.add (next))
506 0 : enqueue_node (next, &first, &reachable);
507 : }
508 : }
509 : /* When we see constructor of external variable, keep referred nodes in the
510 : boundary. This will also hold initializers of the external vars NODE
511 : refers to. */
512 50128600 : varpool_node *vnode = dyn_cast <varpool_node *> (node);
513 50128600 : if (vnode
514 22220916 : && DECL_EXTERNAL (node->decl)
515 2066228 : && !vnode->alias
516 : && in_boundary_p)
517 : {
518 2157900 : struct ipa_ref *ref = NULL;
519 5692629 : for (int i = 0; node->iterate_reference (i, ref); i++)
520 91674 : enqueue_node (ref->referred, &first, &reachable);
521 : }
522 : }
523 :
524 : /* Remove unreachable functions. */
525 29629918 : for (node = first_function (); node; node = next)
526 : {
527 28161415 : next = next_function (node);
528 :
529 : /* If node is not needed at all, remove it. */
530 28161415 : if (!node->aux)
531 : {
532 259857 : if (file)
533 91 : fprintf (file, " %s", node->dump_name ());
534 259857 : node->remove ();
535 259857 : changed = true;
536 : }
537 : /* If node is unreachable, remove its body. */
538 27901558 : else if (!reachable.contains (node))
539 : {
540 : /* We keep definitions of thunks and aliases in the boundary so
541 : we can walk to the ultimate alias targets and function symbols
542 : reliably. */
543 11581959 : if (node->alias || node->thunk)
544 : ;
545 11577814 : else if (!body_needed_for_clonning.contains (node->decl))
546 : {
547 : /* Make the node a non-clone so that we do not attempt to
548 : materialize it later. */
549 11223074 : if (node->clone_of)
550 0 : node->remove_from_clone_tree ();
551 11223074 : node->release_body ();
552 : }
553 354740 : else if (!node->clone_of)
554 343268 : gcc_assert (in_lto_p || DECL_RESULT (node->decl));
555 11581959 : if (node->definition && !node->alias && !node->thunk)
556 : {
557 139848 : if (file)
558 184 : fprintf (file, " %s", node->dump_name ());
559 139848 : node->body_removed = true;
560 139848 : node->analyzed = false;
561 139848 : node->definition = false;
562 139848 : node->cpp_implicit_alias = false;
563 139848 : node->alias = false;
564 139848 : node->transparent_alias = false;
565 139848 : node->thunk = false;
566 139848 : node->weakref = false;
567 : /* After early inlining we drop always_inline attributes on
568 : bodies of functions that are still referenced (have their
569 : address taken). */
570 139848 : DECL_ATTRIBUTES (node->decl)
571 139848 : = remove_attribute ("always_inline",
572 139848 : DECL_ATTRIBUTES (node->decl));
573 139848 : if (!node->in_other_partition)
574 139662 : node->local = false;
575 139848 : node->remove_callees ();
576 139848 : node->remove_all_references ();
577 139848 : changed = true;
578 : }
579 : }
580 : else
581 16319599 : gcc_assert (node->clone_of || !node->has_gimple_body_p ()
582 : || in_lto_p || DECL_RESULT (node->decl));
583 : }
584 :
585 : /* Inline clones might be kept around so their materializing allows further
586 : cloning. If the function the clone is inlined into is removed, we need
587 : to turn it into normal cone. */
588 29370061 : FOR_EACH_FUNCTION (node)
589 : {
590 27901558 : if (node->inlined_to
591 2248305 : && !node->callers)
592 : {
593 0 : gcc_assert (node->clones);
594 0 : node->inlined_to = NULL;
595 0 : update_inlined_to_pointer (node, node);
596 : }
597 27901558 : node->aux = NULL;
598 : }
599 :
600 : /* Remove unreachable variables. */
601 1468503 : if (file)
602 700 : fprintf (file, "\nReclaiming variables:");
603 23731813 : for (vnode = first_variable (); vnode; vnode = vnext)
604 : {
605 22263310 : vnext = next_variable (vnode);
606 22263310 : if (!vnode->aux
607 : /* For can_refer_decl_in_current_unit_p we want to track for
608 : all external variables if they are defined in other partition
609 : or not. */
610 22263310 : && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
611 : {
612 42357 : struct ipa_ref *ref = NULL;
613 :
614 : /* First remove the aliases, so varpool::remove can possibly lookup
615 : the constructor and save it for future use. */
616 42357 : while (vnode->iterate_direct_aliases (0, ref))
617 : {
618 0 : if (file)
619 0 : fprintf (file, " %s", ref->referred->dump_name ());
620 0 : ref->referring->remove ();
621 : }
622 42357 : if (file)
623 1 : fprintf (file, " %s", vnode->dump_name ());
624 42357 : vnext = next_variable (vnode);
625 : /* Signal removal to the debug machinery. */
626 42357 : if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
627 : {
628 40007 : vnode->definition = false;
629 40007 : (*debug_hooks->late_global_decl) (vnode->decl);
630 : }
631 42357 : vnode->remove ();
632 42357 : changed = true;
633 : }
634 22220953 : else if (!reachable.contains (vnode) && !vnode->alias)
635 : {
636 2066304 : tree init;
637 2066304 : if (vnode->definition)
638 : {
639 15835 : if (file)
640 0 : fprintf (file, " %s", vnode->dump_name ());
641 : changed = true;
642 : }
643 : /* Keep body if it may be useful for constant folding. */
644 2061446 : if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
645 4127696 : || ((init = ctor_for_folding (vnode->decl)) == error_mark_node))
646 1986563 : vnode->remove_initializer ();
647 : else
648 79741 : DECL_INITIAL (vnode->decl) = init;
649 2066304 : vnode->body_removed = true;
650 2066304 : vnode->definition = false;
651 2066304 : vnode->analyzed = false;
652 2066304 : vnode->aux = NULL;
653 :
654 2066304 : vnode->remove_from_same_comdat_group ();
655 :
656 2066304 : vnode->remove_all_references ();
657 : }
658 : else
659 20154649 : vnode->aux = NULL;
660 : }
661 :
662 : /* Now update address_taken flags and try to promote functions to be local. */
663 1468503 : if (file)
664 700 : fprintf (file, "\nClearing address taken flags:");
665 17791582 : FOR_EACH_DEFINED_FUNCTION (node)
666 16323079 : if (node->address_taken
667 3279146 : && !node->used_from_other_partition)
668 : {
669 3279002 : if (!node->call_for_symbol_and_aliases
670 3279002 : (has_addr_references_p, NULL, true))
671 : {
672 21157 : if (file)
673 0 : fprintf (file, " %s", node->dump_name ());
674 21157 : node->address_taken = false;
675 21157 : changed = true;
676 21157 : if (node->local_p ()
677 : /* Virtual functions may be kept in cgraph just because
678 : of possible later devirtualization. Do not mark them as
679 : local too early so we won't optimize them out before
680 : we are done with polymorphic call analysis. */
681 21157 : && (symtab->state >= IPA_SSA_AFTER_INLINING
682 508 : || !node->call_for_symbol_and_aliases
683 508 : (is_indirect_call_target_p, NULL, true)))
684 : {
685 517 : node->local = true;
686 517 : if (file)
687 0 : fprintf (file, " (local)");
688 : }
689 : }
690 : }
691 1468503 : if (file)
692 700 : fprintf (file, "\n");
693 :
694 1468503 : symtab_node::checking_verify_symtab_nodes ();
695 :
696 : /* If we removed something, perhaps profile could be improved. */
697 1468503 : if (changed && (optimize || in_lto_p) && ipa_call_summaries)
698 4052330 : FOR_EACH_DEFINED_FUNCTION (node)
699 3983594 : ipa_propagate_frequency (node);
700 :
701 1468503 : timevar_pop (TV_IPA_UNREACHABLE);
702 1468503 : return changed;
703 1468503 : }
704 :
705 : /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
706 : as needed, also clear EXPLICIT_REFS if the references to given variable
707 : do not need to be explicit. */
708 :
709 : void
710 5394398 : process_references (varpool_node *vnode,
711 : bool *written, bool *address_taken,
712 : bool *read, bool *explicit_refs)
713 : {
714 5394398 : int i;
715 5394398 : struct ipa_ref *ref;
716 :
717 5394398 : if (!vnode->all_refs_explicit_p ()
718 5394398 : || TREE_THIS_VOLATILE (vnode->decl))
719 2932664 : *explicit_refs = false;
720 :
721 4066495 : for (i = 0; vnode->iterate_referring (i, ref)
722 9460893 : && *explicit_refs && (!*written || !*address_taken || !*read); i++)
723 4066495 : switch (ref->use)
724 : {
725 2766271 : case IPA_REF_ADDR:
726 2766271 : *address_taken = true;
727 2766271 : break;
728 710567 : case IPA_REF_LOAD:
729 710567 : *read = true;
730 710567 : break;
731 577388 : case IPA_REF_STORE:
732 577388 : *written = true;
733 577388 : break;
734 12269 : case IPA_REF_ALIAS:
735 12269 : process_references (dyn_cast<varpool_node *> (ref->referring), written,
736 : address_taken, read, explicit_refs);
737 12269 : break;
738 : }
739 5394398 : }
740 :
741 : /* Set TREE_READONLY bit. */
742 :
743 : bool
744 78357 : set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
745 : {
746 78357 : TREE_READONLY (vnode->decl) = true;
747 78357 : return false;
748 : }
749 :
750 : /* Set writeonly bit and clear the initalizer, since it will not be needed. */
751 :
752 : bool
753 25986 : set_writeonly_bit (varpool_node *vnode, void *data)
754 : {
755 25986 : vnode->writeonly = true;
756 25986 : if (optimize || in_lto_p)
757 : {
758 25986 : DECL_INITIAL (vnode->decl) = NULL;
759 25986 : if (!vnode->alias)
760 : {
761 25986 : if (vnode->num_references ())
762 211 : *(bool *)data = true;
763 25986 : vnode->remove_all_references ();
764 : }
765 : }
766 25986 : return false;
767 : }
768 :
769 : /* Clear addressale bit of VNODE. */
770 :
771 : bool
772 176551 : clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
773 : {
774 176551 : vnode->address_taken = false;
775 176551 : TREE_ADDRESSABLE (vnode->decl) = 0;
776 176551 : return false;
777 : }
778 :
779 : /* Discover variables that have no longer address taken, are read-only or
780 : write-only and update their flags.
781 :
782 : Return true when unreachable symbol removal should be done.
783 :
784 : FIXME: This cannot be done in between gimplify and omp_expand since
785 : readonly flag plays role on what is shared and what is not. Currently we do
786 : this transformation as part of whole program visibility and re-do at
787 : ipa-reference pass (to take into account clonning), but it would
788 : make sense to do it before early optimizations. */
789 :
790 : bool
791 302964 : ipa_discover_variable_flags (void)
792 : {
793 302964 : if (!flag_ipa_reference_addressable)
794 : return false;
795 :
796 299047 : bool remove_p = false;
797 299047 : varpool_node *vnode;
798 299047 : if (dump_file)
799 73 : fprintf (dump_file, "Clearing variable flags:");
800 5693682 : FOR_EACH_VARIABLE (vnode)
801 5394635 : if (!vnode->alias
802 5394635 : && (TREE_ADDRESSABLE (vnode->decl)
803 2374376 : || !vnode->writeonly
804 25912 : || !TREE_READONLY (vnode->decl)))
805 : {
806 5382129 : bool written = false;
807 5382129 : bool address_taken = false;
808 5382129 : bool read = false;
809 5382129 : bool explicit_refs = true;
810 :
811 5382129 : process_references (vnode, &written, &address_taken, &read,
812 : &explicit_refs);
813 5382129 : if (!explicit_refs)
814 2932664 : continue;
815 2449465 : if (!address_taken)
816 : {
817 165323 : if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
818 0 : fprintf (dump_file, " %s (non-addressable)",
819 : vnode->dump_name ());
820 165323 : vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
821 : true);
822 : }
823 165323 : if (!address_taken && !written
824 : /* Making variable in explicit section readonly can cause section
825 : type conflict.
826 : See e.g. gcc.c-torture/compile/pr23237.c */
827 2516628 : && vnode->get_section () == NULL)
828 : {
829 67133 : if (!TREE_READONLY (vnode->decl) && dump_file)
830 3 : fprintf (dump_file, " %s (read-only)", vnode->dump_name ());
831 67133 : vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
832 : }
833 2449465 : if (!vnode->writeonly && !read && !address_taken && written)
834 : {
835 25986 : if (dump_file)
836 0 : fprintf (dump_file, " %s (write-only)", vnode->dump_name ());
837 25986 : vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
838 : true);
839 : }
840 : }
841 299047 : if (dump_file)
842 73 : fprintf (dump_file, "\n");
843 299047 : return remove_p;
844 : }
845 :
846 : /* Generate and emit a static constructor or destructor. WHICH must
847 : be one of 'I' (for a constructor), 'D' (for a destructor).
848 : BODY is a STATEMENT_LIST containing GENERIC
849 : statements. PRIORITY is the initialization priority for this
850 : constructor or destructor.
851 :
852 : FINAL specify whether the externally visible name for collect2 should
853 : be produced. */
854 :
855 : static tree
856 4808 : cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final,
857 : tree optimization,
858 : tree target)
859 : {
860 4808 : static int counter = 0;
861 4808 : char which_buf[16];
862 4808 : tree decl, name, resdecl;
863 :
864 : /* The priority is encoded in the constructor or destructor name.
865 : collect2 will sort the names and arrange that they are called at
866 : program startup. */
867 4808 : if (!targetm.have_ctors_dtors && final)
868 : {
869 0 : sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
870 0 : name = get_file_function_name (which_buf);
871 : }
872 : else
873 : {
874 : /* Proudce sane name but one not recognizable by collect2, just for the
875 : case we fail to inline the function. */
876 4808 : sprintf (which_buf, "_sub_%c_%.5d_%d", which, priority, counter++);
877 4808 : name = get_identifier (which_buf);
878 : }
879 :
880 4808 : decl = build_decl (input_location, FUNCTION_DECL, name,
881 : build_function_type_list (void_type_node, NULL_TREE));
882 4808 : current_function_decl = decl;
883 :
884 4808 : resdecl = build_decl (input_location,
885 : RESULT_DECL, NULL_TREE, void_type_node);
886 4808 : DECL_ARTIFICIAL (resdecl) = 1;
887 4808 : DECL_RESULT (decl) = resdecl;
888 4808 : DECL_CONTEXT (resdecl) = decl;
889 :
890 4808 : allocate_struct_function (decl, false);
891 :
892 4808 : TREE_STATIC (decl) = 1;
893 4808 : TREE_USED (decl) = 1;
894 4808 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) = optimization;
895 4808 : DECL_FUNCTION_SPECIFIC_TARGET (decl) = target;
896 4808 : DECL_ARTIFICIAL (decl) = 1;
897 4808 : DECL_IGNORED_P (decl) = 1;
898 4808 : DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
899 4808 : DECL_SAVED_TREE (decl) = body;
900 4808 : if (!targetm.have_ctors_dtors && final)
901 : {
902 0 : TREE_PUBLIC (decl) = 1;
903 0 : DECL_PRESERVE_P (decl) = 1;
904 : }
905 4808 : DECL_UNINLINABLE (decl) = 1;
906 :
907 4808 : DECL_INITIAL (decl) = make_node (BLOCK);
908 4808 : BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
909 4808 : TREE_USED (DECL_INITIAL (decl)) = 1;
910 :
911 4808 : DECL_SOURCE_LOCATION (decl) = input_location;
912 4808 : cfun->function_end_locus = input_location;
913 :
914 4808 : switch (which)
915 : {
916 3324 : case 'I':
917 3324 : DECL_STATIC_CONSTRUCTOR (decl) = 1;
918 3324 : decl_init_priority_insert (decl, priority);
919 3324 : break;
920 1484 : case 'D':
921 1484 : DECL_STATIC_DESTRUCTOR (decl) = 1;
922 1484 : decl_fini_priority_insert (decl, priority);
923 1484 : break;
924 0 : default:
925 0 : gcc_unreachable ();
926 : }
927 :
928 4808 : gimplify_function_tree (decl);
929 :
930 4808 : cgraph_node::add_new_function (decl, false);
931 :
932 4808 : set_cfun (NULL);
933 4808 : current_function_decl = NULL;
934 4808 : return decl;
935 : }
936 :
937 : /* Generate and emit a static constructor or destructor. WHICH must
938 : be one of 'I' (for a constructor) or 'D' (for a destructor).
939 : BODY is a STATEMENT_LIST containing GENERIC
940 : statements. PRIORITY is the initialization priority for this
941 : constructor or destructor. */
942 :
943 : void
944 4800 : cgraph_build_static_cdtor (char which, tree body, int priority)
945 : {
946 : /* FIXME: We should be able to
947 : gcc_assert (!in_lto_p);
948 : because at LTO time the global options are not safe to use.
949 : Unfortunately ASAN finish_file will produce constructors late and they
950 : may lead to surprises. */
951 4800 : cgraph_build_static_cdtor_1 (which, body, priority, false,
952 : optimization_default_node,
953 : target_option_default_node);
954 4800 : }
955 :
956 : /* When target does not have ctors and dtors, we call all constructor
957 : and destructor by special initialization/destruction function
958 : recognized by collect2.
959 :
960 : When we are going to build this function, collect all constructors and
961 : destructors and turn them into normal functions. */
962 :
963 : static void
964 103 : record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
965 : {
966 103 : if (DECL_STATIC_CONSTRUCTOR (node->decl))
967 78 : ctors->safe_push (node->decl);
968 103 : if (DECL_STATIC_DESTRUCTOR (node->decl))
969 29 : dtors->safe_push (node->decl);
970 103 : node = cgraph_node::get (node->decl);
971 103 : DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
972 103 : }
973 :
974 : /* Define global constructors/destructor functions for the CDTORS, of
975 : which they are LEN. The CDTORS are sorted by initialization
976 : priority. If CTOR_P is true, these are constructors; otherwise,
977 : they are destructors. */
978 :
979 : static void
980 75 : build_cdtor (bool ctor_p, const vec<tree> &cdtors)
981 : {
982 75 : size_t i,j;
983 75 : size_t len = cdtors.length ();
984 :
985 75 : i = 0;
986 174 : while (i < len)
987 : {
988 99 : tree body;
989 99 : tree fn;
990 99 : priority_type priority;
991 :
992 99 : priority = 0;
993 99 : body = NULL_TREE;
994 99 : j = i;
995 131 : do
996 : {
997 131 : priority_type p;
998 131 : fn = cdtors[j];
999 131 : p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1000 131 : if (j == i)
1001 : priority = p;
1002 32 : else if (p != priority)
1003 : break;
1004 107 : j++;
1005 : }
1006 107 : while (j < len);
1007 :
1008 : /* When there is only one cdtor and target supports them, do nothing. */
1009 99 : if (j == i + 1
1010 91 : && targetm.have_ctors_dtors)
1011 : {
1012 91 : i++;
1013 91 : continue;
1014 : }
1015 : /* Find the next batch of constructors/destructors with the same
1016 : initialization priority. */
1017 24 : for (;i < j; i++)
1018 : {
1019 16 : tree call;
1020 16 : fn = cdtors[i];
1021 16 : call = build_call_expr (fn, 0);
1022 16 : if (ctor_p)
1023 8 : DECL_STATIC_CONSTRUCTOR (fn) = 0;
1024 : else
1025 8 : DECL_STATIC_DESTRUCTOR (fn) = 0;
1026 : /* We do not want to optimize away pure/const calls here.
1027 : When optimizing, these should be already removed, when not
1028 : optimizing, we want user to be able to breakpoint in them. */
1029 16 : TREE_SIDE_EFFECTS (call) = 1;
1030 16 : append_to_statement_list (call, &body);
1031 : }
1032 8 : gcc_assert (body != NULL_TREE);
1033 : /* Generate a function to call all the function of like
1034 : priority. */
1035 16 : cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true,
1036 8 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (cdtors[0]),
1037 8 : DECL_FUNCTION_SPECIFIC_TARGET (cdtors[0]));
1038 : }
1039 75 : }
1040 :
1041 : /* Helper functions for build_cxa_dtor_registrations ().
1042 : Build a decl for __cxa_atexit (). */
1043 :
1044 : static tree
1045 0 : build_cxa_atexit_decl ()
1046 : {
1047 : /* The parameter to "__cxa_atexit" is "void (*)(void *)". */
1048 0 : tree fn_type = build_function_type_list (void_type_node,
1049 : ptr_type_node, NULL_TREE);
1050 0 : tree fn_ptr_type = build_pointer_type (fn_type);
1051 : /* The declaration for `__cxa_atexit' is:
1052 : int __cxa_atexit (void (*)(void *), void *, void *). */
1053 0 : const char *name = "__cxa_atexit";
1054 0 : tree cxa_name = get_identifier (name);
1055 0 : fn_type = build_function_type_list (integer_type_node, fn_ptr_type,
1056 : ptr_type_node, ptr_type_node, NULL_TREE);
1057 0 : tree atexit_fndecl = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
1058 : cxa_name, fn_type);
1059 0 : SET_DECL_ASSEMBLER_NAME (atexit_fndecl, cxa_name);
1060 0 : DECL_VISIBILITY (atexit_fndecl) = VISIBILITY_DEFAULT;
1061 0 : DECL_VISIBILITY_SPECIFIED (atexit_fndecl) = true;
1062 0 : set_call_expr_flags (atexit_fndecl, ECF_LEAF | ECF_NOTHROW);
1063 0 : TREE_PUBLIC (atexit_fndecl) = true;
1064 0 : DECL_EXTERNAL (atexit_fndecl) = true;
1065 0 : DECL_ARTIFICIAL (atexit_fndecl) = true;
1066 0 : return atexit_fndecl;
1067 : }
1068 :
1069 : /* Build a decl for __dso_handle. */
1070 :
1071 : static tree
1072 0 : build_dso_handle_decl ()
1073 : {
1074 : /* Declare the __dso_handle variable. */
1075 0 : tree dso_handle_decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1076 : get_identifier ("__dso_handle"),
1077 : ptr_type_node);
1078 0 : TREE_PUBLIC (dso_handle_decl) = true;
1079 0 : DECL_EXTERNAL (dso_handle_decl) = true;
1080 0 : DECL_ARTIFICIAL (dso_handle_decl) = true;
1081 : #ifdef HAVE_GAS_HIDDEN
1082 0 : if (dso_handle_decl != error_mark_node)
1083 : {
1084 0 : DECL_VISIBILITY (dso_handle_decl) = VISIBILITY_HIDDEN;
1085 0 : DECL_VISIBILITY_SPECIFIED (dso_handle_decl) = true;
1086 : }
1087 : #endif
1088 0 : return dso_handle_decl;
1089 : }
1090 :
1091 : /* This builds one or more constructor functions that register DTORs with
1092 : __cxa_atexit (). Within a priority level, DTORs are registered in TU
1093 : order - which means that they will run in reverse TU order from cxa_atexit.
1094 : This is the same behavior as using a .fini / .mod_term_funcs section.
1095 : As the functions are built, they are appended to the CTORs vector. */
1096 :
1097 : static void
1098 0 : build_cxa_dtor_registrations (const vec<tree> &dtors, vec<tree> *ctors)
1099 : {
1100 0 : size_t i,j;
1101 0 : size_t len = dtors.length ();
1102 :
1103 0 : location_t sav_loc = input_location;
1104 0 : input_location = UNKNOWN_LOCATION;
1105 :
1106 0 : tree atexit_fndecl = build_cxa_atexit_decl ();
1107 0 : tree dso_handle_decl = build_dso_handle_decl ();
1108 :
1109 : /* We want &__dso_handle. */
1110 0 : tree dso_ptr = build1_loc (UNKNOWN_LOCATION, ADDR_EXPR,
1111 : ptr_type_node, dso_handle_decl);
1112 :
1113 0 : i = 0;
1114 0 : while (i < len)
1115 : {
1116 0 : priority_type priority = 0;
1117 0 : tree body = NULL_TREE;
1118 0 : j = i;
1119 0 : do
1120 : {
1121 0 : priority_type p;
1122 0 : tree fn = dtors[j];
1123 0 : p = DECL_FINI_PRIORITY (fn);
1124 0 : if (j == i)
1125 : priority = p;
1126 0 : else if (p != priority)
1127 : break;
1128 0 : j++;
1129 : }
1130 0 : while (j < len);
1131 :
1132 : /* Find the next batch of destructors with the same initialization
1133 : priority. */
1134 0 : for (;i < j; i++)
1135 : {
1136 0 : tree fn = dtors[i];
1137 0 : DECL_STATIC_DESTRUCTOR (fn) = 0;
1138 0 : tree dtor_ptr = build1_loc (UNKNOWN_LOCATION, ADDR_EXPR,
1139 : ptr_type_node, fn);
1140 0 : tree call_cxa_atexit
1141 0 : = build_call_expr_loc (UNKNOWN_LOCATION, atexit_fndecl, 3,
1142 : dtor_ptr, null_pointer_node, dso_ptr);
1143 0 : TREE_SIDE_EFFECTS (call_cxa_atexit) = 1;
1144 0 : append_to_statement_list (call_cxa_atexit, &body);
1145 : }
1146 :
1147 0 : gcc_assert (body != NULL_TREE);
1148 : /* Generate a function to register the DTORs at this priority. */
1149 0 : tree new_ctor
1150 0 : = cgraph_build_static_cdtor_1 ('I', body, priority, true,
1151 0 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (dtors[0]),
1152 0 : DECL_FUNCTION_SPECIFIC_TARGET (dtors[0]));
1153 : /* Add this to the list of ctors. */
1154 0 : ctors->safe_push (new_ctor);
1155 : }
1156 0 : input_location = sav_loc;
1157 0 : }
1158 :
1159 : /* Comparison function for qsort. P1 and P2 are actually of type
1160 : "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1161 : used to determine the sort order. */
1162 :
1163 : static int
1164 96 : compare_ctor (const void *p1, const void *p2)
1165 : {
1166 96 : tree f1;
1167 96 : tree f2;
1168 96 : int priority1;
1169 96 : int priority2;
1170 :
1171 96 : f1 = *(const tree *)p1;
1172 96 : f2 = *(const tree *)p2;
1173 96 : priority1 = DECL_INIT_PRIORITY (f1);
1174 96 : priority2 = DECL_INIT_PRIORITY (f2);
1175 :
1176 96 : if (priority1 < priority2)
1177 : return -1;
1178 40 : else if (priority1 > priority2)
1179 : return 1;
1180 : else
1181 : /* Ensure a stable sort. Constructors are executed in backwarding
1182 : order to make LTO initialize braries first. */
1183 16 : return DECL_UID (f2) - DECL_UID (f1);
1184 : }
1185 :
1186 : /* Comparison function for qsort. P1 and P2 are actually of type
1187 : "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1188 : used to determine the sort order. */
1189 :
1190 : static int
1191 96 : compare_dtor (const void *p1, const void *p2)
1192 : {
1193 96 : tree f1;
1194 96 : tree f2;
1195 96 : int priority1;
1196 96 : int priority2;
1197 :
1198 96 : f1 = *(const tree *)p1;
1199 96 : f2 = *(const tree *)p2;
1200 96 : priority1 = DECL_FINI_PRIORITY (f1);
1201 96 : priority2 = DECL_FINI_PRIORITY (f2);
1202 :
1203 96 : if (priority1 < priority2)
1204 : return -1;
1205 40 : else if (priority1 > priority2)
1206 : return 1;
1207 : else
1208 : /* Ensure a stable sort - into TU order. */
1209 16 : return DECL_UID (f1) - DECL_UID (f2);
1210 : }
1211 :
1212 : /* Comparison function for qsort. P1 and P2 are of type "tree *" and point to
1213 : a pair of static constructors or destructors. We first sort on the basis of
1214 : priority and then into TU order (on the strict assumption that DECL_UIDs are
1215 : ordered in the same way as the original functions). ???: this seems quite
1216 : fragile. */
1217 :
1218 : static int
1219 0 : compare_cdtor_tu_order (const void *p1, const void *p2)
1220 : {
1221 0 : tree f1;
1222 0 : tree f2;
1223 0 : int priority1;
1224 0 : int priority2;
1225 :
1226 0 : f1 = *(const tree *)p1;
1227 0 : f2 = *(const tree *)p2;
1228 : /* We process the DTORs first, and then remove their flag, so this order
1229 : allows for functions that are declared as both CTOR and DTOR. */
1230 0 : if (DECL_STATIC_DESTRUCTOR (f1))
1231 : {
1232 0 : gcc_checking_assert (DECL_STATIC_DESTRUCTOR (f2));
1233 0 : priority1 = DECL_FINI_PRIORITY (f1);
1234 0 : priority2 = DECL_FINI_PRIORITY (f2);
1235 : }
1236 : else
1237 : {
1238 0 : priority1 = DECL_INIT_PRIORITY (f1);
1239 0 : priority2 = DECL_INIT_PRIORITY (f2);
1240 : }
1241 :
1242 0 : if (priority1 < priority2)
1243 : return -1;
1244 0 : else if (priority1 > priority2)
1245 : return 1;
1246 : else
1247 : /* For equal priority, sort into the order of definition in the TU. */
1248 0 : return DECL_UID (f1) - DECL_UID (f2);
1249 : }
1250 :
1251 : /* Generate functions to call static constructors and destructors
1252 : for targets that do not support .ctors/.dtors sections. These
1253 : functions have magic names which are detected by collect2. */
1254 :
1255 : static void
1256 12179 : build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
1257 : {
1258 12179 : if (!ctors->is_empty ())
1259 : {
1260 62 : gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1261 62 : ctors->qsort (compare_ctor);
1262 62 : build_cdtor (/*ctor_p=*/true, *ctors);
1263 : }
1264 :
1265 12179 : if (!dtors->is_empty ())
1266 : {
1267 13 : gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1268 13 : dtors->qsort (compare_dtor);
1269 13 : build_cdtor (/*ctor_p=*/false, *dtors);
1270 : }
1271 12179 : }
1272 :
1273 : /* Generate new CTORs to register static destructors with __cxa_atexit and add
1274 : them to the existing list of CTORs; we then process the revised CTORs list.
1275 :
1276 : We sort the DTORs into priority and then TU order, this means that they are
1277 : registered in that order with __cxa_atexit () and therefore will be run in
1278 : the reverse order.
1279 :
1280 : Likewise, CTORs are sorted into priority and then TU order, which means that
1281 : they will run in that order.
1282 :
1283 : This matches the behavior of using init/fini or mod_init_func/mod_term_func
1284 : sections. */
1285 :
1286 : static void
1287 0 : build_cxa_atexit_fns (vec<tree> *ctors, vec<tree> *dtors)
1288 : {
1289 0 : if (!dtors->is_empty ())
1290 : {
1291 0 : gcc_assert (targetm.dtors_from_cxa_atexit);
1292 0 : dtors->qsort (compare_cdtor_tu_order);
1293 0 : build_cxa_dtor_registrations (*dtors, ctors);
1294 : }
1295 :
1296 0 : if (!ctors->is_empty ())
1297 : {
1298 0 : gcc_assert (targetm.dtors_from_cxa_atexit);
1299 0 : ctors->qsort (compare_cdtor_tu_order);
1300 0 : build_cdtor (/*ctor_p=*/true, *ctors);
1301 : }
1302 0 : }
1303 :
1304 : /* Look for constructors and destructors and produce function calling them.
1305 : This is needed for targets not supporting ctors or dtors, but we perform the
1306 : transformation also at linktime to merge possibly numerous
1307 : constructors/destructors into single function to improve code locality and
1308 : reduce size. */
1309 :
1310 : static unsigned int
1311 12179 : ipa_cdtor_merge (void)
1312 : {
1313 : /* A vector of FUNCTION_DECLs declared as static constructors. */
1314 12179 : auto_vec<tree, 20> ctors;
1315 : /* A vector of FUNCTION_DECLs declared as static destructors. */
1316 12179 : auto_vec<tree, 20> dtors;
1317 12179 : struct cgraph_node *node;
1318 90649 : FOR_EACH_DEFINED_FUNCTION (node)
1319 78470 : if (DECL_STATIC_CONSTRUCTOR (node->decl)
1320 78470 : || DECL_STATIC_DESTRUCTOR (node->decl))
1321 103 : record_cdtor_fn (node, &ctors, &dtors);
1322 12179 : if (targetm.dtors_from_cxa_atexit)
1323 0 : build_cxa_atexit_fns (&ctors, &dtors);
1324 : else
1325 12179 : build_cdtor_fns (&ctors, &dtors);
1326 12179 : return 0;
1327 12179 : }
1328 :
1329 : namespace {
1330 :
1331 : const pass_data pass_data_ipa_cdtor_merge =
1332 : {
1333 : IPA_PASS, /* type */
1334 : "cdtor", /* name */
1335 : OPTGROUP_NONE, /* optinfo_flags */
1336 : TV_CGRAPHOPT, /* tv_id */
1337 : 0, /* properties_required */
1338 : 0, /* properties_provided */
1339 : 0, /* properties_destroyed */
1340 : 0, /* todo_flags_start */
1341 : 0, /* todo_flags_finish */
1342 : };
1343 :
1344 : class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1345 : {
1346 : public:
1347 287872 : pass_ipa_cdtor_merge (gcc::context *ctxt)
1348 : : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1349 : NULL, /* generate_summary */
1350 : NULL, /* write_summary */
1351 : NULL, /* read_summary */
1352 : NULL, /* write_optimization_summary */
1353 : NULL, /* read_optimization_summary */
1354 : NULL, /* stmt_fixup */
1355 : 0, /* function_transform_todo_flags_start */
1356 : NULL, /* function_transform */
1357 287872 : NULL) /* variable_transform */
1358 287872 : {}
1359 :
1360 : /* opt_pass methods: */
1361 : bool gate (function *) final override;
1362 12179 : unsigned int execute (function *) final override
1363 : {
1364 12179 : return ipa_cdtor_merge ();
1365 : }
1366 :
1367 : }; // class pass_ipa_cdtor_merge
1368 :
1369 : bool
1370 568226 : pass_ipa_cdtor_merge::gate (function *)
1371 : {
1372 : /* Perform the pass when we have no ctors/dtors support
1373 : or at LTO time to merge multiple constructors into single
1374 : function. */
1375 568226 : return !targetm.have_ctors_dtors || in_lto_p || targetm.dtors_from_cxa_atexit;
1376 : }
1377 :
1378 : } // anon namespace
1379 :
1380 : ipa_opt_pass_d *
1381 287872 : make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1382 : {
1383 287872 : return new pass_ipa_cdtor_merge (ctxt);
1384 : }
1385 :
1386 : /* Invalid pointer representing BOTTOM for single user dataflow. */
1387 : #define BOTTOM ((cgraph_node *)(size_t) 2)
1388 :
1389 : /* Meet operation for single user dataflow.
1390 : Here we want to associate variables with sigle function that may access it.
1391 :
1392 : FUNCTION is current single user of a variable, VAR is variable that uses it.
1393 : Latttice is stored in SINGLE_USER_MAP.
1394 :
1395 : We represent:
1396 : - TOP by no entry in SIGNLE_USER_MAP
1397 : - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1398 : - known single user by cgraph pointer in SINGLE_USER_MAP. */
1399 :
1400 : cgraph_node *
1401 3436557 : meet (cgraph_node *function, varpool_node *var,
1402 : hash_map<varpool_node *, cgraph_node *> &single_user_map)
1403 : {
1404 3436557 : struct cgraph_node *user, **f;
1405 :
1406 3436557 : if (var->aux == BOTTOM)
1407 : return BOTTOM;
1408 :
1409 2453905 : f = single_user_map.get (var);
1410 2453905 : if (!f)
1411 : return function;
1412 1002107 : user = *f;
1413 1002107 : if (!function)
1414 : return user;
1415 980029 : else if (function != user)
1416 : return BOTTOM;
1417 : else
1418 : return function;
1419 : }
1420 :
1421 : /* Propagation step of single-use dataflow.
1422 :
1423 : Check all uses of VNODE and see if they are used by single function FUNCTION.
1424 : SINGLE_USER_MAP represents the dataflow lattice. */
1425 :
1426 : cgraph_node *
1427 1789488 : propagate_single_user (varpool_node *vnode, cgraph_node *function,
1428 : hash_map<varpool_node *, cgraph_node *> &single_user_map)
1429 : {
1430 1789488 : int i;
1431 1789488 : struct ipa_ref *ref;
1432 :
1433 1789488 : gcc_assert (!vnode->externally_visible);
1434 :
1435 : /* If node is an alias, first meet with its target. */
1436 1789488 : if (vnode->alias)
1437 16541 : function = meet (function, vnode->get_alias_target (), single_user_map);
1438 :
1439 : /* Check all users and see if they correspond to a single function. */
1440 5831856 : for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1441 : {
1442 8084736 : struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1443 4042368 : if (cnode)
1444 : {
1445 622352 : if (cnode->inlined_to)
1446 91117 : cnode = cnode->inlined_to;
1447 622352 : if (!function)
1448 : function = cnode;
1449 316786 : else if (function != cnode)
1450 25054 : function = BOTTOM;
1451 : }
1452 : else
1453 6840032 : function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1454 : single_user_map);
1455 : }
1456 1789488 : return function;
1457 : }
1458 :
1459 : /* Pass setting used_by_single_function flag.
1460 : This flag is set on variable when there is only one function that may
1461 : possibly referr to it. */
1462 :
1463 : static unsigned int
1464 232280 : ipa_single_use (void)
1465 : {
1466 232280 : varpool_node *first = (varpool_node *) (void *) 1;
1467 232280 : varpool_node *var;
1468 232280 : hash_map<varpool_node *, cgraph_node *> single_user_map;
1469 :
1470 3100484 : FOR_EACH_DEFINED_VARIABLE (var)
1471 2868204 : if (!var->all_refs_explicit_p ())
1472 1562293 : var->aux = BOTTOM;
1473 : else
1474 : {
1475 : /* Enqueue symbol for dataflow. */
1476 1305911 : var->aux = first;
1477 1305911 : first = var;
1478 : }
1479 :
1480 : /* The actual dataflow. */
1481 :
1482 2021768 : while (first != (void *) 1)
1483 : {
1484 1789488 : cgraph_node *user, *orig_user, **f;
1485 :
1486 1789488 : var = first;
1487 1789488 : first = (varpool_node *)first->aux;
1488 :
1489 1789488 : f = single_user_map.get (var);
1490 1789488 : if (f)
1491 29162 : orig_user = *f;
1492 : else
1493 : orig_user = NULL;
1494 1789488 : user = propagate_single_user (var, orig_user, single_user_map);
1495 :
1496 1789488 : gcc_checking_assert (var->aux != BOTTOM);
1497 :
1498 : /* If user differs, enqueue all references. */
1499 1789488 : if (user != orig_user)
1500 : {
1501 1312034 : unsigned int i;
1502 1312034 : ipa_ref *ref;
1503 :
1504 1312034 : single_user_map.put (var, user);
1505 :
1506 : /* Enqueue all aliases for re-processing. */
1507 2638150 : for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1508 14082 : if (!ref->referring->aux)
1509 : {
1510 4255 : ref->referring->aux = first;
1511 14082 : first = dyn_cast <varpool_node *> (ref->referring);
1512 : }
1513 : /* Enqueue all users for re-processing. */
1514 5226892 : for (i = 0; var->iterate_reference (i, ref); i++)
1515 1301412 : if (!ref->referred->aux
1516 732425 : && ref->referred->definition
1517 2513159 : && is_a <varpool_node *> (ref->referred))
1518 : {
1519 479322 : ref->referred->aux = first;
1520 479322 : first = dyn_cast <varpool_node *> (ref->referred);
1521 : }
1522 :
1523 : /* If user is BOTTOM, just punt on this var. */
1524 1312034 : if (user == BOTTOM)
1525 1011443 : var->aux = BOTTOM;
1526 : else
1527 300591 : var->aux = NULL;
1528 : }
1529 : else
1530 477454 : var->aux = NULL;
1531 : }
1532 :
1533 3100484 : FOR_EACH_DEFINED_VARIABLE (var)
1534 : {
1535 2868204 : if (var->aux != BOTTOM)
1536 : {
1537 : /* Not having the single user known means that the VAR is
1538 : unreachable. Either someone forgot to remove unreachable
1539 : variables or the reachability here is wrong. */
1540 :
1541 294468 : gcc_checking_assert (single_user_map.get (var));
1542 :
1543 294468 : if (dump_file)
1544 : {
1545 10 : fprintf (dump_file, "Variable %s is used by single function\n",
1546 : var->dump_name ());
1547 : }
1548 294468 : var->used_by_single_function = true;
1549 : }
1550 2868204 : var->aux = NULL;
1551 : }
1552 232280 : return 0;
1553 232280 : }
1554 :
1555 : namespace {
1556 :
1557 : const pass_data pass_data_ipa_single_use =
1558 : {
1559 : IPA_PASS, /* type */
1560 : "single-use", /* name */
1561 : OPTGROUP_NONE, /* optinfo_flags */
1562 : TV_CGRAPHOPT, /* tv_id */
1563 : 0, /* properties_required */
1564 : 0, /* properties_provided */
1565 : 0, /* properties_destroyed */
1566 : 0, /* todo_flags_start */
1567 : 0, /* todo_flags_finish */
1568 : };
1569 :
1570 : class pass_ipa_single_use : public ipa_opt_pass_d
1571 : {
1572 : public:
1573 287872 : pass_ipa_single_use (gcc::context *ctxt)
1574 : : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1575 : NULL, /* generate_summary */
1576 : NULL, /* write_summary */
1577 : NULL, /* read_summary */
1578 : NULL, /* write_optimization_summary */
1579 : NULL, /* read_optimization_summary */
1580 : NULL, /* stmt_fixup */
1581 : 0, /* function_transform_todo_flags_start */
1582 : NULL, /* function_transform */
1583 287872 : NULL) /* variable_transform */
1584 287872 : {}
1585 :
1586 : /* opt_pass methods: */
1587 232280 : unsigned int execute (function *) final override { return ipa_single_use (); }
1588 :
1589 : }; // class pass_ipa_single_use
1590 :
1591 : } // anon namespace
1592 :
1593 : ipa_opt_pass_d *
1594 287872 : make_pass_ipa_single_use (gcc::context *ctxt)
1595 : {
1596 287872 : return new pass_ipa_single_use (ctxt);
1597 : }
1598 :
|