Line data Source code
1 : /* Callgraph based analysis of static variables.
2 : Copyright (C) 2004-2026 Free Software Foundation, Inc.
3 : Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : /* This file marks functions as being either const (TREE_READONLY) or
22 : pure (DECL_PURE_P). It can also set a variant of these that
23 : are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 :
25 : This must be run after inlining decisions have been made since
26 : otherwise, the local sets will not contain information that is
27 : consistent with post inlined state. The global sets are not prone
28 : to this problem since they are by definition transitive. */
29 :
30 : /* The code in this module is called by the ipa pass manager. It
31 : should be one of the later passes since it's information is used by
32 : the rest of the compilation. */
33 :
34 : #include "config.h"
35 : #include "system.h"
36 : #include "coretypes.h"
37 : #include "backend.h"
38 : #include "target.h"
39 : #include "tree.h"
40 : #include "gimple.h"
41 : #include "tree-pass.h"
42 : #include "tree-streamer.h"
43 : #include "cgraph.h"
44 : #include "diagnostic.h"
45 : #include "calls.h"
46 : #include "cfganal.h"
47 : #include "tree-eh.h"
48 : #include "gimple-iterator.h"
49 : #include "gimple-walk.h"
50 : #include "tree-cfg.h"
51 : #include "tree-ssa-loop-niter.h"
52 : #include "langhooks.h"
53 : #include "ipa-utils.h"
54 : #include "gimple-pretty-print.h"
55 : #include "cfgloop.h"
56 : #include "tree-scalar-evolution.h"
57 : #include "intl.h"
58 : #include "opts.h"
59 : #include "ssa.h"
60 : #include "alloc-pool.h"
61 : #include "symbol-summary.h"
62 : #include "sreal.h"
63 : #include "ipa-cp.h"
64 : #include "ipa-prop.h"
65 : #include "ipa-fnsummary.h"
66 : #include "symtab-thunks.h"
67 : #include "dbgcnt.h"
68 : #include "gcc-urlifier.h"
69 :
70 : /* Lattice values for const and pure functions. Everything starts out
71 : being const, then may drop to pure and then neither depending on
72 : what is found. */
73 : enum pure_const_state_e
74 : {
75 : IPA_CONST,
76 : IPA_PURE,
77 : IPA_NEITHER
78 : };
79 :
80 : static const char *pure_const_names[3] = {"const", "pure", "neither"};
81 :
82 : enum malloc_state_e
83 : {
84 : STATE_MALLOC_TOP,
85 : STATE_MALLOC,
86 : STATE_MALLOC_BOTTOM
87 : };
88 :
89 : static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
90 :
91 : /* Holder for the const_state. There is one of these per function
92 : decl. */
93 : class funct_state_d
94 : {
95 : public:
96 2732696 : funct_state_d (): pure_const_state (IPA_NEITHER),
97 2732696 : state_previously_known (IPA_NEITHER), looping_previously_known (true),
98 2732696 : looping (true), can_throw (true), can_free (true),
99 0 : malloc_state (STATE_MALLOC_BOTTOM) {}
100 :
101 2620403 : funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
102 2620403 : state_previously_known (s.state_previously_known),
103 2620403 : looping_previously_known (s.looping_previously_known),
104 2620403 : looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
105 2620403 : malloc_state (s.malloc_state) {}
106 :
107 : /* See above. */
108 : enum pure_const_state_e pure_const_state;
109 : /* What user set here; we can be always sure about this. */
110 : enum pure_const_state_e state_previously_known;
111 : bool looping_previously_known;
112 :
113 : /* True if the function could possibly infinite loop. There are a
114 : lot of ways that this could be determined. We are pretty
115 : conservative here. While it is possible to cse pure and const
116 : calls, it is not legal to have dce get rid of the call if there
117 : is a possibility that the call could infinite loop since this is
118 : a behavioral change. */
119 : bool looping;
120 :
121 : bool can_throw;
122 :
123 : /* If function can call free, munmap or otherwise make previously
124 : non-trapping memory accesses trapping. */
125 : bool can_free;
126 :
127 : enum malloc_state_e malloc_state;
128 : };
129 :
130 : typedef class funct_state_d * funct_state;
131 :
132 : /* The storage of the funct_state is abstracted because there is the
133 : possibility that it may be desirable to move this to the cgraph
134 : local info. */
135 :
136 : class funct_state_summary_t:
137 : public fast_function_summary <funct_state_d *, va_heap>
138 : {
139 : public:
140 161664 : funct_state_summary_t (symbol_table *symtab):
141 323328 : fast_function_summary <funct_state_d *, va_heap> (symtab) {}
142 :
143 : void insert (cgraph_node *, funct_state_d *state) final override;
144 : void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
145 : funct_state_d *src_data,
146 : funct_state_d *dst_data) final override;
147 : };
148 :
149 : static funct_state_summary_t *funct_state_summaries = NULL;
150 :
151 : static bool gate_pure_const (void);
152 :
153 : namespace {
154 :
155 : const pass_data pass_data_ipa_pure_const =
156 : {
157 : IPA_PASS, /* type */
158 : "pure-const", /* name */
159 : OPTGROUP_NONE, /* optinfo_flags */
160 : TV_IPA_PURE_CONST, /* tv_id */
161 : 0, /* properties_required */
162 : 0, /* properties_provided */
163 : 0, /* properties_destroyed */
164 : 0, /* todo_flags_start */
165 : 0, /* todo_flags_finish */
166 : };
167 :
168 : class pass_ipa_pure_const : public ipa_opt_pass_d
169 : {
170 : public:
171 : pass_ipa_pure_const(gcc::context *ctxt);
172 :
173 : /* opt_pass methods: */
174 1182510 : bool gate (function *) final override { return gate_pure_const (); }
175 : unsigned int execute (function *fun) final override;
176 :
177 : void register_hooks (void);
178 :
179 : private:
180 : bool init_p;
181 : }; // class pass_ipa_pure_const
182 :
183 : } // anon namespace
184 :
185 : /* Try to guess if function body will always be visible to compiler
186 : when compiling the call and whether compiler will be able
187 : to propagate the information by itself. */
188 :
189 : static bool
190 26 : function_always_visible_to_compiler_p (tree decl)
191 : {
192 22 : return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
193 48 : || DECL_COMDAT (decl));
194 : }
195 :
196 : /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
197 : is true if the function is known to be finite. The diagnostic is
198 : controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
199 : OPTION, this function may initialize it and it is always returned
200 : by the function. */
201 :
202 : static hash_set<tree> *
203 1257538 : suggest_attribute (diagnostics::option_id option, tree decl, bool known_finite,
204 : hash_set<tree> *warned_about,
205 : const char * attrib_name)
206 : {
207 1257538 : if (!option_enabled (option.m_idx, lang_hooks.option_lang_mask (),
208 : &global_options))
209 : return warned_about;
210 30 : if (TREE_THIS_VOLATILE (decl)
211 30 : || (known_finite && function_always_visible_to_compiler_p (decl)))
212 : return warned_about;
213 :
214 26 : if (!warned_about)
215 14 : warned_about = new hash_set<tree>;
216 26 : if (warned_about->contains (decl))
217 : return warned_about;
218 26 : warned_about->add (decl);
219 26 : auto_urlify_attributes sentinel;
220 30 : warning_at (DECL_SOURCE_LOCATION (decl),
221 : option,
222 : known_finite
223 : ? G_("function might be candidate for attribute %qs")
224 : : G_("function might be candidate for attribute %qs"
225 : " if it is known to return normally"), attrib_name);
226 26 : return warned_about;
227 26 : }
228 :
229 : /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
230 : is true if the function is known to be finite. */
231 :
232 : static void
233 368333 : warn_function_pure (tree decl, bool known_finite)
234 : {
235 : /* Declaring a void function pure makes no sense and is diagnosed
236 : by -Wattributes because calling it would have no effect. */
237 368333 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
238 : return;
239 :
240 339451 : static hash_set<tree> *warned_about;
241 339451 : warned_about
242 339451 : = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
243 : known_finite, warned_about, "pure");
244 : }
245 :
246 : /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
247 : is true if the function is known to be finite. */
248 :
249 : static void
250 911421 : warn_function_const (tree decl, bool known_finite)
251 : {
252 : /* Declaring a void function const makes no sense is diagnosed
253 : by -Wattributes because calling it would have no effect. */
254 911421 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
255 : return;
256 :
257 671256 : static hash_set<tree> *warned_about;
258 671256 : warned_about
259 671256 : = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
260 : known_finite, warned_about, "const");
261 : }
262 :
263 : /* Emit suggestion about __attribute__((malloc)) for DECL. */
264 :
265 : static void
266 3 : warn_function_malloc (tree decl)
267 : {
268 3 : static hash_set<tree> *warned_about;
269 3 : warned_about
270 3 : = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
271 : true, warned_about, "malloc");
272 3 : }
273 :
274 : /* Emit suggestion about __attribute__((noreturn)) for DECL. */
275 :
276 : static void
277 26587 : warn_function_noreturn (tree decl)
278 : {
279 26587 : tree original_decl = decl;
280 :
281 26587 : static hash_set<tree> *warned_about;
282 26587 : if (!lang_hooks.missing_noreturn_ok_p (decl)
283 26587 : && targetm.warn_func_return (decl))
284 14367 : warned_about
285 14367 : = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
286 : true, warned_about, "noreturn");
287 26587 : }
288 :
289 : void
290 8977 : warn_function_cold (tree decl)
291 : {
292 8977 : tree original_decl = decl;
293 :
294 8977 : static hash_set<tree> *warned_about;
295 8977 : warned_about
296 8977 : = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
297 : true, warned_about, "cold");
298 8977 : }
299 :
300 : void
301 223484 : warn_function_returns_nonnull (tree decl)
302 : {
303 223484 : static hash_set<tree> *warned_about;
304 223484 : warned_about
305 223484 : = suggest_attribute (OPT_Wsuggest_attribute_returns_nonnull, decl,
306 : true, warned_about, "returns_nonnull");
307 223484 : }
308 :
309 : /* Check to see if the use (or definition when CHECKING_WRITE is true)
310 : variable T is legal in a function that is either pure or const. */
311 :
312 : static inline void
313 20024353 : check_decl (funct_state local,
314 : tree t, bool checking_write, bool ipa)
315 : {
316 : /* Do not want to do anything with volatile except mark any
317 : function that uses one to be not const or pure. */
318 20024353 : if (TREE_THIS_VOLATILE (t))
319 : {
320 1446893 : local->pure_const_state = IPA_NEITHER;
321 1446893 : if (dump_file)
322 30 : fprintf (dump_file, " Volatile operand is not const/pure\n");
323 1446893 : return;
324 : }
325 :
326 : /* Do not care about a local automatic that is not static. */
327 18577460 : if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
328 : return;
329 :
330 : /* If the variable has the "used" attribute, treat it as if it had a
331 : been touched by the devil. */
332 3545512 : if (DECL_PRESERVE_P (t))
333 : {
334 3499 : local->pure_const_state = IPA_NEITHER;
335 3499 : if (dump_file)
336 0 : fprintf (dump_file, " Used static/global variable is not const/pure\n");
337 3499 : return;
338 : }
339 :
340 : /* In IPA mode we are not interested in checking actual loads and stores;
341 : they will be processed at propagation time using ipa_ref. */
342 3542013 : if (ipa)
343 : return;
344 :
345 : /* Since we have dealt with the locals and params cases above, if we
346 : are CHECKING_WRITE, this cannot be a pure or constant
347 : function. */
348 2284552 : if (checking_write)
349 : {
350 816076 : local->pure_const_state = IPA_NEITHER;
351 816076 : if (dump_file)
352 1 : fprintf (dump_file, " static/global memory write is not const/pure\n");
353 816076 : return;
354 : }
355 :
356 1468476 : if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
357 : {
358 : /* Readonly reads are safe. */
359 843014 : if (TREE_READONLY (t))
360 : return; /* Read of a constant, do not change the function state. */
361 : else
362 : {
363 839577 : if (dump_file)
364 0 : fprintf (dump_file, " global memory read is not const\n");
365 : /* Just a regular read. */
366 839577 : if (local->pure_const_state == IPA_CONST)
367 167861 : local->pure_const_state = IPA_PURE;
368 : }
369 : }
370 : else
371 : {
372 : /* Compilation level statics can be read if they are readonly
373 : variables. */
374 625462 : if (TREE_READONLY (t))
375 : return;
376 :
377 595895 : if (dump_file)
378 1 : fprintf (dump_file, " static memory read is not const\n");
379 : /* Just a regular read. */
380 595895 : if (local->pure_const_state == IPA_CONST)
381 32689 : local->pure_const_state = IPA_PURE;
382 : }
383 : }
384 :
385 :
386 : /* Check to see if the use (or definition when CHECKING_WRITE is true)
387 : variable T is legal in a function that is either pure or const. */
388 :
389 : static inline void
390 18547936 : check_op (funct_state local, tree t, bool checking_write)
391 : {
392 18547936 : t = get_base_address (t);
393 18547936 : if (t && TREE_THIS_VOLATILE (t))
394 : {
395 20644 : local->pure_const_state = IPA_NEITHER;
396 20644 : if (dump_file)
397 2 : fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
398 20644 : return;
399 : }
400 18527292 : else if (refs_local_or_readonly_memory_p (t))
401 : {
402 4801151 : if (dump_file)
403 10 : fprintf (dump_file, " Indirect ref to local or readonly "
404 : "memory is OK\n");
405 4801151 : return;
406 : }
407 13726141 : else if (checking_write)
408 : {
409 4795868 : local->pure_const_state = IPA_NEITHER;
410 4795868 : if (dump_file)
411 64 : fprintf (dump_file, " Indirect ref write is not const/pure\n");
412 4795868 : return;
413 : }
414 : else
415 : {
416 8930273 : if (dump_file)
417 175 : fprintf (dump_file, " Indirect ref read is not const\n");
418 8930273 : if (local->pure_const_state == IPA_CONST)
419 1345334 : local->pure_const_state = IPA_PURE;
420 : }
421 : }
422 :
423 : /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
424 :
425 : static void
426 15723374 : state_from_flags (enum pure_const_state_e *state, bool *looping,
427 : int flags, bool cannot_lead_to_return)
428 : {
429 15723374 : *looping = false;
430 15723374 : if (flags & ECF_LOOPING_CONST_OR_PURE)
431 : {
432 202133 : *looping = true;
433 202133 : if (dump_file && (dump_flags & TDF_DETAILS))
434 0 : fprintf (dump_file, " looping\n");
435 : }
436 15723374 : if (flags & ECF_CONST)
437 : {
438 1205533 : *state = IPA_CONST;
439 1205533 : if (dump_file && (dump_flags & TDF_DETAILS))
440 3 : fprintf (dump_file, " const\n");
441 : }
442 14517841 : else if (flags & ECF_PURE)
443 : {
444 1194580 : *state = IPA_PURE;
445 1194580 : if (dump_file && (dump_flags & TDF_DETAILS))
446 3 : fprintf (dump_file, " pure\n");
447 : }
448 13323261 : else if (cannot_lead_to_return)
449 : {
450 965748 : *state = IPA_PURE;
451 965748 : *looping = true;
452 965748 : if (dump_file && (dump_flags & TDF_DETAILS))
453 1 : fprintf (dump_file, " ignoring side effects->pure looping\n");
454 : }
455 : else
456 : {
457 12357513 : if (dump_file && (dump_flags & TDF_DETAILS))
458 42 : fprintf (dump_file, " neither\n");
459 12357513 : *state = IPA_NEITHER;
460 12357513 : *looping = true;
461 : }
462 15723374 : }
463 :
464 : /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
465 : into STATE and LOOPING better of the two variants.
466 : Be sure to merge looping correctly. IPA_NEITHER functions
467 : have looping 0 even if they don't have to return. */
468 :
469 : static inline void
470 6947507 : better_state (enum pure_const_state_e *state, bool *looping,
471 : enum pure_const_state_e state2, bool looping2)
472 : {
473 6947507 : if (state2 < *state)
474 : {
475 35913 : if (*state == IPA_NEITHER)
476 34413 : *looping = looping2;
477 : else
478 1500 : *looping = MIN (*looping, looping2);
479 35913 : *state = state2;
480 : }
481 6911594 : else if (state2 != IPA_NEITHER)
482 1616992 : *looping = MIN (*looping, looping2);
483 6947507 : }
484 :
485 : /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
486 : into STATE and LOOPING worse of the two variants.
487 : N is the actual node called. */
488 :
489 : static inline void
490 14953375 : worse_state (enum pure_const_state_e *state, bool *looping,
491 : enum pure_const_state_e state2, bool looping2,
492 : struct symtab_node *from,
493 : struct symtab_node *to)
494 : {
495 : /* Consider function:
496 :
497 : bool a(int *p)
498 : {
499 : return *p==*p;
500 : }
501 :
502 : During early optimization we will turn this into:
503 :
504 : bool a(int *p)
505 : {
506 : return true;
507 : }
508 :
509 : Now if this function will be detected as CONST however when interposed it
510 : may end up being just pure. We always must assume the worst scenario here.
511 : */
512 14953375 : if (*state == IPA_CONST && state2 == IPA_CONST
513 14953375 : && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
514 : {
515 3846 : if (dump_file && (dump_flags & TDF_DETAILS))
516 0 : fprintf (dump_file, "Dropping state to PURE because call to %s may not "
517 : "bind to current def.\n", to->dump_name ());
518 : state2 = IPA_PURE;
519 : }
520 14953375 : *state = MAX (*state, state2);
521 14953375 : *looping = MAX (*looping, looping2);
522 14953375 : }
523 :
524 : /* Recognize special cases of builtins that are by themselves not const
525 : but function using them is. */
526 : bool
527 23886267 : builtin_safe_for_const_function_p (bool *looping, tree callee)
528 : {
529 23886267 : if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
530 6813204 : switch (DECL_FUNCTION_CODE (callee))
531 : {
532 997141 : case BUILT_IN_RETURN:
533 997141 : case BUILT_IN_UNREACHABLE:
534 997141 : CASE_BUILT_IN_ALLOCA:
535 997141 : case BUILT_IN_STACK_SAVE:
536 997141 : case BUILT_IN_STACK_RESTORE:
537 997141 : case BUILT_IN_EH_POINTER:
538 997141 : case BUILT_IN_EH_FILTER:
539 997141 : case BUILT_IN_UNWIND_RESUME:
540 997141 : case BUILT_IN_CXA_END_CLEANUP:
541 997141 : case BUILT_IN_EH_COPY_VALUES:
542 997141 : case BUILT_IN_FRAME_ADDRESS:
543 997141 : case BUILT_IN_APPLY_ARGS:
544 997141 : case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
545 997141 : case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
546 997141 : case BUILT_IN_DWARF_CFA:
547 997141 : case BUILT_IN_RETURN_ADDRESS:
548 997141 : *looping = false;
549 997141 : return true;
550 9967 : case BUILT_IN_PREFETCH:
551 9967 : *looping = true;
552 9967 : return true;
553 : default:
554 : break;
555 : }
556 : return false;
557 : }
558 :
559 : /* Check the parameters of a function call to CALL_EXPR to see if
560 : there are any references in the parameters that are not allowed for
561 : pure or const functions. Also check to see if this is either an
562 : indirect call, a call outside the compilation unit, or has special
563 : attributes that may also effect the purity. The CALL_EXPR node for
564 : the entire call expression. */
565 :
566 : static void
567 15515004 : check_call (funct_state local, gcall *call, bool ipa)
568 : {
569 15515004 : int flags = gimple_call_flags (call);
570 15515004 : tree callee_t = gimple_call_fndecl (call);
571 15515004 : bool possibly_throws = stmt_could_throw_p (cfun, call);
572 15515004 : bool possibly_throws_externally = (possibly_throws
573 15515004 : && stmt_can_throw_external (cfun, call));
574 :
575 5690729 : if (possibly_throws)
576 : {
577 : unsigned int i;
578 34264572 : for (i = 0; i < gimple_num_ops (call); i++)
579 28573843 : if (gimple_op (call, i)
580 28573843 : && tree_could_throw_p (gimple_op (call, i)))
581 : {
582 80034 : if (possibly_throws && cfun->can_throw_non_call_exceptions)
583 : {
584 80034 : if (dump_file)
585 0 : fprintf (dump_file, " operand can throw; looping\n");
586 80034 : local->looping = true;
587 : }
588 80034 : if (possibly_throws_externally)
589 : {
590 68927 : if (dump_file)
591 0 : fprintf (dump_file, " operand can throw externally\n");
592 68927 : local->can_throw = true;
593 : }
594 : }
595 : }
596 :
597 : /* The const and pure flags are set by a variety of places in the
598 : compiler (including here). If someone has already set the flags
599 : for the callee, (such as for some of the builtins) we will use
600 : them, otherwise we will compute our own information.
601 :
602 : Const and pure functions have less clobber effects than other
603 : functions so we process these first. Otherwise if it is a call
604 : outside the compilation unit or an indirect call we punt. This
605 : leaves local calls which will be processed by following the call
606 : graph. */
607 15515004 : if (callee_t)
608 : {
609 14509743 : bool call_looping;
610 :
611 14509743 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
612 14509743 : && !nonfreeing_call_p (call))
613 724517 : local->can_free = true;
614 :
615 14509743 : if (builtin_safe_for_const_function_p (&call_looping, callee_t))
616 : {
617 351703 : worse_state (&local->pure_const_state, &local->looping,
618 : IPA_CONST, call_looping,
619 : NULL, NULL);
620 351703 : return;
621 : }
622 : /* When bad things happen to bad functions, they cannot be const
623 : or pure. */
624 14158040 : if (setjmp_call_p (callee_t))
625 : {
626 2700 : if (dump_file)
627 0 : fprintf (dump_file, " setjmp is not const/pure\n");
628 2700 : local->looping = true;
629 2700 : local->pure_const_state = IPA_NEITHER;
630 : }
631 :
632 14158040 : if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
633 3355134 : switch (DECL_FUNCTION_CODE (callee_t))
634 : {
635 1838 : case BUILT_IN_LONGJMP:
636 1838 : case BUILT_IN_NONLOCAL_GOTO:
637 1838 : if (dump_file)
638 0 : fprintf (dump_file,
639 : " longjmp and nonlocal goto is not const/pure\n");
640 1838 : local->pure_const_state = IPA_NEITHER;
641 1838 : local->looping = true;
642 1838 : break;
643 : default:
644 : break;
645 : }
646 : }
647 1005261 : else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
648 157283 : local->can_free = true;
649 :
650 : /* When not in IPA mode, we can still handle self recursion. */
651 15163301 : if (!ipa && callee_t
652 15163301 : && recursive_call_p (current_function_decl, callee_t))
653 : {
654 19897 : if (dump_file)
655 0 : fprintf (dump_file, " Recursive call can loop.\n");
656 19897 : local->looping = true;
657 : }
658 : /* Either callee is unknown or we are doing local analysis.
659 : Look to see if there are any bits available for the callee (such as by
660 : declaration or because it is builtin) and process solely on the basis of
661 : those bits. Handle internal calls always, those calls don't have
662 : corresponding cgraph edges and thus aren't processed during
663 : the propagation. */
664 15143404 : else if (!ipa || gimple_call_internal_p (call))
665 : {
666 9905907 : enum pure_const_state_e call_state;
667 9905907 : bool call_looping;
668 9905907 : if (possibly_throws && cfun->can_throw_non_call_exceptions)
669 : {
670 2178183 : if (dump_file)
671 0 : fprintf (dump_file, " can throw; looping\n");
672 2178183 : local->looping = true;
673 : }
674 9905907 : if (possibly_throws_externally)
675 : {
676 2772150 : if (dump_file)
677 : {
678 0 : fprintf (dump_file, " can throw externally to lp %i\n",
679 : lookup_stmt_eh_lp (call));
680 0 : if (callee_t)
681 0 : fprintf (dump_file, " callee:%s\n",
682 0 : IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
683 : }
684 2772150 : local->can_throw = true;
685 : }
686 9905907 : if (dump_file && (dump_flags & TDF_DETAILS))
687 22 : fprintf (dump_file, " checking flags for call:");
688 9905907 : state_from_flags (&call_state, &call_looping, flags,
689 9905907 : ((flags & (ECF_NORETURN | ECF_NOTHROW))
690 : == (ECF_NORETURN | ECF_NOTHROW))
691 9905907 : || (!flag_exceptions && (flags & ECF_NORETURN)));
692 9905907 : worse_state (&local->pure_const_state, &local->looping,
693 : call_state, call_looping, NULL, NULL);
694 : }
695 : /* Direct functions calls are handled by IPA propagation. */
696 : }
697 :
698 : /* Wrapper around check_decl for loads in local more. */
699 :
700 : static bool
701 12437534 : check_load (gimple *, tree op, tree, void *data)
702 : {
703 12437534 : if (DECL_P (op))
704 5201350 : check_decl ((funct_state)data, op, false, false);
705 : else
706 7236184 : check_op ((funct_state)data, op, false);
707 12437534 : return false;
708 : }
709 :
710 : /* Wrapper around check_decl for stores in local more. */
711 :
712 : static bool
713 13431566 : check_store (gimple *, tree op, tree, void *data)
714 : {
715 13431566 : if (DECL_P (op))
716 7761415 : check_decl ((funct_state)data, op, true, false);
717 : else
718 5670151 : check_op ((funct_state)data, op, true);
719 13431566 : return false;
720 : }
721 :
722 : /* Wrapper around check_decl for loads in ipa mode. */
723 :
724 : static bool
725 6220797 : check_ipa_load (gimple *, tree op, tree, void *data)
726 : {
727 6220797 : if (DECL_P (op))
728 2975515 : check_decl ((funct_state)data, op, false, true);
729 : else
730 3245282 : check_op ((funct_state)data, op, false);
731 6220797 : return false;
732 : }
733 :
734 : /* Wrapper around check_decl for stores in ipa mode. */
735 :
736 : static bool
737 6482392 : check_ipa_store (gimple *, tree op, tree, void *data)
738 : {
739 6482392 : if (DECL_P (op))
740 4086073 : check_decl ((funct_state)data, op, true, true);
741 : else
742 2396319 : check_op ((funct_state)data, op, true);
743 6482392 : return false;
744 : }
745 :
746 : /* Look into pointer pointed to by GSIP and figure out what interesting side
747 : effects it has. */
748 : static void
749 182525023 : check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
750 : {
751 182525023 : gimple *stmt = gsi_stmt (*gsip);
752 :
753 182525023 : if (is_gimple_debug (stmt))
754 : return;
755 :
756 : /* Do consider clobber as side effects before IPA, so we rather inline
757 : C++ destructors and keep clobber semantics than eliminate them.
758 :
759 : Similar logic is in ipa-modref.
760 :
761 : TODO: We may get smarter during early optimizations on these and let
762 : functions containing only clobbers to be optimized more. This is a common
763 : case of C++ destructors. */
764 :
765 92106102 : if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
766 : return;
767 :
768 89917952 : if (dump_file)
769 : {
770 1702 : fprintf (dump_file, " scanning: ");
771 1702 : print_gimple_stmt (dump_file, stmt, 0);
772 : }
773 :
774 166585792 : if (gimple_has_volatile_ops (stmt) && !gimple_clobber_p (stmt))
775 : {
776 1469402 : local->pure_const_state = IPA_NEITHER;
777 1469402 : if (dump_file)
778 33 : fprintf (dump_file, " Volatile stmt is not const/pure\n");
779 : }
780 :
781 : /* Look for loads and stores. */
782 150572306 : walk_stmt_load_store_ops (stmt, local,
783 : ipa ? check_ipa_load : check_load,
784 : ipa ? check_ipa_store : check_store);
785 :
786 89917952 : if (gimple_code (stmt) != GIMPLE_CALL && stmt_could_throw_p (cfun, stmt))
787 : {
788 2601242 : if (cfun->can_throw_non_call_exceptions)
789 : {
790 2293843 : if (dump_file)
791 0 : fprintf (dump_file, " can throw; looping\n");
792 2293843 : local->looping = true;
793 : }
794 2601242 : if (stmt_can_throw_external (cfun, stmt))
795 : {
796 2247903 : if (dump_file)
797 6 : fprintf (dump_file, " can throw externally\n");
798 2247903 : local->can_throw = true;
799 : }
800 : else
801 353339 : if (dump_file)
802 0 : fprintf (dump_file, " can throw\n");
803 : }
804 89917952 : switch (gimple_code (stmt))
805 : {
806 15515004 : case GIMPLE_CALL:
807 15515004 : check_call (local, as_a <gcall *> (stmt), ipa);
808 15515004 : break;
809 1577801 : case GIMPLE_LABEL:
810 1577801 : if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
811 : /* Target of long jump. */
812 : {
813 764 : if (dump_file)
814 0 : fprintf (dump_file, " nonlocal label is not const/pure\n");
815 764 : local->pure_const_state = IPA_NEITHER;
816 : }
817 : break;
818 261558 : case GIMPLE_ASM:
819 261558 : if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
820 : {
821 115034 : if (dump_file)
822 3 : fprintf (dump_file, " memory asm clobber is not const/pure\n");
823 : /* Abandon all hope, ye who enter here. */
824 115034 : local->pure_const_state = IPA_NEITHER;
825 115034 : local->can_free = true;
826 : }
827 261558 : if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
828 : {
829 238817 : if (dump_file)
830 3 : fprintf (dump_file, " volatile is not const/pure\n");
831 : /* Abandon all hope, ye who enter here. */
832 238817 : local->pure_const_state = IPA_NEITHER;
833 238817 : local->looping = true;
834 238817 : local->can_free = true;
835 : }
836 : return;
837 : default:
838 : break;
839 : }
840 : }
841 :
842 : /* Check that RETVAL is used only in STMT and in comparisons against 0.
843 : RETVAL is return value of the function and STMT is return stmt. */
844 :
845 : static bool
846 471088 : check_retval_uses (tree retval, gimple *stmt)
847 : {
848 471088 : imm_use_iterator use_iter;
849 471088 : gimple *use_stmt;
850 :
851 1485981 : FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
852 684060 : if (gcond *cond = dyn_cast<gcond *> (use_stmt))
853 : {
854 12644 : tree op2 = gimple_cond_rhs (cond);
855 12644 : if (!integer_zerop (op2))
856 : return false;
857 : }
858 671416 : else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
859 : {
860 93158 : enum tree_code code = gimple_assign_rhs_code (ga);
861 93158 : if (TREE_CODE_CLASS (code) != tcc_comparison)
862 : return false;
863 2538 : if (!integer_zerop (gimple_assign_rhs2 (ga)))
864 : return false;
865 : }
866 578258 : else if (is_gimple_debug (use_stmt))
867 : ;
868 463620 : else if (use_stmt != stmt)
869 140255 : return false;
870 :
871 330833 : return true;
872 : }
873 :
874 : /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
875 : attribute. Currently this function does a very conservative analysis.
876 : FUN is considered to be a candidate if
877 : 1) It returns a value of pointer type.
878 : 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
879 : a phi, and element of phi is either NULL or
880 : SSA_NAME_DEF_STMT(element) is function call.
881 : 3) The return-value has immediate uses only within comparisons (gcond or gassign)
882 : and return_stmt (and likewise a phi arg has immediate use only within comparison
883 : or the phi stmt). */
884 :
885 : #define DUMP_AND_RETURN(reason) \
886 : { \
887 : if (dump_file && (dump_flags & TDF_DETAILS)) \
888 : fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
889 : (node->dump_name ()), (reason)); \
890 : return false; \
891 : }
892 :
893 : static bool
894 401202 : malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
895 : bitmap visited)
896 : {
897 401202 : cgraph_node *node = cgraph_node::get_create (fun->decl);
898 401202 : if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
899 : return true;
900 :
901 401192 : if (!check_retval_uses (retval, ret_stmt))
902 111586 : DUMP_AND_RETURN("Return value has uses outside return stmt"
903 : " and comparisons against 0.")
904 :
905 289606 : gimple *def = SSA_NAME_DEF_STMT (retval);
906 :
907 289606 : if (gcall *call_stmt = dyn_cast<gcall *> (def))
908 : {
909 73119 : tree callee_decl = gimple_call_fndecl (call_stmt);
910 73119 : if (!callee_decl)
911 : return false;
912 :
913 134145 : if (!ipa && !DECL_IS_MALLOC (callee_decl))
914 25973 : DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
915 : " non-ipa mode.")
916 :
917 45690 : cgraph_edge *cs = node->get_edge (call_stmt);
918 45690 : if (cs)
919 : {
920 9181 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
921 9181 : es->is_return_callee_uncaptured = true;
922 : }
923 : }
924 :
925 216487 : else if (gphi *phi = dyn_cast<gphi *> (def))
926 : {
927 : bool all_args_zero = true;
928 98260 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
929 : {
930 92906 : tree arg = gimple_phi_arg_def (phi, i);
931 92906 : if (integer_zerop (arg))
932 20309 : continue;
933 :
934 72597 : all_args_zero = false;
935 72597 : if (TREE_CODE (arg) != SSA_NAME)
936 2701 : DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
937 69896 : if (!check_retval_uses (arg, phi))
938 28669 : DUMP_AND_RETURN ("phi arg has uses outside phi"
939 : " and comparisons against 0.")
940 :
941 41227 : gimple *arg_def = SSA_NAME_DEF_STMT (arg);
942 41227 : if (is_a<gphi *> (arg_def))
943 : {
944 3658 : if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
945 3566 : DUMP_AND_RETURN ("nested phi fail")
946 92 : continue;
947 : }
948 :
949 37569 : gcall *call_stmt = dyn_cast<gcall *> (arg_def);
950 37569 : if (!call_stmt)
951 18291 : DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
952 :
953 19278 : tree callee_decl = gimple_call_fndecl (call_stmt);
954 19278 : if (!callee_decl)
955 : return false;
956 26096 : if (!ipa && !DECL_IS_MALLOC (callee_decl))
957 6574 : DUMP_AND_RETURN("callee_decl does not have malloc attribute"
958 : " for non-ipa mode.")
959 :
960 9807 : cgraph_edge *cs = node->get_edge (call_stmt);
961 9807 : if (cs)
962 : {
963 6798 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
964 6798 : es->is_return_callee_uncaptured = true;
965 : }
966 : }
967 :
968 5354 : if (all_args_zero)
969 48 : DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
970 : }
971 :
972 : else
973 148435 : DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
974 :
975 : return true;
976 : }
977 :
978 : static bool
979 5949103 : malloc_candidate_p (function *fun, bool ipa)
980 : {
981 5949103 : basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
982 5949103 : edge e;
983 5949103 : edge_iterator ei;
984 5949103 : cgraph_node *node = cgraph_node::get_create (fun->decl);
985 :
986 5949103 : if (EDGE_COUNT (exit_block->preds) == 0
987 5945705 : || !flag_delete_null_pointer_checks)
988 : return false;
989 :
990 5815419 : auto_bitmap visited;
991 5866333 : FOR_EACH_EDGE (e, ei, exit_block->preds)
992 : {
993 5815419 : gimple_stmt_iterator gsi = gsi_last_bb (e->src);
994 11578681 : greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
995 :
996 5814176 : if (!ret_stmt)
997 5815419 : return false;
998 :
999 5814176 : tree retval = gimple_return_retval (ret_stmt);
1000 5814176 : if (!retval)
1001 2628344 : DUMP_AND_RETURN("No return value.")
1002 :
1003 3185832 : if (TREE_CODE (retval) != SSA_NAME
1004 3185832 : || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
1005 2788288 : DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
1006 :
1007 397544 : if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
1008 : return false;
1009 : }
1010 :
1011 50914 : if (dump_file && (dump_flags & TDF_DETAILS))
1012 8 : fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1013 4 : IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1014 : return true;
1015 5815419 : }
1016 :
1017 : #undef DUMP_AND_RETURN
1018 :
1019 : /* Return true if function is known to be finite. */
1020 :
1021 : bool
1022 4241888 : finite_function_p ()
1023 : {
1024 : /* Const functions cannot have back edges (an
1025 : indication of possible infinite loop side
1026 : effect. */
1027 4241888 : bool finite = true;
1028 4241888 : if (mark_dfs_back_edges ())
1029 : {
1030 : /* Preheaders are needed for SCEV to work.
1031 : Simple latches and recorded exits improve chances that loop will
1032 : proved to be finite in testcases such as in loop-15.c
1033 : and loop-24.c */
1034 425320 : loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1035 : | LOOPS_HAVE_SIMPLE_LATCHES
1036 : | LOOPS_HAVE_RECORDED_EXITS);
1037 425320 : if (dump_file && (dump_flags & TDF_DETAILS))
1038 0 : flow_loops_dump (dump_file, NULL, 0);
1039 425320 : if (mark_irreducible_loops ())
1040 : {
1041 2052 : if (dump_file)
1042 0 : fprintf (dump_file, " has irreducible loops\n");
1043 : finite = false;
1044 : }
1045 : else
1046 : {
1047 423268 : scev_initialize ();
1048 2109655 : for (auto loop : loops_list (cfun, 0))
1049 915391 : if (!finite_loop_p (loop))
1050 : {
1051 75540 : if (dump_file)
1052 1 : fprintf (dump_file, " cannot prove finiteness of "
1053 : "loop %i\n", loop->num);
1054 : finite =false;
1055 : break;
1056 423268 : }
1057 423268 : scev_finalize ();
1058 : }
1059 425320 : loop_optimizer_finalize ();
1060 : }
1061 4241888 : return finite;
1062 : }
1063 :
1064 : /* This is the main routine for finding the reference patterns for
1065 : global variables within a function FN. */
1066 :
1067 : static funct_state
1068 4692526 : analyze_function (struct cgraph_node *fn, bool ipa)
1069 : {
1070 4692526 : tree decl = fn->decl;
1071 4692526 : funct_state l;
1072 4692526 : basic_block this_block;
1073 :
1074 4692526 : l = XCNEW (class funct_state_d);
1075 4692526 : l->pure_const_state = IPA_CONST;
1076 4692526 : l->state_previously_known = IPA_NEITHER;
1077 4692526 : l->looping_previously_known = true;
1078 4692526 : l->looping = false;
1079 4692526 : l->can_throw = false;
1080 4692526 : l->can_free = false;
1081 4692526 : state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1082 4692526 : flags_from_decl_or_type (fn->decl),
1083 4692526 : fn->cannot_return_p ());
1084 :
1085 4692526 : if (fn->thunk || fn->alias)
1086 : {
1087 : /* Thunk gets propagated through, so nothing interesting happens. */
1088 63479 : gcc_assert (ipa);
1089 63479 : if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1090 729 : l->pure_const_state = IPA_NEITHER;
1091 63479 : return l;
1092 : }
1093 :
1094 4629047 : if (dump_file)
1095 : {
1096 190 : fprintf (dump_file, "\n\n local analysis of %s\n ",
1097 : fn->dump_name ());
1098 : }
1099 :
1100 4629047 : push_cfun (DECL_STRUCT_FUNCTION (decl));
1101 :
1102 35736933 : FOR_EACH_BB_FN (this_block, cfun)
1103 : {
1104 31173312 : gimple_stmt_iterator gsi;
1105 31173312 : struct walk_stmt_info wi;
1106 :
1107 31173312 : memset (&wi, 0, sizeof (wi));
1108 62346624 : for (gsi = gsi_start_bb (this_block);
1109 213632909 : !gsi_end_p (gsi);
1110 182459597 : gsi_next (&gsi))
1111 : {
1112 : /* NULL memory accesses terminates BB. These accesses are known
1113 : to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1114 : to volatile accesses and adds builtin_trap call which would
1115 : confuse us otherwise. */
1116 182527394 : if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
1117 : null_pointer_node))
1118 : {
1119 2371 : if (dump_file)
1120 0 : fprintf (dump_file, " NULL memory access; terminating BB%s\n",
1121 0 : flag_non_call_exceptions ? "; looping" : "");
1122 2371 : if (flag_non_call_exceptions)
1123 : {
1124 426 : l->looping = true;
1125 426 : if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
1126 : {
1127 330 : if (dump_file)
1128 0 : fprintf (dump_file, " can throw externally\n");
1129 330 : l->can_throw = true;
1130 : }
1131 : }
1132 : break;
1133 : }
1134 182525023 : check_stmt (&gsi, l, ipa);
1135 182525023 : if (l->pure_const_state == IPA_NEITHER
1136 116257569 : && l->looping
1137 94438018 : && l->can_throw
1138 45511427 : && l->can_free)
1139 65426 : goto end;
1140 : }
1141 : }
1142 :
1143 4563621 : end:
1144 4629047 : if (l->pure_const_state != IPA_NEITHER
1145 2027214 : && !l->looping
1146 6473272 : && !finite_function_p ())
1147 24039 : l->looping = true;
1148 :
1149 4629047 : if (dump_file && (dump_flags & TDF_DETAILS))
1150 22 : fprintf (dump_file, " checking previously known:");
1151 :
1152 4629047 : better_state (&l->pure_const_state, &l->looping,
1153 : l->state_previously_known,
1154 4629047 : l->looping_previously_known);
1155 4629047 : if (TREE_NOTHROW (decl))
1156 3138853 : l->can_throw = false;
1157 :
1158 4629047 : l->malloc_state = STATE_MALLOC_BOTTOM;
1159 4629047 : if (DECL_IS_MALLOC (decl))
1160 9797 : l->malloc_state = STATE_MALLOC;
1161 4619250 : else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1162 11705 : l->malloc_state = STATE_MALLOC_TOP;
1163 4607545 : else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1164 39209 : l->malloc_state = STATE_MALLOC;
1165 :
1166 4629047 : pop_cfun ();
1167 4629047 : if (dump_file)
1168 : {
1169 190 : if (l->looping)
1170 50 : fprintf (dump_file, "Function is locally looping.\n");
1171 190 : if (l->can_throw)
1172 6 : fprintf (dump_file, "Function is locally throwing.\n");
1173 190 : if (l->pure_const_state == IPA_CONST)
1174 103 : fprintf (dump_file, "Function is locally const.\n");
1175 190 : if (l->pure_const_state == IPA_PURE)
1176 7 : fprintf (dump_file, "Function is locally pure.\n");
1177 190 : if (l->can_free)
1178 3 : fprintf (dump_file, "Function can locally free.\n");
1179 190 : if (l->malloc_state == STATE_MALLOC)
1180 8 : fprintf (dump_file, "Function is locally malloc.\n");
1181 : }
1182 : return l;
1183 : }
1184 :
1185 : void
1186 19206 : funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1187 : {
1188 : /* There are some shared nodes, in particular the initializers on
1189 : static declarations. We do not need to scan them more than once
1190 : since all we would be interested in are the addressof
1191 : operations. */
1192 19206 : if (opt_for_fn (node->decl, flag_ipa_pure_const))
1193 : {
1194 19205 : funct_state_d *a = analyze_function (node, true);
1195 19205 : new (state) funct_state_d (*a);
1196 19205 : free (a);
1197 : }
1198 : else
1199 : /* Do not keep stale summaries. */
1200 1 : funct_state_summaries->remove (node);
1201 19206 : }
1202 :
1203 : /* Called when new clone is inserted to callgraph late. */
1204 :
1205 : void
1206 1206372 : funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1207 : funct_state_d *src_data,
1208 : funct_state_d *dst_data)
1209 : {
1210 1206372 : new (dst_data) funct_state_d (*src_data);
1211 1206372 : if (dst_data->malloc_state == STATE_MALLOC
1212 1206372 : && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1213 11 : dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1214 1206372 : }
1215 :
1216 :
1217 : void
1218 161664 : pass_ipa_pure_const::
1219 : register_hooks (void)
1220 : {
1221 161664 : if (init_p)
1222 : return;
1223 :
1224 161664 : init_p = true;
1225 :
1226 161664 : funct_state_summaries = new funct_state_summary_t (symtab);
1227 : }
1228 :
1229 :
1230 : /* Analyze each function in the cgraph to see if it is locally PURE or
1231 : CONST. */
1232 :
1233 : static void
1234 149380 : pure_const_generate_summary (void)
1235 : {
1236 149380 : struct cgraph_node *node;
1237 :
1238 149380 : pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1239 149380 : pass->register_hooks ();
1240 :
1241 : /* Process all of the functions.
1242 :
1243 : We process AVAIL_INTERPOSABLE functions. We cannot use the results
1244 : by default, but the info can be used at LTO with -fwhole-program or
1245 : when function got cloned and the clone is AVAILABLE. */
1246 :
1247 1544535 : FOR_EACH_DEFINED_FUNCTION (node)
1248 1395155 : if (opt_for_fn (node->decl, flag_ipa_pure_const))
1249 : {
1250 1394826 : funct_state_d *a = analyze_function (node, true);
1251 1394826 : new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1252 1394826 : free (a);
1253 : }
1254 149380 : }
1255 :
1256 :
1257 : /* Serialize the ipa info for lto. */
1258 :
1259 : static void
1260 20127 : pure_const_write_summary (void)
1261 : {
1262 20127 : struct cgraph_node *node;
1263 20127 : struct lto_simple_output_block *ob
1264 20127 : = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1265 20127 : unsigned int count = 0;
1266 20127 : lto_symtab_encoder_iterator lsei;
1267 20127 : lto_symtab_encoder_t encoder;
1268 :
1269 20127 : encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1270 :
1271 114165 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1272 94038 : lsei_next_function_in_partition (&lsei))
1273 : {
1274 94038 : node = lsei_cgraph_node (lsei);
1275 94038 : if (node->definition && funct_state_summaries->exists (node))
1276 93862 : count++;
1277 : }
1278 :
1279 20127 : streamer_write_uhwi_stream (ob->main_stream, count);
1280 :
1281 : /* Process all of the functions. */
1282 114165 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1283 94038 : lsei_next_function_in_partition (&lsei))
1284 : {
1285 94038 : node = lsei_cgraph_node (lsei);
1286 94038 : funct_state_d *fs = funct_state_summaries->get (node);
1287 94038 : if (node->definition && fs != NULL)
1288 : {
1289 93862 : struct bitpack_d bp;
1290 93862 : int node_ref;
1291 93862 : lto_symtab_encoder_t encoder;
1292 :
1293 93862 : encoder = ob->decl_state->symtab_node_encoder;
1294 93862 : node_ref = lto_symtab_encoder_encode (encoder, node);
1295 93862 : streamer_write_uhwi_stream (ob->main_stream, node_ref);
1296 :
1297 : /* Note that flags will need to be read in the opposite
1298 : order as we are pushing the bitflags into FLAGS. */
1299 93862 : bp = bitpack_create (ob->main_stream);
1300 93862 : bp_pack_value (&bp, fs->pure_const_state, 2);
1301 93862 : bp_pack_value (&bp, fs->state_previously_known, 2);
1302 93862 : bp_pack_value (&bp, fs->looping_previously_known, 1);
1303 93862 : bp_pack_value (&bp, fs->looping, 1);
1304 93862 : bp_pack_value (&bp, fs->can_throw, 1);
1305 93862 : bp_pack_value (&bp, fs->can_free, 1);
1306 93862 : bp_pack_value (&bp, fs->malloc_state, 2);
1307 93862 : streamer_write_bitpack (&bp);
1308 : }
1309 : }
1310 :
1311 20127 : lto_destroy_simple_output_block (ob);
1312 20127 : }
1313 :
1314 :
1315 : /* Deserialize the ipa info for lto. */
1316 :
1317 : static void
1318 12284 : pure_const_read_summary (void)
1319 : {
1320 12284 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1321 12284 : struct lto_file_decl_data *file_data;
1322 12284 : unsigned int j = 0;
1323 :
1324 12284 : pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1325 12284 : pass->register_hooks ();
1326 :
1327 37907 : while ((file_data = file_data_vec[j++]))
1328 : {
1329 13339 : const char *data;
1330 13339 : size_t len;
1331 13339 : class lto_input_block *ib
1332 13339 : = lto_create_simple_input_block (file_data,
1333 : LTO_section_ipa_pure_const,
1334 : &data, &len);
1335 13339 : if (ib)
1336 : {
1337 11008 : unsigned int i;
1338 11008 : unsigned int count = streamer_read_uhwi (ib);
1339 :
1340 89080 : for (i = 0; i < count; i++)
1341 : {
1342 78072 : unsigned int index;
1343 78072 : struct cgraph_node *node;
1344 78072 : struct bitpack_d bp;
1345 78072 : funct_state fs;
1346 78072 : lto_symtab_encoder_t encoder;
1347 :
1348 78072 : index = streamer_read_uhwi (ib);
1349 78072 : encoder = file_data->symtab_node_encoder;
1350 78072 : node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1351 : index));
1352 :
1353 78072 : fs = funct_state_summaries->get_create (node);
1354 : /* Note that the flags must be read in the opposite
1355 : order in which they were written (the bitflags were
1356 : pushed into FLAGS). */
1357 78072 : bp = streamer_read_bitpack (ib);
1358 78072 : fs->pure_const_state
1359 78072 : = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1360 78072 : fs->state_previously_known
1361 78072 : = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1362 78072 : fs->looping_previously_known = bp_unpack_value (&bp, 1);
1363 78072 : fs->looping = bp_unpack_value (&bp, 1);
1364 78072 : fs->can_throw = bp_unpack_value (&bp, 1);
1365 78072 : fs->can_free = bp_unpack_value (&bp, 1);
1366 78072 : fs->malloc_state
1367 78072 : = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1368 :
1369 78072 : if (dump_file)
1370 : {
1371 0 : int flags = flags_from_decl_or_type (node->decl);
1372 0 : fprintf (dump_file, "Read info for %s ", node->dump_name ());
1373 0 : if (flags & ECF_CONST)
1374 0 : fprintf (dump_file, " const");
1375 0 : if (flags & ECF_PURE)
1376 0 : fprintf (dump_file, " pure");
1377 0 : if (flags & ECF_NOTHROW)
1378 0 : fprintf (dump_file, " nothrow");
1379 0 : fprintf (dump_file, "\n pure const state: %s\n",
1380 0 : pure_const_names[fs->pure_const_state]);
1381 0 : fprintf (dump_file, " previously known state: %s\n",
1382 0 : pure_const_names[fs->state_previously_known]);
1383 0 : if (fs->looping)
1384 0 : fprintf (dump_file," function is locally looping\n");
1385 0 : if (fs->looping_previously_known)
1386 0 : fprintf (dump_file," function is previously known looping\n");
1387 0 : if (fs->can_throw)
1388 0 : fprintf (dump_file," function is locally throwing\n");
1389 0 : if (fs->can_free)
1390 0 : fprintf (dump_file," function can locally free\n");
1391 0 : fprintf (dump_file, "\n malloc state: %s\n",
1392 0 : malloc_state_names[fs->malloc_state]);
1393 : }
1394 : }
1395 :
1396 11008 : lto_destroy_simple_input_block (file_data,
1397 : LTO_section_ipa_pure_const,
1398 : ib, data, len);
1399 : }
1400 : }
1401 12284 : }
1402 :
1403 : /* We only propagate across edges that can throw externally and their callee
1404 : is not interposable. */
1405 :
1406 : static bool
1407 7379975 : ignore_edge_for_nothrow (struct cgraph_edge *e)
1408 : {
1409 7379975 : if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1410 : return true;
1411 :
1412 2151894 : enum availability avail;
1413 2151894 : cgraph_node *ultimate_target
1414 2151894 : = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1415 2151894 : if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1416 : return true;
1417 755437 : return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1418 206494 : && !e->callee->binds_to_current_def_p (e->caller))
1419 755403 : || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1420 1509720 : || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1421 : }
1422 :
1423 : /* Return true if NODE is self recursive function.
1424 : Indirectly recursive functions appears as non-trivial strongly
1425 : connected components, so we need to care about self recursion
1426 : only. */
1427 :
1428 : static bool
1429 2102587 : self_recursive_p (struct cgraph_node *node)
1430 : {
1431 2102587 : struct cgraph_edge *e;
1432 8195213 : for (e = node->callees; e; e = e->next_callee)
1433 6096548 : if (e->callee->function_symbol () == node)
1434 : return true;
1435 : return false;
1436 : }
1437 :
1438 : /* Return true if N is cdtor that is not const or pure. In this case we may
1439 : need to remove unreachable function if it is marked const/pure. */
1440 :
1441 : static bool
1442 50971 : cdtor_p (cgraph_node *n, void *)
1443 : {
1444 50971 : if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1445 3 : return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1446 3 : || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1447 : return false;
1448 : }
1449 :
1450 : /* Skip edges from and to nodes without ipa_pure_const enabled.
1451 : Ignore not available symbols. */
1452 :
1453 : static bool
1454 7379975 : ignore_edge_for_pure_const (struct cgraph_edge *e)
1455 : {
1456 7379975 : enum availability avail;
1457 7379975 : cgraph_node *ultimate_target
1458 7379975 : = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1459 :
1460 7379975 : return (avail <= AVAIL_INTERPOSABLE
1461 2545541 : || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1462 9916751 : || !opt_for_fn (ultimate_target->decl,
1463 7379975 : flag_ipa_pure_const));
1464 : }
1465 :
1466 : /* Return true if function should be skipped for local pure const analysis. */
1467 :
1468 : static bool
1469 4667504 : skip_function_for_local_pure_const (struct cgraph_node *node)
1470 : {
1471 : /* Because we do not schedule pass_fixup_cfg over whole program after early
1472 : optimizations we must not promote functions that are called by already
1473 : processed functions. */
1474 :
1475 4667504 : if (function_called_by_processed_nodes_p ())
1476 : {
1477 3387 : if (dump_file)
1478 1 : fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1479 3387 : return true;
1480 : }
1481 : /* Save some work and do not analyze functions which are interposable and
1482 : do not have any non-interposable aliases. */
1483 4664117 : if (node->get_availability () <= AVAIL_INTERPOSABLE
1484 4664117 : && !node->has_aliases_p ())
1485 : {
1486 197170 : if (dump_file)
1487 0 : fprintf (dump_file,
1488 : "Function is interposable; not analyzing.\n");
1489 197170 : return true;
1490 : }
1491 : return false;
1492 : }
1493 :
1494 : /* Make function const and output warning. If LOCAL is true,
1495 : return true if anything changed. Otherwise return true if
1496 : we may have introduced removale ctors. */
1497 :
1498 : bool
1499 1549262 : ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1500 : {
1501 1549262 : bool cdtor = false;
1502 :
1503 1549262 : if (TREE_READONLY (node->decl)
1504 1549262 : && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1505 : return false;
1506 911421 : warn_function_const (node->decl, !looping);
1507 911421 : if (local && skip_function_for_local_pure_const (node))
1508 : return false;
1509 892337 : if (dump_file)
1510 58 : fprintf (dump_file, "Function found to be %sconst: %s\n",
1511 : looping ? "looping " : "",
1512 : node->dump_name ());
1513 892337 : if (!local && !looping)
1514 44264 : cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1515 892337 : if (!dbg_cnt (ipa_attr))
1516 : return false;
1517 892337 : if (node->set_const_flag (true, looping))
1518 : {
1519 501136 : if (dump_file)
1520 58 : fprintf (dump_file,
1521 : "Declaration updated to be %sconst: %s\n",
1522 : looping ? "looping " : "",
1523 : node->dump_name ());
1524 501136 : if (local)
1525 : return true;
1526 2815 : return cdtor;
1527 : }
1528 : return false;
1529 : }
1530 :
1531 : /* Make function const and output warning. If LOCAL is true,
1532 : return true if anything changed. Otherwise return true if
1533 : we may have introduced removale ctors. */
1534 :
1535 : bool
1536 1047940 : ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1537 : {
1538 1047940 : bool cdtor = false;
1539 :
1540 1047940 : if (TREE_READONLY (node->decl)
1541 1047940 : || (DECL_PURE_P (node->decl)
1542 678835 : && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
1543 : return false;
1544 368333 : warn_function_pure (node->decl, !looping);
1545 368333 : if (local && skip_function_for_local_pure_const (node))
1546 : return false;
1547 359300 : if (dump_file)
1548 8 : fprintf (dump_file, "Function found to be %spure: %s\n",
1549 : looping ? "looping " : "",
1550 : node->dump_name ());
1551 359300 : if (!local && !looping)
1552 3439 : cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1553 359300 : if (!dbg_cnt (ipa_attr))
1554 : return false;
1555 359300 : if (node->set_pure_flag (true, looping))
1556 : {
1557 348494 : if (dump_file)
1558 8 : fprintf (dump_file,
1559 : "Declaration updated to be %spure: %s\n",
1560 : looping ? "looping " : "",
1561 : node->dump_name ());
1562 348494 : if (local)
1563 : return true;
1564 7756 : return cdtor;
1565 : }
1566 : return false;
1567 : }
1568 :
1569 : /* Produce transitive closure over the callgraph and compute pure/const
1570 : attributes. */
1571 :
1572 : static bool
1573 152416 : propagate_pure_const (void)
1574 : {
1575 152416 : struct cgraph_node *node;
1576 152416 : struct cgraph_node *w;
1577 152416 : struct cgraph_node **order =
1578 152416 : XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1579 152416 : int order_pos;
1580 152416 : int i;
1581 152416 : struct ipa_dfs_info * w_info;
1582 152416 : bool remove_p = false;
1583 :
1584 152416 : order_pos = ipa_reduced_postorder (order, true,
1585 : ignore_edge_for_pure_const);
1586 152416 : if (dump_file)
1587 : {
1588 29 : cgraph_node::dump_cgraph (dump_file);
1589 29 : ipa_print_order (dump_file, "reduced", order, order_pos);
1590 : }
1591 :
1592 : /* Propagate the local information through the call graph to produce
1593 : the global information. All the nodes within a cycle will have
1594 : the same info so we collapse cycles first. Then we can do the
1595 : propagation in one pass from the leaves to the roots. */
1596 2564579 : for (i = 0; i < order_pos; i++ )
1597 : {
1598 2412163 : enum pure_const_state_e pure_const_state = IPA_CONST;
1599 2412163 : bool looping = false;
1600 2412163 : int count = 0;
1601 2412163 : node = order[i];
1602 :
1603 2412163 : if (node->alias)
1604 37464 : continue;
1605 :
1606 2374699 : if (dump_file && (dump_flags & TDF_DETAILS))
1607 5 : fprintf (dump_file, "Starting cycle\n");
1608 :
1609 : /* Find the worst state for any node in the cycle. */
1610 : w = node;
1611 3994817 : while (w && pure_const_state != IPA_NEITHER)
1612 : {
1613 2377305 : struct cgraph_edge *e;
1614 2377305 : struct cgraph_edge *ie;
1615 2377305 : int i;
1616 2377305 : struct ipa_ref *ref = NULL;
1617 :
1618 2377305 : funct_state w_l = funct_state_summaries->get_create (w);
1619 2377305 : if (dump_file && (dump_flags & TDF_DETAILS))
1620 6 : fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1621 : w->dump_name (),
1622 6 : pure_const_names[w_l->pure_const_state],
1623 6 : w_l->looping);
1624 :
1625 : /* First merge in function body properties.
1626 : We are safe to pass NULL as FROM and TO because we will take care
1627 : of possible interposition when walking callees. */
1628 2377305 : worse_state (&pure_const_state, &looping,
1629 2377305 : w_l->pure_const_state, w_l->looping,
1630 : NULL, NULL);
1631 2377305 : if (pure_const_state == IPA_NEITHER)
1632 : break;
1633 :
1634 1620118 : count++;
1635 :
1636 : /* We consider recursive cycles as possibly infinite.
1637 : This might be relaxed since infinite recursion leads to stack
1638 : overflow. */
1639 1620118 : if (count > 1)
1640 2606 : looping = true;
1641 :
1642 : /* Now walk the edges and merge in callee properties. */
1643 2506458 : for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1644 886340 : e = e->next_callee)
1645 : {
1646 1946041 : enum availability avail;
1647 1946041 : struct cgraph_node *y = e->callee->
1648 3892082 : function_or_virtual_thunk_symbol (&avail,
1649 1946041 : e->caller);
1650 1946041 : enum pure_const_state_e edge_state = IPA_CONST;
1651 1946041 : bool edge_looping = false;
1652 :
1653 1946041 : if (e->recursive_p ())
1654 5515 : looping = true;
1655 :
1656 1946041 : if (dump_file && (dump_flags & TDF_DETAILS))
1657 : {
1658 7 : fprintf (dump_file, " Call to %s",
1659 7 : e->callee->dump_name ());
1660 : }
1661 1946041 : if (avail > AVAIL_INTERPOSABLE)
1662 : {
1663 662246 : funct_state y_l = funct_state_summaries->get_create (y);
1664 :
1665 662246 : if (dump_file && (dump_flags & TDF_DETAILS))
1666 : {
1667 2 : fprintf (dump_file,
1668 : " state:%s looping:%i\n",
1669 2 : pure_const_names[y_l->pure_const_state],
1670 2 : y_l->looping);
1671 : }
1672 662246 : if (y_l->pure_const_state > IPA_PURE
1673 662246 : && e->cannot_lead_to_return_p ())
1674 : {
1675 8363 : if (dump_file && (dump_flags & TDF_DETAILS))
1676 0 : fprintf (dump_file,
1677 : " Ignoring side effects"
1678 : " -> pure, looping\n");
1679 8363 : edge_state = IPA_PURE;
1680 8363 : edge_looping = true;
1681 : }
1682 : else
1683 : {
1684 653883 : edge_state = y_l->pure_const_state;
1685 653883 : edge_looping = y_l->looping;
1686 : }
1687 : }
1688 1283795 : else if (builtin_safe_for_const_function_p (&edge_looping,
1689 : y->decl))
1690 : edge_state = IPA_CONST;
1691 : else
1692 1100190 : state_from_flags (&edge_state, &edge_looping,
1693 1100190 : flags_from_decl_or_type (y->decl),
1694 1100190 : e->cannot_lead_to_return_p ());
1695 :
1696 : /* Merge the results with what we already know. */
1697 1946041 : better_state (&edge_state, &edge_looping,
1698 : w_l->state_previously_known,
1699 1946041 : w_l->looping_previously_known);
1700 1946041 : worse_state (&pure_const_state, &looping,
1701 1946041 : edge_state, edge_looping, e->caller, e->callee);
1702 1946041 : if (pure_const_state == IPA_NEITHER)
1703 : break;
1704 : }
1705 :
1706 : /* Now process the indirect call. */
1707 1620118 : for (ie = w->indirect_calls;
1708 1620866 : ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1709 : {
1710 24751 : enum pure_const_state_e edge_state = IPA_CONST;
1711 24751 : bool edge_looping = false;
1712 :
1713 24751 : if (dump_file && (dump_flags & TDF_DETAILS))
1714 0 : fprintf (dump_file, " Indirect call");
1715 49502 : state_from_flags (&edge_state, &edge_looping,
1716 24751 : ie->indirect_info->ecf_flags,
1717 24751 : ie->cannot_lead_to_return_p ());
1718 : /* Merge the results with what we already know. */
1719 24751 : better_state (&edge_state, &edge_looping,
1720 : w_l->state_previously_known,
1721 24751 : w_l->looping_previously_known);
1722 24751 : worse_state (&pure_const_state, &looping,
1723 : edge_state, edge_looping, NULL, NULL);
1724 24751 : if (pure_const_state == IPA_NEITHER)
1725 : break;
1726 : }
1727 :
1728 : /* And finally all loads and stores. */
1729 319514 : for (i = 0; w->iterate_reference (i, ref)
1730 2588276 : && pure_const_state != IPA_NEITHER; i++)
1731 : {
1732 347668 : enum pure_const_state_e ref_state = IPA_CONST;
1733 347668 : bool ref_looping = false;
1734 347668 : switch (ref->use)
1735 : {
1736 220611 : case IPA_REF_LOAD:
1737 : /* readonly reads are safe. */
1738 220611 : if (TREE_READONLY (ref->referred->decl))
1739 : break;
1740 204838 : if (dump_file && (dump_flags & TDF_DETAILS))
1741 0 : fprintf (dump_file, " nonreadonly global var read\n");
1742 204838 : ref_state = IPA_PURE;
1743 204838 : break;
1744 89918 : case IPA_REF_STORE:
1745 89918 : if (ref->cannot_lead_to_return ())
1746 : break;
1747 28224 : ref_state = IPA_NEITHER;
1748 28224 : if (dump_file && (dump_flags & TDF_DETAILS))
1749 0 : fprintf (dump_file, " global var write\n");
1750 : break;
1751 : case IPA_REF_ADDR:
1752 : break;
1753 0 : default:
1754 0 : gcc_unreachable ();
1755 : }
1756 347668 : better_state (&ref_state, &ref_looping,
1757 : w_l->state_previously_known,
1758 347668 : w_l->looping_previously_known);
1759 347668 : worse_state (&pure_const_state, &looping,
1760 : ref_state, ref_looping, NULL, NULL);
1761 347668 : if (pure_const_state == IPA_NEITHER)
1762 : break;
1763 : }
1764 1620118 : w_info = (struct ipa_dfs_info *) w->aux;
1765 1620118 : w = w_info->next_cycle;
1766 : }
1767 2374699 : if (dump_file && (dump_flags & TDF_DETAILS))
1768 5 : fprintf (dump_file, "Result %s looping %i\n",
1769 5 : pure_const_names [pure_const_state],
1770 : looping);
1771 :
1772 : /* Find the worst state of can_free for any node in the cycle. */
1773 : bool can_free = false;
1774 : w = node;
1775 4752493 : while (w && !can_free)
1776 : {
1777 2377794 : struct cgraph_edge *e;
1778 2377794 : funct_state w_l = funct_state_summaries->get (w);
1779 :
1780 2377794 : if (w_l->can_free
1781 2198379 : || w->get_availability () == AVAIL_INTERPOSABLE
1782 4496476 : || w->indirect_calls)
1783 : can_free = true;
1784 :
1785 4136201 : for (e = w->callees; e && !can_free; e = e->next_callee)
1786 : {
1787 1758407 : enum availability avail;
1788 1758407 : struct cgraph_node *y = e->callee->
1789 3516814 : function_or_virtual_thunk_symbol (&avail,
1790 1758407 : e->caller);
1791 :
1792 1758407 : if (avail > AVAIL_INTERPOSABLE)
1793 798674 : can_free = funct_state_summaries->get (y)->can_free;
1794 : else
1795 : can_free = true;
1796 : }
1797 2377794 : w_info = (struct ipa_dfs_info *) w->aux;
1798 2377794 : w = w_info->next_cycle;
1799 : }
1800 :
1801 : /* Copy back the region's pure_const_state which is shared by
1802 : all nodes in the region. */
1803 : w = node;
1804 4770218 : while (w)
1805 : {
1806 2395519 : funct_state w_l = funct_state_summaries->get (w);
1807 2395519 : enum pure_const_state_e this_state = pure_const_state;
1808 2395519 : bool this_looping = looping;
1809 :
1810 2395519 : w_l->can_free = can_free;
1811 2395519 : w->nonfreeing_fn = !can_free;
1812 2395519 : if (!can_free && dump_file)
1813 28 : fprintf (dump_file, "Function found not to call free: %s\n",
1814 : w->dump_name ());
1815 :
1816 2395519 : if (w_l->state_previously_known != IPA_NEITHER
1817 405419 : && this_state > w_l->state_previously_known)
1818 : {
1819 1177 : if (this_state == IPA_NEITHER)
1820 50 : this_looping = w_l->looping_previously_known;
1821 : this_state = w_l->state_previously_known;
1822 : }
1823 2395519 : if (!this_looping && self_recursive_p (w))
1824 : this_looping = true;
1825 2395519 : if (!w_l->looping_previously_known)
1826 309749 : this_looping = false;
1827 :
1828 : /* All nodes within a cycle share the same info. */
1829 2395519 : w_l->pure_const_state = this_state;
1830 2395519 : w_l->looping = this_looping;
1831 :
1832 : /* Inline clones share declaration with their offline copies;
1833 : do not modify their declarations since the offline copy may
1834 : be different. */
1835 2395519 : if (!w->inlined_to)
1836 1044615 : switch (this_state)
1837 : {
1838 163031 : case IPA_CONST:
1839 163031 : remove_p |= ipa_make_function_const (w, this_looping, false);
1840 163031 : break;
1841 :
1842 98699 : case IPA_PURE:
1843 98699 : remove_p |= ipa_make_function_pure (w, this_looping, false);
1844 98699 : break;
1845 :
1846 : default:
1847 : break;
1848 : }
1849 2395519 : w_info = (struct ipa_dfs_info *) w->aux;
1850 2395519 : w = w_info->next_cycle;
1851 : }
1852 : }
1853 :
1854 152416 : ipa_free_postorder_info ();
1855 152416 : free (order);
1856 152416 : return remove_p;
1857 : }
1858 :
1859 : /* Produce transitive closure over the callgraph and compute nothrow
1860 : attributes. */
1861 :
1862 : static void
1863 152416 : propagate_nothrow (void)
1864 : {
1865 152416 : struct cgraph_node *node;
1866 152416 : struct cgraph_node *w;
1867 152416 : struct cgraph_node **order =
1868 152416 : XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1869 152416 : int order_pos;
1870 152416 : int i;
1871 152416 : struct ipa_dfs_info * w_info;
1872 :
1873 152416 : order_pos = ipa_reduced_postorder (order, true,
1874 : ignore_edge_for_nothrow);
1875 152416 : if (dump_file)
1876 : {
1877 29 : cgraph_node::dump_cgraph (dump_file);
1878 29 : ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1879 : }
1880 :
1881 : /* Propagate the local information through the call graph to produce
1882 : the global information. All the nodes within a cycle will have
1883 : the same info so we collapse cycles first. Then we can do the
1884 : propagation in one pass from the leaves to the roots. */
1885 2581126 : for (i = 0; i < order_pos; i++ )
1886 : {
1887 2428710 : bool can_throw = false;
1888 2428710 : node = order[i];
1889 :
1890 2428710 : if (node->alias)
1891 37464 : continue;
1892 :
1893 : /* Find the worst state for any node in the cycle. */
1894 : w = node;
1895 4782657 : while (w && !can_throw)
1896 : {
1897 2391411 : struct cgraph_edge *e, *ie;
1898 :
1899 2391411 : if (!TREE_NOTHROW (w->decl))
1900 : {
1901 939695 : funct_state w_l = funct_state_summaries->get_create (w);
1902 :
1903 939695 : if (w_l->can_throw
1904 939695 : || w->get_availability () == AVAIL_INTERPOSABLE)
1905 : can_throw = true;
1906 :
1907 1804354 : for (e = w->callees; e && !can_throw; e = e->next_callee)
1908 : {
1909 864659 : enum availability avail;
1910 :
1911 864659 : if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1912 254148 : continue;
1913 :
1914 610511 : struct cgraph_node *y = e->callee->
1915 1221022 : function_or_virtual_thunk_symbol (&avail,
1916 610511 : e->caller);
1917 :
1918 : /* We can use info about the callee only if we know it
1919 : cannot be interposed.
1920 : When callee is compiled with non-call exceptions we also
1921 : must check that the declaration is bound to current
1922 : body as other semantically equivalent body may still
1923 : throw. */
1924 610511 : if (avail <= AVAIL_INTERPOSABLE
1925 610511 : || (!TREE_NOTHROW (y->decl)
1926 325208 : && (funct_state_summaries->get_create (y)->can_throw
1927 5682 : || (opt_for_fn (y->decl, flag_non_call_exceptions)
1928 548 : && !e->callee->binds_to_current_def_p (w)))))
1929 : can_throw = true;
1930 : }
1931 953418 : for (ie = w->indirect_calls; ie && !can_throw;
1932 13723 : ie = ie->next_callee)
1933 13723 : if (ie->can_throw_external
1934 12241 : && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1935 13723 : can_throw = true;
1936 : }
1937 2391411 : w_info = (struct ipa_dfs_info *) w->aux;
1938 2391411 : w = w_info->next_cycle;
1939 : }
1940 :
1941 : /* Copy back the region's pure_const_state which is shared by
1942 : all nodes in the region. */
1943 : w = node;
1944 4786765 : while (w)
1945 : {
1946 2395519 : funct_state w_l = funct_state_summaries->get_create (w);
1947 2395519 : if (!can_throw && !TREE_NOTHROW (w->decl))
1948 : {
1949 : /* Inline clones share declaration with their offline copies;
1950 : do not modify their declarations since the offline copy may
1951 : be different. */
1952 18443 : if (!w->inlined_to)
1953 : {
1954 3036 : w->set_nothrow_flag (true);
1955 3036 : if (dump_file)
1956 0 : fprintf (dump_file, "Function found to be nothrow: %s\n",
1957 : w->dump_name ());
1958 : }
1959 : }
1960 925316 : else if (can_throw && !TREE_NOTHROW (w->decl))
1961 925316 : w_l->can_throw = true;
1962 2395519 : w_info = (struct ipa_dfs_info *) w->aux;
1963 2395519 : w = w_info->next_cycle;
1964 : }
1965 : }
1966 :
1967 152416 : ipa_free_postorder_info ();
1968 152416 : free (order);
1969 152416 : }
1970 :
1971 : /* Debugging function to dump state of malloc lattice. */
1972 :
1973 : DEBUG_FUNCTION
1974 : static void
1975 304832 : dump_malloc_lattice (FILE *dump_file, const char *s)
1976 : {
1977 304832 : if (!dump_file)
1978 : return;
1979 :
1980 58 : fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1981 58 : cgraph_node *node;
1982 290 : FOR_EACH_FUNCTION (node)
1983 : {
1984 232 : funct_state fs = funct_state_summaries->get (node);
1985 232 : if (fs)
1986 156 : fprintf (dump_file, "%s: %s\n", node->dump_name (),
1987 156 : malloc_state_names[fs->malloc_state]);
1988 : }
1989 : }
1990 :
1991 : /* Propagate malloc attribute across the callgraph. */
1992 :
1993 : static void
1994 152416 : propagate_malloc (void)
1995 : {
1996 152416 : cgraph_node *node;
1997 4005901 : FOR_EACH_FUNCTION (node)
1998 : {
1999 3853485 : if (DECL_IS_MALLOC (node->decl))
2000 87070 : if (!funct_state_summaries->exists (node))
2001 : {
2002 23490 : funct_state fs = funct_state_summaries->get_create (node);
2003 23490 : fs->malloc_state = STATE_MALLOC;
2004 : }
2005 : }
2006 :
2007 152416 : dump_malloc_lattice (dump_file, "Initial");
2008 152416 : struct cgraph_node **order
2009 152416 : = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
2010 152416 : int order_pos = ipa_reverse_postorder (order);
2011 152416 : bool changed = true;
2012 :
2013 459040 : while (changed)
2014 : {
2015 154208 : changed = false;
2016 : /* Walk in postorder. */
2017 4682389 : for (int i = order_pos - 1; i >= 0; --i)
2018 : {
2019 4528181 : cgraph_node *node = order[i];
2020 6129487 : if (node->alias
2021 4484956 : || !node->definition
2022 7455056 : || !funct_state_summaries->exists (node))
2023 4441277 : continue;
2024 :
2025 2926875 : funct_state l = funct_state_summaries->get (node);
2026 :
2027 : /* FIXME: add support for indirect-calls. */
2028 2926875 : if (node->indirect_calls)
2029 : {
2030 142434 : l->malloc_state = STATE_MALLOC_BOTTOM;
2031 142434 : continue;
2032 : }
2033 :
2034 2784441 : if (node->get_availability () <= AVAIL_INTERPOSABLE)
2035 : {
2036 93607 : l->malloc_state = STATE_MALLOC_BOTTOM;
2037 93607 : continue;
2038 : }
2039 :
2040 2690834 : if (l->malloc_state == STATE_MALLOC_BOTTOM)
2041 2603930 : continue;
2042 :
2043 86904 : auto_vec<cgraph_node *, 16> callees;
2044 382167 : for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2045 : {
2046 295263 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
2047 295263 : if (es && es->is_return_callee_uncaptured)
2048 13168 : callees.safe_push (cs->callee);
2049 : }
2050 :
2051 86904 : malloc_state_e new_state = l->malloc_state;
2052 100072 : for (unsigned j = 0; j < callees.length (); j++)
2053 : {
2054 13168 : cgraph_node *callee = callees[j];
2055 13168 : if (!funct_state_summaries->exists (node))
2056 : {
2057 : new_state = STATE_MALLOC_BOTTOM;
2058 : break;
2059 : }
2060 13168 : malloc_state_e callee_state
2061 13168 : = funct_state_summaries->get_create (callee)->malloc_state;
2062 13168 : if (new_state < callee_state)
2063 11059 : new_state = callee_state;
2064 : }
2065 86904 : if (new_state != l->malloc_state)
2066 : {
2067 11053 : changed = true;
2068 11053 : l->malloc_state = new_state;
2069 : }
2070 86904 : }
2071 : }
2072 :
2073 2585403 : FOR_EACH_DEFINED_FUNCTION (node)
2074 2432987 : if (funct_state_summaries->exists (node))
2075 : {
2076 2420988 : funct_state l = funct_state_summaries->get (node);
2077 2420988 : if (!node->alias
2078 2395519 : && l->malloc_state == STATE_MALLOC
2079 55268 : && !node->inlined_to
2080 2421521 : && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2081 : {
2082 533 : if (dump_file && (dump_flags & TDF_DETAILS))
2083 6 : fprintf (dump_file, "Function %s found to be malloc\n",
2084 : node->dump_name ());
2085 :
2086 533 : bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2087 533 : node->set_malloc_flag (true);
2088 533 : if (!malloc_decl_p && warn_suggest_attribute_malloc)
2089 0 : warn_function_malloc (node->decl);
2090 : }
2091 : }
2092 :
2093 152416 : dump_malloc_lattice (dump_file, "after propagation");
2094 152416 : ipa_free_postorder_info ();
2095 152416 : free (order);
2096 152416 : }
2097 :
2098 : /* Produce the global information by preforming a transitive closure
2099 : on the local information that was produced by generate_summary. */
2100 :
2101 : unsigned int
2102 152416 : pass_ipa_pure_const::
2103 : execute (function *)
2104 : {
2105 152416 : bool remove_p;
2106 :
2107 : /* Nothrow makes more function to not lead to return and improve
2108 : later analysis. */
2109 152416 : propagate_nothrow ();
2110 152416 : propagate_malloc ();
2111 152416 : remove_p = propagate_pure_const ();
2112 :
2113 152416 : delete funct_state_summaries;
2114 152416 : return remove_p ? TODO_remove_functions : 0;
2115 : }
2116 :
2117 : static bool
2118 4042520 : gate_pure_const (void)
2119 : {
2120 591507 : return flag_ipa_pure_const || in_lto_p;
2121 : }
2122 :
2123 288047 : pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2124 : : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2125 : pure_const_generate_summary, /* generate_summary */
2126 : pure_const_write_summary, /* write_summary */
2127 : pure_const_read_summary, /* read_summary */
2128 : NULL, /* write_optimization_summary */
2129 : NULL, /* read_optimization_summary */
2130 : NULL, /* stmt_fixup */
2131 : 0, /* function_transform_todo_flags_start */
2132 : NULL, /* function_transform */
2133 : NULL), /* variable_transform */
2134 288047 : init_p (false) {}
2135 :
2136 : ipa_opt_pass_d *
2137 288047 : make_pass_ipa_pure_const (gcc::context *ctxt)
2138 : {
2139 288047 : return new pass_ipa_pure_const (ctxt);
2140 : }
2141 :
2142 : /* Simple local pass for pure const discovery reusing the analysis from
2143 : ipa_pure_const. This pass is effective when executed together with
2144 : other optimization passes in early optimization pass queue. */
2145 :
2146 : namespace {
2147 :
2148 : const pass_data pass_data_local_pure_const =
2149 : {
2150 : GIMPLE_PASS, /* type */
2151 : "local-pure-const", /* name */
2152 : OPTGROUP_NONE, /* optinfo_flags */
2153 : TV_IPA_PURE_CONST, /* tv_id */
2154 : 0, /* properties_required */
2155 : 0, /* properties_provided */
2156 : 0, /* properties_destroyed */
2157 : 0, /* todo_flags_start */
2158 : 0, /* todo_flags_finish */
2159 : };
2160 :
2161 : class pass_local_pure_const : public gimple_opt_pass
2162 : {
2163 : public:
2164 576094 : pass_local_pure_const (gcc::context *ctxt)
2165 1152188 : : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2166 : {}
2167 :
2168 : /* opt_pass methods: */
2169 288047 : opt_pass * clone () final override
2170 : {
2171 288047 : return new pass_local_pure_const (m_ctxt);
2172 : }
2173 3451265 : bool gate (function *) final override { return gate_pure_const (); }
2174 : unsigned int execute (function *) final override;
2175 :
2176 : }; // class pass_local_pure_const
2177 :
2178 : unsigned int
2179 3450935 : pass_local_pure_const::execute (function *fun)
2180 : {
2181 3450935 : bool changed = false;
2182 3450935 : funct_state l;
2183 3450935 : bool skip;
2184 3450935 : struct cgraph_node *node;
2185 :
2186 3450935 : node = cgraph_node::get (current_function_decl);
2187 3450935 : skip = skip_function_for_local_pure_const (node);
2188 :
2189 3450935 : if (!warn_suggest_attribute_const
2190 3450912 : && !warn_suggest_attribute_pure
2191 3450891 : && skip)
2192 : return 0;
2193 :
2194 3278495 : l = analyze_function (node, false);
2195 :
2196 : /* Do NORETURN discovery. */
2197 3278495 : if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2198 6528656 : && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2199 : {
2200 26583 : warn_function_noreturn (fun->decl);
2201 26583 : if (dump_file)
2202 1 : fprintf (dump_file, "Function found to be noreturn: %s\n",
2203 : current_function_name ());
2204 :
2205 : /* Update declaration and reduce profile to executed once. */
2206 26583 : if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2207 : changed = true;
2208 26583 : if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2209 12311 : node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2210 : }
2211 :
2212 3278495 : switch (l->pure_const_state)
2213 : {
2214 648635 : case IPA_CONST:
2215 1297270 : changed |= ipa_make_function_const
2216 648635 : (cgraph_node::get (current_function_decl), l->looping, true);
2217 648635 : break;
2218 :
2219 424611 : case IPA_PURE:
2220 849222 : changed |= ipa_make_function_pure
2221 424611 : (cgraph_node::get (current_function_decl), l->looping, true);
2222 424611 : break;
2223 :
2224 : default:
2225 : break;
2226 : }
2227 3278495 : if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2228 : {
2229 20594 : node->set_nothrow_flag (true);
2230 20594 : changed = true;
2231 20594 : if (dump_file)
2232 2 : fprintf (dump_file, "Function found to be nothrow: %s\n",
2233 : current_function_name ());
2234 : }
2235 :
2236 3278495 : if (l->malloc_state == STATE_MALLOC
2237 3278495 : && !DECL_IS_MALLOC (current_function_decl))
2238 : {
2239 39209 : node->set_malloc_flag (true);
2240 39209 : if (warn_suggest_attribute_malloc)
2241 3 : warn_function_malloc (node->decl);
2242 39209 : changed = true;
2243 39209 : if (dump_file)
2244 2 : fprintf (dump_file, "Function found to be malloc: %s\n",
2245 : node->dump_name ());
2246 : }
2247 :
2248 3278495 : free (l);
2249 3278495 : if (changed)
2250 898579 : return execute_fixup_cfg ();
2251 : else
2252 : return 0;
2253 : }
2254 :
2255 : } // anon namespace
2256 :
2257 : gimple_opt_pass *
2258 288047 : make_pass_local_pure_const (gcc::context *ctxt)
2259 : {
2260 288047 : return new pass_local_pure_const (ctxt);
2261 : }
2262 :
2263 : /* Emit noreturn warnings. */
2264 :
2265 : namespace {
2266 :
2267 : const pass_data pass_data_warn_function_noreturn =
2268 : {
2269 : GIMPLE_PASS, /* type */
2270 : "*warn_function_noreturn", /* name */
2271 : OPTGROUP_NONE, /* optinfo_flags */
2272 : TV_NONE, /* tv_id */
2273 : PROP_cfg, /* properties_required */
2274 : 0, /* properties_provided */
2275 : 0, /* properties_destroyed */
2276 : 0, /* todo_flags_start */
2277 : 0, /* todo_flags_finish */
2278 : };
2279 :
2280 : class pass_warn_function_noreturn : public gimple_opt_pass
2281 : {
2282 : public:
2283 288047 : pass_warn_function_noreturn (gcc::context *ctxt)
2284 576094 : : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2285 : {}
2286 :
2287 : /* opt_pass methods: */
2288 1475202 : bool gate (function *) final override
2289 : {
2290 1475202 : return warn_suggest_attribute_noreturn;
2291 : }
2292 29 : unsigned int execute (function *fun) final override
2293 : {
2294 29 : if (!TREE_THIS_VOLATILE (current_function_decl)
2295 29 : && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2296 4 : warn_function_noreturn (current_function_decl);
2297 29 : return 0;
2298 : }
2299 :
2300 : }; // class pass_warn_function_noreturn
2301 :
2302 : } // anon namespace
2303 :
2304 : gimple_opt_pass *
2305 288047 : make_pass_warn_function_noreturn (gcc::context *ctxt)
2306 : {
2307 288047 : return new pass_warn_function_noreturn (ctxt);
2308 : }
2309 :
2310 : /* Simple local pass for nothrow discovery reusing the analysis from
2311 : ipa_pure_const. This pass is effective when executed together with
2312 : other optimization passes in early optimization pass queue. */
2313 :
2314 : namespace {
2315 :
2316 : const pass_data pass_data_nothrow =
2317 : {
2318 : GIMPLE_PASS, /* type */
2319 : "nothrow", /* name */
2320 : OPTGROUP_NONE, /* optinfo_flags */
2321 : TV_IPA_PURE_CONST, /* tv_id */
2322 : 0, /* properties_required */
2323 : 0, /* properties_provided */
2324 : 0, /* properties_destroyed */
2325 : 0, /* todo_flags_start */
2326 : 0, /* todo_flags_finish */
2327 : };
2328 :
2329 : class pass_nothrow : public gimple_opt_pass
2330 : {
2331 : public:
2332 288047 : pass_nothrow (gcc::context *ctxt)
2333 576094 : : gimple_opt_pass (pass_data_nothrow, ctxt)
2334 : {}
2335 :
2336 : /* opt_pass methods: */
2337 0 : opt_pass * clone () final override { return new pass_nothrow (m_ctxt); }
2338 2853338 : bool gate (function *) final override { return optimize; }
2339 : unsigned int execute (function *) final override;
2340 :
2341 : }; // class pass_nothrow
2342 :
2343 : unsigned int
2344 2411749 : pass_nothrow::execute (function *)
2345 : {
2346 2411749 : struct cgraph_node *node;
2347 2411749 : basic_block this_block;
2348 :
2349 2411749 : if (TREE_NOTHROW (current_function_decl))
2350 : return 0;
2351 :
2352 1518520 : node = cgraph_node::get (current_function_decl);
2353 :
2354 : /* We run during lowering, we cannot really use availability yet. */
2355 1518520 : if (cgraph_node::get (current_function_decl)->get_availability ()
2356 : <= AVAIL_INTERPOSABLE)
2357 : {
2358 83543 : if (dump_file)
2359 0 : fprintf (dump_file, "Function is interposable;"
2360 : " not analyzing.\n");
2361 83543 : return true;
2362 : }
2363 :
2364 11049962 : FOR_EACH_BB_FN (this_block, cfun)
2365 : {
2366 20418732 : for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2367 49740568 : !gsi_end_p (gsi);
2368 39531202 : gsi_next (&gsi))
2369 40125583 : if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2370 : {
2371 595820 : if (is_gimple_call (gsi_stmt (gsi)))
2372 : {
2373 378366 : tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2374 378366 : if (callee_t && recursive_call_p (current_function_decl,
2375 : callee_t))
2376 1439 : continue;
2377 : }
2378 :
2379 594381 : if (dump_file)
2380 : {
2381 0 : fprintf (dump_file, "Statement can throw: ");
2382 0 : print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2383 : }
2384 594381 : return 0;
2385 : }
2386 : }
2387 :
2388 840596 : node->set_nothrow_flag (true);
2389 :
2390 840596 : bool cfg_changed = false;
2391 840596 : if (self_recursive_p (node))
2392 30658 : FOR_EACH_BB_FN (this_block, cfun)
2393 82877 : if (gcall *g = safe_dyn_cast <gcall *> (*gsi_last_bb (this_block)))
2394 : {
2395 2099 : tree callee_t = gimple_call_fndecl (g);
2396 2099 : if (callee_t
2397 1984 : && recursive_call_p (current_function_decl, callee_t)
2398 577 : && maybe_clean_eh_stmt (g)
2399 2102 : && gimple_purge_dead_eh_edges (this_block))
2400 : cfg_changed = true;
2401 : }
2402 :
2403 840596 : if (dump_file)
2404 33 : fprintf (dump_file, "Function found to be nothrow: %s\n",
2405 : current_function_name ());
2406 840596 : return cfg_changed ? TODO_cleanup_cfg : 0;
2407 : }
2408 :
2409 : } // anon namespace
2410 :
2411 : gimple_opt_pass *
2412 288047 : make_pass_nothrow (gcc::context *ctxt)
2413 : {
2414 288047 : return new pass_nothrow (ctxt);
2415 : }
|