Line data Source code
1 : /* Callgraph based analysis of static variables.
2 : Copyright (C) 2004-2026 Free Software Foundation, Inc.
3 : Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : /* This file marks functions as being either const (TREE_READONLY) or
22 : pure (DECL_PURE_P). It can also set a variant of these that
23 : are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 :
25 : This must be run after inlining decisions have been made since
26 : otherwise, the local sets will not contain information that is
27 : consistent with post inlined state. The global sets are not prone
28 : to this problem since they are by definition transitive. */
29 :
30 : /* The code in this module is called by the ipa pass manager. It
31 : should be one of the later passes since it's information is used by
32 : the rest of the compilation. */
33 :
34 : #include "config.h"
35 : #include "system.h"
36 : #include "coretypes.h"
37 : #include "backend.h"
38 : #include "target.h"
39 : #include "tree.h"
40 : #include "gimple.h"
41 : #include "tree-pass.h"
42 : #include "tree-streamer.h"
43 : #include "cgraph.h"
44 : #include "diagnostic.h"
45 : #include "calls.h"
46 : #include "cfganal.h"
47 : #include "tree-eh.h"
48 : #include "gimple-iterator.h"
49 : #include "gimple-walk.h"
50 : #include "tree-cfg.h"
51 : #include "tree-ssa-loop-niter.h"
52 : #include "langhooks.h"
53 : #include "ipa-utils.h"
54 : #include "gimple-pretty-print.h"
55 : #include "cfgloop.h"
56 : #include "tree-scalar-evolution.h"
57 : #include "intl.h"
58 : #include "opts.h"
59 : #include "ssa.h"
60 : #include "alloc-pool.h"
61 : #include "symbol-summary.h"
62 : #include "sreal.h"
63 : #include "ipa-cp.h"
64 : #include "ipa-prop.h"
65 : #include "ipa-fnsummary.h"
66 : #include "symtab-thunks.h"
67 : #include "dbgcnt.h"
68 : #include "gcc-urlifier.h"
69 :
70 : /* Lattice values for const and pure functions. Everything starts out
71 : being const, then may drop to pure and then neither depending on
72 : what is found. */
73 : enum pure_const_state_e
74 : {
75 : IPA_CONST,
76 : IPA_PURE,
77 : IPA_NEITHER
78 : };
79 :
80 : static const char *pure_const_names[3] = {"const", "pure", "neither"};
81 :
82 : enum malloc_state_e
83 : {
84 : STATE_MALLOC_TOP,
85 : STATE_MALLOC,
86 : STATE_MALLOC_BOTTOM
87 : };
88 :
89 : static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
90 :
91 : /* Holder for the const_state. There is one of these per function
92 : decl. */
93 : class funct_state_d
94 : {
95 : public:
96 2765248 : funct_state_d (): pure_const_state (IPA_NEITHER),
97 2765248 : state_previously_known (IPA_NEITHER), looping_previously_known (true),
98 2765248 : looping (true), can_throw (true), can_free (true),
99 0 : malloc_state (STATE_MALLOC_BOTTOM) {}
100 :
101 2652651 : funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
102 2652651 : state_previously_known (s.state_previously_known),
103 2652651 : looping_previously_known (s.looping_previously_known),
104 2652651 : looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
105 2652651 : malloc_state (s.malloc_state) {}
106 :
107 : /* See above. */
108 : enum pure_const_state_e pure_const_state;
109 : /* What user set here; we can be always sure about this. */
110 : enum pure_const_state_e state_previously_known;
111 : bool looping_previously_known;
112 :
113 : /* True if the function could possibly infinite loop. There are a
114 : lot of ways that this could be determined. We are pretty
115 : conservative here. While it is possible to cse pure and const
116 : calls, it is not legal to have dce get rid of the call if there
117 : is a possibility that the call could infinite loop since this is
118 : a behavioral change. */
119 : bool looping;
120 :
121 : bool can_throw;
122 :
123 : /* If function can call free, munmap or otherwise make previously
124 : non-trapping memory accesses trapping. */
125 : bool can_free;
126 :
127 : enum malloc_state_e malloc_state;
128 : };
129 :
130 : typedef class funct_state_d * funct_state;
131 :
132 : /* The storage of the funct_state is abstracted because there is the
133 : possibility that it may be desirable to move this to the cgraph
134 : local info. */
135 :
136 : class funct_state_summary_t:
137 : public fast_function_summary <funct_state_d *, va_heap>
138 : {
139 : public:
140 161929 : funct_state_summary_t (symbol_table *symtab):
141 323858 : fast_function_summary <funct_state_d *, va_heap> (symtab) {}
142 :
143 : void insert (cgraph_node *, funct_state_d *state) final override;
144 : void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
145 : funct_state_d *src_data,
146 : funct_state_d *dst_data) final override;
147 : };
148 :
149 : static funct_state_summary_t *funct_state_summaries = NULL;
150 :
151 : static bool gate_pure_const (void);
152 :
153 : namespace {
154 :
155 : const pass_data pass_data_ipa_pure_const =
156 : {
157 : IPA_PASS, /* type */
158 : "pure-const", /* name */
159 : OPTGROUP_NONE, /* optinfo_flags */
160 : TV_IPA_PURE_CONST, /* tv_id */
161 : 0, /* properties_required */
162 : 0, /* properties_provided */
163 : 0, /* properties_destroyed */
164 : 0, /* todo_flags_start */
165 : 0, /* todo_flags_finish */
166 : };
167 :
168 : class pass_ipa_pure_const : public ipa_opt_pass_d
169 : {
170 : public:
171 : pass_ipa_pure_const(gcc::context *ctxt);
172 :
173 : /* opt_pass methods: */
174 1185458 : bool gate (function *) final override { return gate_pure_const (); }
175 : unsigned int execute (function *fun) final override;
176 :
177 : void register_hooks (void);
178 :
179 : private:
180 : bool init_p;
181 : }; // class pass_ipa_pure_const
182 :
183 : } // anon namespace
184 :
185 : /* Try to guess if function body will always be visible to compiler
186 : when compiling the call and whether compiler will be able
187 : to propagate the information by itself. */
188 :
189 : static bool
190 26 : function_always_visible_to_compiler_p (tree decl)
191 : {
192 22 : return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
193 48 : || DECL_COMDAT (decl));
194 : }
195 :
196 : /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
197 : is true if the function is known to be finite. The diagnostic is
198 : controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
199 : OPTION, this function may initialize it and it is always returned
200 : by the function. */
201 :
202 : static hash_set<tree> *
203 1278478 : suggest_attribute (diagnostics::option_id option, tree decl, bool known_finite,
204 : hash_set<tree> *warned_about,
205 : const char * attrib_name)
206 : {
207 1278478 : if (!option_enabled (option.m_idx, lang_hooks.option_lang_mask (),
208 : &global_options))
209 : return warned_about;
210 30 : if (TREE_THIS_VOLATILE (decl)
211 30 : || (known_finite && function_always_visible_to_compiler_p (decl)))
212 : return warned_about;
213 :
214 26 : if (!warned_about)
215 14 : warned_about = new hash_set<tree>;
216 26 : if (warned_about->contains (decl))
217 : return warned_about;
218 26 : warned_about->add (decl);
219 26 : auto_urlify_attributes sentinel;
220 30 : warning_at (DECL_SOURCE_LOCATION (decl),
221 : option,
222 : known_finite
223 : ? G_("function might be candidate for attribute %qs")
224 : : G_("function might be candidate for attribute %qs"
225 : " if it is known to return normally"), attrib_name);
226 26 : return warned_about;
227 26 : }
228 :
229 : /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
230 : is true if the function is known to be finite. */
231 :
232 : static void
233 372178 : warn_function_pure (tree decl, bool known_finite)
234 : {
235 : /* Declaring a void function pure makes no sense and is diagnosed
236 : by -Wattributes because calling it would have no effect. */
237 372178 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
238 : return;
239 :
240 343309 : static hash_set<tree> *warned_about;
241 343309 : warned_about
242 343309 : = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
243 : known_finite, warned_about, "pure");
244 : }
245 :
246 : /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
247 : is true if the function is known to be finite. */
248 :
249 : static void
250 926781 : warn_function_const (tree decl, bool known_finite)
251 : {
252 : /* Declaring a void function const makes no sense is diagnosed
253 : by -Wattributes because calling it would have no effect. */
254 926781 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
255 : return;
256 :
257 682759 : static hash_set<tree> *warned_about;
258 682759 : warned_about
259 682759 : = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
260 : known_finite, warned_about, "const");
261 : }
262 :
263 : /* Emit suggestion about __attribute__((malloc)) for DECL. */
264 :
265 : static void
266 3 : warn_function_malloc (tree decl)
267 : {
268 3 : static hash_set<tree> *warned_about;
269 3 : warned_about
270 3 : = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
271 : true, warned_about, "malloc");
272 3 : }
273 :
274 : /* Emit suggestion about __attribute__((noreturn)) for DECL. */
275 :
276 : static void
277 26656 : warn_function_noreturn (tree decl)
278 : {
279 26656 : tree original_decl = decl;
280 :
281 26656 : static hash_set<tree> *warned_about;
282 26656 : if (!lang_hooks.missing_noreturn_ok_p (decl)
283 26656 : && targetm.warn_func_return (decl))
284 14408 : warned_about
285 14408 : = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
286 : true, warned_about, "noreturn");
287 26656 : }
288 :
289 : void
290 8995 : warn_function_cold (tree decl)
291 : {
292 8995 : tree original_decl = decl;
293 :
294 8995 : static hash_set<tree> *warned_about;
295 8995 : warned_about
296 8995 : = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
297 : true, warned_about, "cold");
298 8995 : }
299 :
300 : void
301 229004 : warn_function_returns_nonnull (tree decl)
302 : {
303 229004 : static hash_set<tree> *warned_about;
304 229004 : warned_about
305 229004 : = suggest_attribute (OPT_Wsuggest_attribute_returns_nonnull, decl,
306 : true, warned_about, "returns_nonnull");
307 229004 : }
308 :
309 : /* Check to see if the use (or definition when CHECKING_WRITE is true)
310 : variable T is legal in a function that is either pure or const. */
311 :
312 : static inline void
313 20061184 : check_decl (funct_state local,
314 : tree t, bool checking_write, bool ipa)
315 : {
316 : /* Do not want to do anything with volatile except mark any
317 : function that uses one to be not const or pure. */
318 20061184 : if (TREE_THIS_VOLATILE (t))
319 : {
320 1446843 : local->pure_const_state = IPA_NEITHER;
321 1446843 : if (dump_file)
322 30 : fprintf (dump_file, " Volatile operand is not const/pure\n");
323 1446843 : return;
324 : }
325 :
326 : /* Do not care about a local automatic that is not static. */
327 18614341 : if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
328 : return;
329 :
330 : /* If the variable has the "used" attribute, treat it as if it had a
331 : been touched by the devil. */
332 3547489 : if (DECL_PRESERVE_P (t))
333 : {
334 3490 : local->pure_const_state = IPA_NEITHER;
335 3490 : if (dump_file)
336 0 : fprintf (dump_file, " Used static/global variable is not const/pure\n");
337 3490 : return;
338 : }
339 :
340 : /* In IPA mode we are not interested in checking actual loads and stores;
341 : they will be processed at propagation time using ipa_ref. */
342 3543999 : if (ipa)
343 : return;
344 :
345 : /* Since we have dealt with the locals and params cases above, if we
346 : are CHECKING_WRITE, this cannot be a pure or constant
347 : function. */
348 2286257 : if (checking_write)
349 : {
350 817071 : local->pure_const_state = IPA_NEITHER;
351 817071 : if (dump_file)
352 1 : fprintf (dump_file, " static/global memory write is not const/pure\n");
353 817071 : return;
354 : }
355 :
356 1469186 : if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
357 : {
358 : /* Readonly reads are safe. */
359 843344 : if (TREE_READONLY (t))
360 : return; /* Read of a constant, do not change the function state. */
361 : else
362 : {
363 839902 : if (dump_file)
364 0 : fprintf (dump_file, " global memory read is not const\n");
365 : /* Just a regular read. */
366 839902 : if (local->pure_const_state == IPA_CONST)
367 167828 : local->pure_const_state = IPA_PURE;
368 : }
369 : }
370 : else
371 : {
372 : /* Compilation level statics can be read if they are readonly
373 : variables. */
374 625842 : if (TREE_READONLY (t))
375 : return;
376 :
377 596195 : if (dump_file)
378 1 : fprintf (dump_file, " static memory read is not const\n");
379 : /* Just a regular read. */
380 596195 : if (local->pure_const_state == IPA_CONST)
381 32678 : local->pure_const_state = IPA_PURE;
382 : }
383 : }
384 :
385 :
386 : /* Check to see if the use (or definition when CHECKING_WRITE is true)
387 : variable T is legal in a function that is either pure or const. */
388 :
389 : static inline void
390 18639037 : check_op (funct_state local, tree t, bool checking_write)
391 : {
392 18639037 : t = get_base_address (t);
393 18639037 : if (t && TREE_THIS_VOLATILE (t))
394 : {
395 20638 : local->pure_const_state = IPA_NEITHER;
396 20638 : if (dump_file)
397 2 : fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
398 20638 : return;
399 : }
400 18618399 : else if (refs_local_or_readonly_memory_p (t))
401 : {
402 4808005 : if (dump_file)
403 10 : fprintf (dump_file, " Indirect ref to local or readonly "
404 : "memory is OK\n");
405 4808005 : return;
406 : }
407 13810394 : else if (checking_write)
408 : {
409 4835608 : local->pure_const_state = IPA_NEITHER;
410 4835608 : if (dump_file)
411 64 : fprintf (dump_file, " Indirect ref write is not const/pure\n");
412 4835608 : return;
413 : }
414 : else
415 : {
416 8974786 : if (dump_file)
417 175 : fprintf (dump_file, " Indirect ref read is not const\n");
418 8974786 : if (local->pure_const_state == IPA_CONST)
419 1356321 : local->pure_const_state = IPA_PURE;
420 : }
421 : }
422 :
423 : /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
424 :
425 : static void
426 15828293 : state_from_flags (enum pure_const_state_e *state, bool *looping,
427 : int flags, bool cannot_lead_to_return)
428 : {
429 15828293 : *looping = false;
430 15828293 : if (flags & ECF_LOOPING_CONST_OR_PURE)
431 : {
432 202358 : *looping = true;
433 202358 : if (dump_file && (dump_flags & TDF_DETAILS))
434 0 : fprintf (dump_file, " looping\n");
435 : }
436 15828293 : if (flags & ECF_CONST)
437 : {
438 1209507 : *state = IPA_CONST;
439 1209507 : if (dump_file && (dump_flags & TDF_DETAILS))
440 3 : fprintf (dump_file, " const\n");
441 : }
442 14618786 : else if (flags & ECF_PURE)
443 : {
444 1205559 : *state = IPA_PURE;
445 1205559 : if (dump_file && (dump_flags & TDF_DETAILS))
446 3 : fprintf (dump_file, " pure\n");
447 : }
448 13413227 : else if (cannot_lead_to_return)
449 : {
450 965874 : *state = IPA_PURE;
451 965874 : *looping = true;
452 965874 : if (dump_file && (dump_flags & TDF_DETAILS))
453 1 : fprintf (dump_file, " ignoring side effects->pure looping\n");
454 : }
455 : else
456 : {
457 12447353 : if (dump_file && (dump_flags & TDF_DETAILS))
458 42 : fprintf (dump_file, " neither\n");
459 12447353 : *state = IPA_NEITHER;
460 12447353 : *looping = true;
461 : }
462 15828293 : }
463 :
464 : /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
465 : into STATE and LOOPING better of the two variants.
466 : Be sure to merge looping correctly. IPA_NEITHER functions
467 : have looping 0 even if they don't have to return. */
468 :
469 : static inline void
470 7007599 : better_state (enum pure_const_state_e *state, bool *looping,
471 : enum pure_const_state_e state2, bool looping2)
472 : {
473 7007599 : if (state2 < *state)
474 : {
475 36477 : if (*state == IPA_NEITHER)
476 34960 : *looping = looping2;
477 : else
478 1517 : *looping = MIN (*looping, looping2);
479 36477 : *state = state2;
480 : }
481 6971122 : else if (state2 != IPA_NEITHER)
482 1623538 : *looping = MIN (*looping, looping2);
483 7007599 : }
484 :
485 : /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
486 : into STATE and LOOPING worse of the two variants.
487 : N is the actual node called. */
488 :
489 : static inline void
490 15070187 : worse_state (enum pure_const_state_e *state, bool *looping,
491 : enum pure_const_state_e state2, bool looping2,
492 : struct symtab_node *from,
493 : struct symtab_node *to)
494 : {
495 : /* Consider function:
496 :
497 : bool a(int *p)
498 : {
499 : return *p==*p;
500 : }
501 :
502 : During early optimization we will turn this into:
503 :
504 : bool a(int *p)
505 : {
506 : return true;
507 : }
508 :
509 : Now if this function will be detected as CONST however when interposed it
510 : may end up being just pure. We always must assume the worst scenario here.
511 : */
512 15070187 : if (*state == IPA_CONST && state2 == IPA_CONST
513 15070187 : && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
514 : {
515 3885 : if (dump_file && (dump_flags & TDF_DETAILS))
516 0 : fprintf (dump_file, "Dropping state to PURE because call to %s may not "
517 : "bind to current def.\n", to->dump_name ());
518 : state2 = IPA_PURE;
519 : }
520 15070187 : *state = MAX (*state, state2);
521 15070187 : *looping = MAX (*looping, looping2);
522 15070187 : }
523 :
524 : /* Recognize special cases of builtins that are by themselves not const
525 : but function using them is. */
526 : bool
527 24085754 : builtin_safe_for_const_function_p (bool *looping, tree callee)
528 : {
529 24085754 : if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
530 6856980 : switch (DECL_FUNCTION_CODE (callee))
531 : {
532 1020617 : case BUILT_IN_RETURN:
533 1020617 : case BUILT_IN_UNREACHABLE:
534 1020617 : CASE_BUILT_IN_ALLOCA:
535 1020617 : case BUILT_IN_STACK_SAVE:
536 1020617 : case BUILT_IN_STACK_RESTORE:
537 1020617 : case BUILT_IN_EH_POINTER:
538 1020617 : case BUILT_IN_EH_FILTER:
539 1020617 : case BUILT_IN_UNWIND_RESUME:
540 1020617 : case BUILT_IN_CXA_END_CLEANUP:
541 1020617 : case BUILT_IN_EH_COPY_VALUES:
542 1020617 : case BUILT_IN_FRAME_ADDRESS:
543 1020617 : case BUILT_IN_APPLY_ARGS:
544 1020617 : case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
545 1020617 : case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
546 1020617 : case BUILT_IN_DWARF_CFA:
547 1020617 : case BUILT_IN_RETURN_ADDRESS:
548 1020617 : *looping = false;
549 1020617 : return true;
550 9967 : case BUILT_IN_PREFETCH:
551 9967 : *looping = true;
552 9967 : return true;
553 : default:
554 : break;
555 : }
556 : return false;
557 : }
558 :
559 : /* Check the parameters of a function call to CALL_EXPR to see if
560 : there are any references in the parameters that are not allowed for
561 : pure or const functions. Also check to see if this is either an
562 : indirect call, a call outside the compilation unit, or has special
563 : attributes that may also effect the purity. The CALL_EXPR node for
564 : the entire call expression. */
565 :
566 : static void
567 15604516 : check_call (funct_state local, gcall *call, bool ipa)
568 : {
569 15604516 : int flags = gimple_call_flags (call);
570 15604516 : tree callee_t = gimple_call_fndecl (call);
571 15604516 : bool possibly_throws = stmt_could_throw_p (cfun, call);
572 15604516 : bool possibly_throws_externally = (possibly_throws
573 15604516 : && stmt_can_throw_external (cfun, call));
574 :
575 5728301 : if (possibly_throws)
576 : {
577 : unsigned int i;
578 34488833 : for (i = 0; i < gimple_num_ops (call); i++)
579 28760532 : if (gimple_op (call, i)
580 28760532 : && tree_could_throw_p (gimple_op (call, i)))
581 : {
582 80034 : if (possibly_throws && cfun->can_throw_non_call_exceptions)
583 : {
584 80034 : if (dump_file)
585 0 : fprintf (dump_file, " operand can throw; looping\n");
586 80034 : local->looping = true;
587 : }
588 80034 : if (possibly_throws_externally)
589 : {
590 68927 : if (dump_file)
591 0 : fprintf (dump_file, " operand can throw externally\n");
592 68927 : local->can_throw = true;
593 : }
594 : }
595 : }
596 :
597 : /* The const and pure flags are set by a variety of places in the
598 : compiler (including here). If someone has already set the flags
599 : for the callee, (such as for some of the builtins) we will use
600 : them, otherwise we will compute our own information.
601 :
602 : Const and pure functions have less clobber effects than other
603 : functions so we process these first. Otherwise if it is a call
604 : outside the compilation unit or an indirect call we punt. This
605 : leaves local calls which will be processed by following the call
606 : graph. */
607 15604516 : if (callee_t)
608 : {
609 14596016 : bool call_looping;
610 :
611 14596016 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
612 14596016 : && !nonfreeing_call_p (call))
613 724521 : local->can_free = true;
614 :
615 14596016 : if (builtin_safe_for_const_function_p (&call_looping, callee_t))
616 : {
617 356712 : worse_state (&local->pure_const_state, &local->looping,
618 : IPA_CONST, call_looping,
619 : NULL, NULL);
620 356712 : return;
621 : }
622 : /* When bad things happen to bad functions, they cannot be const
623 : or pure. */
624 14239304 : if (setjmp_call_p (callee_t))
625 : {
626 2700 : if (dump_file)
627 0 : fprintf (dump_file, " setjmp is not const/pure\n");
628 2700 : local->looping = true;
629 2700 : local->pure_const_state = IPA_NEITHER;
630 : }
631 :
632 14239304 : if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
633 3361890 : switch (DECL_FUNCTION_CODE (callee_t))
634 : {
635 1838 : case BUILT_IN_LONGJMP:
636 1838 : case BUILT_IN_NONLOCAL_GOTO:
637 1838 : if (dump_file)
638 0 : fprintf (dump_file,
639 : " longjmp and nonlocal goto is not const/pure\n");
640 1838 : local->pure_const_state = IPA_NEITHER;
641 1838 : local->looping = true;
642 1838 : break;
643 : default:
644 : break;
645 : }
646 : }
647 1008500 : else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
648 157295 : local->can_free = true;
649 :
650 : /* When not in IPA mode, we can still handle self recursion. */
651 15247804 : if (!ipa && callee_t
652 15247804 : && recursive_call_p (current_function_decl, callee_t))
653 : {
654 19983 : if (dump_file)
655 0 : fprintf (dump_file, " Recursive call can loop.\n");
656 19983 : local->looping = true;
657 : }
658 : /* Either callee is unknown or we are doing local analysis.
659 : Look to see if there are any bits available for the callee (such as by
660 : declaration or because it is builtin) and process solely on the basis of
661 : those bits. Handle internal calls always, those calls don't have
662 : corresponding cgraph edges and thus aren't processed during
663 : the propagation. */
664 15227821 : else if (!ipa || gimple_call_internal_p (call))
665 : {
666 9963014 : enum pure_const_state_e call_state;
667 9963014 : bool call_looping;
668 9963014 : if (possibly_throws && cfun->can_throw_non_call_exceptions)
669 : {
670 2178410 : if (dump_file)
671 0 : fprintf (dump_file, " can throw; looping\n");
672 2178410 : local->looping = true;
673 : }
674 9963014 : if (possibly_throws_externally)
675 : {
676 2787162 : if (dump_file)
677 : {
678 0 : fprintf (dump_file, " can throw externally to lp %i\n",
679 : lookup_stmt_eh_lp (call));
680 0 : if (callee_t)
681 0 : fprintf (dump_file, " callee:%s\n",
682 0 : IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
683 : }
684 2787162 : local->can_throw = true;
685 : }
686 9963014 : if (dump_file && (dump_flags & TDF_DETAILS))
687 22 : fprintf (dump_file, " checking flags for call:");
688 9963014 : state_from_flags (&call_state, &call_looping, flags,
689 9963014 : ((flags & (ECF_NORETURN | ECF_NOTHROW))
690 : == (ECF_NORETURN | ECF_NOTHROW))
691 9963014 : || (!flag_exceptions && (flags & ECF_NORETURN)));
692 9963014 : worse_state (&local->pure_const_state, &local->looping,
693 : call_state, call_looping, NULL, NULL);
694 : }
695 : /* Direct functions calls are handled by IPA propagation. */
696 : }
697 :
698 : /* Wrapper around check_decl for loads in local more. */
699 :
700 : static bool
701 12484588 : check_load (gimple *, tree op, tree, void *data)
702 : {
703 12484588 : if (DECL_P (op))
704 5212590 : check_decl ((funct_state)data, op, false, false);
705 : else
706 7271998 : check_op ((funct_state)data, op, false);
707 12484588 : return false;
708 : }
709 :
710 : /* Wrapper around check_decl for stores in local more. */
711 :
712 : static bool
713 13484187 : check_store (gimple *, tree op, tree, void *data)
714 : {
715 13484187 : if (DECL_P (op))
716 7779220 : check_decl ((funct_state)data, op, true, false);
717 : else
718 5704967 : check_op ((funct_state)data, op, true);
719 13484187 : return false;
720 : }
721 :
722 : /* Wrapper around check_decl for loads in ipa mode. */
723 :
724 : static bool
725 6236870 : check_ipa_load (gimple *, tree op, tree, void *data)
726 : {
727 6236870 : if (DECL_P (op))
728 2980513 : check_decl ((funct_state)data, op, false, true);
729 : else
730 3256357 : check_op ((funct_state)data, op, false);
731 6236870 : return false;
732 : }
733 :
734 : /* Wrapper around check_decl for stores in ipa mode. */
735 :
736 : static bool
737 6494576 : check_ipa_store (gimple *, tree op, tree, void *data)
738 : {
739 6494576 : if (DECL_P (op))
740 4088861 : check_decl ((funct_state)data, op, true, true);
741 : else
742 2405715 : check_op ((funct_state)data, op, true);
743 6494576 : return false;
744 : }
745 :
746 : /* Look into pointer pointed to by GSIP and figure out what interesting side
747 : effects it has. */
748 : static void
749 184286159 : check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
750 : {
751 184286159 : gimple *stmt = gsi_stmt (*gsip);
752 :
753 184286159 : if (is_gimple_debug (stmt))
754 : return;
755 :
756 : /* Do consider clobber as side effects before IPA, so we rather inline
757 : C++ destructors and keep clobber semantics than eliminate them.
758 :
759 : Similar logic is in ipa-modref.
760 :
761 : TODO: We may get smarter during early optimizations on these and let
762 : functions containing only clobbers to be optimized more. This is a common
763 : case of C++ destructors. */
764 :
765 92521765 : if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
766 : return;
767 :
768 90312501 : if (dump_file)
769 : {
770 1702 : fprintf (dump_file, " scanning: ");
771 1702 : print_gimple_stmt (dump_file, stmt, 0);
772 : }
773 :
774 167297523 : if (gimple_has_volatile_ops (stmt) && !gimple_clobber_p (stmt))
775 : {
776 1469346 : local->pure_const_state = IPA_NEITHER;
777 1469346 : if (dump_file)
778 33 : fprintf (dump_file, " Volatile stmt is not const/pure\n");
779 : }
780 :
781 : /* Look for loads and stores. */
782 151262553 : walk_stmt_load_store_ops (stmt, local,
783 : ipa ? check_ipa_load : check_load,
784 : ipa ? check_ipa_store : check_store);
785 :
786 90312501 : if (gimple_code (stmt) != GIMPLE_CALL && stmt_could_throw_p (cfun, stmt))
787 : {
788 2610915 : if (cfun->can_throw_non_call_exceptions)
789 : {
790 2294489 : if (dump_file)
791 0 : fprintf (dump_file, " can throw; looping\n");
792 2294489 : local->looping = true;
793 : }
794 2610915 : if (stmt_can_throw_external (cfun, stmt))
795 : {
796 2254656 : if (dump_file)
797 6 : fprintf (dump_file, " can throw externally\n");
798 2254656 : local->can_throw = true;
799 : }
800 : else
801 356259 : if (dump_file)
802 0 : fprintf (dump_file, " can throw\n");
803 : }
804 90312501 : switch (gimple_code (stmt))
805 : {
806 15604516 : case GIMPLE_CALL:
807 15604516 : check_call (local, as_a <gcall *> (stmt), ipa);
808 15604516 : break;
809 1592432 : case GIMPLE_LABEL:
810 1592432 : if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
811 : /* Target of long jump. */
812 : {
813 764 : if (dump_file)
814 0 : fprintf (dump_file, " nonlocal label is not const/pure\n");
815 764 : local->pure_const_state = IPA_NEITHER;
816 : }
817 : break;
818 261632 : case GIMPLE_ASM:
819 261632 : if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
820 : {
821 115029 : if (dump_file)
822 3 : fprintf (dump_file, " memory asm clobber is not const/pure\n");
823 : /* Abandon all hope, ye who enter here. */
824 115029 : local->pure_const_state = IPA_NEITHER;
825 115029 : local->can_free = true;
826 : }
827 261632 : if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
828 : {
829 238891 : if (dump_file)
830 3 : fprintf (dump_file, " volatile is not const/pure\n");
831 : /* Abandon all hope, ye who enter here. */
832 238891 : local->pure_const_state = IPA_NEITHER;
833 238891 : local->looping = true;
834 238891 : local->can_free = true;
835 : }
836 : return;
837 : default:
838 : break;
839 : }
840 : }
841 :
842 : /* Check that RETVAL is used only in STMT and in comparisons against 0.
843 : RETVAL is return value of the function and STMT is return stmt. */
844 :
845 : static bool
846 479754 : check_retval_uses (tree retval, gimple *stmt)
847 : {
848 479754 : imm_use_iterator use_iter;
849 479754 : gimple *use_stmt;
850 :
851 1512823 : FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
852 694894 : if (gcond *cond = dyn_cast<gcond *> (use_stmt))
853 : {
854 12883 : tree op2 = gimple_cond_rhs (cond);
855 12883 : if (!integer_zerop (op2))
856 : return false;
857 : }
858 682011 : else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
859 : {
860 93708 : enum tree_code code = gimple_assign_rhs_code (ga);
861 93708 : if (TREE_CODE_CLASS (code) != tcc_comparison)
862 : return false;
863 2626 : if (!integer_zerop (gimple_assign_rhs2 (ga)))
864 : return false;
865 : }
866 588303 : else if (is_gimple_debug (use_stmt))
867 : ;
868 472517 : else if (use_stmt != stmt)
869 141579 : return false;
870 :
871 338175 : return true;
872 : }
873 :
874 : /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
875 : attribute. Currently this function does a very conservative analysis.
876 : FUN is considered to be a candidate if
877 : 1) It returns a value of pointer type.
878 : 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
879 : a phi, and element of phi is either NULL or
880 : SSA_NAME_DEF_STMT(element) is function call.
881 : 3) The return-value has immediate uses only within comparisons (gcond or gassign)
882 : and return_stmt (and likewise a phi arg has immediate use only within comparison
883 : or the phi stmt). */
884 :
885 : #define DUMP_AND_RETURN(reason) \
886 : { \
887 : if (dump_file && (dump_flags & TDF_DETAILS)) \
888 : fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
889 : (node->dump_name ()), (reason)); \
890 : return false; \
891 : }
892 :
893 : static bool
894 408659 : malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
895 : bitmap visited)
896 : {
897 408659 : cgraph_node *node = cgraph_node::get_create (fun->decl);
898 408659 : if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
899 : return true;
900 :
901 408649 : if (!check_retval_uses (retval, ret_stmt))
902 112601 : DUMP_AND_RETURN("Return value has uses outside return stmt"
903 : " and comparisons against 0.")
904 :
905 296048 : gimple *def = SSA_NAME_DEF_STMT (retval);
906 :
907 296048 : if (gcall *call_stmt = dyn_cast<gcall *> (def))
908 : {
909 74658 : tree callee_decl = gimple_call_fndecl (call_stmt);
910 74658 : if (!callee_decl)
911 : return false;
912 :
913 137092 : if (!ipa && !DECL_IS_MALLOC (callee_decl))
914 26524 : DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
915 : " non-ipa mode.")
916 :
917 46678 : cgraph_edge *cs = node->get_edge (call_stmt);
918 46678 : if (cs)
919 : {
920 9312 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
921 9312 : es->is_return_callee_uncaptured = true;
922 : }
923 : }
924 :
925 221390 : else if (gphi *phi = dyn_cast<gphi *> (def))
926 : {
927 : bool all_args_zero = true;
928 100521 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
929 : {
930 94970 : tree arg = gimple_phi_arg_def (phi, i);
931 94970 : if (integer_zerop (arg))
932 21180 : continue;
933 :
934 73790 : all_args_zero = false;
935 73790 : if (TREE_CODE (arg) != SSA_NAME)
936 2685 : DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
937 71105 : if (!check_retval_uses (arg, phi))
938 28978 : DUMP_AND_RETURN ("phi arg has uses outside phi"
939 : " and comparisons against 0.")
940 :
941 42127 : gimple *arg_def = SSA_NAME_DEF_STMT (arg);
942 42127 : if (is_a<gphi *> (arg_def))
943 : {
944 3746 : if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
945 3654 : DUMP_AND_RETURN ("nested phi fail")
946 92 : continue;
947 : }
948 :
949 38381 : gcall *call_stmt = dyn_cast<gcall *> (arg_def);
950 38381 : if (!call_stmt)
951 18468 : DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
952 :
953 19913 : tree callee_decl = gimple_call_fndecl (call_stmt);
954 19913 : if (!callee_decl)
955 : return false;
956 27191 : if (!ipa && !DECL_IS_MALLOC (callee_decl))
957 6902 : DUMP_AND_RETURN("callee_decl does not have malloc attribute"
958 : " for non-ipa mode.")
959 :
960 10114 : cgraph_edge *cs = node->get_edge (call_stmt);
961 10114 : if (cs)
962 : {
963 6995 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
964 6995 : es->is_return_callee_uncaptured = true;
965 : }
966 : }
967 :
968 5551 : if (all_args_zero)
969 48 : DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
970 : }
971 :
972 : else
973 152255 : DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
974 :
975 : return true;
976 : }
977 :
978 : static bool
979 5990093 : malloc_candidate_p (function *fun, bool ipa)
980 : {
981 5990093 : basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
982 5990093 : edge e;
983 5990093 : edge_iterator ei;
984 5990093 : cgraph_node *node = cgraph_node::get_create (fun->decl);
985 :
986 5990093 : if (EDGE_COUNT (exit_block->preds) == 0
987 5986573 : || !flag_delete_null_pointer_checks)
988 : return false;
989 :
990 5856152 : auto_bitmap visited;
991 5908251 : FOR_EACH_EDGE (e, ei, exit_block->preds)
992 : {
993 5856152 : gimple_stmt_iterator gsi = gsi_last_bb (e->src);
994 11658962 : greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
995 :
996 5854909 : if (!ret_stmt)
997 5856152 : return false;
998 :
999 5854909 : tree retval = gimple_return_retval (ret_stmt);
1000 5854909 : if (!retval)
1001 2648284 : DUMP_AND_RETURN("No return value.")
1002 :
1003 3206625 : if (TREE_CODE (retval) != SSA_NAME
1004 3206625 : || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
1005 2801712 : DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
1006 :
1007 404913 : if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
1008 : return false;
1009 : }
1010 :
1011 52099 : if (dump_file && (dump_flags & TDF_DETAILS))
1012 8 : fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1013 4 : IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1014 : return true;
1015 5856152 : }
1016 :
1017 : #undef DUMP_AND_RETURN
1018 :
1019 : /* Return true if function is known to be finite. */
1020 :
1021 : bool
1022 4280459 : finite_function_p ()
1023 : {
1024 : /* Const functions cannot have back edges (an
1025 : indication of possible infinite loop side
1026 : effect. */
1027 4280459 : bool finite = true;
1028 4280459 : if (mark_dfs_back_edges ())
1029 : {
1030 : /* Preheaders are needed for SCEV to work.
1031 : Simple latches and recorded exits improve chances that loop will
1032 : proved to be finite in testcases such as in loop-15.c
1033 : and loop-24.c */
1034 426399 : loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1035 : | LOOPS_HAVE_SIMPLE_LATCHES
1036 : | LOOPS_HAVE_RECORDED_EXITS);
1037 426399 : if (dump_file && (dump_flags & TDF_DETAILS))
1038 0 : flow_loops_dump (dump_file, NULL, 0);
1039 426399 : if (mark_irreducible_loops ())
1040 : {
1041 2066 : if (dump_file)
1042 0 : fprintf (dump_file, " has irreducible loops\n");
1043 : finite = false;
1044 : }
1045 : else
1046 : {
1047 424333 : scev_initialize ();
1048 2113026 : for (auto loop : loops_list (cfun, 0))
1049 915945 : if (!finite_loop_p (loop))
1050 : {
1051 75918 : if (dump_file)
1052 1 : fprintf (dump_file, " cannot prove finiteness of "
1053 : "loop %i\n", loop->num);
1054 : finite =false;
1055 : break;
1056 424333 : }
1057 424333 : scev_finalize ();
1058 : }
1059 426399 : loop_optimizer_finalize ();
1060 : }
1061 4280459 : return finite;
1062 : }
1063 :
1064 : /* This is the main routine for finding the reference patterns for
1065 : global variables within a function FN. */
1066 :
1067 : static funct_state
1068 4728163 : analyze_function (struct cgraph_node *fn, bool ipa)
1069 : {
1070 4728163 : tree decl = fn->decl;
1071 4728163 : funct_state l;
1072 4728163 : basic_block this_block;
1073 :
1074 4728163 : l = XCNEW (class funct_state_d);
1075 4728163 : l->pure_const_state = IPA_CONST;
1076 4728163 : l->state_previously_known = IPA_NEITHER;
1077 4728163 : l->looping_previously_known = true;
1078 4728163 : l->looping = false;
1079 4728163 : l->can_throw = false;
1080 4728163 : l->can_free = false;
1081 4728163 : state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1082 4728163 : flags_from_decl_or_type (fn->decl),
1083 4728163 : fn->cannot_return_p ());
1084 :
1085 4728163 : if (fn->thunk || fn->alias)
1086 : {
1087 : /* Thunk gets propagated through, so nothing interesting happens. */
1088 64556 : gcc_assert (ipa);
1089 64556 : if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1090 729 : l->pure_const_state = IPA_NEITHER;
1091 64556 : return l;
1092 : }
1093 :
1094 4663607 : if (dump_file)
1095 : {
1096 190 : fprintf (dump_file, "\n\n local analysis of %s\n ",
1097 : fn->dump_name ());
1098 : }
1099 :
1100 4663607 : push_cfun (DECL_STRUCT_FUNCTION (decl));
1101 :
1102 35950750 : FOR_EACH_BB_FN (this_block, cfun)
1103 : {
1104 31352691 : gimple_stmt_iterator gsi;
1105 31352691 : struct walk_stmt_info wi;
1106 :
1107 31352691 : memset (&wi, 0, sizeof (wi));
1108 62705382 : for (gsi = gsi_start_bb (this_block);
1109 215573302 : !gsi_end_p (gsi);
1110 184220611 : gsi_next (&gsi))
1111 : {
1112 : /* NULL memory accesses terminates BB. These accesses are known
1113 : to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1114 : to volatile accesses and adds builtin_trap call which would
1115 : confuse us otherwise. */
1116 184288536 : if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
1117 : null_pointer_node))
1118 : {
1119 2377 : if (dump_file)
1120 0 : fprintf (dump_file, " NULL memory access; terminating BB%s\n",
1121 0 : flag_non_call_exceptions ? "; looping" : "");
1122 2377 : if (flag_non_call_exceptions)
1123 : {
1124 426 : l->looping = true;
1125 426 : if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
1126 : {
1127 330 : if (dump_file)
1128 0 : fprintf (dump_file, " can throw externally\n");
1129 330 : l->can_throw = true;
1130 : }
1131 : }
1132 : break;
1133 : }
1134 184286159 : check_stmt (&gsi, l, ipa);
1135 184286159 : if (l->pure_const_state == IPA_NEITHER
1136 117678973 : && l->looping
1137 95615766 : && l->can_throw
1138 46353293 : && l->can_free)
1139 65548 : goto end;
1140 : }
1141 : }
1142 :
1143 4598059 : end:
1144 4663607 : if (l->pure_const_state != IPA_NEITHER
1145 2043622 : && !l->looping
1146 6524458 : && !finite_function_p ())
1147 24141 : l->looping = true;
1148 :
1149 4663607 : if (dump_file && (dump_flags & TDF_DETAILS))
1150 22 : fprintf (dump_file, " checking previously known:");
1151 :
1152 4663607 : better_state (&l->pure_const_state, &l->looping,
1153 : l->state_previously_known,
1154 4663607 : l->looping_previously_known);
1155 4663607 : if (TREE_NOTHROW (decl))
1156 3164279 : l->can_throw = false;
1157 :
1158 4663607 : l->malloc_state = STATE_MALLOC_BOTTOM;
1159 4663607 : if (DECL_IS_MALLOC (decl))
1160 10026 : l->malloc_state = STATE_MALLOC;
1161 4653581 : else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1162 11967 : l->malloc_state = STATE_MALLOC_TOP;
1163 4641614 : else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1164 40132 : l->malloc_state = STATE_MALLOC;
1165 :
1166 4663607 : pop_cfun ();
1167 4663607 : if (dump_file)
1168 : {
1169 190 : if (l->looping)
1170 50 : fprintf (dump_file, "Function is locally looping.\n");
1171 190 : if (l->can_throw)
1172 6 : fprintf (dump_file, "Function is locally throwing.\n");
1173 190 : if (l->pure_const_state == IPA_CONST)
1174 103 : fprintf (dump_file, "Function is locally const.\n");
1175 190 : if (l->pure_const_state == IPA_PURE)
1176 7 : fprintf (dump_file, "Function is locally pure.\n");
1177 190 : if (l->can_free)
1178 3 : fprintf (dump_file, "Function can locally free.\n");
1179 190 : if (l->malloc_state == STATE_MALLOC)
1180 8 : fprintf (dump_file, "Function is locally malloc.\n");
1181 : }
1182 : return l;
1183 : }
1184 :
1185 : void
1186 19200 : funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1187 : {
1188 : /* There are some shared nodes, in particular the initializers on
1189 : static declarations. We do not need to scan them more than once
1190 : since all we would be interested in are the addressof
1191 : operations. */
1192 19200 : if (opt_for_fn (node->decl, flag_ipa_pure_const))
1193 : {
1194 19199 : funct_state_d *a = analyze_function (node, true);
1195 19199 : new (state) funct_state_d (*a);
1196 19199 : free (a);
1197 : }
1198 : else
1199 : /* Do not keep stale summaries. */
1200 1 : funct_state_summaries->remove (node);
1201 19200 : }
1202 :
1203 : /* Called when new clone is inserted to callgraph late. */
1204 :
1205 : void
1206 1230416 : funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1207 : funct_state_d *src_data,
1208 : funct_state_d *dst_data)
1209 : {
1210 1230416 : new (dst_data) funct_state_d (*src_data);
1211 1230416 : if (dst_data->malloc_state == STATE_MALLOC
1212 1230416 : && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1213 11 : dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1214 1230416 : }
1215 :
1216 :
1217 : void
1218 161929 : pass_ipa_pure_const::
1219 : register_hooks (void)
1220 : {
1221 161929 : if (init_p)
1222 : return;
1223 :
1224 161929 : init_p = true;
1225 :
1226 161929 : funct_state_summaries = new funct_state_summary_t (symtab);
1227 : }
1228 :
1229 :
1230 : /* Analyze each function in the cgraph to see if it is locally PURE or
1231 : CONST. */
1232 :
1233 : static void
1234 149530 : pure_const_generate_summary (void)
1235 : {
1236 149530 : struct cgraph_node *node;
1237 :
1238 149530 : pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1239 149530 : pass->register_hooks ();
1240 :
1241 : /* Process all of the functions.
1242 :
1243 : We process AVAIL_INTERPOSABLE functions. We cannot use the results
1244 : by default, but the info can be used at LTO with -fwhole-program or
1245 : when function got cloned and the clone is AVAILABLE. */
1246 :
1247 1552895 : FOR_EACH_DEFINED_FUNCTION (node)
1248 1403365 : if (opt_for_fn (node->decl, flag_ipa_pure_const))
1249 : {
1250 1403036 : funct_state_d *a = analyze_function (node, true);
1251 1403036 : new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1252 1403036 : free (a);
1253 : }
1254 149530 : }
1255 :
1256 :
1257 : /* Serialize the ipa info for lto. */
1258 :
1259 : static void
1260 20108 : pure_const_write_summary (void)
1261 : {
1262 20108 : struct cgraph_node *node;
1263 20108 : struct lto_simple_output_block *ob
1264 20108 : = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1265 20108 : unsigned int count = 0;
1266 20108 : lto_symtab_encoder_iterator lsei;
1267 20108 : lto_symtab_encoder_t encoder;
1268 :
1269 20108 : encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1270 :
1271 114101 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1272 93993 : lsei_next_function_in_partition (&lsei))
1273 : {
1274 93993 : node = lsei_cgraph_node (lsei);
1275 93993 : if (node->definition && funct_state_summaries->exists (node))
1276 93816 : count++;
1277 : }
1278 :
1279 20108 : streamer_write_uhwi_stream (ob->main_stream, count);
1280 :
1281 : /* Process all of the functions. */
1282 114101 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1283 93993 : lsei_next_function_in_partition (&lsei))
1284 : {
1285 93993 : node = lsei_cgraph_node (lsei);
1286 93993 : funct_state_d *fs = funct_state_summaries->get (node);
1287 93993 : if (node->definition && fs != NULL)
1288 : {
1289 93816 : struct bitpack_d bp;
1290 93816 : int node_ref;
1291 93816 : lto_symtab_encoder_t encoder;
1292 :
1293 93816 : encoder = ob->decl_state->symtab_node_encoder;
1294 93816 : node_ref = lto_symtab_encoder_encode (encoder, node);
1295 93816 : streamer_write_uhwi_stream (ob->main_stream, node_ref);
1296 :
1297 : /* Note that flags will need to be read in the opposite
1298 : order as we are pushing the bitflags into FLAGS. */
1299 93816 : bp = bitpack_create (ob->main_stream);
1300 93816 : bp_pack_value (&bp, fs->pure_const_state, 2);
1301 93816 : bp_pack_value (&bp, fs->state_previously_known, 2);
1302 93816 : bp_pack_value (&bp, fs->looping_previously_known, 1);
1303 93816 : bp_pack_value (&bp, fs->looping, 1);
1304 93816 : bp_pack_value (&bp, fs->can_throw, 1);
1305 93816 : bp_pack_value (&bp, fs->can_free, 1);
1306 93816 : bp_pack_value (&bp, fs->malloc_state, 2);
1307 93816 : streamer_write_bitpack (&bp);
1308 : }
1309 : }
1310 :
1311 20108 : lto_destroy_simple_output_block (ob);
1312 20108 : }
1313 :
1314 :
1315 : /* Deserialize the ipa info for lto. */
1316 :
1317 : static void
1318 12399 : pure_const_read_summary (void)
1319 : {
1320 12399 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1321 12399 : struct lto_file_decl_data *file_data;
1322 12399 : unsigned int j = 0;
1323 :
1324 12399 : pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1325 12399 : pass->register_hooks ();
1326 :
1327 38252 : while ((file_data = file_data_vec[j++]))
1328 : {
1329 13454 : const char *data;
1330 13454 : size_t len;
1331 13454 : class lto_input_block *ib
1332 13454 : = lto_create_simple_input_block (file_data,
1333 : LTO_section_ipa_pure_const,
1334 : &data, &len);
1335 13454 : if (ib)
1336 : {
1337 11002 : unsigned int i;
1338 11002 : unsigned int count = streamer_read_uhwi (ib);
1339 :
1340 89054 : for (i = 0; i < count; i++)
1341 : {
1342 78052 : unsigned int index;
1343 78052 : struct cgraph_node *node;
1344 78052 : struct bitpack_d bp;
1345 78052 : funct_state fs;
1346 78052 : lto_symtab_encoder_t encoder;
1347 :
1348 78052 : index = streamer_read_uhwi (ib);
1349 78052 : encoder = file_data->symtab_node_encoder;
1350 78052 : node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1351 : index));
1352 :
1353 78052 : fs = funct_state_summaries->get_create (node);
1354 : /* Note that the flags must be read in the opposite
1355 : order in which they were written (the bitflags were
1356 : pushed into FLAGS). */
1357 78052 : bp = streamer_read_bitpack (ib);
1358 78052 : fs->pure_const_state
1359 78052 : = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1360 78052 : fs->state_previously_known
1361 78052 : = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1362 78052 : fs->looping_previously_known = bp_unpack_value (&bp, 1);
1363 78052 : fs->looping = bp_unpack_value (&bp, 1);
1364 78052 : fs->can_throw = bp_unpack_value (&bp, 1);
1365 78052 : fs->can_free = bp_unpack_value (&bp, 1);
1366 78052 : fs->malloc_state
1367 78052 : = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1368 :
1369 78052 : if (dump_file)
1370 : {
1371 0 : int flags = flags_from_decl_or_type (node->decl);
1372 0 : fprintf (dump_file, "Read info for %s ", node->dump_name ());
1373 0 : if (flags & ECF_CONST)
1374 0 : fprintf (dump_file, " const");
1375 0 : if (flags & ECF_PURE)
1376 0 : fprintf (dump_file, " pure");
1377 0 : if (flags & ECF_NOTHROW)
1378 0 : fprintf (dump_file, " nothrow");
1379 0 : fprintf (dump_file, "\n pure const state: %s\n",
1380 0 : pure_const_names[fs->pure_const_state]);
1381 0 : fprintf (dump_file, " previously known state: %s\n",
1382 0 : pure_const_names[fs->state_previously_known]);
1383 0 : if (fs->looping)
1384 0 : fprintf (dump_file," function is locally looping\n");
1385 0 : if (fs->looping_previously_known)
1386 0 : fprintf (dump_file," function is previously known looping\n");
1387 0 : if (fs->can_throw)
1388 0 : fprintf (dump_file," function is locally throwing\n");
1389 0 : if (fs->can_free)
1390 0 : fprintf (dump_file," function can locally free\n");
1391 0 : fprintf (dump_file, "\n malloc state: %s\n",
1392 0 : malloc_state_names[fs->malloc_state]);
1393 : }
1394 : }
1395 :
1396 11002 : lto_destroy_simple_input_block (file_data,
1397 : LTO_section_ipa_pure_const,
1398 : ib, data, len);
1399 : }
1400 : }
1401 12399 : }
1402 :
1403 : /* We only propagate across edges that can throw externally and their callee
1404 : is not interposable. */
1405 :
1406 : static bool
1407 7453714 : ignore_edge_for_nothrow (struct cgraph_edge *e)
1408 : {
1409 7453714 : if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1410 : return true;
1411 :
1412 2171450 : enum availability avail;
1413 2171450 : cgraph_node *ultimate_target
1414 2171450 : = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1415 2171450 : if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1416 : return true;
1417 763622 : return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1418 206494 : && !e->callee->binds_to_current_def_p (e->caller))
1419 763588 : || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1420 1526090 : || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1421 : }
1422 :
1423 : /* Return true if NODE is self recursive function.
1424 : Indirectly recursive functions appears as non-trivial strongly
1425 : connected components, so we need to care about self recursion
1426 : only. */
1427 :
1428 : static bool
1429 2125270 : self_recursive_p (struct cgraph_node *node)
1430 : {
1431 2125270 : struct cgraph_edge *e;
1432 8273615 : for (e = node->callees; e; e = e->next_callee)
1433 6152308 : if (e->callee->function_symbol () == node)
1434 : return true;
1435 : return false;
1436 : }
1437 :
1438 : /* Return true if N is cdtor that is not const or pure. In this case we may
1439 : need to remove unreachable function if it is marked const/pure. */
1440 :
1441 : static bool
1442 51453 : cdtor_p (cgraph_node *n, void *)
1443 : {
1444 51453 : if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1445 3 : return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1446 3 : || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1447 : return false;
1448 : }
1449 :
1450 : /* Skip edges from and to nodes without ipa_pure_const enabled.
1451 : Ignore not available symbols. */
1452 :
1453 : static bool
1454 7453714 : ignore_edge_for_pure_const (struct cgraph_edge *e)
1455 : {
1456 7453714 : enum availability avail;
1457 7453714 : cgraph_node *ultimate_target
1458 7453714 : = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1459 :
1460 7453714 : return (avail <= AVAIL_INTERPOSABLE
1461 2580475 : || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1462 10025403 : || !opt_for_fn (ultimate_target->decl,
1463 7453714 : flag_ipa_pure_const));
1464 : }
1465 :
1466 : /* Return true if function should be skipped for local pure const analysis. */
1467 :
1468 : static bool
1469 4713967 : skip_function_for_local_pure_const (struct cgraph_node *node)
1470 : {
1471 : /* Because we do not schedule pass_fixup_cfg over whole program after early
1472 : optimizations we must not promote functions that are called by already
1473 : processed functions. */
1474 :
1475 4713967 : if (function_called_by_processed_nodes_p ())
1476 : {
1477 3433 : if (dump_file)
1478 1 : fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1479 3433 : return true;
1480 : }
1481 : /* Save some work and do not analyze functions which are interposable and
1482 : do not have any non-interposable aliases. */
1483 4710534 : if (node->get_availability () <= AVAIL_INTERPOSABLE
1484 4710534 : && !node->has_aliases_p ())
1485 : {
1486 197414 : if (dump_file)
1487 0 : fprintf (dump_file,
1488 : "Function is interposable; not analyzing.\n");
1489 197414 : return true;
1490 : }
1491 : return false;
1492 : }
1493 :
1494 : /* Make function const and output warning. If LOCAL is true,
1495 : return true if anything changed. Otherwise return true if
1496 : we may have introduced removale ctors. */
1497 :
1498 : bool
1499 1565470 : ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1500 : {
1501 1565470 : bool cdtor = false;
1502 :
1503 1565470 : if (TREE_READONLY (node->decl)
1504 1565470 : && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1505 : return false;
1506 926781 : warn_function_const (node->decl, !looping);
1507 926781 : if (local && skip_function_for_local_pure_const (node))
1508 : return false;
1509 907697 : if (dump_file)
1510 58 : fprintf (dump_file, "Function found to be %sconst: %s\n",
1511 : looping ? "looping " : "",
1512 : node->dump_name ());
1513 907697 : if (!local && !looping)
1514 44748 : cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1515 907697 : if (!dbg_cnt (ipa_attr))
1516 : return false;
1517 907697 : if (node->set_const_flag (true, looping))
1518 : {
1519 508620 : if (dump_file)
1520 58 : fprintf (dump_file,
1521 : "Declaration updated to be %sconst: %s\n",
1522 : looping ? "looping " : "",
1523 : node->dump_name ());
1524 508620 : if (local)
1525 : return true;
1526 2816 : return cdtor;
1527 : }
1528 : return false;
1529 : }
1530 :
1531 : /* Make function const and output warning. If LOCAL is true,
1532 : return true if anything changed. Otherwise return true if
1533 : we may have introduced removale ctors. */
1534 :
1535 : bool
1536 1057875 : ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1537 : {
1538 1057875 : bool cdtor = false;
1539 :
1540 1057875 : if (TREE_READONLY (node->decl)
1541 1057875 : || (DECL_PURE_P (node->decl)
1542 684916 : && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
1543 : return false;
1544 372178 : warn_function_pure (node->decl, !looping);
1545 372178 : if (local && skip_function_for_local_pure_const (node))
1546 : return false;
1547 363161 : if (dump_file)
1548 8 : fprintf (dump_file, "Function found to be %spure: %s\n",
1549 : looping ? "looping " : "",
1550 : node->dump_name ());
1551 363161 : if (!local && !looping)
1552 3437 : cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1553 363161 : if (!dbg_cnt (ipa_attr))
1554 : return false;
1555 363161 : if (node->set_pure_flag (true, looping))
1556 : {
1557 352372 : if (dump_file)
1558 8 : fprintf (dump_file,
1559 : "Declaration updated to be %spure: %s\n",
1560 : looping ? "looping " : "",
1561 : node->dump_name ());
1562 352372 : if (local)
1563 : return true;
1564 7770 : return cdtor;
1565 : }
1566 : return false;
1567 : }
1568 :
1569 : /* Produce transitive closure over the callgraph and compute pure/const
1570 : attributes. */
1571 :
1572 : static bool
1573 152682 : propagate_pure_const (void)
1574 : {
1575 152682 : struct cgraph_node *node;
1576 152682 : struct cgraph_node *w;
1577 152682 : struct cgraph_node **order =
1578 152682 : XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1579 152682 : int order_pos;
1580 152682 : int i;
1581 152682 : struct ipa_dfs_info * w_info;
1582 152682 : bool remove_p = false;
1583 :
1584 152682 : order_pos = ipa_reduced_postorder (order, true,
1585 : ignore_edge_for_pure_const);
1586 152682 : if (dump_file)
1587 : {
1588 29 : cgraph_node::dump_cgraph (dump_file);
1589 29 : ipa_print_order (dump_file, "reduced", order, order_pos);
1590 : }
1591 :
1592 : /* Propagate the local information through the call graph to produce
1593 : the global information. All the nodes within a cycle will have
1594 : the same info so we collapse cycles first. Then we can do the
1595 : propagation in one pass from the leaves to the roots. */
1596 2594691 : for (i = 0; i < order_pos; i++ )
1597 : {
1598 2442009 : enum pure_const_state_e pure_const_state = IPA_CONST;
1599 2442009 : bool looping = false;
1600 2442009 : int count = 0;
1601 2442009 : node = order[i];
1602 :
1603 2442009 : if (node->alias)
1604 38146 : continue;
1605 :
1606 2403863 : if (dump_file && (dump_flags & TDF_DETAILS))
1607 5 : fprintf (dump_file, "Starting cycle\n");
1608 :
1609 : /* Find the worst state for any node in the cycle. */
1610 : w = node;
1611 4043149 : while (w && pure_const_state != IPA_NEITHER)
1612 : {
1613 2406469 : struct cgraph_edge *e;
1614 2406469 : struct cgraph_edge *ie;
1615 2406469 : int i;
1616 2406469 : struct ipa_ref *ref = NULL;
1617 :
1618 2406469 : funct_state w_l = funct_state_summaries->get_create (w);
1619 2406469 : if (dump_file && (dump_flags & TDF_DETAILS))
1620 6 : fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1621 : w->dump_name (),
1622 6 : pure_const_names[w_l->pure_const_state],
1623 6 : w_l->looping);
1624 :
1625 : /* First merge in function body properties.
1626 : We are safe to pass NULL as FROM and TO because we will take care
1627 : of possible interposition when walking callees. */
1628 2406469 : worse_state (&pure_const_state, &looping,
1629 2406469 : w_l->pure_const_state, w_l->looping,
1630 : NULL, NULL);
1631 2406469 : if (pure_const_state == IPA_NEITHER)
1632 : break;
1633 :
1634 1639286 : count++;
1635 :
1636 : /* We consider recursive cycles as possibly infinite.
1637 : This might be relaxed since infinite recursion leads to stack
1638 : overflow. */
1639 1639286 : if (count > 1)
1640 2606 : looping = true;
1641 :
1642 : /* Now walk the edges and merge in callee properties. */
1643 2539929 : for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1644 900643 : e = e->next_callee)
1645 : {
1646 1972042 : enum availability avail;
1647 1972042 : struct cgraph_node *y = e->callee->
1648 3944084 : function_or_virtual_thunk_symbol (&avail,
1649 1972042 : e->caller);
1650 1972042 : enum pure_const_state_e edge_state = IPA_CONST;
1651 1972042 : bool edge_looping = false;
1652 :
1653 1972042 : if (e->recursive_p ())
1654 5517 : looping = true;
1655 :
1656 1972042 : if (dump_file && (dump_flags & TDF_DETAILS))
1657 : {
1658 7 : fprintf (dump_file, " Call to %s",
1659 7 : e->callee->dump_name ());
1660 : }
1661 1972042 : if (avail > AVAIL_INTERPOSABLE)
1662 : {
1663 668788 : funct_state y_l = funct_state_summaries->get_create (y);
1664 :
1665 668788 : if (dump_file && (dump_flags & TDF_DETAILS))
1666 : {
1667 2 : fprintf (dump_file,
1668 : " state:%s looping:%i\n",
1669 2 : pure_const_names[y_l->pure_const_state],
1670 2 : y_l->looping);
1671 : }
1672 668788 : if (y_l->pure_const_state > IPA_PURE
1673 668788 : && e->cannot_lead_to_return_p ())
1674 : {
1675 8395 : if (dump_file && (dump_flags & TDF_DETAILS))
1676 0 : fprintf (dump_file,
1677 : " Ignoring side effects"
1678 : " -> pure, looping\n");
1679 8395 : edge_state = IPA_PURE;
1680 8395 : edge_looping = true;
1681 : }
1682 : else
1683 : {
1684 660393 : edge_state = y_l->pure_const_state;
1685 660393 : edge_looping = y_l->looping;
1686 : }
1687 : }
1688 1303254 : else if (builtin_safe_for_const_function_p (&edge_looping,
1689 : y->decl))
1690 : edge_state = IPA_CONST;
1691 : else
1692 1112876 : state_from_flags (&edge_state, &edge_looping,
1693 1112876 : flags_from_decl_or_type (y->decl),
1694 1112876 : e->cannot_lead_to_return_p ());
1695 :
1696 : /* Merge the results with what we already know. */
1697 1972042 : better_state (&edge_state, &edge_looping,
1698 : w_l->state_previously_known,
1699 1972042 : w_l->looping_previously_known);
1700 1972042 : worse_state (&pure_const_state, &looping,
1701 1972042 : edge_state, edge_looping, e->caller, e->callee);
1702 1972042 : if (pure_const_state == IPA_NEITHER)
1703 : break;
1704 : }
1705 :
1706 : /* Now process the indirect call. */
1707 1639286 : for (ie = w->indirect_calls;
1708 1640034 : ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1709 : {
1710 24240 : enum pure_const_state_e edge_state = IPA_CONST;
1711 24240 : bool edge_looping = false;
1712 :
1713 24240 : if (dump_file && (dump_flags & TDF_DETAILS))
1714 0 : fprintf (dump_file, " Indirect call");
1715 48480 : state_from_flags (&edge_state, &edge_looping,
1716 24240 : ie->indirect_info->ecf_flags,
1717 24240 : ie->cannot_lead_to_return_p ());
1718 : /* Merge the results with what we already know. */
1719 24240 : better_state (&edge_state, &edge_looping,
1720 : w_l->state_previously_known,
1721 24240 : w_l->looping_previously_known);
1722 24240 : worse_state (&pure_const_state, &looping,
1723 : edge_state, edge_looping, NULL, NULL);
1724 24240 : if (pure_const_state == IPA_NEITHER)
1725 : break;
1726 : }
1727 :
1728 : /* And finally all loads and stores. */
1729 319573 : for (i = 0; w->iterate_reference (i, ref)
1730 2608410 : && pure_const_state != IPA_NEITHER; i++)
1731 : {
1732 347710 : enum pure_const_state_e ref_state = IPA_CONST;
1733 347710 : bool ref_looping = false;
1734 347710 : switch (ref->use)
1735 : {
1736 220660 : case IPA_REF_LOAD:
1737 : /* readonly reads are safe. */
1738 220660 : if (TREE_READONLY (ref->referred->decl))
1739 : break;
1740 204809 : if (dump_file && (dump_flags & TDF_DETAILS))
1741 0 : fprintf (dump_file, " nonreadonly global var read\n");
1742 204809 : ref_state = IPA_PURE;
1743 204809 : break;
1744 89903 : case IPA_REF_STORE:
1745 89903 : if (ref->cannot_lead_to_return ())
1746 : break;
1747 28215 : ref_state = IPA_NEITHER;
1748 28215 : if (dump_file && (dump_flags & TDF_DETAILS))
1749 0 : fprintf (dump_file, " global var write\n");
1750 : break;
1751 : case IPA_REF_ADDR:
1752 : break;
1753 0 : default:
1754 0 : gcc_unreachable ();
1755 : }
1756 347710 : better_state (&ref_state, &ref_looping,
1757 : w_l->state_previously_known,
1758 347710 : w_l->looping_previously_known);
1759 347710 : worse_state (&pure_const_state, &looping,
1760 : ref_state, ref_looping, NULL, NULL);
1761 347710 : if (pure_const_state == IPA_NEITHER)
1762 : break;
1763 : }
1764 1639286 : w_info = (struct ipa_dfs_info *) w->aux;
1765 1639286 : w = w_info->next_cycle;
1766 : }
1767 2403863 : if (dump_file && (dump_flags & TDF_DETAILS))
1768 5 : fprintf (dump_file, "Result %s looping %i\n",
1769 5 : pure_const_names [pure_const_state],
1770 : looping);
1771 :
1772 : /* Find the worst state of can_free for any node in the cycle. */
1773 : bool can_free = false;
1774 : w = node;
1775 4810821 : while (w && !can_free)
1776 : {
1777 2406958 : struct cgraph_edge *e;
1778 2406958 : funct_state w_l = funct_state_summaries->get (w);
1779 :
1780 2406958 : if (w_l->can_free
1781 2227331 : || w->get_availability () == AVAIL_INTERPOSABLE
1782 4554446 : || w->indirect_calls)
1783 : can_free = true;
1784 :
1785 4192354 : for (e = w->callees; e && !can_free; e = e->next_callee)
1786 : {
1787 1785396 : enum availability avail;
1788 1785396 : struct cgraph_node *y = e->callee->
1789 3570792 : function_or_virtual_thunk_symbol (&avail,
1790 1785396 : e->caller);
1791 :
1792 1785396 : if (avail > AVAIL_INTERPOSABLE)
1793 811305 : can_free = funct_state_summaries->get (y)->can_free;
1794 : else
1795 : can_free = true;
1796 : }
1797 2406958 : w_info = (struct ipa_dfs_info *) w->aux;
1798 2406958 : w = w_info->next_cycle;
1799 : }
1800 :
1801 : /* Copy back the region's pure_const_state which is shared by
1802 : all nodes in the region. */
1803 : w = node;
1804 4828546 : while (w)
1805 : {
1806 2424683 : funct_state w_l = funct_state_summaries->get (w);
1807 2424683 : enum pure_const_state_e this_state = pure_const_state;
1808 2424683 : bool this_looping = looping;
1809 :
1810 2424683 : w_l->can_free = can_free;
1811 2424683 : w->nonfreeing_fn = !can_free;
1812 2424683 : if (!can_free && dump_file)
1813 28 : fprintf (dump_file, "Function found not to call free: %s\n",
1814 : w->dump_name ());
1815 :
1816 2424683 : if (w_l->state_previously_known != IPA_NEITHER
1817 408752 : && this_state > w_l->state_previously_known)
1818 : {
1819 1177 : if (this_state == IPA_NEITHER)
1820 50 : this_looping = w_l->looping_previously_known;
1821 : this_state = w_l->state_previously_known;
1822 : }
1823 2424683 : if (!this_looping && self_recursive_p (w))
1824 : this_looping = true;
1825 2424683 : if (!w_l->looping_previously_known)
1826 313020 : this_looping = false;
1827 :
1828 : /* All nodes within a cycle share the same info. */
1829 2424683 : w_l->pure_const_state = this_state;
1830 2424683 : w_l->looping = this_looping;
1831 :
1832 : /* Inline clones share declaration with their offline copies;
1833 : do not modify their declarations since the offline copy may
1834 : be different. */
1835 2424683 : if (!w->inlined_to)
1836 1048824 : switch (this_state)
1837 : {
1838 163430 : case IPA_CONST:
1839 163430 : remove_p |= ipa_make_function_const (w, this_looping, false);
1840 163430 : break;
1841 :
1842 99564 : case IPA_PURE:
1843 99564 : remove_p |= ipa_make_function_pure (w, this_looping, false);
1844 99564 : break;
1845 :
1846 : default:
1847 : break;
1848 : }
1849 2424683 : w_info = (struct ipa_dfs_info *) w->aux;
1850 2424683 : w = w_info->next_cycle;
1851 : }
1852 : }
1853 :
1854 152682 : ipa_free_postorder_info ();
1855 152682 : free (order);
1856 152682 : return remove_p;
1857 : }
1858 :
1859 : /* Produce transitive closure over the callgraph and compute nothrow
1860 : attributes. */
1861 :
1862 : static void
1863 152682 : propagate_nothrow (void)
1864 : {
1865 152682 : struct cgraph_node *node;
1866 152682 : struct cgraph_node *w;
1867 152682 : struct cgraph_node **order =
1868 152682 : XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1869 152682 : int order_pos;
1870 152682 : int i;
1871 152682 : struct ipa_dfs_info * w_info;
1872 :
1873 152682 : order_pos = ipa_reduced_postorder (order, true,
1874 : ignore_edge_for_nothrow);
1875 152682 : if (dump_file)
1876 : {
1877 29 : cgraph_node::dump_cgraph (dump_file);
1878 29 : ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1879 : }
1880 :
1881 : /* Propagate the local information through the call graph to produce
1882 : the global information. All the nodes within a cycle will have
1883 : the same info so we collapse cycles first. Then we can do the
1884 : propagation in one pass from the leaves to the roots. */
1885 2611238 : for (i = 0; i < order_pos; i++ )
1886 : {
1887 2458556 : bool can_throw = false;
1888 2458556 : node = order[i];
1889 :
1890 2458556 : if (node->alias)
1891 38146 : continue;
1892 :
1893 : /* Find the worst state for any node in the cycle. */
1894 : w = node;
1895 4840985 : while (w && !can_throw)
1896 : {
1897 2420575 : struct cgraph_edge *e, *ie;
1898 :
1899 2420575 : if (!TREE_NOTHROW (w->decl))
1900 : {
1901 949323 : funct_state w_l = funct_state_summaries->get_create (w);
1902 :
1903 949323 : if (w_l->can_throw
1904 949323 : || w->get_availability () == AVAIL_INTERPOSABLE)
1905 : can_throw = true;
1906 :
1907 1826930 : for (e = w->callees; e && !can_throw; e = e->next_callee)
1908 : {
1909 877607 : enum availability avail;
1910 :
1911 877607 : if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1912 258233 : continue;
1913 :
1914 619374 : struct cgraph_node *y = e->callee->
1915 1238748 : function_or_virtual_thunk_symbol (&avail,
1916 619374 : e->caller);
1917 :
1918 : /* We can use info about the callee only if we know it
1919 : cannot be interposed.
1920 : When callee is compiled with non-call exceptions we also
1921 : must check that the declaration is bound to current
1922 : body as other semantically equivalent body may still
1923 : throw. */
1924 619374 : if (avail <= AVAIL_INTERPOSABLE
1925 619374 : || (!TREE_NOTHROW (y->decl)
1926 329682 : && (funct_state_summaries->get_create (y)->can_throw
1927 5698 : || (opt_for_fn (y->decl, flag_non_call_exceptions)
1928 548 : && !e->callee->binds_to_current_def_p (w)))))
1929 : can_throw = true;
1930 : }
1931 962564 : for (ie = w->indirect_calls; ie && !can_throw;
1932 13241 : ie = ie->next_callee)
1933 13241 : if (ie->can_throw_external
1934 11759 : && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1935 13241 : can_throw = true;
1936 : }
1937 2420575 : w_info = (struct ipa_dfs_info *) w->aux;
1938 2420575 : w = w_info->next_cycle;
1939 : }
1940 :
1941 : /* Copy back the region's pure_const_state which is shared by
1942 : all nodes in the region. */
1943 : w = node;
1944 4845093 : while (w)
1945 : {
1946 2424683 : funct_state w_l = funct_state_summaries->get_create (w);
1947 2424683 : if (!can_throw && !TREE_NOTHROW (w->decl))
1948 : {
1949 : /* Inline clones share declaration with their offline copies;
1950 : do not modify their declarations since the offline copy may
1951 : be different. */
1952 18475 : if (!w->inlined_to)
1953 : {
1954 3036 : w->set_nothrow_flag (true);
1955 3036 : if (dump_file)
1956 0 : fprintf (dump_file, "Function found to be nothrow: %s\n",
1957 : w->dump_name ());
1958 : }
1959 : }
1960 934912 : else if (can_throw && !TREE_NOTHROW (w->decl))
1961 934912 : w_l->can_throw = true;
1962 2424683 : w_info = (struct ipa_dfs_info *) w->aux;
1963 2424683 : w = w_info->next_cycle;
1964 : }
1965 : }
1966 :
1967 152682 : ipa_free_postorder_info ();
1968 152682 : free (order);
1969 152682 : }
1970 :
1971 : /* Debugging function to dump state of malloc lattice. */
1972 :
1973 : DEBUG_FUNCTION
1974 : static void
1975 305364 : dump_malloc_lattice (FILE *dump_file, const char *s)
1976 : {
1977 305364 : if (!dump_file)
1978 : return;
1979 :
1980 58 : fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1981 58 : cgraph_node *node;
1982 290 : FOR_EACH_FUNCTION (node)
1983 : {
1984 232 : funct_state fs = funct_state_summaries->get (node);
1985 232 : if (fs)
1986 156 : fprintf (dump_file, "%s: %s\n", node->dump_name (),
1987 156 : malloc_state_names[fs->malloc_state]);
1988 : }
1989 : }
1990 :
1991 : /* Propagate malloc attribute across the callgraph. */
1992 :
1993 : static void
1994 152682 : propagate_malloc (void)
1995 : {
1996 152682 : cgraph_node *node;
1997 4041454 : FOR_EACH_FUNCTION (node)
1998 : {
1999 3888772 : if (DECL_IS_MALLOC (node->decl))
2000 89224 : if (!funct_state_summaries->exists (node))
2001 : {
2002 23655 : funct_state fs = funct_state_summaries->get_create (node);
2003 23655 : fs->malloc_state = STATE_MALLOC;
2004 : }
2005 : }
2006 :
2007 152682 : dump_malloc_lattice (dump_file, "Initial");
2008 152682 : struct cgraph_node **order
2009 152682 : = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
2010 152682 : int order_pos = ipa_reverse_postorder (order);
2011 152682 : bool changed = true;
2012 :
2013 459859 : while (changed)
2014 : {
2015 154495 : changed = false;
2016 : /* Walk in postorder. */
2017 4720084 : for (int i = order_pos - 1; i >= 0; --i)
2018 : {
2019 4565589 : cgraph_node *node = order[i];
2020 6174659 : if (node->alias
2021 4521418 : || !node->definition
2022 7522108 : || !funct_state_summaries->exists (node))
2023 4476642 : continue;
2024 :
2025 2956519 : funct_state l = funct_state_summaries->get (node);
2026 :
2027 : /* FIXME: add support for indirect-calls. */
2028 2956519 : if (node->indirect_calls)
2029 : {
2030 142493 : l->malloc_state = STATE_MALLOC_BOTTOM;
2031 142493 : continue;
2032 : }
2033 :
2034 2814026 : if (node->get_availability () <= AVAIL_INTERPOSABLE)
2035 : {
2036 93985 : l->malloc_state = STATE_MALLOC_BOTTOM;
2037 93985 : continue;
2038 : }
2039 :
2040 2720041 : if (l->malloc_state == STATE_MALLOC_BOTTOM)
2041 2631094 : continue;
2042 :
2043 88947 : auto_vec<cgraph_node *, 16> callees;
2044 391632 : for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2045 : {
2046 302685 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
2047 302685 : if (es && es->is_return_callee_uncaptured)
2048 13429 : callees.safe_push (cs->callee);
2049 : }
2050 :
2051 88947 : malloc_state_e new_state = l->malloc_state;
2052 102376 : for (unsigned j = 0; j < callees.length (); j++)
2053 : {
2054 13429 : cgraph_node *callee = callees[j];
2055 13429 : if (!funct_state_summaries->exists (node))
2056 : {
2057 : new_state = STATE_MALLOC_BOTTOM;
2058 : break;
2059 : }
2060 13429 : malloc_state_e callee_state
2061 13429 : = funct_state_summaries->get_create (callee)->malloc_state;
2062 13429 : if (new_state < callee_state)
2063 11320 : new_state = callee_state;
2064 : }
2065 88947 : if (new_state != l->malloc_state)
2066 : {
2067 11314 : changed = true;
2068 11314 : l->malloc_state = new_state;
2069 : }
2070 88947 : }
2071 : }
2072 :
2073 2615515 : FOR_EACH_DEFINED_FUNCTION (node)
2074 2462833 : if (funct_state_summaries->exists (node))
2075 : {
2076 2450766 : funct_state l = funct_state_summaries->get (node);
2077 2450766 : if (!node->alias
2078 2424683 : && l->malloc_state == STATE_MALLOC
2079 57051 : && !node->inlined_to
2080 2451322 : && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2081 : {
2082 556 : if (dump_file && (dump_flags & TDF_DETAILS))
2083 6 : fprintf (dump_file, "Function %s found to be malloc\n",
2084 : node->dump_name ());
2085 :
2086 556 : bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2087 556 : node->set_malloc_flag (true);
2088 556 : if (!malloc_decl_p && warn_suggest_attribute_malloc)
2089 0 : warn_function_malloc (node->decl);
2090 : }
2091 : }
2092 :
2093 152682 : dump_malloc_lattice (dump_file, "after propagation");
2094 152682 : ipa_free_postorder_info ();
2095 152682 : free (order);
2096 152682 : }
2097 :
2098 : /* Produce the global information by preforming a transitive closure
2099 : on the local information that was produced by generate_summary. */
2100 :
2101 : unsigned int
2102 152682 : pass_ipa_pure_const::
2103 : execute (function *)
2104 : {
2105 152682 : bool remove_p;
2106 :
2107 : /* Nothrow makes more function to not lead to return and improve
2108 : later analysis. */
2109 152682 : propagate_nothrow ();
2110 152682 : propagate_malloc ();
2111 152682 : remove_p = propagate_pure_const ();
2112 :
2113 152682 : delete funct_state_summaries;
2114 152682 : return remove_p ? TODO_remove_functions : 0;
2115 : }
2116 :
2117 : static bool
2118 4071733 : gate_pure_const (void)
2119 : {
2120 592981 : return flag_ipa_pure_const || in_lto_p;
2121 : }
2122 :
2123 288775 : pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2124 : : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2125 : pure_const_generate_summary, /* generate_summary */
2126 : pure_const_write_summary, /* write_summary */
2127 : pure_const_read_summary, /* read_summary */
2128 : NULL, /* write_optimization_summary */
2129 : NULL, /* read_optimization_summary */
2130 : NULL, /* stmt_fixup */
2131 : 0, /* function_transform_todo_flags_start */
2132 : NULL, /* function_transform */
2133 : NULL), /* variable_transform */
2134 288775 : init_p (false) {}
2135 :
2136 : ipa_opt_pass_d *
2137 288775 : make_pass_ipa_pure_const (gcc::context *ctxt)
2138 : {
2139 288775 : return new pass_ipa_pure_const (ctxt);
2140 : }
2141 :
2142 : /* Simple local pass for pure const discovery reusing the analysis from
2143 : ipa_pure_const. This pass is effective when executed together with
2144 : other optimization passes in early optimization pass queue. */
2145 :
2146 : namespace {
2147 :
2148 : const pass_data pass_data_local_pure_const =
2149 : {
2150 : GIMPLE_PASS, /* type */
2151 : "local-pure-const", /* name */
2152 : OPTGROUP_NONE, /* optinfo_flags */
2153 : TV_IPA_PURE_CONST, /* tv_id */
2154 : 0, /* properties_required */
2155 : 0, /* properties_provided */
2156 : 0, /* properties_destroyed */
2157 : 0, /* todo_flags_start */
2158 : 0, /* todo_flags_finish */
2159 : };
2160 :
2161 : class pass_local_pure_const : public gimple_opt_pass
2162 : {
2163 : public:
2164 577550 : pass_local_pure_const (gcc::context *ctxt)
2165 1155100 : : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2166 : {}
2167 :
2168 : /* opt_pass methods: */
2169 288775 : opt_pass * clone () final override
2170 : {
2171 288775 : return new pass_local_pure_const (m_ctxt);
2172 : }
2173 3479004 : bool gate (function *) final override { return gate_pure_const (); }
2174 : unsigned int execute (function *) final override;
2175 :
2176 : }; // class pass_local_pure_const
2177 :
2178 : unsigned int
2179 3478674 : pass_local_pure_const::execute (function *fun)
2180 : {
2181 3478674 : bool changed = false;
2182 3478674 : funct_state l;
2183 3478674 : bool skip;
2184 3478674 : struct cgraph_node *node;
2185 :
2186 3478674 : node = cgraph_node::get (current_function_decl);
2187 3478674 : skip = skip_function_for_local_pure_const (node);
2188 :
2189 3478674 : if (!warn_suggest_attribute_const
2190 3478651 : && !warn_suggest_attribute_pure
2191 3478630 : && skip)
2192 : return 0;
2193 :
2194 3305928 : l = analyze_function (node, false);
2195 :
2196 : /* Do NORETURN discovery. */
2197 3305928 : if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2198 6583498 : && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2199 : {
2200 26652 : warn_function_noreturn (fun->decl);
2201 26652 : if (dump_file)
2202 1 : fprintf (dump_file, "Function found to be noreturn: %s\n",
2203 : current_function_name ());
2204 :
2205 : /* Update declaration and reduce profile to executed once. */
2206 26652 : if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2207 : changed = true;
2208 26652 : if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2209 12306 : node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2210 : }
2211 :
2212 3305928 : switch (l->pure_const_state)
2213 : {
2214 656436 : case IPA_CONST:
2215 1312872 : changed |= ipa_make_function_const
2216 656436 : (cgraph_node::get (current_function_decl), l->looping, true);
2217 656436 : break;
2218 :
2219 428946 : case IPA_PURE:
2220 857892 : changed |= ipa_make_function_pure
2221 428946 : (cgraph_node::get (current_function_decl), l->looping, true);
2222 428946 : break;
2223 :
2224 : default:
2225 : break;
2226 : }
2227 3305928 : if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2228 : {
2229 20592 : node->set_nothrow_flag (true);
2230 20592 : changed = true;
2231 20592 : if (dump_file)
2232 2 : fprintf (dump_file, "Function found to be nothrow: %s\n",
2233 : current_function_name ());
2234 : }
2235 :
2236 3305928 : if (l->malloc_state == STATE_MALLOC
2237 3305928 : && !DECL_IS_MALLOC (current_function_decl))
2238 : {
2239 40132 : node->set_malloc_flag (true);
2240 40132 : if (warn_suggest_attribute_malloc)
2241 3 : warn_function_malloc (node->decl);
2242 40132 : changed = true;
2243 40132 : if (dump_file)
2244 2 : fprintf (dump_file, "Function found to be malloc: %s\n",
2245 : node->dump_name ());
2246 : }
2247 :
2248 3305928 : free (l);
2249 3305928 : if (changed)
2250 910796 : return execute_fixup_cfg ();
2251 : else
2252 : return 0;
2253 : }
2254 :
2255 : } // anon namespace
2256 :
2257 : gimple_opt_pass *
2258 288775 : make_pass_local_pure_const (gcc::context *ctxt)
2259 : {
2260 288775 : return new pass_local_pure_const (ctxt);
2261 : }
2262 :
2263 : /* Emit noreturn warnings. */
2264 :
2265 : namespace {
2266 :
2267 : const pass_data pass_data_warn_function_noreturn =
2268 : {
2269 : GIMPLE_PASS, /* type */
2270 : "*warn_function_noreturn", /* name */
2271 : OPTGROUP_NONE, /* optinfo_flags */
2272 : TV_NONE, /* tv_id */
2273 : PROP_cfg, /* properties_required */
2274 : 0, /* properties_provided */
2275 : 0, /* properties_destroyed */
2276 : 0, /* todo_flags_start */
2277 : 0, /* todo_flags_finish */
2278 : };
2279 :
2280 : class pass_warn_function_noreturn : public gimple_opt_pass
2281 : {
2282 : public:
2283 288775 : pass_warn_function_noreturn (gcc::context *ctxt)
2284 577550 : : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2285 : {}
2286 :
2287 : /* opt_pass methods: */
2288 1480905 : bool gate (function *) final override
2289 : {
2290 1480905 : return warn_suggest_attribute_noreturn;
2291 : }
2292 29 : unsigned int execute (function *fun) final override
2293 : {
2294 29 : if (!TREE_THIS_VOLATILE (current_function_decl)
2295 29 : && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2296 4 : warn_function_noreturn (current_function_decl);
2297 29 : return 0;
2298 : }
2299 :
2300 : }; // class pass_warn_function_noreturn
2301 :
2302 : } // anon namespace
2303 :
2304 : gimple_opt_pass *
2305 288775 : make_pass_warn_function_noreturn (gcc::context *ctxt)
2306 : {
2307 288775 : return new pass_warn_function_noreturn (ctxt);
2308 : }
2309 :
2310 : /* Simple local pass for nothrow discovery reusing the analysis from
2311 : ipa_pure_const. This pass is effective when executed together with
2312 : other optimization passes in early optimization pass queue. */
2313 :
2314 : namespace {
2315 :
2316 : const pass_data pass_data_nothrow =
2317 : {
2318 : GIMPLE_PASS, /* type */
2319 : "nothrow", /* name */
2320 : OPTGROUP_NONE, /* optinfo_flags */
2321 : TV_IPA_PURE_CONST, /* tv_id */
2322 : 0, /* properties_required */
2323 : 0, /* properties_provided */
2324 : 0, /* properties_destroyed */
2325 : 0, /* todo_flags_start */
2326 : 0, /* todo_flags_finish */
2327 : };
2328 :
2329 : class pass_nothrow : public gimple_opt_pass
2330 : {
2331 : public:
2332 288775 : pass_nothrow (gcc::context *ctxt)
2333 577550 : : gimple_opt_pass (pass_data_nothrow, ctxt)
2334 : {}
2335 :
2336 : /* opt_pass methods: */
2337 0 : opt_pass * clone () final override { return new pass_nothrow (m_ctxt); }
2338 2878910 : bool gate (function *) final override { return optimize; }
2339 : unsigned int execute (function *) final override;
2340 :
2341 : }; // class pass_nothrow
2342 :
2343 : unsigned int
2344 2435665 : pass_nothrow::execute (function *)
2345 : {
2346 2435665 : struct cgraph_node *node;
2347 2435665 : basic_block this_block;
2348 :
2349 2435665 : if (TREE_NOTHROW (current_function_decl))
2350 : return 0;
2351 :
2352 1528093 : node = cgraph_node::get (current_function_decl);
2353 :
2354 : /* We run during lowering, we cannot really use availability yet. */
2355 1528093 : if (cgraph_node::get (current_function_decl)->get_availability ()
2356 : <= AVAIL_INTERPOSABLE)
2357 : {
2358 83676 : if (dump_file)
2359 0 : fprintf (dump_file, "Function is interposable;"
2360 : " not analyzing.\n");
2361 83676 : return true;
2362 : }
2363 :
2364 11092983 : FOR_EACH_BB_FN (this_block, cfun)
2365 : {
2366 20496180 : for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2367 49894584 : !gsi_end_p (gsi);
2368 39646494 : gsi_next (&gsi))
2369 40246018 : if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2370 : {
2371 601007 : if (is_gimple_call (gsi_stmt (gsi)))
2372 : {
2373 382314 : tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2374 382314 : if (callee_t && recursive_call_p (current_function_decl,
2375 : callee_t))
2376 1483 : continue;
2377 : }
2378 :
2379 599524 : if (dump_file)
2380 : {
2381 0 : fprintf (dump_file, "Statement can throw: ");
2382 0 : print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2383 : }
2384 599524 : return 0;
2385 : }
2386 : }
2387 :
2388 844893 : node->set_nothrow_flag (true);
2389 :
2390 844893 : bool cfg_changed = false;
2391 844893 : if (self_recursive_p (node))
2392 30839 : FOR_EACH_BB_FN (this_block, cfun)
2393 83253 : if (gcall *g = safe_dyn_cast <gcall *> (*gsi_last_bb (this_block)))
2394 : {
2395 2093 : tree callee_t = gimple_call_fndecl (g);
2396 2093 : if (callee_t
2397 1978 : && recursive_call_p (current_function_decl, callee_t)
2398 571 : && maybe_clean_eh_stmt (g)
2399 2096 : && gimple_purge_dead_eh_edges (this_block))
2400 : cfg_changed = true;
2401 : }
2402 :
2403 844893 : if (dump_file)
2404 33 : fprintf (dump_file, "Function found to be nothrow: %s\n",
2405 : current_function_name ());
2406 844893 : return cfg_changed ? TODO_cleanup_cfg : 0;
2407 : }
2408 :
2409 : } // anon namespace
2410 :
2411 : gimple_opt_pass *
2412 288775 : make_pass_nothrow (gcc::context *ctxt)
2413 : {
2414 288775 : return new pass_nothrow (ctxt);
2415 : }
|