Line data Source code
1 : /* Callgraph based analysis of static variables.
2 : Copyright (C) 2004-2026 Free Software Foundation, Inc.
3 : Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : /* This file marks functions as being either const (TREE_READONLY) or
22 : pure (DECL_PURE_P). It can also set a variant of these that
23 : are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 :
25 : This must be run after inlining decisions have been made since
26 : otherwise, the local sets will not contain information that is
27 : consistent with post inlined state. The global sets are not prone
28 : to this problem since they are by definition transitive. */
29 :
30 : /* The code in this module is called by the ipa pass manager. It
31 : should be one of the later passes since it's information is used by
32 : the rest of the compilation. */
33 :
34 : #include "config.h"
35 : #include "system.h"
36 : #include "coretypes.h"
37 : #include "backend.h"
38 : #include "target.h"
39 : #include "tree.h"
40 : #include "gimple.h"
41 : #include "tree-pass.h"
42 : #include "tree-streamer.h"
43 : #include "cgraph.h"
44 : #include "diagnostic.h"
45 : #include "calls.h"
46 : #include "cfganal.h"
47 : #include "tree-eh.h"
48 : #include "gimple-iterator.h"
49 : #include "gimple-walk.h"
50 : #include "tree-cfg.h"
51 : #include "tree-ssa-loop-niter.h"
52 : #include "langhooks.h"
53 : #include "ipa-utils.h"
54 : #include "gimple-pretty-print.h"
55 : #include "cfgloop.h"
56 : #include "tree-scalar-evolution.h"
57 : #include "intl.h"
58 : #include "opts.h"
59 : #include "ssa.h"
60 : #include "alloc-pool.h"
61 : #include "symbol-summary.h"
62 : #include "sreal.h"
63 : #include "ipa-cp.h"
64 : #include "ipa-prop.h"
65 : #include "ipa-fnsummary.h"
66 : #include "symtab-thunks.h"
67 : #include "dbgcnt.h"
68 : #include "gcc-urlifier.h"
69 :
70 : /* Lattice values for const and pure functions. Everything starts out
71 : being const, then may drop to pure and then neither depending on
72 : what is found. */
73 : enum pure_const_state_e
74 : {
75 : IPA_CONST,
76 : IPA_PURE,
77 : IPA_NEITHER
78 : };
79 :
80 : static const char *pure_const_names[3] = {"const", "pure", "neither"};
81 :
82 : enum malloc_state_e
83 : {
84 : STATE_MALLOC_TOP,
85 : STATE_MALLOC,
86 : STATE_MALLOC_BOTTOM
87 : };
88 :
89 : static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
90 :
91 : /* Holder for the const_state. There is one of these per function
92 : decl. */
93 : class funct_state_d
94 : {
95 : public:
96 2699711 : funct_state_d (): pure_const_state (IPA_NEITHER),
97 2699711 : state_previously_known (IPA_NEITHER), looping_previously_known (true),
98 2699711 : looping (true), can_throw (true), can_free (true),
99 0 : malloc_state (STATE_MALLOC_BOTTOM) {}
100 :
101 2587908 : funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
102 2587908 : state_previously_known (s.state_previously_known),
103 2587908 : looping_previously_known (s.looping_previously_known),
104 2587908 : looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
105 2587908 : malloc_state (s.malloc_state) {}
106 :
107 : /* See above. */
108 : enum pure_const_state_e pure_const_state;
109 : /* What user set here; we can be always sure about this. */
110 : enum pure_const_state_e state_previously_known;
111 : bool looping_previously_known;
112 :
113 : /* True if the function could possibly infinite loop. There are a
114 : lot of ways that this could be determined. We are pretty
115 : conservative here. While it is possible to cse pure and const
116 : calls, it is not legal to have dce get rid of the call if there
117 : is a possibility that the call could infinite loop since this is
118 : a behavioral change. */
119 : bool looping;
120 :
121 : bool can_throw;
122 :
123 : /* If function can call free, munmap or otherwise make previously
124 : non-trapping memory accesses trapping. */
125 : bool can_free;
126 :
127 : enum malloc_state_e malloc_state;
128 : };
129 :
130 : typedef class funct_state_d * funct_state;
131 :
132 : /* The storage of the funct_state is abstracted because there is the
133 : possibility that it may be desirable to move this to the cgraph
134 : local info. */
135 :
136 : class funct_state_summary_t:
137 : public fast_function_summary <funct_state_d *, va_heap>
138 : {
139 : public:
140 160807 : funct_state_summary_t (symbol_table *symtab):
141 321614 : fast_function_summary <funct_state_d *, va_heap> (symtab) {}
142 :
143 : void insert (cgraph_node *, funct_state_d *state) final override;
144 : void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
145 : funct_state_d *src_data,
146 : funct_state_d *dst_data) final override;
147 : };
148 :
149 : static funct_state_summary_t *funct_state_summaries = NULL;
150 :
151 : static bool gate_pure_const (void);
152 :
153 : namespace {
154 :
155 : const pass_data pass_data_ipa_pure_const =
156 : {
157 : IPA_PASS, /* type */
158 : "pure-const", /* name */
159 : OPTGROUP_NONE, /* optinfo_flags */
160 : TV_IPA_PURE_CONST, /* tv_id */
161 : 0, /* properties_required */
162 : 0, /* properties_provided */
163 : 0, /* properties_destroyed */
164 : 0, /* todo_flags_start */
165 : 0, /* todo_flags_finish */
166 : };
167 :
168 : class pass_ipa_pure_const : public ipa_opt_pass_d
169 : {
170 : public:
171 : pass_ipa_pure_const(gcc::context *ctxt);
172 :
173 : /* opt_pass methods: */
174 1173510 : bool gate (function *) final override { return gate_pure_const (); }
175 : unsigned int execute (function *fun) final override;
176 :
177 : void register_hooks (void);
178 :
179 : private:
180 : bool init_p;
181 : }; // class pass_ipa_pure_const
182 :
183 : } // anon namespace
184 :
185 : /* Try to guess if function body will always be visible to compiler
186 : when compiling the call and whether compiler will be able
187 : to propagate the information by itself. */
188 :
189 : static bool
190 26 : function_always_visible_to_compiler_p (tree decl)
191 : {
192 22 : return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
193 48 : || DECL_COMDAT (decl));
194 : }
195 :
196 : /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
197 : is true if the function is known to be finite. The diagnostic is
198 : controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
199 : OPTION, this function may initialize it and it is always returned
200 : by the function. */
201 :
202 : static hash_set<tree> *
203 1253883 : suggest_attribute (diagnostics::option_id option, tree decl, bool known_finite,
204 : hash_set<tree> *warned_about,
205 : const char * attrib_name)
206 : {
207 1253883 : if (!option_enabled (option.m_idx, lang_hooks.option_lang_mask (),
208 : &global_options))
209 : return warned_about;
210 30 : if (TREE_THIS_VOLATILE (decl)
211 30 : || (known_finite && function_always_visible_to_compiler_p (decl)))
212 : return warned_about;
213 :
214 26 : if (!warned_about)
215 14 : warned_about = new hash_set<tree>;
216 26 : if (warned_about->contains (decl))
217 : return warned_about;
218 26 : warned_about->add (decl);
219 26 : auto_urlify_attributes sentinel;
220 30 : warning_at (DECL_SOURCE_LOCATION (decl),
221 : option,
222 : known_finite
223 : ? G_("function might be candidate for attribute %qs")
224 : : G_("function might be candidate for attribute %qs"
225 : " if it is known to return normally"), attrib_name);
226 26 : return warned_about;
227 26 : }
228 :
229 : /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
230 : is true if the function is known to be finite. */
231 :
232 : static void
233 364921 : warn_function_pure (tree decl, bool known_finite)
234 : {
235 : /* Declaring a void function pure makes no sense and is diagnosed
236 : by -Wattributes because calling it would have no effect. */
237 364921 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
238 : return;
239 :
240 335820 : static hash_set<tree> *warned_about;
241 335820 : warned_about
242 335820 : = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
243 : known_finite, warned_about, "pure");
244 : }
245 :
246 : /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
247 : is true if the function is known to be finite. */
248 :
249 : static void
250 913065 : warn_function_const (tree decl, bool known_finite)
251 : {
252 : /* Declaring a void function const makes no sense is diagnosed
253 : by -Wattributes because calling it would have no effect. */
254 913065 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
255 : return;
256 :
257 670985 : static hash_set<tree> *warned_about;
258 670985 : warned_about
259 670985 : = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
260 : known_finite, warned_about, "const");
261 : }
262 :
263 : /* Emit suggestion about __attribute__((malloc)) for DECL. */
264 :
265 : static void
266 3 : warn_function_malloc (tree decl)
267 : {
268 3 : static hash_set<tree> *warned_about;
269 3 : warned_about
270 3 : = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
271 : true, warned_about, "malloc");
272 3 : }
273 :
274 : /* Emit suggestion about __attribute__((noreturn)) for DECL. */
275 :
276 : static void
277 26843 : warn_function_noreturn (tree decl)
278 : {
279 26843 : tree original_decl = decl;
280 :
281 26843 : static hash_set<tree> *warned_about;
282 26843 : if (!lang_hooks.missing_noreturn_ok_p (decl)
283 26843 : && targetm.warn_func_return (decl))
284 14555 : warned_about
285 14555 : = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
286 : true, warned_about, "noreturn");
287 26843 : }
288 :
289 : void
290 9091 : warn_function_cold (tree decl)
291 : {
292 9091 : tree original_decl = decl;
293 :
294 9091 : static hash_set<tree> *warned_about;
295 9091 : warned_about
296 9091 : = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
297 : true, warned_about, "cold");
298 9091 : }
299 :
300 : void
301 223429 : warn_function_returns_nonnull (tree decl)
302 : {
303 223429 : static hash_set<tree> *warned_about;
304 223429 : warned_about
305 223429 : = suggest_attribute (OPT_Wsuggest_attribute_returns_nonnull, decl,
306 : true, warned_about, "returns_nonnull");
307 223429 : }
308 :
309 : /* Check to see if the use (or definition when CHECKING_WRITE is true)
310 : variable T is legal in a function that is either pure or const. */
311 :
312 : static inline void
313 20125789 : check_decl (funct_state local,
314 : tree t, bool checking_write, bool ipa)
315 : {
316 : /* Do not want to do anything with volatile except mark any
317 : function that uses one to be not const or pure. */
318 20125789 : if (TREE_THIS_VOLATILE (t))
319 : {
320 1446829 : local->pure_const_state = IPA_NEITHER;
321 1446829 : if (dump_file)
322 30 : fprintf (dump_file, " Volatile operand is not const/pure\n");
323 1446829 : return;
324 : }
325 :
326 : /* Do not care about a local automatic that is not static. */
327 18678960 : if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
328 : return;
329 :
330 : /* If the variable has the "used" attribute, treat it as if it had a
331 : been touched by the devil. */
332 3559330 : if (DECL_PRESERVE_P (t))
333 : {
334 3492 : local->pure_const_state = IPA_NEITHER;
335 3492 : if (dump_file)
336 0 : fprintf (dump_file, " Used static/global variable is not const/pure\n");
337 3492 : return;
338 : }
339 :
340 : /* In IPA mode we are not interested in checking actual loads and stores;
341 : they will be processed at propagation time using ipa_ref. */
342 3555838 : if (ipa)
343 : return;
344 :
345 : /* Since we have dealt with the locals and params cases above, if we
346 : are CHECKING_WRITE, this cannot be a pure or constant
347 : function. */
348 2293886 : if (checking_write)
349 : {
350 832475 : local->pure_const_state = IPA_NEITHER;
351 832475 : if (dump_file)
352 1 : fprintf (dump_file, " static/global memory write is not const/pure\n");
353 832475 : return;
354 : }
355 :
356 1461411 : if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
357 : {
358 : /* Readonly reads are safe. */
359 840905 : if (TREE_READONLY (t))
360 : return; /* Read of a constant, do not change the function state. */
361 : else
362 : {
363 836878 : if (dump_file)
364 0 : fprintf (dump_file, " global memory read is not const\n");
365 : /* Just a regular read. */
366 836878 : if (local->pure_const_state == IPA_CONST)
367 167804 : local->pure_const_state = IPA_PURE;
368 : }
369 : }
370 : else
371 : {
372 : /* Compilation level statics can be read if they are readonly
373 : variables. */
374 620506 : if (TREE_READONLY (t))
375 : return;
376 :
377 590622 : if (dump_file)
378 1 : fprintf (dump_file, " static memory read is not const\n");
379 : /* Just a regular read. */
380 590622 : if (local->pure_const_state == IPA_CONST)
381 32232 : local->pure_const_state = IPA_PURE;
382 : }
383 : }
384 :
385 :
386 : /* Check to see if the use (or definition when CHECKING_WRITE is true)
387 : variable T is legal in a function that is either pure or const. */
388 :
389 : static inline void
390 18608383 : check_op (funct_state local, tree t, bool checking_write)
391 : {
392 18608383 : t = get_base_address (t);
393 18608383 : if (t && TREE_THIS_VOLATILE (t))
394 : {
395 20637 : local->pure_const_state = IPA_NEITHER;
396 20637 : if (dump_file)
397 2 : fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
398 20637 : return;
399 : }
400 18587746 : else if (refs_local_or_readonly_memory_p (t))
401 : {
402 4803691 : if (dump_file)
403 10 : fprintf (dump_file, " Indirect ref to local or readonly "
404 : "memory is OK\n");
405 4803691 : return;
406 : }
407 13784055 : else if (checking_write)
408 : {
409 4759554 : local->pure_const_state = IPA_NEITHER;
410 4759554 : if (dump_file)
411 64 : fprintf (dump_file, " Indirect ref write is not const/pure\n");
412 4759554 : return;
413 : }
414 : else
415 : {
416 9024501 : if (dump_file)
417 175 : fprintf (dump_file, " Indirect ref read is not const\n");
418 9024501 : if (local->pure_const_state == IPA_CONST)
419 1348516 : local->pure_const_state = IPA_PURE;
420 : }
421 : }
422 :
423 : /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
424 :
425 : static void
426 15740991 : state_from_flags (enum pure_const_state_e *state, bool *looping,
427 : int flags, bool cannot_lead_to_return)
428 : {
429 15740991 : *looping = false;
430 15740991 : if (flags & ECF_LOOPING_CONST_OR_PURE)
431 : {
432 202303 : *looping = true;
433 202303 : if (dump_file && (dump_flags & TDF_DETAILS))
434 0 : fprintf (dump_file, " looping\n");
435 : }
436 15740991 : if (flags & ECF_CONST)
437 : {
438 1184159 : *state = IPA_CONST;
439 1184159 : if (dump_file && (dump_flags & TDF_DETAILS))
440 3 : fprintf (dump_file, " const\n");
441 : }
442 14556832 : else if (flags & ECF_PURE)
443 : {
444 1197904 : *state = IPA_PURE;
445 1197904 : if (dump_file && (dump_flags & TDF_DETAILS))
446 3 : fprintf (dump_file, " pure\n");
447 : }
448 13358928 : else if (cannot_lead_to_return)
449 : {
450 964623 : *state = IPA_PURE;
451 964623 : *looping = true;
452 964623 : if (dump_file && (dump_flags & TDF_DETAILS))
453 1 : fprintf (dump_file, " ignoring side effects->pure looping\n");
454 : }
455 : else
456 : {
457 12394305 : if (dump_file && (dump_flags & TDF_DETAILS))
458 42 : fprintf (dump_file, " neither\n");
459 12394305 : *state = IPA_NEITHER;
460 12394305 : *looping = true;
461 : }
462 15740991 : }
463 :
464 : /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
465 : into STATE and LOOPING better of the two variants.
466 : Be sure to merge looping correctly. IPA_NEITHER functions
467 : have looping 0 even if they don't have to return. */
468 :
469 : static inline void
470 6917064 : better_state (enum pure_const_state_e *state, bool *looping,
471 : enum pure_const_state_e state2, bool looping2)
472 : {
473 6917064 : if (state2 < *state)
474 : {
475 36997 : if (*state == IPA_NEITHER)
476 35474 : *looping = looping2;
477 : else
478 1523 : *looping = MIN (*looping, looping2);
479 36997 : *state = state2;
480 : }
481 6880067 : else if (state2 != IPA_NEITHER)
482 1619953 : *looping = MIN (*looping, looping2);
483 6917064 : }
484 :
485 : /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
486 : into STATE and LOOPING worse of the two variants.
487 : N is the actual node called. */
488 :
489 : static inline void
490 14911681 : worse_state (enum pure_const_state_e *state, bool *looping,
491 : enum pure_const_state_e state2, bool looping2,
492 : struct symtab_node *from,
493 : struct symtab_node *to)
494 : {
495 : /* Consider function:
496 :
497 : bool a(int *p)
498 : {
499 : return *p==*p;
500 : }
501 :
502 : During early optimization we will turn this into:
503 :
504 : bool a(int *p)
505 : {
506 : return true;
507 : }
508 :
509 : Now if this function will be detected as CONST however when interposed it
510 : may end up being just pure. We always must assume the worst scenario here.
511 : */
512 14911681 : if (*state == IPA_CONST && state2 == IPA_CONST
513 14911681 : && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
514 : {
515 3853 : if (dump_file && (dump_flags & TDF_DETAILS))
516 0 : fprintf (dump_file, "Dropping state to PURE because call to %s may not "
517 : "bind to current def.\n", to->dump_name ());
518 : state2 = IPA_PURE;
519 : }
520 14911681 : *state = MAX (*state, state2);
521 14911681 : *looping = MAX (*looping, looping2);
522 14911681 : }
523 :
524 : /* Recognize special cases of builtins that are by themselves not const
525 : but function using them is. */
526 : bool
527 23920707 : builtin_safe_for_const_function_p (bool *looping, tree callee)
528 : {
529 23920707 : if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
530 6818926 : switch (DECL_FUNCTION_CODE (callee))
531 : {
532 1018818 : case BUILT_IN_RETURN:
533 1018818 : case BUILT_IN_UNREACHABLE:
534 1018818 : CASE_BUILT_IN_ALLOCA:
535 1018818 : case BUILT_IN_STACK_SAVE:
536 1018818 : case BUILT_IN_STACK_RESTORE:
537 1018818 : case BUILT_IN_EH_POINTER:
538 1018818 : case BUILT_IN_EH_FILTER:
539 1018818 : case BUILT_IN_UNWIND_RESUME:
540 1018818 : case BUILT_IN_CXA_END_CLEANUP:
541 1018818 : case BUILT_IN_EH_COPY_VALUES:
542 1018818 : case BUILT_IN_FRAME_ADDRESS:
543 1018818 : case BUILT_IN_APPLY_ARGS:
544 1018818 : case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
545 1018818 : case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
546 1018818 : case BUILT_IN_DWARF_CFA:
547 1018818 : case BUILT_IN_RETURN_ADDRESS:
548 1018818 : *looping = false;
549 1018818 : return true;
550 9967 : case BUILT_IN_PREFETCH:
551 9967 : *looping = true;
552 9967 : return true;
553 : default:
554 : break;
555 : }
556 : return false;
557 : }
558 :
559 : /* Check the parameters of a function call to CALL_EXPR to see if
560 : there are any references in the parameters that are not allowed for
561 : pure or const functions. Also check to see if this is either an
562 : indirect call, a call outside the compilation unit, or has special
563 : attributes that may also effect the purity. The CALL_EXPR node for
564 : the entire call expression. */
565 :
566 : static void
567 15539600 : check_call (funct_state local, gcall *call, bool ipa)
568 : {
569 15539600 : int flags = gimple_call_flags (call);
570 15539600 : tree callee_t = gimple_call_fndecl (call);
571 15539600 : bool possibly_throws = stmt_could_throw_p (cfun, call);
572 15539600 : bool possibly_throws_externally = (possibly_throws
573 15539600 : && stmt_can_throw_external (cfun, call));
574 :
575 5716542 : if (possibly_throws)
576 : {
577 : unsigned int i;
578 34416250 : for (i = 0; i < gimple_num_ops (call); i++)
579 28699708 : if (gimple_op (call, i)
580 28699708 : && tree_could_throw_p (gimple_op (call, i)))
581 : {
582 80034 : if (possibly_throws && cfun->can_throw_non_call_exceptions)
583 : {
584 80034 : if (dump_file)
585 0 : fprintf (dump_file, " operand can throw; looping\n");
586 80034 : local->looping = true;
587 : }
588 80034 : if (possibly_throws_externally)
589 : {
590 68927 : if (dump_file)
591 0 : fprintf (dump_file, " operand can throw externally\n");
592 68927 : local->can_throw = true;
593 : }
594 : }
595 : }
596 :
597 : /* The const and pure flags are set by a variety of places in the
598 : compiler (including here). If someone has already set the flags
599 : for the callee, (such as for some of the builtins) we will use
600 : them, otherwise we will compute our own information.
601 :
602 : Const and pure functions have less clobber effects than other
603 : functions so we process these first. Otherwise if it is a call
604 : outside the compilation unit or an indirect call we punt. This
605 : leaves local calls which will be processed by following the call
606 : graph. */
607 15539600 : if (callee_t)
608 : {
609 14530015 : bool call_looping;
610 :
611 14530015 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
612 14530015 : && !nonfreeing_call_p (call))
613 719209 : local->can_free = true;
614 :
615 14530015 : if (builtin_safe_for_const_function_p (&call_looping, callee_t))
616 : {
617 360783 : worse_state (&local->pure_const_state, &local->looping,
618 : IPA_CONST, call_looping,
619 : NULL, NULL);
620 360783 : return;
621 : }
622 : /* When bad things happen to bad functions, they cannot be const
623 : or pure. */
624 14169232 : if (setjmp_call_p (callee_t))
625 : {
626 2703 : if (dump_file)
627 0 : fprintf (dump_file, " setjmp is not const/pure\n");
628 2703 : local->looping = true;
629 2703 : local->pure_const_state = IPA_NEITHER;
630 : }
631 :
632 14169232 : if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
633 3340149 : switch (DECL_FUNCTION_CODE (callee_t))
634 : {
635 1856 : case BUILT_IN_LONGJMP:
636 1856 : case BUILT_IN_NONLOCAL_GOTO:
637 1856 : if (dump_file)
638 0 : fprintf (dump_file,
639 : " longjmp and nonlocal goto is not const/pure\n");
640 1856 : local->pure_const_state = IPA_NEITHER;
641 1856 : local->looping = true;
642 1856 : break;
643 : default:
644 : break;
645 : }
646 : }
647 1009585 : else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
648 157315 : local->can_free = true;
649 :
650 : /* When not in IPA mode, we can still handle self recursion. */
651 15178817 : if (!ipa && callee_t
652 15178817 : && recursive_call_p (current_function_decl, callee_t))
653 : {
654 19794 : if (dump_file)
655 0 : fprintf (dump_file, " Recursive call can loop.\n");
656 19794 : local->looping = true;
657 : }
658 : /* Either callee is unknown or we are doing local analysis.
659 : Look to see if there are any bits available for the callee (such as by
660 : declaration or because it is builtin) and process solely on the basis of
661 : those bits. Handle internal calls always, those calls don't have
662 : corresponding cgraph edges and thus aren't processed during
663 : the propagation. */
664 15159023 : else if (!ipa || gimple_call_internal_p (call))
665 : {
666 9921249 : enum pure_const_state_e call_state;
667 9921249 : bool call_looping;
668 9921249 : if (possibly_throws && cfun->can_throw_non_call_exceptions)
669 : {
670 2178401 : if (dump_file)
671 0 : fprintf (dump_file, " can throw; looping\n");
672 2178401 : local->looping = true;
673 : }
674 9921249 : if (possibly_throws_externally)
675 : {
676 2782862 : if (dump_file)
677 : {
678 0 : fprintf (dump_file, " can throw externally to lp %i\n",
679 : lookup_stmt_eh_lp (call));
680 0 : if (callee_t)
681 0 : fprintf (dump_file, " callee:%s\n",
682 0 : IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
683 : }
684 2782862 : local->can_throw = true;
685 : }
686 9921249 : if (dump_file && (dump_flags & TDF_DETAILS))
687 22 : fprintf (dump_file, " checking flags for call:");
688 9921249 : state_from_flags (&call_state, &call_looping, flags,
689 9921249 : ((flags & (ECF_NORETURN | ECF_NOTHROW))
690 : == (ECF_NORETURN | ECF_NOTHROW))
691 9921249 : || (!flag_exceptions && (flags & ECF_NORETURN)));
692 9921249 : worse_state (&local->pure_const_state, &local->looping,
693 : call_state, call_looping, NULL, NULL);
694 : }
695 : /* Direct functions calls are handled by IPA propagation. */
696 : }
697 :
698 : /* Wrapper around check_decl for loads in local more. */
699 :
700 : static bool
701 12534957 : check_load (gimple *, tree op, tree, void *data)
702 : {
703 12534957 : if (DECL_P (op))
704 5230579 : check_decl ((funct_state)data, op, false, false);
705 : else
706 7304378 : check_op ((funct_state)data, op, false);
707 12534957 : return false;
708 : }
709 :
710 : /* Wrapper around check_decl for stores in local more. */
711 :
712 : static bool
713 13456221 : check_store (gimple *, tree op, tree, void *data)
714 : {
715 13456221 : if (DECL_P (op))
716 7800314 : check_decl ((funct_state)data, op, true, false);
717 : else
718 5655907 : check_op ((funct_state)data, op, true);
719 13456221 : return false;
720 : }
721 :
722 : /* Wrapper around check_decl for loads in ipa mode. */
723 :
724 : static bool
725 6253633 : check_ipa_load (gimple *, tree op, tree, void *data)
726 : {
727 6253633 : if (DECL_P (op))
728 2984515 : check_decl ((funct_state)data, op, false, true);
729 : else
730 3269118 : check_op ((funct_state)data, op, false);
731 6253633 : return false;
732 : }
733 :
734 : /* Wrapper around check_decl for stores in ipa mode. */
735 :
736 : static bool
737 6489361 : check_ipa_store (gimple *, tree op, tree, void *data)
738 : {
739 6489361 : if (DECL_P (op))
740 4110381 : check_decl ((funct_state)data, op, true, true);
741 : else
742 2378980 : check_op ((funct_state)data, op, true);
743 6489361 : return false;
744 : }
745 :
746 : /* Look into pointer pointed to by GSIP and figure out what interesting side
747 : effects it has. */
748 : static void
749 183815942 : check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
750 : {
751 183815942 : gimple *stmt = gsi_stmt (*gsip);
752 :
753 183815942 : if (is_gimple_debug (stmt))
754 : return;
755 :
756 : /* Do consider clobber as side effects before IPA, so we rather inline
757 : C++ destructors and keep clobber semantics than eliminate them.
758 :
759 : Similar logic is in ipa-modref.
760 :
761 : TODO: We may get smarter during early optimizations on these and let
762 : functions containing only clobbers to be optimized more. This is a common
763 : case of C++ destructors. */
764 :
765 92549389 : if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
766 : return;
767 :
768 90359929 : if (dump_file)
769 : {
770 1702 : fprintf (dump_file, " scanning: ");
771 1702 : print_gimple_stmt (dump_file, stmt, 0);
772 : }
773 :
774 167334048 : if (gimple_has_volatile_ops (stmt) && !gimple_clobber_p (stmt))
775 : {
776 1469342 : local->pure_const_state = IPA_NEITHER;
777 1469342 : if (dump_file)
778 33 : fprintf (dump_file, " Volatile stmt is not const/pure\n");
779 : }
780 :
781 : /* Look for loads and stores. */
782 151392424 : walk_stmt_load_store_ops (stmt, local,
783 : ipa ? check_ipa_load : check_load,
784 : ipa ? check_ipa_store : check_store);
785 :
786 90359929 : if (gimple_code (stmt) != GIMPLE_CALL && stmt_could_throw_p (cfun, stmt))
787 : {
788 2602922 : if (cfun->can_throw_non_call_exceptions)
789 : {
790 2294485 : if (dump_file)
791 0 : fprintf (dump_file, " can throw; looping\n");
792 2294485 : local->looping = true;
793 : }
794 2602922 : if (stmt_can_throw_external (cfun, stmt))
795 : {
796 2250304 : if (dump_file)
797 6 : fprintf (dump_file, " can throw externally\n");
798 2250304 : local->can_throw = true;
799 : }
800 : else
801 352618 : if (dump_file)
802 0 : fprintf (dump_file, " can throw\n");
803 : }
804 90359929 : switch (gimple_code (stmt))
805 : {
806 15539600 : case GIMPLE_CALL:
807 15539600 : check_call (local, as_a <gcall *> (stmt), ipa);
808 15539600 : break;
809 1620559 : case GIMPLE_LABEL:
810 1620559 : if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
811 : /* Target of long jump. */
812 : {
813 812 : if (dump_file)
814 0 : fprintf (dump_file, " nonlocal label is not const/pure\n");
815 812 : local->pure_const_state = IPA_NEITHER;
816 : }
817 : break;
818 260919 : case GIMPLE_ASM:
819 260919 : if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
820 : {
821 115128 : if (dump_file)
822 3 : fprintf (dump_file, " memory asm clobber is not const/pure\n");
823 : /* Abandon all hope, ye who enter here. */
824 115128 : local->pure_const_state = IPA_NEITHER;
825 115128 : local->can_free = true;
826 : }
827 260919 : if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
828 : {
829 238172 : if (dump_file)
830 3 : fprintf (dump_file, " volatile is not const/pure\n");
831 : /* Abandon all hope, ye who enter here. */
832 238172 : local->pure_const_state = IPA_NEITHER;
833 238172 : local->looping = true;
834 238172 : local->can_free = true;
835 : }
836 : return;
837 : default:
838 : break;
839 : }
840 : }
841 :
842 : /* Check that RETVAL is used only in STMT and in comparisons against 0.
843 : RETVAL is return value of the function and STMT is return stmt. */
844 :
845 : static bool
846 476965 : check_retval_uses (tree retval, gimple *stmt)
847 : {
848 476965 : imm_use_iterator use_iter;
849 476965 : gimple *use_stmt;
850 :
851 1504398 : FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
852 692437 : if (gcond *cond = dyn_cast<gcond *> (use_stmt))
853 : {
854 12759 : tree op2 = gimple_cond_rhs (cond);
855 12759 : if (!integer_zerop (op2))
856 : return false;
857 : }
858 679678 : else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
859 : {
860 93649 : enum tree_code code = gimple_assign_rhs_code (ga);
861 93649 : if (TREE_CODE_CLASS (code) != tcc_comparison)
862 : return false;
863 2566 : if (!integer_zerop (gimple_assign_rhs2 (ga)))
864 : return false;
865 : }
866 586029 : else if (is_gimple_debug (use_stmt))
867 : ;
868 470456 : else if (use_stmt != stmt)
869 141969 : return false;
870 :
871 334996 : return true;
872 : }
873 :
874 : /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
875 : attribute. Currently this function does a very conservative analysis.
876 : FUN is considered to be a candidate if
877 : 1) It returns a value of pointer type.
878 : 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
879 : a phi, and element of phi is either NULL or
880 : SSA_NAME_DEF_STMT(element) is function call.
881 : 3) The return-value has immediate uses only within comparisons (gcond or gassign)
882 : and return_stmt (and likewise a phi arg has immediate use only within comparison
883 : or the phi stmt). */
884 :
885 : #define DUMP_AND_RETURN(reason) \
886 : { \
887 : if (dump_file && (dump_flags & TDF_DETAILS)) \
888 : fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
889 : (node->dump_name ()), (reason)); \
890 : return false; \
891 : }
892 :
893 : static bool
894 405175 : malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
895 : bitmap visited)
896 : {
897 405175 : cgraph_node *node = cgraph_node::get_create (fun->decl);
898 405175 : if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
899 : return true;
900 :
901 405165 : if (!check_retval_uses (retval, ret_stmt))
902 111737 : DUMP_AND_RETURN("Return value has uses outside return stmt"
903 : " and comparisons against 0.")
904 :
905 293428 : gimple *def = SSA_NAME_DEF_STMT (retval);
906 :
907 293428 : if (gcall *call_stmt = dyn_cast<gcall *> (def))
908 : {
909 74848 : tree callee_decl = gimple_call_fndecl (call_stmt);
910 74848 : if (!callee_decl)
911 : return false;
912 :
913 137449 : if (!ipa && !DECL_IS_MALLOC (callee_decl))
914 27643 : DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
915 : " non-ipa mode.")
916 :
917 45749 : cgraph_edge *cs = node->get_edge (call_stmt);
918 45749 : if (cs)
919 : {
920 9335 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
921 9335 : es->is_return_callee_uncaptured = true;
922 : }
923 : }
924 :
925 218580 : else if (gphi *phi = dyn_cast<gphi *> (def))
926 : {
927 : bool all_args_zero = true;
928 100548 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
929 : {
930 95174 : tree arg = gimple_phi_arg_def (phi, i);
931 95174 : if (integer_zerop (arg))
932 20700 : continue;
933 :
934 74474 : all_args_zero = false;
935 74474 : if (TREE_CODE (arg) != SSA_NAME)
936 2674 : DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
937 71800 : if (!check_retval_uses (arg, phi))
938 30232 : DUMP_AND_RETURN ("phi arg has uses outside phi"
939 : " and comparisons against 0.")
940 :
941 41568 : gimple *arg_def = SSA_NAME_DEF_STMT (arg);
942 41568 : if (is_a<gphi *> (arg_def))
943 : {
944 3656 : if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
945 3562 : DUMP_AND_RETURN ("nested phi fail")
946 94 : continue;
947 : }
948 :
949 37912 : gcall *call_stmt = dyn_cast<gcall *> (arg_def);
950 37912 : if (!call_stmt)
951 18523 : DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
952 :
953 19389 : tree callee_decl = gimple_call_fndecl (call_stmt);
954 19389 : if (!callee_decl)
955 : return false;
956 26281 : if (!ipa && !DECL_IS_MALLOC (callee_decl))
957 6641 : DUMP_AND_RETURN("callee_decl does not have malloc attribute"
958 : " for non-ipa mode.")
959 :
960 9851 : cgraph_edge *cs = node->get_edge (call_stmt);
961 9851 : if (cs)
962 : {
963 6839 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
964 6839 : es->is_return_callee_uncaptured = true;
965 : }
966 : }
967 :
968 5374 : if (all_args_zero)
969 48 : DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
970 : }
971 :
972 : else
973 148677 : DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
974 :
975 : return true;
976 : }
977 :
978 : static bool
979 5964468 : malloc_candidate_p (function *fun, bool ipa)
980 : {
981 5964468 : basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
982 5964468 : edge e;
983 5964468 : edge_iterator ei;
984 5964468 : cgraph_node *node = cgraph_node::get_create (fun->decl);
985 :
986 5964468 : if (EDGE_COUNT (exit_block->preds) == 0
987 5961984 : || !flag_delete_null_pointer_checks)
988 : return false;
989 :
990 5831263 : auto_bitmap visited;
991 5882254 : FOR_EACH_EDGE (e, ei, exit_block->preds)
992 : {
993 5831263 : gimple_stmt_iterator gsi = gsi_last_bb (e->src);
994 11610280 : greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
995 :
996 5830008 : if (!ret_stmt)
997 5831263 : return false;
998 :
999 5830008 : tree retval = gimple_return_retval (ret_stmt);
1000 5830008 : if (!retval)
1001 2631623 : DUMP_AND_RETURN("No return value.")
1002 :
1003 3198385 : if (TREE_CODE (retval) != SSA_NAME
1004 3198385 : || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
1005 2796866 : DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
1006 :
1007 401519 : if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
1008 : return false;
1009 : }
1010 :
1011 50991 : if (dump_file && (dump_flags & TDF_DETAILS))
1012 8 : fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1013 4 : IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1014 : return true;
1015 5831263 : }
1016 :
1017 : #undef DUMP_AND_RETURN
1018 :
1019 : /* Return true if function is known to be finite. */
1020 :
1021 : bool
1022 4237368 : finite_function_p ()
1023 : {
1024 : /* Const functions cannot have back edges (an
1025 : indication of possible infinite loop side
1026 : effect. */
1027 4237368 : bool finite = true;
1028 4237368 : if (mark_dfs_back_edges ())
1029 : {
1030 : /* Preheaders are needed for SCEV to work.
1031 : Simple latches and recorded exits improve chances that loop will
1032 : proved to be finite in testcases such as in loop-15.c
1033 : and loop-24.c */
1034 428587 : loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1035 : | LOOPS_HAVE_SIMPLE_LATCHES
1036 : | LOOPS_HAVE_RECORDED_EXITS);
1037 428587 : if (dump_file && (dump_flags & TDF_DETAILS))
1038 0 : flow_loops_dump (dump_file, NULL, 0);
1039 428587 : if (mark_irreducible_loops ())
1040 : {
1041 2065 : if (dump_file)
1042 0 : fprintf (dump_file, " has irreducible loops\n");
1043 : finite = false;
1044 : }
1045 : else
1046 : {
1047 426522 : scev_initialize ();
1048 2121681 : for (auto loop : loops_list (cfun, 0))
1049 917746 : if (!finite_loop_p (loop))
1050 : {
1051 75631 : if (dump_file)
1052 1 : fprintf (dump_file, " cannot prove finiteness of "
1053 : "loop %i\n", loop->num);
1054 : finite =false;
1055 : break;
1056 426522 : }
1057 426522 : scev_finalize ();
1058 : }
1059 428587 : loop_optimizer_finalize ();
1060 : }
1061 4237368 : return finite;
1062 : }
1063 :
1064 : /* This is the main routine for finding the reference patterns for
1065 : global variables within a function FN. */
1066 :
1067 : static funct_state
1068 4704514 : analyze_function (struct cgraph_node *fn, bool ipa)
1069 : {
1070 4704514 : tree decl = fn->decl;
1071 4704514 : funct_state l;
1072 4704514 : basic_block this_block;
1073 :
1074 4704514 : l = XCNEW (class funct_state_d);
1075 4704514 : l->pure_const_state = IPA_CONST;
1076 4704514 : l->state_previously_known = IPA_NEITHER;
1077 4704514 : l->looping_previously_known = true;
1078 4704514 : l->looping = false;
1079 4704514 : l->can_throw = false;
1080 4704514 : l->can_free = false;
1081 4704514 : state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1082 4704514 : flags_from_decl_or_type (fn->decl),
1083 4704514 : fn->cannot_return_p ());
1084 :
1085 4704514 : if (fn->thunk || fn->alias)
1086 : {
1087 : /* Thunk gets propagated through, so nothing interesting happens. */
1088 63351 : gcc_assert (ipa);
1089 63351 : if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1090 729 : l->pure_const_state = IPA_NEITHER;
1091 63351 : return l;
1092 : }
1093 :
1094 4641163 : if (dump_file)
1095 : {
1096 190 : fprintf (dump_file, "\n\n local analysis of %s\n ",
1097 : fn->dump_name ());
1098 : }
1099 :
1100 4641163 : push_cfun (DECL_STRUCT_FUNCTION (decl));
1101 :
1102 36045034 : FOR_EACH_BB_FN (this_block, cfun)
1103 : {
1104 31468916 : gimple_stmt_iterator gsi;
1105 31468916 : struct walk_stmt_info wi;
1106 :
1107 31468916 : memset (&wi, 0, sizeof (wi));
1108 62937832 : for (gsi = gsi_start_bb (this_block);
1109 215219813 : !gsi_end_p (gsi);
1110 183750897 : gsi_next (&gsi))
1111 : {
1112 : /* NULL memory accesses terminates BB. These accesses are known
1113 : to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1114 : to volatile accesses and adds builtin_trap call which would
1115 : confuse us otherwise. */
1116 183818320 : if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
1117 : null_pointer_node))
1118 : {
1119 2378 : if (dump_file)
1120 0 : fprintf (dump_file, " NULL memory access; terminating BB%s\n",
1121 0 : flag_non_call_exceptions ? "; looping" : "");
1122 2378 : if (flag_non_call_exceptions)
1123 : {
1124 426 : l->looping = true;
1125 426 : if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
1126 : {
1127 330 : if (dump_file)
1128 0 : fprintf (dump_file, " can throw externally\n");
1129 330 : l->can_throw = true;
1130 : }
1131 : }
1132 : break;
1133 : }
1134 183815942 : check_stmt (&gsi, l, ipa);
1135 183815942 : if (l->pure_const_state == IPA_NEITHER
1136 117590437 : && l->looping
1137 95652598 : && l->can_throw
1138 46042509 : && l->can_free)
1139 65045 : goto end;
1140 : }
1141 : }
1142 :
1143 4576118 : end:
1144 4641163 : if (l->pure_const_state != IPA_NEITHER
1145 2022322 : && !l->looping
1146 6480465 : && !finite_function_p ())
1147 24045 : l->looping = true;
1148 :
1149 4641163 : if (dump_file && (dump_flags & TDF_DETAILS))
1150 22 : fprintf (dump_file, " checking previously known:");
1151 :
1152 4641163 : better_state (&l->pure_const_state, &l->looping,
1153 : l->state_previously_known,
1154 4641163 : l->looping_previously_known);
1155 4641163 : if (TREE_NOTHROW (decl))
1156 3135768 : l->can_throw = false;
1157 :
1158 4641163 : l->malloc_state = STATE_MALLOC_BOTTOM;
1159 4641163 : if (DECL_IS_MALLOC (decl))
1160 9666 : l->malloc_state = STATE_MALLOC;
1161 4631497 : else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1162 11882 : l->malloc_state = STATE_MALLOC_TOP;
1163 4619615 : else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1164 39109 : l->malloc_state = STATE_MALLOC;
1165 :
1166 4641163 : pop_cfun ();
1167 4641163 : if (dump_file)
1168 : {
1169 190 : if (l->looping)
1170 50 : fprintf (dump_file, "Function is locally looping.\n");
1171 190 : if (l->can_throw)
1172 6 : fprintf (dump_file, "Function is locally throwing.\n");
1173 190 : if (l->pure_const_state == IPA_CONST)
1174 103 : fprintf (dump_file, "Function is locally const.\n");
1175 190 : if (l->pure_const_state == IPA_PURE)
1176 7 : fprintf (dump_file, "Function is locally pure.\n");
1177 190 : if (l->can_free)
1178 3 : fprintf (dump_file, "Function can locally free.\n");
1179 190 : if (l->malloc_state == STATE_MALLOC)
1180 8 : fprintf (dump_file, "Function is locally malloc.\n");
1181 : }
1182 : return l;
1183 : }
1184 :
1185 : void
1186 19196 : funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1187 : {
1188 : /* There are some shared nodes, in particular the initializers on
1189 : static declarations. We do not need to scan them more than once
1190 : since all we would be interested in are the addressof
1191 : operations. */
1192 19196 : if (opt_for_fn (node->decl, flag_ipa_pure_const))
1193 : {
1194 19195 : funct_state_d *a = analyze_function (node, true);
1195 19195 : new (state) funct_state_d (*a);
1196 19195 : free (a);
1197 : }
1198 : else
1199 : /* Do not keep stale summaries. */
1200 1 : funct_state_summaries->remove (node);
1201 19196 : }
1202 :
1203 : /* Called when new clone is inserted to callgraph late. */
1204 :
1205 : void
1206 1170737 : funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1207 : funct_state_d *src_data,
1208 : funct_state_d *dst_data)
1209 : {
1210 1170737 : new (dst_data) funct_state_d (*src_data);
1211 1170737 : if (dst_data->malloc_state == STATE_MALLOC
1212 1170737 : && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1213 11 : dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1214 1170737 : }
1215 :
1216 :
1217 : void
1218 160807 : pass_ipa_pure_const::
1219 : register_hooks (void)
1220 : {
1221 160807 : if (init_p)
1222 : return;
1223 :
1224 160807 : init_p = true;
1225 :
1226 160807 : funct_state_summaries = new funct_state_summary_t (symtab);
1227 : }
1228 :
1229 :
1230 : /* Analyze each function in the cgraph to see if it is locally PURE or
1231 : CONST. */
1232 :
1233 : static void
1234 148605 : pure_const_generate_summary (void)
1235 : {
1236 148605 : struct cgraph_node *node;
1237 :
1238 148605 : pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1239 148605 : pass->register_hooks ();
1240 :
1241 : /* Process all of the functions.
1242 :
1243 : We process AVAIL_INTERPOSABLE functions. We cannot use the results
1244 : by default, but the info can be used at LTO with -fwhole-program or
1245 : when function got cloned and the clone is AVAILABLE. */
1246 :
1247 1546826 : FOR_EACH_DEFINED_FUNCTION (node)
1248 1398221 : if (opt_for_fn (node->decl, flag_ipa_pure_const))
1249 : {
1250 1397976 : funct_state_d *a = analyze_function (node, true);
1251 1397976 : new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1252 1397976 : free (a);
1253 : }
1254 148605 : }
1255 :
1256 :
1257 : /* Serialize the ipa info for lto. */
1258 :
1259 : static void
1260 19997 : pure_const_write_summary (void)
1261 : {
1262 19997 : struct cgraph_node *node;
1263 19997 : struct lto_simple_output_block *ob
1264 19997 : = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1265 19997 : unsigned int count = 0;
1266 19997 : lto_symtab_encoder_iterator lsei;
1267 19997 : lto_symtab_encoder_t encoder;
1268 :
1269 19997 : encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1270 :
1271 113618 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1272 93621 : lsei_next_function_in_partition (&lsei))
1273 : {
1274 93621 : node = lsei_cgraph_node (lsei);
1275 93621 : if (node->definition && funct_state_summaries->exists (node))
1276 93468 : count++;
1277 : }
1278 :
1279 19997 : streamer_write_uhwi_stream (ob->main_stream, count);
1280 :
1281 : /* Process all of the functions. */
1282 113618 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1283 93621 : lsei_next_function_in_partition (&lsei))
1284 : {
1285 93621 : node = lsei_cgraph_node (lsei);
1286 93621 : funct_state_d *fs = funct_state_summaries->get (node);
1287 93621 : if (node->definition && fs != NULL)
1288 : {
1289 93468 : struct bitpack_d bp;
1290 93468 : int node_ref;
1291 93468 : lto_symtab_encoder_t encoder;
1292 :
1293 93468 : encoder = ob->decl_state->symtab_node_encoder;
1294 93468 : node_ref = lto_symtab_encoder_encode (encoder, node);
1295 93468 : streamer_write_uhwi_stream (ob->main_stream, node_ref);
1296 :
1297 : /* Note that flags will need to be read in the opposite
1298 : order as we are pushing the bitflags into FLAGS. */
1299 93468 : bp = bitpack_create (ob->main_stream);
1300 93468 : bp_pack_value (&bp, fs->pure_const_state, 2);
1301 93468 : bp_pack_value (&bp, fs->state_previously_known, 2);
1302 93468 : bp_pack_value (&bp, fs->looping_previously_known, 1);
1303 93468 : bp_pack_value (&bp, fs->looping, 1);
1304 93468 : bp_pack_value (&bp, fs->can_throw, 1);
1305 93468 : bp_pack_value (&bp, fs->can_free, 1);
1306 93468 : bp_pack_value (&bp, fs->malloc_state, 2);
1307 93468 : streamer_write_bitpack (&bp);
1308 : }
1309 : }
1310 :
1311 19997 : lto_destroy_simple_output_block (ob);
1312 19997 : }
1313 :
1314 :
1315 : /* Deserialize the ipa info for lto. */
1316 :
1317 : static void
1318 12202 : pure_const_read_summary (void)
1319 : {
1320 12202 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1321 12202 : struct lto_file_decl_data *file_data;
1322 12202 : unsigned int j = 0;
1323 :
1324 12202 : pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1325 12202 : pass->register_hooks ();
1326 :
1327 37661 : while ((file_data = file_data_vec[j++]))
1328 : {
1329 13257 : const char *data;
1330 13257 : size_t len;
1331 13257 : class lto_input_block *ib
1332 13257 : = lto_create_simple_input_block (file_data,
1333 : LTO_section_ipa_pure_const,
1334 : &data, &len);
1335 13257 : if (ib)
1336 : {
1337 10925 : unsigned int i;
1338 10925 : unsigned int count = streamer_read_uhwi (ib);
1339 :
1340 88688 : for (i = 0; i < count; i++)
1341 : {
1342 77763 : unsigned int index;
1343 77763 : struct cgraph_node *node;
1344 77763 : struct bitpack_d bp;
1345 77763 : funct_state fs;
1346 77763 : lto_symtab_encoder_t encoder;
1347 :
1348 77763 : index = streamer_read_uhwi (ib);
1349 77763 : encoder = file_data->symtab_node_encoder;
1350 77763 : node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1351 : index));
1352 :
1353 77763 : fs = funct_state_summaries->get_create (node);
1354 : /* Note that the flags must be read in the opposite
1355 : order in which they were written (the bitflags were
1356 : pushed into FLAGS). */
1357 77763 : bp = streamer_read_bitpack (ib);
1358 77763 : fs->pure_const_state
1359 77763 : = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1360 77763 : fs->state_previously_known
1361 77763 : = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1362 77763 : fs->looping_previously_known = bp_unpack_value (&bp, 1);
1363 77763 : fs->looping = bp_unpack_value (&bp, 1);
1364 77763 : fs->can_throw = bp_unpack_value (&bp, 1);
1365 77763 : fs->can_free = bp_unpack_value (&bp, 1);
1366 77763 : fs->malloc_state
1367 77763 : = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1368 :
1369 77763 : if (dump_file)
1370 : {
1371 0 : int flags = flags_from_decl_or_type (node->decl);
1372 0 : fprintf (dump_file, "Read info for %s ", node->dump_name ());
1373 0 : if (flags & ECF_CONST)
1374 0 : fprintf (dump_file, " const");
1375 0 : if (flags & ECF_PURE)
1376 0 : fprintf (dump_file, " pure");
1377 0 : if (flags & ECF_NOTHROW)
1378 0 : fprintf (dump_file, " nothrow");
1379 0 : fprintf (dump_file, "\n pure const state: %s\n",
1380 0 : pure_const_names[fs->pure_const_state]);
1381 0 : fprintf (dump_file, " previously known state: %s\n",
1382 0 : pure_const_names[fs->state_previously_known]);
1383 0 : if (fs->looping)
1384 0 : fprintf (dump_file," function is locally looping\n");
1385 0 : if (fs->looping_previously_known)
1386 0 : fprintf (dump_file," function is previously known looping\n");
1387 0 : if (fs->can_throw)
1388 0 : fprintf (dump_file," function is locally throwing\n");
1389 0 : if (fs->can_free)
1390 0 : fprintf (dump_file," function can locally free\n");
1391 0 : fprintf (dump_file, "\n malloc state: %s\n",
1392 0 : malloc_state_names[fs->malloc_state]);
1393 : }
1394 : }
1395 :
1396 10925 : lto_destroy_simple_input_block (file_data,
1397 : LTO_section_ipa_pure_const,
1398 : ib, data, len);
1399 : }
1400 : }
1401 12202 : }
1402 :
1403 : /* We only propagate across edges that can throw externally and their callee
1404 : is not interposable. */
1405 :
1406 : static bool
1407 7344640 : ignore_edge_for_nothrow (struct cgraph_edge *e)
1408 : {
1409 7344640 : if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1410 : return true;
1411 :
1412 2141744 : enum availability avail;
1413 2141744 : cgraph_node *ultimate_target
1414 2141744 : = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1415 2141744 : if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1416 : return true;
1417 758697 : return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1418 206494 : && !e->callee->binds_to_current_def_p (e->caller))
1419 758663 : || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1420 1516257 : || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1421 : }
1422 :
1423 : /* Return true if NODE is self recursive function.
1424 : Indirectly recursive functions appears as non-trivial strongly
1425 : connected components, so we need to care about self recursion
1426 : only. */
1427 :
1428 : static bool
1429 2080352 : self_recursive_p (struct cgraph_node *node)
1430 : {
1431 2080352 : struct cgraph_edge *e;
1432 8147896 : for (e = node->callees; e; e = e->next_callee)
1433 6071458 : if (e->callee->function_symbol () == node)
1434 : return true;
1435 : return false;
1436 : }
1437 :
1438 : /* Return true if N is cdtor that is not const or pure. In this case we may
1439 : need to remove unreachable function if it is marked const/pure. */
1440 :
1441 : static bool
1442 51259 : cdtor_p (cgraph_node *n, void *)
1443 : {
1444 51259 : if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1445 3 : return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1446 3 : || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1447 : return false;
1448 : }
1449 :
1450 : /* Skip edges from and to nodes without ipa_pure_const enabled.
1451 : Ignore not available symbols. */
1452 :
1453 : static bool
1454 7344640 : ignore_edge_for_pure_const (struct cgraph_edge *e)
1455 : {
1456 7344640 : enum availability avail;
1457 7344640 : cgraph_node *ultimate_target
1458 7344640 : = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1459 :
1460 7344640 : return (avail <= AVAIL_INTERPOSABLE
1461 2572913 : || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1462 9908842 : || !opt_for_fn (ultimate_target->decl,
1463 7344640 : flag_ipa_pure_const));
1464 : }
1465 :
1466 : /* Return true if function should be skipped for local pure const analysis. */
1467 :
1468 : static bool
1469 4668106 : skip_function_for_local_pure_const (struct cgraph_node *node)
1470 : {
1471 : /* Because we do not schedule pass_fixup_cfg over whole program after early
1472 : optimizations we must not promote functions that are called by already
1473 : processed functions. */
1474 :
1475 4668106 : if (function_called_by_processed_nodes_p ())
1476 : {
1477 3384 : if (dump_file)
1478 1 : fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1479 3384 : return true;
1480 : }
1481 : /* Save some work and do not analyze functions which are interposable and
1482 : do not have any non-interposable aliases. */
1483 4664722 : if (node->get_availability () <= AVAIL_INTERPOSABLE
1484 207514 : && !flag_lto
1485 4864963 : && !node->has_aliases_p ())
1486 : {
1487 190697 : if (dump_file)
1488 0 : fprintf (dump_file,
1489 : "Function is interposable; not analyzing.\n");
1490 190697 : return true;
1491 : }
1492 : return false;
1493 : }
1494 :
1495 : /* Make function const and output warning. If LOCAL is true,
1496 : return true if anything changed. Otherwise return true if
1497 : we may have introduced removale ctors. */
1498 :
1499 : bool
1500 1547310 : ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1501 : {
1502 1547310 : bool cdtor = false;
1503 :
1504 1547310 : if (TREE_READONLY (node->decl)
1505 1547310 : && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1506 : return false;
1507 913065 : warn_function_const (node->decl, !looping);
1508 913065 : if (local && skip_function_for_local_pure_const (node))
1509 : return false;
1510 894057 : if (dump_file)
1511 58 : fprintf (dump_file, "Function found to be %sconst: %s\n",
1512 : looping ? "looping " : "",
1513 : node->dump_name ());
1514 894057 : if (!local && !looping)
1515 44579 : cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1516 894057 : if (!dbg_cnt (ipa_attr))
1517 : return false;
1518 894057 : if (node->set_const_flag (true, looping))
1519 : {
1520 500860 : if (dump_file)
1521 58 : fprintf (dump_file,
1522 : "Declaration updated to be %sconst: %s\n",
1523 : looping ? "looping " : "",
1524 : node->dump_name ());
1525 500860 : if (local)
1526 : return true;
1527 2796 : return cdtor;
1528 : }
1529 : return false;
1530 : }
1531 :
1532 : /* Make function const and output warning. If LOCAL is true,
1533 : return true if anything changed. Otherwise return true if
1534 : we may have introduced removale ctors. */
1535 :
1536 : bool
1537 1040244 : ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1538 : {
1539 1040244 : bool cdtor = false;
1540 :
1541 1040244 : if (TREE_READONLY (node->decl)
1542 1040244 : || (DECL_PURE_P (node->decl)
1543 674542 : && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
1544 : return false;
1545 364921 : warn_function_pure (node->decl, !looping);
1546 364921 : if (local && skip_function_for_local_pure_const (node))
1547 : return false;
1548 356087 : if (dump_file)
1549 8 : fprintf (dump_file, "Function found to be %spure: %s\n",
1550 : looping ? "looping " : "",
1551 : node->dump_name ());
1552 356087 : if (!local && !looping)
1553 3421 : cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1554 356087 : if (!dbg_cnt (ipa_attr))
1555 : return false;
1556 356087 : if (node->set_pure_flag (true, looping))
1557 : {
1558 344405 : if (dump_file)
1559 8 : fprintf (dump_file,
1560 : "Declaration updated to be %spure: %s\n",
1561 : looping ? "looping " : "",
1562 : node->dump_name ());
1563 344405 : if (local)
1564 : return true;
1565 7756 : return cdtor;
1566 : }
1567 : return false;
1568 : }
1569 :
1570 : /* Produce transitive closure over the callgraph and compute pure/const
1571 : attributes. */
1572 :
1573 : static bool
1574 151619 : propagate_pure_const (void)
1575 : {
1576 151619 : struct cgraph_node *node;
1577 151619 : struct cgraph_node *w;
1578 151619 : struct cgraph_node **order =
1579 151619 : XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1580 151619 : int order_pos;
1581 151619 : int i;
1582 151619 : struct ipa_dfs_info * w_info;
1583 151619 : bool remove_p = false;
1584 :
1585 151619 : order_pos = ipa_reduced_postorder (order, true,
1586 : ignore_edge_for_pure_const);
1587 151619 : if (dump_file)
1588 : {
1589 29 : cgraph_node::dump_cgraph (dump_file);
1590 29 : ipa_print_order (dump_file, "reduced", order, order_pos);
1591 : }
1592 :
1593 : /* Propagate the local information through the call graph to produce
1594 : the global information. All the nodes within a cycle will have
1595 : the same info so we collapse cycles first. Then we can do the
1596 : propagation in one pass from the leaves to the roots. */
1597 2540268 : for (i = 0; i < order_pos; i++ )
1598 : {
1599 2388649 : enum pure_const_state_e pure_const_state = IPA_CONST;
1600 2388649 : bool looping = false;
1601 2388649 : int count = 0;
1602 2388649 : node = order[i];
1603 :
1604 2388649 : if (node->alias)
1605 37475 : continue;
1606 :
1607 2351174 : if (dump_file && (dump_flags & TDF_DETAILS))
1608 5 : fprintf (dump_file, "Starting cycle\n");
1609 :
1610 : /* Find the worst state for any node in the cycle. */
1611 : w = node;
1612 3933068 : while (w && pure_const_state != IPA_NEITHER)
1613 : {
1614 2353748 : struct cgraph_edge *e;
1615 2353748 : struct cgraph_edge *ie;
1616 2353748 : int i;
1617 2353748 : struct ipa_ref *ref = NULL;
1618 :
1619 2353748 : funct_state w_l = funct_state_summaries->get_create (w);
1620 2353748 : if (dump_file && (dump_flags & TDF_DETAILS))
1621 6 : fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1622 : w->dump_name (),
1623 6 : pure_const_names[w_l->pure_const_state],
1624 6 : w_l->looping);
1625 :
1626 : /* First merge in function body properties.
1627 : We are safe to pass NULL as FROM and TO because we will take care
1628 : of possible interposition when walking callees. */
1629 2353748 : worse_state (&pure_const_state, &looping,
1630 2353748 : w_l->pure_const_state, w_l->looping,
1631 : NULL, NULL);
1632 2353748 : if (pure_const_state == IPA_NEITHER)
1633 : break;
1634 :
1635 1581894 : count++;
1636 :
1637 : /* We consider recursive cycles as possibly infinite.
1638 : This might be relaxed since infinite recursion leads to stack
1639 : overflow. */
1640 1581894 : if (count > 1)
1641 2574 : looping = true;
1642 :
1643 : /* Now walk the edges and merge in callee properties. */
1644 2419813 : for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1645 837919 : e = e->next_callee)
1646 : {
1647 1901057 : enum availability avail;
1648 1901057 : struct cgraph_node *y = e->callee->
1649 3802114 : function_or_virtual_thunk_symbol (&avail,
1650 1901057 : e->caller);
1651 1901057 : enum pure_const_state_e edge_state = IPA_CONST;
1652 1901057 : bool edge_looping = false;
1653 :
1654 1901057 : if (e->recursive_p ())
1655 5477 : looping = true;
1656 :
1657 1901057 : if (dump_file && (dump_flags & TDF_DETAILS))
1658 : {
1659 7 : fprintf (dump_file, " Call to %s",
1660 7 : e->callee->dump_name ());
1661 : }
1662 1901057 : if (avail > AVAIL_INTERPOSABLE)
1663 : {
1664 634643 : funct_state y_l = funct_state_summaries->get_create (y);
1665 :
1666 634643 : if (dump_file && (dump_flags & TDF_DETAILS))
1667 : {
1668 2 : fprintf (dump_file,
1669 : " state:%s looping:%i\n",
1670 2 : pure_const_names[y_l->pure_const_state],
1671 2 : y_l->looping);
1672 : }
1673 634643 : if (y_l->pure_const_state > IPA_PURE
1674 634643 : && e->cannot_lead_to_return_p ())
1675 : {
1676 8410 : if (dump_file && (dump_flags & TDF_DETAILS))
1677 0 : fprintf (dump_file,
1678 : " Ignoring side effects"
1679 : " -> pure, looping\n");
1680 8410 : edge_state = IPA_PURE;
1681 8410 : edge_looping = true;
1682 : }
1683 : else
1684 : {
1685 626233 : edge_state = y_l->pure_const_state;
1686 626233 : edge_looping = y_l->looping;
1687 : }
1688 : }
1689 1266414 : else if (builtin_safe_for_const_function_p (&edge_looping,
1690 : y->decl))
1691 : edge_state = IPA_CONST;
1692 : else
1693 1088423 : state_from_flags (&edge_state, &edge_looping,
1694 1088423 : flags_from_decl_or_type (y->decl),
1695 1088423 : e->cannot_lead_to_return_p ());
1696 :
1697 : /* Merge the results with what we already know. */
1698 1901057 : better_state (&edge_state, &edge_looping,
1699 : w_l->state_previously_known,
1700 1901057 : w_l->looping_previously_known);
1701 1901057 : worse_state (&pure_const_state, &looping,
1702 1901057 : edge_state, edge_looping, e->caller, e->callee);
1703 1901057 : if (pure_const_state == IPA_NEITHER)
1704 : break;
1705 : }
1706 :
1707 : /* Now process the indirect call. */
1708 1581894 : for (ie = w->indirect_calls;
1709 1582642 : ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1710 : {
1711 26805 : enum pure_const_state_e edge_state = IPA_CONST;
1712 26805 : bool edge_looping = false;
1713 :
1714 26805 : if (dump_file && (dump_flags & TDF_DETAILS))
1715 0 : fprintf (dump_file, " Indirect call");
1716 53610 : state_from_flags (&edge_state, &edge_looping,
1717 26805 : ie->indirect_info->ecf_flags,
1718 26805 : ie->cannot_lead_to_return_p ());
1719 : /* Merge the results with what we already know. */
1720 26805 : better_state (&edge_state, &edge_looping,
1721 : w_l->state_previously_known,
1722 26805 : w_l->looping_previously_known);
1723 26805 : worse_state (&pure_const_state, &looping,
1724 : edge_state, edge_looping, NULL, NULL);
1725 26805 : if (pure_const_state == IPA_NEITHER)
1726 : break;
1727 : }
1728 :
1729 : /* And finally all loads and stores. */
1730 319914 : for (i = 0; w->iterate_reference (i, ref)
1731 2549464 : && pure_const_state != IPA_NEITHER; i++)
1732 : {
1733 348039 : enum pure_const_state_e ref_state = IPA_CONST;
1734 348039 : bool ref_looping = false;
1735 348039 : switch (ref->use)
1736 : {
1737 220514 : case IPA_REF_LOAD:
1738 : /* readonly reads are safe. */
1739 220514 : if (TREE_READONLY (ref->referred->decl))
1740 : break;
1741 204687 : if (dump_file && (dump_flags & TDF_DETAILS))
1742 0 : fprintf (dump_file, " nonreadonly global var read\n");
1743 204687 : ref_state = IPA_PURE;
1744 204687 : break;
1745 89891 : case IPA_REF_STORE:
1746 89891 : if (ref->cannot_lead_to_return ())
1747 : break;
1748 28203 : ref_state = IPA_NEITHER;
1749 28203 : if (dump_file && (dump_flags & TDF_DETAILS))
1750 0 : fprintf (dump_file, " global var write\n");
1751 : break;
1752 : case IPA_REF_ADDR:
1753 : break;
1754 0 : default:
1755 0 : gcc_unreachable ();
1756 : }
1757 348039 : better_state (&ref_state, &ref_looping,
1758 : w_l->state_previously_known,
1759 348039 : w_l->looping_previously_known);
1760 348039 : worse_state (&pure_const_state, &looping,
1761 : ref_state, ref_looping, NULL, NULL);
1762 348039 : if (pure_const_state == IPA_NEITHER)
1763 : break;
1764 : }
1765 1581894 : w_info = (struct ipa_dfs_info *) w->aux;
1766 1581894 : w = w_info->next_cycle;
1767 : }
1768 2351174 : if (dump_file && (dump_flags & TDF_DETAILS))
1769 5 : fprintf (dump_file, "Result %s looping %i\n",
1770 5 : pure_const_names [pure_const_state],
1771 : looping);
1772 :
1773 : /* Find the worst state of can_free for any node in the cycle. */
1774 : bool can_free = false;
1775 : w = node;
1776 4705403 : while (w && !can_free)
1777 : {
1778 2354229 : struct cgraph_edge *e;
1779 2354229 : funct_state w_l = funct_state_summaries->get (w);
1780 :
1781 2354229 : if (w_l->can_free
1782 2175635 : || w->get_availability () == AVAIL_INTERPOSABLE
1783 4450315 : || w->indirect_calls)
1784 : can_free = true;
1785 :
1786 4083101 : for (e = w->callees; e && !can_free; e = e->next_callee)
1787 : {
1788 1728872 : enum availability avail;
1789 1728872 : struct cgraph_node *y = e->callee->
1790 3457744 : function_or_virtual_thunk_symbol (&avail,
1791 1728872 : e->caller);
1792 :
1793 1728872 : if (avail > AVAIL_INTERPOSABLE)
1794 779600 : can_free = funct_state_summaries->get (y)->can_free;
1795 : else
1796 : can_free = true;
1797 : }
1798 2354229 : w_info = (struct ipa_dfs_info *) w->aux;
1799 2354229 : w = w_info->next_cycle;
1800 : }
1801 :
1802 : /* Copy back the region's pure_const_state which is shared by
1803 : all nodes in the region. */
1804 : w = node;
1805 4722711 : while (w)
1806 : {
1807 2371537 : funct_state w_l = funct_state_summaries->get (w);
1808 2371537 : enum pure_const_state_e this_state = pure_const_state;
1809 2371537 : bool this_looping = looping;
1810 :
1811 2371537 : w_l->can_free = can_free;
1812 2371537 : w->nonfreeing_fn = !can_free;
1813 2371537 : if (!can_free && dump_file)
1814 28 : fprintf (dump_file, "Function found not to call free: %s\n",
1815 : w->dump_name ());
1816 :
1817 2371537 : if (w_l->state_previously_known != IPA_NEITHER
1818 411948 : && this_state > w_l->state_previously_known)
1819 : {
1820 1177 : if (this_state == IPA_NEITHER)
1821 50 : this_looping = w_l->looping_previously_known;
1822 : this_state = w_l->state_previously_known;
1823 : }
1824 2371537 : if (!this_looping && self_recursive_p (w))
1825 : this_looping = true;
1826 2371537 : if (!w_l->looping_previously_known)
1827 316315 : this_looping = false;
1828 :
1829 : /* All nodes within a cycle share the same info. */
1830 2371537 : w_l->pure_const_state = this_state;
1831 2371537 : w_l->looping = this_looping;
1832 :
1833 : /* Inline clones share declaration with their offline copies;
1834 : do not modify their declarations since the offline copy may
1835 : be different. */
1836 2371537 : if (!w->inlined_to)
1837 1046775 : switch (this_state)
1838 : {
1839 162767 : case IPA_CONST:
1840 162767 : remove_p |= ipa_make_function_const (w, this_looping, false);
1841 162767 : break;
1842 :
1843 98753 : case IPA_PURE:
1844 98753 : remove_p |= ipa_make_function_pure (w, this_looping, false);
1845 98753 : break;
1846 :
1847 : default:
1848 : break;
1849 : }
1850 2371537 : w_info = (struct ipa_dfs_info *) w->aux;
1851 2371537 : w = w_info->next_cycle;
1852 : }
1853 : }
1854 :
1855 151619 : ipa_free_postorder_info ();
1856 151619 : free (order);
1857 151619 : return remove_p;
1858 : }
1859 :
1860 : /* Produce transitive closure over the callgraph and compute nothrow
1861 : attributes. */
1862 :
1863 : static void
1864 151619 : propagate_nothrow (void)
1865 : {
1866 151619 : struct cgraph_node *node;
1867 151619 : struct cgraph_node *w;
1868 151619 : struct cgraph_node **order =
1869 151619 : XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1870 151619 : int order_pos;
1871 151619 : int i;
1872 151619 : struct ipa_dfs_info * w_info;
1873 :
1874 151619 : order_pos = ipa_reduced_postorder (order, true,
1875 : ignore_edge_for_nothrow);
1876 151619 : if (dump_file)
1877 : {
1878 29 : cgraph_node::dump_cgraph (dump_file);
1879 29 : ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1880 : }
1881 :
1882 : /* Propagate the local information through the call graph to produce
1883 : the global information. All the nodes within a cycle will have
1884 : the same info so we collapse cycles first. Then we can do the
1885 : propagation in one pass from the leaves to the roots. */
1886 2556767 : for (i = 0; i < order_pos; i++ )
1887 : {
1888 2405148 : bool can_throw = false;
1889 2405148 : node = order[i];
1890 :
1891 2405148 : if (node->alias)
1892 37475 : continue;
1893 :
1894 : /* Find the worst state for any node in the cycle. */
1895 : w = node;
1896 4735511 : while (w && !can_throw)
1897 : {
1898 2367838 : struct cgraph_edge *e, *ie;
1899 :
1900 2367838 : if (!TREE_NOTHROW (w->decl))
1901 : {
1902 938839 : funct_state w_l = funct_state_summaries->get_create (w);
1903 :
1904 938839 : if (w_l->can_throw
1905 938839 : || w->get_availability () == AVAIL_INTERPOSABLE)
1906 : can_throw = true;
1907 :
1908 1796345 : for (e = w->callees; e && !can_throw; e = e->next_callee)
1909 : {
1910 857506 : enum availability avail;
1911 :
1912 857506 : if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1913 248889 : continue;
1914 :
1915 608617 : struct cgraph_node *y = e->callee->
1916 1217234 : function_or_virtual_thunk_symbol (&avail,
1917 608617 : e->caller);
1918 :
1919 : /* We can use info about the callee only if we know it
1920 : cannot be interposed.
1921 : When callee is compiled with non-call exceptions we also
1922 : must check that the declaration is bound to current
1923 : body as other semantically equivalent body may still
1924 : throw. */
1925 608617 : if (avail <= AVAIL_INTERPOSABLE
1926 608617 : || (!TREE_NOTHROW (y->decl)
1927 326923 : && (funct_state_summaries->get_create (y)->can_throw
1928 5608 : || (opt_for_fn (y->decl, flag_non_call_exceptions)
1929 548 : && !e->callee->binds_to_current_def_p (w)))))
1930 : can_throw = true;
1931 : }
1932 953953 : for (ie = w->indirect_calls; ie && !can_throw;
1933 15114 : ie = ie->next_callee)
1934 15114 : if (ie->can_throw_external
1935 13632 : && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1936 15114 : can_throw = true;
1937 : }
1938 2367838 : w_info = (struct ipa_dfs_info *) w->aux;
1939 2367838 : w = w_info->next_cycle;
1940 : }
1941 :
1942 : /* Copy back the region's pure_const_state which is shared by
1943 : all nodes in the region. */
1944 : w = node;
1945 4739210 : while (w)
1946 : {
1947 2371537 : funct_state w_l = funct_state_summaries->get_create (w);
1948 2371537 : if (!can_throw && !TREE_NOTHROW (w->decl))
1949 : {
1950 : /* Inline clones share declaration with their offline copies;
1951 : do not modify their declarations since the offline copy may
1952 : be different. */
1953 18428 : if (!w->inlined_to)
1954 : {
1955 3036 : w->set_nothrow_flag (true);
1956 3036 : if (dump_file)
1957 0 : fprintf (dump_file, "Function found to be nothrow: %s\n",
1958 : w->dump_name ());
1959 : }
1960 : }
1961 924066 : else if (can_throw && !TREE_NOTHROW (w->decl))
1962 924066 : w_l->can_throw = true;
1963 2371537 : w_info = (struct ipa_dfs_info *) w->aux;
1964 2371537 : w = w_info->next_cycle;
1965 : }
1966 : }
1967 :
1968 151619 : ipa_free_postorder_info ();
1969 151619 : free (order);
1970 151619 : }
1971 :
1972 : /* Debugging function to dump state of malloc lattice. */
1973 :
1974 : DEBUG_FUNCTION
1975 : static void
1976 303238 : dump_malloc_lattice (FILE *dump_file, const char *s)
1977 : {
1978 303238 : if (!dump_file)
1979 : return;
1980 :
1981 58 : fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1982 58 : cgraph_node *node;
1983 290 : FOR_EACH_FUNCTION (node)
1984 : {
1985 232 : funct_state fs = funct_state_summaries->get (node);
1986 232 : if (fs)
1987 156 : fprintf (dump_file, "%s: %s\n", node->dump_name (),
1988 156 : malloc_state_names[fs->malloc_state]);
1989 : }
1990 : }
1991 :
1992 : /* Propagate malloc attribute across the callgraph. */
1993 :
1994 : static void
1995 151619 : propagate_malloc (void)
1996 : {
1997 151619 : cgraph_node *node;
1998 3974324 : FOR_EACH_FUNCTION (node)
1999 : {
2000 3822705 : if (DECL_IS_MALLOC (node->decl))
2001 84644 : if (!funct_state_summaries->exists (node))
2002 : {
2003 23359 : funct_state fs = funct_state_summaries->get_create (node);
2004 23359 : fs->malloc_state = STATE_MALLOC;
2005 : }
2006 : }
2007 :
2008 151619 : dump_malloc_lattice (dump_file, "Initial");
2009 151619 : struct cgraph_node **order
2010 151619 : = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
2011 151619 : int order_pos = ipa_reverse_postorder (order);
2012 151619 : bool changed = true;
2013 :
2014 456676 : while (changed)
2015 : {
2016 153438 : changed = false;
2017 : /* Walk in postorder. */
2018 4704145 : for (int i = order_pos - 1; i >= 0; --i)
2019 : {
2020 4550707 : cgraph_node *node = order[i];
2021 6150123 : if (node->alias
2022 4507325 : || !node->definition
2023 7501998 : || !funct_state_summaries->exists (node))
2024 4465202 : continue;
2025 :
2026 2951291 : funct_state l = funct_state_summaries->get (node);
2027 :
2028 : /* FIXME: add support for indirect-calls. */
2029 2951291 : if (node->indirect_calls)
2030 : {
2031 149017 : l->malloc_state = STATE_MALLOC_BOTTOM;
2032 149017 : continue;
2033 : }
2034 :
2035 2802274 : if (node->get_availability () <= AVAIL_INTERPOSABLE)
2036 : {
2037 93190 : l->malloc_state = STATE_MALLOC_BOTTOM;
2038 93190 : continue;
2039 : }
2040 :
2041 2709084 : if (l->malloc_state == STATE_MALLOC_BOTTOM)
2042 2623579 : continue;
2043 :
2044 85505 : auto_vec<cgraph_node *, 16> callees;
2045 373600 : for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2046 : {
2047 288095 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
2048 288095 : if (es && es->is_return_callee_uncaptured)
2049 13331 : callees.safe_push (cs->callee);
2050 : }
2051 :
2052 85505 : malloc_state_e new_state = l->malloc_state;
2053 98836 : for (unsigned j = 0; j < callees.length (); j++)
2054 : {
2055 13331 : cgraph_node *callee = callees[j];
2056 13331 : if (!funct_state_summaries->exists (node))
2057 : {
2058 : new_state = STATE_MALLOC_BOTTOM;
2059 : break;
2060 : }
2061 13331 : malloc_state_e callee_state
2062 13331 : = funct_state_summaries->get_create (callee)->malloc_state;
2063 13331 : if (new_state < callee_state)
2064 11231 : new_state = callee_state;
2065 : }
2066 85505 : if (new_state != l->malloc_state)
2067 : {
2068 11225 : changed = true;
2069 11225 : l->malloc_state = new_state;
2070 : }
2071 85505 : }
2072 : }
2073 :
2074 2560635 : FOR_EACH_DEFINED_FUNCTION (node)
2075 2409016 : if (funct_state_summaries->exists (node))
2076 : {
2077 2397070 : funct_state l = funct_state_summaries->get (node);
2078 2397070 : if (!node->alias
2079 2371537 : && l->malloc_state == STATE_MALLOC
2080 52994 : && !node->inlined_to
2081 2397499 : && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2082 : {
2083 429 : if (dump_file && (dump_flags & TDF_DETAILS))
2084 6 : fprintf (dump_file, "Function %s found to be malloc\n",
2085 : node->dump_name ());
2086 :
2087 429 : bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2088 429 : node->set_malloc_flag (true);
2089 429 : if (!malloc_decl_p && warn_suggest_attribute_malloc)
2090 0 : warn_function_malloc (node->decl);
2091 : }
2092 : }
2093 :
2094 151619 : dump_malloc_lattice (dump_file, "after propagation");
2095 151619 : ipa_free_postorder_info ();
2096 151619 : free (order);
2097 151619 : }
2098 :
2099 : /* Produce the global information by preforming a transitive closure
2100 : on the local information that was produced by generate_summary. */
2101 :
2102 : unsigned int
2103 151619 : pass_ipa_pure_const::
2104 : execute (function *)
2105 : {
2106 151619 : bool remove_p;
2107 :
2108 : /* Nothrow makes more function to not lead to return and improve
2109 : later analysis. */
2110 151619 : propagate_nothrow ();
2111 151619 : propagate_malloc ();
2112 151619 : remove_p = propagate_pure_const ();
2113 :
2114 151619 : delete funct_state_summaries;
2115 151619 : return remove_p ? TODO_remove_functions : 0;
2116 : }
2117 :
2118 : static bool
2119 4040667 : gate_pure_const (void)
2120 : {
2121 587007 : return flag_ipa_pure_const || in_lto_p;
2122 : }
2123 :
2124 285722 : pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2125 : : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2126 : pure_const_generate_summary, /* generate_summary */
2127 : pure_const_write_summary, /* write_summary */
2128 : pure_const_read_summary, /* read_summary */
2129 : NULL, /* write_optimization_summary */
2130 : NULL, /* read_optimization_summary */
2131 : NULL, /* stmt_fixup */
2132 : 0, /* function_transform_todo_flags_start */
2133 : NULL, /* function_transform */
2134 : NULL), /* variable_transform */
2135 285722 : init_p (false) {}
2136 :
2137 : ipa_opt_pass_d *
2138 285722 : make_pass_ipa_pure_const (gcc::context *ctxt)
2139 : {
2140 285722 : return new pass_ipa_pure_const (ctxt);
2141 : }
2142 :
2143 : /* Simple local pass for pure const discovery reusing the analysis from
2144 : ipa_pure_const. This pass is effective when executed together with
2145 : other optimization passes in early optimization pass queue. */
2146 :
2147 : namespace {
2148 :
2149 : const pass_data pass_data_local_pure_const =
2150 : {
2151 : GIMPLE_PASS, /* type */
2152 : "local-pure-const", /* name */
2153 : OPTGROUP_NONE, /* optinfo_flags */
2154 : TV_IPA_PURE_CONST, /* tv_id */
2155 : 0, /* properties_required */
2156 : 0, /* properties_provided */
2157 : 0, /* properties_destroyed */
2158 : 0, /* todo_flags_start */
2159 : 0, /* todo_flags_finish */
2160 : };
2161 :
2162 : class pass_local_pure_const : public gimple_opt_pass
2163 : {
2164 : public:
2165 571444 : pass_local_pure_const (gcc::context *ctxt)
2166 1142888 : : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2167 : {}
2168 :
2169 : /* opt_pass methods: */
2170 285722 : opt_pass * clone () final override
2171 : {
2172 285722 : return new pass_local_pure_const (m_ctxt);
2173 : }
2174 3453912 : bool gate (function *) final override { return gate_pure_const (); }
2175 : unsigned int execute (function *) final override;
2176 :
2177 : }; // class pass_local_pure_const
2178 :
2179 : unsigned int
2180 3453582 : pass_local_pure_const::execute (function *fun)
2181 : {
2182 3453582 : bool changed = false;
2183 3453582 : funct_state l;
2184 3453582 : bool skip;
2185 3453582 : struct cgraph_node *node;
2186 :
2187 3453582 : node = cgraph_node::get (current_function_decl);
2188 3453582 : skip = skip_function_for_local_pure_const (node);
2189 :
2190 3453582 : if (!warn_suggest_attribute_const
2191 3453559 : && !warn_suggest_attribute_pure
2192 3453538 : && skip)
2193 : return 0;
2194 :
2195 3287343 : l = analyze_function (node, false);
2196 :
2197 : /* Do NORETURN discovery. */
2198 3287343 : if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2199 6546255 : && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2200 : {
2201 26839 : warn_function_noreturn (fun->decl);
2202 26839 : if (dump_file)
2203 1 : fprintf (dump_file, "Function found to be noreturn: %s\n",
2204 : current_function_name ());
2205 :
2206 : /* Update declaration and reduce profile to executed once. */
2207 26839 : if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2208 : changed = true;
2209 26839 : if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2210 12318 : node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2211 : }
2212 :
2213 3287343 : switch (l->pure_const_state)
2214 : {
2215 648386 : case IPA_CONST:
2216 1296772 : changed |= ipa_make_function_const
2217 648386 : (cgraph_node::get (current_function_decl), l->looping, true);
2218 648386 : break;
2219 :
2220 421083 : case IPA_PURE:
2221 842166 : changed |= ipa_make_function_pure
2222 421083 : (cgraph_node::get (current_function_decl), l->looping, true);
2223 421083 : break;
2224 :
2225 : default:
2226 : break;
2227 : }
2228 3287343 : if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2229 : {
2230 23583 : node->set_nothrow_flag (true);
2231 23583 : changed = true;
2232 23583 : if (dump_file)
2233 2 : fprintf (dump_file, "Function found to be nothrow: %s\n",
2234 : current_function_name ());
2235 : }
2236 :
2237 3287343 : if (l->malloc_state == STATE_MALLOC
2238 3287343 : && !DECL_IS_MALLOC (current_function_decl))
2239 : {
2240 39109 : node->set_malloc_flag (true);
2241 39109 : if (warn_suggest_attribute_malloc)
2242 3 : warn_function_malloc (node->decl);
2243 39109 : changed = true;
2244 39109 : if (dump_file)
2245 2 : fprintf (dump_file, "Function found to be malloc: %s\n",
2246 : node->dump_name ());
2247 : }
2248 :
2249 3287343 : free (l);
2250 3287343 : if (changed)
2251 898521 : return execute_fixup_cfg ();
2252 : else
2253 : return 0;
2254 : }
2255 :
2256 : } // anon namespace
2257 :
2258 : gimple_opt_pass *
2259 285722 : make_pass_local_pure_const (gcc::context *ctxt)
2260 : {
2261 285722 : return new pass_local_pure_const (ctxt);
2262 : }
2263 :
2264 : /* Emit noreturn warnings. */
2265 :
2266 : namespace {
2267 :
2268 : const pass_data pass_data_warn_function_noreturn =
2269 : {
2270 : GIMPLE_PASS, /* type */
2271 : "*warn_function_noreturn", /* name */
2272 : OPTGROUP_NONE, /* optinfo_flags */
2273 : TV_NONE, /* tv_id */
2274 : PROP_cfg, /* properties_required */
2275 : 0, /* properties_provided */
2276 : 0, /* properties_destroyed */
2277 : 0, /* todo_flags_start */
2278 : 0, /* todo_flags_finish */
2279 : };
2280 :
2281 : class pass_warn_function_noreturn : public gimple_opt_pass
2282 : {
2283 : public:
2284 285722 : pass_warn_function_noreturn (gcc::context *ctxt)
2285 571444 : : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2286 : {}
2287 :
2288 : /* opt_pass methods: */
2289 1472150 : bool gate (function *) final override
2290 : {
2291 1472150 : return warn_suggest_attribute_noreturn;
2292 : }
2293 29 : unsigned int execute (function *fun) final override
2294 : {
2295 29 : if (!TREE_THIS_VOLATILE (current_function_decl)
2296 29 : && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2297 4 : warn_function_noreturn (current_function_decl);
2298 29 : return 0;
2299 : }
2300 :
2301 : }; // class pass_warn_function_noreturn
2302 :
2303 : } // anon namespace
2304 :
2305 : gimple_opt_pass *
2306 285722 : make_pass_warn_function_noreturn (gcc::context *ctxt)
2307 : {
2308 285722 : return new pass_warn_function_noreturn (ctxt);
2309 : }
2310 :
2311 : /* Simple local pass for nothrow discovery reusing the analysis from
2312 : ipa_pure_const. This pass is effective when executed together with
2313 : other optimization passes in early optimization pass queue. */
2314 :
2315 : namespace {
2316 :
2317 : const pass_data pass_data_nothrow =
2318 : {
2319 : GIMPLE_PASS, /* type */
2320 : "nothrow", /* name */
2321 : OPTGROUP_NONE, /* optinfo_flags */
2322 : TV_IPA_PURE_CONST, /* tv_id */
2323 : 0, /* properties_required */
2324 : 0, /* properties_provided */
2325 : 0, /* properties_destroyed */
2326 : 0, /* todo_flags_start */
2327 : 0, /* todo_flags_finish */
2328 : };
2329 :
2330 : class pass_nothrow : public gimple_opt_pass
2331 : {
2332 : public:
2333 285722 : pass_nothrow (gcc::context *ctxt)
2334 571444 : : gimple_opt_pass (pass_data_nothrow, ctxt)
2335 : {}
2336 :
2337 : /* opt_pass methods: */
2338 0 : opt_pass * clone () final override { return new pass_nothrow (m_ctxt); }
2339 2848664 : bool gate (function *) final override { return optimize; }
2340 : unsigned int execute (function *) final override;
2341 :
2342 : }; // class pass_nothrow
2343 :
2344 : unsigned int
2345 2412451 : pass_nothrow::execute (function *)
2346 : {
2347 2412451 : struct cgraph_node *node;
2348 2412451 : basic_block this_block;
2349 :
2350 2412451 : if (TREE_NOTHROW (current_function_decl))
2351 : return 0;
2352 :
2353 1520223 : node = cgraph_node::get (current_function_decl);
2354 :
2355 : /* We run during lowering, we cannot really use availability yet. */
2356 1520223 : if (cgraph_node::get (current_function_decl)->get_availability ()
2357 : <= AVAIL_INTERPOSABLE)
2358 : {
2359 83079 : if (dump_file)
2360 0 : fprintf (dump_file, "Function is interposable;"
2361 : " not analyzing.\n");
2362 83079 : return true;
2363 : }
2364 :
2365 11088111 : FOR_EACH_BB_FN (this_block, cfun)
2366 : {
2367 20499968 : for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2368 49897559 : !gsi_end_p (gsi);
2369 39647575 : gsi_next (&gsi))
2370 40246592 : if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2371 : {
2372 600462 : if (is_gimple_call (gsi_stmt (gsi)))
2373 : {
2374 381064 : tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2375 381064 : if (callee_t && recursive_call_p (current_function_decl,
2376 : callee_t))
2377 1445 : continue;
2378 : }
2379 :
2380 599017 : if (dump_file)
2381 : {
2382 0 : fprintf (dump_file, "Statement can throw: ");
2383 0 : print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2384 : }
2385 599017 : return 0;
2386 : }
2387 : }
2388 :
2389 838127 : node->set_nothrow_flag (true);
2390 :
2391 838127 : bool cfg_changed = false;
2392 838127 : if (self_recursive_p (node))
2393 30077 : FOR_EACH_BB_FN (this_block, cfun)
2394 81137 : if (gcall *g = safe_dyn_cast <gcall *> (*gsi_last_bb (this_block)))
2395 : {
2396 2048 : tree callee_t = gimple_call_fndecl (g);
2397 2048 : if (callee_t
2398 1938 : && recursive_call_p (current_function_decl, callee_t)
2399 571 : && maybe_clean_eh_stmt (g)
2400 2051 : && gimple_purge_dead_eh_edges (this_block))
2401 : cfg_changed = true;
2402 : }
2403 :
2404 838127 : if (dump_file)
2405 33 : fprintf (dump_file, "Function found to be nothrow: %s\n",
2406 : current_function_name ());
2407 838127 : return cfg_changed ? TODO_cleanup_cfg : 0;
2408 : }
2409 :
2410 : } // anon namespace
2411 :
2412 : gimple_opt_pass *
2413 285722 : make_pass_nothrow (gcc::context *ctxt)
2414 : {
2415 285722 : return new pass_nothrow (ctxt);
2416 : }
|