Line data Source code
1 : /* Callgraph based analysis of static variables.
2 : Copyright (C) 2004-2026 Free Software Foundation, Inc.
3 : Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : /* This file marks functions as being either const (TREE_READONLY) or
22 : pure (DECL_PURE_P). It can also set a variant of these that
23 : are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 :
25 : This must be run after inlining decisions have been made since
26 : otherwise, the local sets will not contain information that is
27 : consistent with post inlined state. The global sets are not prone
28 : to this problem since they are by definition transitive. */
29 :
30 : /* The code in this module is called by the ipa pass manager. It
31 : should be one of the later passes since it's information is used by
32 : the rest of the compilation. */
33 :
34 : #include "config.h"
35 : #include "system.h"
36 : #include "coretypes.h"
37 : #include "backend.h"
38 : #include "target.h"
39 : #include "tree.h"
40 : #include "gimple.h"
41 : #include "tree-pass.h"
42 : #include "tree-streamer.h"
43 : #include "cgraph.h"
44 : #include "diagnostic.h"
45 : #include "calls.h"
46 : #include "cfganal.h"
47 : #include "tree-eh.h"
48 : #include "gimple-iterator.h"
49 : #include "gimple-walk.h"
50 : #include "tree-cfg.h"
51 : #include "tree-ssa-loop-niter.h"
52 : #include "langhooks.h"
53 : #include "ipa-utils.h"
54 : #include "gimple-pretty-print.h"
55 : #include "cfgloop.h"
56 : #include "tree-scalar-evolution.h"
57 : #include "intl.h"
58 : #include "opts.h"
59 : #include "ssa.h"
60 : #include "alloc-pool.h"
61 : #include "symbol-summary.h"
62 : #include "sreal.h"
63 : #include "ipa-cp.h"
64 : #include "ipa-prop.h"
65 : #include "ipa-fnsummary.h"
66 : #include "symtab-thunks.h"
67 : #include "dbgcnt.h"
68 : #include "gcc-urlifier.h"
69 :
70 : /* Lattice values for const and pure functions. Everything starts out
71 : being const, then may drop to pure and then neither depending on
72 : what is found. */
73 : enum pure_const_state_e
74 : {
75 : IPA_CONST,
76 : IPA_PURE,
77 : IPA_NEITHER
78 : };
79 :
80 : static const char *pure_const_names[3] = {"const", "pure", "neither"};
81 :
82 : enum malloc_state_e
83 : {
84 : STATE_MALLOC_TOP,
85 : STATE_MALLOC,
86 : STATE_MALLOC_BOTTOM
87 : };
88 :
89 : static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
90 :
91 : /* Holder for the const_state. There is one of these per function
92 : decl. */
93 : class funct_state_d
94 : {
95 : public:
96 2683000 : funct_state_d (): pure_const_state (IPA_NEITHER),
97 2683000 : state_previously_known (IPA_NEITHER), looping_previously_known (true),
98 2683000 : looping (true), can_throw (true), can_free (true),
99 0 : malloc_state (STATE_MALLOC_BOTTOM) {}
100 :
101 2571100 : funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
102 2571100 : state_previously_known (s.state_previously_known),
103 2571100 : looping_previously_known (s.looping_previously_known),
104 2571100 : looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
105 2571100 : malloc_state (s.malloc_state) {}
106 :
107 : /* See above. */
108 : enum pure_const_state_e pure_const_state;
109 : /* What user set here; we can be always sure about this. */
110 : enum pure_const_state_e state_previously_known;
111 : bool looping_previously_known;
112 :
113 : /* True if the function could possibly infinite loop. There are a
114 : lot of ways that this could be determined. We are pretty
115 : conservative here. While it is possible to cse pure and const
116 : calls, it is not legal to have dce get rid of the call if there
117 : is a possibility that the call could infinite loop since this is
118 : a behavioral change. */
119 : bool looping;
120 :
121 : bool can_throw;
122 :
123 : /* If function can call free, munmap or otherwise make previously
124 : non-trapping memory accesses trapping. */
125 : bool can_free;
126 :
127 : enum malloc_state_e malloc_state;
128 : };
129 :
130 : typedef class funct_state_d * funct_state;
131 :
132 : /* The storage of the funct_state is abstracted because there is the
133 : possibility that it may be desirable to move this to the cgraph
134 : local info. */
135 :
136 : class funct_state_summary_t:
137 : public fast_function_summary <funct_state_d *, va_heap>
138 : {
139 : public:
140 160647 : funct_state_summary_t (symbol_table *symtab):
141 321294 : fast_function_summary <funct_state_d *, va_heap> (symtab) {}
142 :
143 : void insert (cgraph_node *, funct_state_d *state) final override;
144 : void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
145 : funct_state_d *src_data,
146 : funct_state_d *dst_data) final override;
147 : };
148 :
149 : static funct_state_summary_t *funct_state_summaries = NULL;
150 :
151 : static bool gate_pure_const (void);
152 :
153 : namespace {
154 :
155 : const pass_data pass_data_ipa_pure_const =
156 : {
157 : IPA_PASS, /* type */
158 : "pure-const", /* name */
159 : OPTGROUP_NONE, /* optinfo_flags */
160 : TV_IPA_PURE_CONST, /* tv_id */
161 : 0, /* properties_required */
162 : 0, /* properties_provided */
163 : 0, /* properties_destroyed */
164 : 0, /* todo_flags_start */
165 : 0, /* todo_flags_finish */
166 : };
167 :
168 : class pass_ipa_pure_const : public ipa_opt_pass_d
169 : {
170 : public:
171 : pass_ipa_pure_const(gcc::context *ctxt);
172 :
173 : /* opt_pass methods: */
174 1182450 : bool gate (function *) final override { return gate_pure_const (); }
175 : unsigned int execute (function *fun) final override;
176 :
177 : void register_hooks (void);
178 :
179 : private:
180 : bool init_p;
181 : }; // class pass_ipa_pure_const
182 :
183 : } // anon namespace
184 :
185 : /* Try to guess if function body will always be visible to compiler
186 : when compiling the call and whether compiler will be able
187 : to propagate the information by itself. */
188 :
189 : static bool
190 26 : function_always_visible_to_compiler_p (tree decl)
191 : {
192 22 : return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
193 48 : || DECL_COMDAT (decl));
194 : }
195 :
196 : /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
197 : is true if the function is known to be finite. The diagnostic is
198 : controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
199 : OPTION, this function may initialize it and it is always returned
200 : by the function. */
201 :
202 : static hash_set<tree> *
203 1247200 : suggest_attribute (diagnostics::option_id option, tree decl, bool known_finite,
204 : hash_set<tree> *warned_about,
205 : const char * attrib_name)
206 : {
207 1247200 : if (!option_enabled (option.m_idx, lang_hooks.option_lang_mask (),
208 : &global_options))
209 : return warned_about;
210 30 : if (TREE_THIS_VOLATILE (decl)
211 30 : || (known_finite && function_always_visible_to_compiler_p (decl)))
212 : return warned_about;
213 :
214 26 : if (!warned_about)
215 14 : warned_about = new hash_set<tree>;
216 26 : if (warned_about->contains (decl))
217 : return warned_about;
218 26 : warned_about->add (decl);
219 26 : auto_urlify_attributes sentinel;
220 30 : warning_at (DECL_SOURCE_LOCATION (decl),
221 : option,
222 : known_finite
223 : ? G_("function might be candidate for attribute %qs")
224 : : G_("function might be candidate for attribute %qs"
225 : " if it is known to return normally"), attrib_name);
226 26 : return warned_about;
227 26 : }
228 :
229 : /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
230 : is true if the function is known to be finite. */
231 :
232 : static void
233 364006 : warn_function_pure (tree decl, bool known_finite)
234 : {
235 : /* Declaring a void function pure makes no sense and is diagnosed
236 : by -Wattributes because calling it would have no effect. */
237 364006 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
238 : return;
239 :
240 335158 : static hash_set<tree> *warned_about;
241 335158 : warned_about
242 335158 : = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
243 : known_finite, warned_about, "pure");
244 : }
245 :
246 : /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
247 : is true if the function is known to be finite. */
248 :
249 : static void
250 905720 : warn_function_const (tree decl, bool known_finite)
251 : {
252 : /* Declaring a void function const makes no sense is diagnosed
253 : by -Wattributes because calling it would have no effect. */
254 905720 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
255 : return;
256 :
257 666215 : static hash_set<tree> *warned_about;
258 666215 : warned_about
259 666215 : = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
260 : known_finite, warned_about, "const");
261 : }
262 :
263 : /* Emit suggestion about __attribute__((malloc)) for DECL. */
264 :
265 : static void
266 3 : warn_function_malloc (tree decl)
267 : {
268 3 : static hash_set<tree> *warned_about;
269 3 : warned_about
270 3 : = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
271 : true, warned_about, "malloc");
272 3 : }
273 :
274 : /* Emit suggestion about __attribute__((noreturn)) for DECL. */
275 :
276 : static void
277 26671 : warn_function_noreturn (tree decl)
278 : {
279 26671 : tree original_decl = decl;
280 :
281 26671 : static hash_set<tree> *warned_about;
282 26671 : if (!lang_hooks.missing_noreturn_ok_p (decl)
283 26671 : && targetm.warn_func_return (decl))
284 14429 : warned_about
285 14429 : = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
286 : true, warned_about, "noreturn");
287 26671 : }
288 :
289 : void
290 9112 : warn_function_cold (tree decl)
291 : {
292 9112 : tree original_decl = decl;
293 :
294 9112 : static hash_set<tree> *warned_about;
295 9112 : warned_about
296 9112 : = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
297 : true, warned_about, "cold");
298 9112 : }
299 :
300 : void
301 222283 : warn_function_returns_nonnull (tree decl)
302 : {
303 222283 : static hash_set<tree> *warned_about;
304 222283 : warned_about
305 222283 : = suggest_attribute (OPT_Wsuggest_attribute_returns_nonnull, decl,
306 : true, warned_about, "returns_nonnull");
307 222283 : }
308 :
309 : /* Check to see if the use (or definition when CHECKING_WRITE is true)
310 : variable T is legal in a function that is either pure or const. */
311 :
312 : static inline void
313 20208570 : check_decl (funct_state local,
314 : tree t, bool checking_write, bool ipa)
315 : {
316 : /* Do not want to do anything with volatile except mark any
317 : function that uses one to be not const or pure. */
318 20208570 : if (TREE_THIS_VOLATILE (t))
319 : {
320 1446831 : local->pure_const_state = IPA_NEITHER;
321 1446831 : if (dump_file)
322 30 : fprintf (dump_file, " Volatile operand is not const/pure\n");
323 1446831 : return;
324 : }
325 :
326 : /* Do not care about a local automatic that is not static. */
327 18761739 : if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
328 : return;
329 :
330 : /* If the variable has the "used" attribute, treat it as if it had a
331 : been touched by the devil. */
332 3579346 : if (DECL_PRESERVE_P (t))
333 : {
334 3484 : local->pure_const_state = IPA_NEITHER;
335 3484 : if (dump_file)
336 0 : fprintf (dump_file, " Used static/global variable is not const/pure\n");
337 3484 : return;
338 : }
339 :
340 : /* In IPA mode we are not interested in checking actual loads and stores;
341 : they will be processed at propagation time using ipa_ref. */
342 3575862 : if (ipa)
343 : return;
344 :
345 : /* Since we have dealt with the locals and params cases above, if we
346 : are CHECKING_WRITE, this cannot be a pure or constant
347 : function. */
348 2305753 : if (checking_write)
349 : {
350 837121 : local->pure_const_state = IPA_NEITHER;
351 837121 : if (dump_file)
352 1 : fprintf (dump_file, " static/global memory write is not const/pure\n");
353 837121 : return;
354 : }
355 :
356 1468632 : if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
357 : {
358 : /* Readonly reads are safe. */
359 842328 : if (TREE_READONLY (t))
360 : return; /* Read of a constant, do not change the function state. */
361 : else
362 : {
363 838292 : if (dump_file)
364 0 : fprintf (dump_file, " global memory read is not const\n");
365 : /* Just a regular read. */
366 838292 : if (local->pure_const_state == IPA_CONST)
367 167465 : local->pure_const_state = IPA_PURE;
368 : }
369 : }
370 : else
371 : {
372 : /* Compilation level statics can be read if they are readonly
373 : variables. */
374 626304 : if (TREE_READONLY (t))
375 : return;
376 :
377 596455 : if (dump_file)
378 1 : fprintf (dump_file, " static memory read is not const\n");
379 : /* Just a regular read. */
380 596455 : if (local->pure_const_state == IPA_CONST)
381 32656 : local->pure_const_state = IPA_PURE;
382 : }
383 : }
384 :
385 :
386 : /* Check to see if the use (or definition when CHECKING_WRITE is true)
387 : variable T is legal in a function that is either pure or const. */
388 :
389 : static inline void
390 18636684 : check_op (funct_state local, tree t, bool checking_write)
391 : {
392 18636684 : t = get_base_address (t);
393 18636684 : if (t && TREE_THIS_VOLATILE (t))
394 : {
395 20638 : local->pure_const_state = IPA_NEITHER;
396 20638 : if (dump_file)
397 2 : fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
398 20638 : return;
399 : }
400 18616046 : else if (refs_local_or_readonly_memory_p (t))
401 : {
402 4817842 : if (dump_file)
403 10 : fprintf (dump_file, " Indirect ref to local or readonly "
404 : "memory is OK\n");
405 4817842 : return;
406 : }
407 13798204 : else if (checking_write)
408 : {
409 4787705 : local->pure_const_state = IPA_NEITHER;
410 4787705 : if (dump_file)
411 64 : fprintf (dump_file, " Indirect ref write is not const/pure\n");
412 4787705 : return;
413 : }
414 : else
415 : {
416 9010499 : if (dump_file)
417 175 : fprintf (dump_file, " Indirect ref read is not const\n");
418 9010499 : if (local->pure_const_state == IPA_CONST)
419 1346817 : local->pure_const_state = IPA_PURE;
420 : }
421 : }
422 :
423 : /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
424 :
425 : static void
426 15714944 : state_from_flags (enum pure_const_state_e *state, bool *looping,
427 : int flags, bool cannot_lead_to_return)
428 : {
429 15714944 : *looping = false;
430 15714944 : if (flags & ECF_LOOPING_CONST_OR_PURE)
431 : {
432 202308 : *looping = true;
433 202308 : if (dump_file && (dump_flags & TDF_DETAILS))
434 0 : fprintf (dump_file, " looping\n");
435 : }
436 15714944 : if (flags & ECF_CONST)
437 : {
438 1208021 : *state = IPA_CONST;
439 1208021 : if (dump_file && (dump_flags & TDF_DETAILS))
440 3 : fprintf (dump_file, " const\n");
441 : }
442 14506923 : else if (flags & ECF_PURE)
443 : {
444 1192491 : *state = IPA_PURE;
445 1192491 : if (dump_file && (dump_flags & TDF_DETAILS))
446 3 : fprintf (dump_file, " pure\n");
447 : }
448 13314432 : else if (cannot_lead_to_return)
449 : {
450 964909 : *state = IPA_PURE;
451 964909 : *looping = true;
452 964909 : if (dump_file && (dump_flags & TDF_DETAILS))
453 1 : fprintf (dump_file, " ignoring side effects->pure looping\n");
454 : }
455 : else
456 : {
457 12349523 : if (dump_file && (dump_flags & TDF_DETAILS))
458 42 : fprintf (dump_file, " neither\n");
459 12349523 : *state = IPA_NEITHER;
460 12349523 : *looping = true;
461 : }
462 15714944 : }
463 :
464 : /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
465 : into STATE and LOOPING better of the two variants.
466 : Be sure to merge looping correctly. IPA_NEITHER functions
467 : have looping 0 even if they don't have to return. */
468 :
469 : static inline void
470 6884400 : better_state (enum pure_const_state_e *state, bool *looping,
471 : enum pure_const_state_e state2, bool looping2)
472 : {
473 6884400 : if (state2 < *state)
474 : {
475 36737 : if (*state == IPA_NEITHER)
476 35220 : *looping = looping2;
477 : else
478 1517 : *looping = MIN (*looping, looping2);
479 36737 : *state = state2;
480 : }
481 6847663 : else if (state2 != IPA_NEITHER)
482 1617277 : *looping = MIN (*looping, looping2);
483 6884400 : }
484 :
485 : /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
486 : into STATE and LOOPING worse of the two variants.
487 : N is the actual node called. */
488 :
489 : static inline void
490 14885090 : worse_state (enum pure_const_state_e *state, bool *looping,
491 : enum pure_const_state_e state2, bool looping2,
492 : struct symtab_node *from,
493 : struct symtab_node *to)
494 : {
495 : /* Consider function:
496 :
497 : bool a(int *p)
498 : {
499 : return *p==*p;
500 : }
501 :
502 : During early optimization we will turn this into:
503 :
504 : bool a(int *p)
505 : {
506 : return true;
507 : }
508 :
509 : Now if this function will be detected as CONST however when interposed it
510 : may end up being just pure. We always must assume the worst scenario here.
511 : */
512 14885090 : if (*state == IPA_CONST && state2 == IPA_CONST
513 14885090 : && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
514 : {
515 3801 : if (dump_file && (dump_flags & TDF_DETAILS))
516 0 : fprintf (dump_file, "Dropping state to PURE because call to %s may not "
517 : "bind to current def.\n", to->dump_name ());
518 : state2 = IPA_PURE;
519 : }
520 14885090 : *state = MAX (*state, state2);
521 14885090 : *looping = MAX (*looping, looping2);
522 14885090 : }
523 :
524 : /* Recognize special cases of builtins that are by themselves not const
525 : but function using them is. */
526 : bool
527 23860721 : builtin_safe_for_const_function_p (bool *looping, tree callee)
528 : {
529 23860721 : if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
530 6820168 : switch (DECL_FUNCTION_CODE (callee))
531 : {
532 1005297 : case BUILT_IN_RETURN:
533 1005297 : case BUILT_IN_UNREACHABLE:
534 1005297 : CASE_BUILT_IN_ALLOCA:
535 1005297 : case BUILT_IN_STACK_SAVE:
536 1005297 : case BUILT_IN_STACK_RESTORE:
537 1005297 : case BUILT_IN_EH_POINTER:
538 1005297 : case BUILT_IN_EH_FILTER:
539 1005297 : case BUILT_IN_UNWIND_RESUME:
540 1005297 : case BUILT_IN_CXA_END_CLEANUP:
541 1005297 : case BUILT_IN_EH_COPY_VALUES:
542 1005297 : case BUILT_IN_FRAME_ADDRESS:
543 1005297 : case BUILT_IN_APPLY_ARGS:
544 1005297 : case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
545 1005297 : case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
546 1005297 : case BUILT_IN_DWARF_CFA:
547 1005297 : case BUILT_IN_RETURN_ADDRESS:
548 1005297 : *looping = false;
549 1005297 : return true;
550 9967 : case BUILT_IN_PREFETCH:
551 9967 : *looping = true;
552 9967 : return true;
553 : default:
554 : break;
555 : }
556 : return false;
557 : }
558 :
559 : /* Check the parameters of a function call to CALL_EXPR to see if
560 : there are any references in the parameters that are not allowed for
561 : pure or const functions. Also check to see if this is either an
562 : indirect call, a call outside the compilation unit, or has special
563 : attributes that may also effect the purity. The CALL_EXPR node for
564 : the entire call expression. */
565 :
566 : static void
567 15539886 : check_call (funct_state local, gcall *call, bool ipa)
568 : {
569 15539886 : int flags = gimple_call_flags (call);
570 15539886 : tree callee_t = gimple_call_fndecl (call);
571 15539886 : bool possibly_throws = stmt_could_throw_p (cfun, call);
572 15539886 : bool possibly_throws_externally = (possibly_throws
573 15539886 : && stmt_can_throw_external (cfun, call));
574 :
575 5716131 : if (possibly_throws)
576 : {
577 : unsigned int i;
578 34456797 : for (i = 0; i < gimple_num_ops (call); i++)
579 28740666 : if (gimple_op (call, i)
580 28740666 : && tree_could_throw_p (gimple_op (call, i)))
581 : {
582 80034 : if (possibly_throws && cfun->can_throw_non_call_exceptions)
583 : {
584 80034 : if (dump_file)
585 0 : fprintf (dump_file, " operand can throw; looping\n");
586 80034 : local->looping = true;
587 : }
588 80034 : if (possibly_throws_externally)
589 : {
590 68927 : if (dump_file)
591 0 : fprintf (dump_file, " operand can throw externally\n");
592 68927 : local->can_throw = true;
593 : }
594 : }
595 : }
596 :
597 : /* The const and pure flags are set by a variety of places in the
598 : compiler (including here). If someone has already set the flags
599 : for the callee, (such as for some of the builtins) we will use
600 : them, otherwise we will compute our own information.
601 :
602 : Const and pure functions have less clobber effects than other
603 : functions so we process these first. Otherwise if it is a call
604 : outside the compilation unit or an indirect call we punt. This
605 : leaves local calls which will be processed by following the call
606 : graph. */
607 15539886 : if (callee_t)
608 : {
609 14527741 : bool call_looping;
610 :
611 14527741 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
612 14527741 : && !nonfreeing_call_p (call))
613 722507 : local->can_free = true;
614 :
615 14527741 : if (builtin_safe_for_const_function_p (&call_looping, callee_t))
616 : {
617 356241 : worse_state (&local->pure_const_state, &local->looping,
618 : IPA_CONST, call_looping,
619 : NULL, NULL);
620 356241 : return;
621 : }
622 : /* When bad things happen to bad functions, they cannot be const
623 : or pure. */
624 14171500 : if (setjmp_call_p (callee_t))
625 : {
626 2700 : if (dump_file)
627 0 : fprintf (dump_file, " setjmp is not const/pure\n");
628 2700 : local->looping = true;
629 2700 : local->pure_const_state = IPA_NEITHER;
630 : }
631 :
632 14171500 : if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
633 3361258 : switch (DECL_FUNCTION_CODE (callee_t))
634 : {
635 1838 : case BUILT_IN_LONGJMP:
636 1838 : case BUILT_IN_NONLOCAL_GOTO:
637 1838 : if (dump_file)
638 0 : fprintf (dump_file,
639 : " longjmp and nonlocal goto is not const/pure\n");
640 1838 : local->pure_const_state = IPA_NEITHER;
641 1838 : local->looping = true;
642 1838 : break;
643 : default:
644 : break;
645 : }
646 : }
647 1012145 : else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
648 157105 : local->can_free = true;
649 :
650 : /* When not in IPA mode, we can still handle self recursion. */
651 15183645 : if (!ipa && callee_t
652 15183645 : && recursive_call_p (current_function_decl, callee_t))
653 : {
654 19760 : if (dump_file)
655 0 : fprintf (dump_file, " Recursive call can loop.\n");
656 19760 : local->looping = true;
657 : }
658 : /* Either callee is unknown or we are doing local analysis.
659 : Look to see if there are any bits available for the callee (such as by
660 : declaration or because it is builtin) and process solely on the basis of
661 : those bits. Handle internal calls always, those calls don't have
662 : corresponding cgraph edges and thus aren't processed during
663 : the propagation. */
664 15163885 : else if (!ipa || gimple_call_internal_p (call))
665 : {
666 9922365 : enum pure_const_state_e call_state;
667 9922365 : bool call_looping;
668 9922365 : if (possibly_throws && cfun->can_throw_non_call_exceptions)
669 : {
670 2178410 : if (dump_file)
671 0 : fprintf (dump_file, " can throw; looping\n");
672 2178410 : local->looping = true;
673 : }
674 9922365 : if (possibly_throws_externally)
675 : {
676 2787632 : if (dump_file)
677 : {
678 0 : fprintf (dump_file, " can throw externally to lp %i\n",
679 : lookup_stmt_eh_lp (call));
680 0 : if (callee_t)
681 0 : fprintf (dump_file, " callee:%s\n",
682 0 : IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
683 : }
684 2787632 : local->can_throw = true;
685 : }
686 9922365 : if (dump_file && (dump_flags & TDF_DETAILS))
687 22 : fprintf (dump_file, " checking flags for call:");
688 9922365 : state_from_flags (&call_state, &call_looping, flags,
689 9922365 : ((flags & (ECF_NORETURN | ECF_NOTHROW))
690 : == (ECF_NORETURN | ECF_NOTHROW))
691 9922365 : || (!flag_exceptions && (flags & ECF_NORETURN)));
692 9922365 : worse_state (&local->pure_const_state, &local->looping,
693 : call_state, call_looping, NULL, NULL);
694 : }
695 : /* Direct functions calls are handled by IPA propagation. */
696 : }
697 :
698 : /* Wrapper around check_decl for loads in local more. */
699 :
700 : static bool
701 12545344 : check_load (gimple *, tree op, tree, void *data)
702 : {
703 12545344 : if (DECL_P (op))
704 5247636 : check_decl ((funct_state)data, op, false, false);
705 : else
706 7297708 : check_op ((funct_state)data, op, false);
707 12545344 : return false;
708 : }
709 :
710 : /* Wrapper around check_decl for stores in local more. */
711 :
712 : static bool
713 13514657 : check_store (gimple *, tree op, tree, void *data)
714 : {
715 13514657 : if (DECL_P (op))
716 7837151 : check_decl ((funct_state)data, op, true, false);
717 : else
718 5677506 : check_op ((funct_state)data, op, true);
719 13514657 : return false;
720 : }
721 :
722 : /* Wrapper around check_decl for loads in ipa mode. */
723 :
724 : static bool
725 6264552 : check_ipa_load (gimple *, tree op, tree, void *data)
726 : {
727 6264552 : if (DECL_P (op))
728 2995533 : check_decl ((funct_state)data, op, false, true);
729 : else
730 3269019 : check_op ((funct_state)data, op, false);
731 6264552 : return false;
732 : }
733 :
734 : /* Wrapper around check_decl for stores in ipa mode. */
735 :
736 : static bool
737 6520701 : check_ipa_store (gimple *, tree op, tree, void *data)
738 : {
739 6520701 : if (DECL_P (op))
740 4128250 : check_decl ((funct_state)data, op, true, true);
741 : else
742 2392451 : check_op ((funct_state)data, op, true);
743 6520701 : return false;
744 : }
745 :
746 : /* Look into pointer pointed to by GSIP and figure out what interesting side
747 : effects it has. */
748 : static void
749 184428569 : check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
750 : {
751 184428569 : gimple *stmt = gsi_stmt (*gsip);
752 :
753 184428569 : if (is_gimple_debug (stmt))
754 : return;
755 :
756 : /* Do consider clobber as side effects before IPA, so we rather inline
757 : C++ destructors and keep clobber semantics than eliminate them.
758 :
759 : Similar logic is in ipa-modref.
760 :
761 : TODO: We may get smarter during early optimizations on these and let
762 : functions containing only clobbers to be optimized more. This is a common
763 : case of C++ destructors. */
764 :
765 92746165 : if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
766 : return;
767 :
768 90549900 : if (dump_file)
769 : {
770 1702 : fprintf (dump_file, " scanning: ");
771 1702 : print_gimple_stmt (dump_file, stmt, 0);
772 : }
773 :
774 167691756 : if (gimple_has_volatile_ops (stmt) && !gimple_clobber_p (stmt))
775 : {
776 1469334 : local->pure_const_state = IPA_NEITHER;
777 1469334 : if (dump_file)
778 33 : fprintf (dump_file, " Volatile stmt is not const/pure\n");
779 : }
780 :
781 : /* Look for loads and stores. */
782 151691145 : walk_stmt_load_store_ops (stmt, local,
783 : ipa ? check_ipa_load : check_load,
784 : ipa ? check_ipa_store : check_store);
785 :
786 90549900 : if (gimple_code (stmt) != GIMPLE_CALL && stmt_could_throw_p (cfun, stmt))
787 : {
788 2597465 : if (cfun->can_throw_non_call_exceptions)
789 : {
790 2294489 : if (dump_file)
791 0 : fprintf (dump_file, " can throw; looping\n");
792 2294489 : local->looping = true;
793 : }
794 2597465 : if (stmt_can_throw_external (cfun, stmt))
795 : {
796 2244308 : if (dump_file)
797 6 : fprintf (dump_file, " can throw externally\n");
798 2244308 : local->can_throw = true;
799 : }
800 : else
801 353157 : if (dump_file)
802 0 : fprintf (dump_file, " can throw\n");
803 : }
804 90549900 : switch (gimple_code (stmt))
805 : {
806 15539886 : case GIMPLE_CALL:
807 15539886 : check_call (local, as_a <gcall *> (stmt), ipa);
808 15539886 : break;
809 1617199 : case GIMPLE_LABEL:
810 1617199 : if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
811 : /* Target of long jump. */
812 : {
813 764 : if (dump_file)
814 0 : fprintf (dump_file, " nonlocal label is not const/pure\n");
815 764 : local->pure_const_state = IPA_NEITHER;
816 : }
817 : break;
818 261250 : case GIMPLE_ASM:
819 261250 : if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
820 : {
821 114843 : if (dump_file)
822 3 : fprintf (dump_file, " memory asm clobber is not const/pure\n");
823 : /* Abandon all hope, ye who enter here. */
824 114843 : local->pure_const_state = IPA_NEITHER;
825 114843 : local->can_free = true;
826 : }
827 261250 : if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
828 : {
829 238509 : if (dump_file)
830 3 : fprintf (dump_file, " volatile is not const/pure\n");
831 : /* Abandon all hope, ye who enter here. */
832 238509 : local->pure_const_state = IPA_NEITHER;
833 238509 : local->looping = true;
834 238509 : local->can_free = true;
835 : }
836 : return;
837 : default:
838 : break;
839 : }
840 : }
841 :
842 : /* Check that RETVAL is used only in STMT and in comparisons against 0.
843 : RETVAL is return value of the function and STMT is return stmt. */
844 :
845 : static bool
846 470050 : check_retval_uses (tree retval, gimple *stmt)
847 : {
848 470050 : imm_use_iterator use_iter;
849 470050 : gimple *use_stmt;
850 :
851 1483052 : FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
852 683861 : if (gcond *cond = dyn_cast<gcond *> (use_stmt))
853 : {
854 12480 : tree op2 = gimple_cond_rhs (cond);
855 12480 : if (!integer_zerop (op2))
856 : return false;
857 : }
858 671381 : else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
859 : {
860 93325 : enum tree_code code = gimple_assign_rhs_code (ga);
861 93325 : if (TREE_CODE_CLASS (code) != tcc_comparison)
862 : return false;
863 2462 : if (!integer_zerop (gimple_assign_rhs2 (ga)))
864 : return false;
865 : }
866 578056 : else if (is_gimple_debug (use_stmt))
867 : ;
868 463134 : else if (use_stmt != stmt)
869 140909 : return false;
870 :
871 329141 : return true;
872 : }
873 :
874 : /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
875 : attribute. Currently this function does a very conservative analysis.
876 : FUN is considered to be a candidate if
877 : 1) It returns a value of pointer type.
878 : 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
879 : a phi, and element of phi is either NULL or
880 : SSA_NAME_DEF_STMT(element) is function call.
881 : 3) The return-value has immediate uses only within comparisons (gcond or gassign)
882 : and return_stmt (and likewise a phi arg has immediate use only within comparison
883 : or the phi stmt). */
884 :
885 : #define DUMP_AND_RETURN(reason) \
886 : { \
887 : if (dump_file && (dump_flags & TDF_DETAILS)) \
888 : fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
889 : (node->dump_name ()), (reason)); \
890 : return false; \
891 : }
892 :
893 : static bool
894 399357 : malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
895 : bitmap visited)
896 : {
897 399357 : cgraph_node *node = cgraph_node::get_create (fun->decl);
898 399357 : if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
899 : return true;
900 :
901 399347 : if (!check_retval_uses (retval, ret_stmt))
902 110844 : DUMP_AND_RETURN("Return value has uses outside return stmt"
903 : " and comparisons against 0.")
904 :
905 288503 : gimple *def = SSA_NAME_DEF_STMT (retval);
906 :
907 288503 : if (gcall *call_stmt = dyn_cast<gcall *> (def))
908 : {
909 73334 : tree callee_decl = gimple_call_fndecl (call_stmt);
910 73334 : if (!callee_decl)
911 : return false;
912 :
913 134567 : if (!ipa && !DECL_IS_MALLOC (callee_decl))
914 27110 : DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
915 : " non-ipa mode.")
916 :
917 44768 : cgraph_edge *cs = node->get_edge (call_stmt);
918 44768 : if (cs)
919 : {
920 9189 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
921 9189 : es->is_return_callee_uncaptured = true;
922 : }
923 : }
924 :
925 215169 : else if (gphi *phi = dyn_cast<gphi *> (def))
926 : {
927 : bool all_args_zero = true;
928 98608 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
929 : {
930 93453 : tree arg = gimple_phi_arg_def (phi, i);
931 93453 : if (integer_zerop (arg))
932 20065 : continue;
933 :
934 73388 : all_args_zero = false;
935 73388 : if (TREE_CODE (arg) != SSA_NAME)
936 2685 : DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
937 70703 : if (!check_retval_uses (arg, phi))
938 30065 : DUMP_AND_RETURN ("phi arg has uses outside phi"
939 : " and comparisons against 0.")
940 :
941 40638 : gimple *arg_def = SSA_NAME_DEF_STMT (arg);
942 40638 : if (is_a<gphi *> (arg_def))
943 : {
944 3572 : if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
945 3480 : DUMP_AND_RETURN ("nested phi fail")
946 92 : continue;
947 : }
948 :
949 37066 : gcall *call_stmt = dyn_cast<gcall *> (arg_def);
950 37066 : if (!call_stmt)
951 18411 : DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
952 :
953 18655 : tree callee_decl = gimple_call_fndecl (call_stmt);
954 18655 : if (!callee_decl)
955 : return false;
956 25021 : if (!ipa && !DECL_IS_MALLOC (callee_decl))
957 6254 : DUMP_AND_RETURN("callee_decl does not have malloc attribute"
958 : " for non-ipa mode.")
959 :
960 9504 : cgraph_edge *cs = node->get_edge (call_stmt);
961 9504 : if (cs)
962 : {
963 6605 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
964 6605 : es->is_return_callee_uncaptured = true;
965 : }
966 : }
967 :
968 5155 : if (all_args_zero)
969 48 : DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
970 : }
971 :
972 : else
973 146222 : DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
974 :
975 : return true;
976 : }
977 :
978 : static bool
979 5936569 : malloc_candidate_p (function *fun, bool ipa)
980 : {
981 5936569 : basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
982 5936569 : edge e;
983 5936569 : edge_iterator ei;
984 5936569 : cgraph_node *node = cgraph_node::get_create (fun->decl);
985 :
986 5936569 : if (EDGE_COUNT (exit_block->preds) == 0
987 5934093 : || !flag_delete_null_pointer_checks)
988 : return false;
989 :
990 5803649 : auto_bitmap visited;
991 5853442 : FOR_EACH_EDGE (e, ei, exit_block->preds)
992 : {
993 5803649 : gimple_stmt_iterator gsi = gsi_last_bb (e->src);
994 11556262 : greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
995 :
996 5802406 : if (!ret_stmt)
997 5803649 : return false;
998 :
999 5802406 : tree retval = gimple_return_retval (ret_stmt);
1000 5802406 : if (!retval)
1001 2617168 : DUMP_AND_RETURN("No return value.")
1002 :
1003 3185238 : if (TREE_CODE (retval) != SSA_NAME
1004 3185238 : || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
1005 2789453 : DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
1006 :
1007 395785 : if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
1008 : return false;
1009 : }
1010 :
1011 49793 : if (dump_file && (dump_flags & TDF_DETAILS))
1012 8 : fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1013 4 : IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1014 : return true;
1015 5803649 : }
1016 :
1017 : #undef DUMP_AND_RETURN
1018 :
1019 : /* Return true if function is known to be finite. */
1020 :
1021 : bool
1022 4218300 : finite_function_p ()
1023 : {
1024 : /* Const functions cannot have back edges (an
1025 : indication of possible infinite loop side
1026 : effect. */
1027 4218300 : bool finite = true;
1028 4218300 : if (mark_dfs_back_edges ())
1029 : {
1030 : /* Preheaders are needed for SCEV to work.
1031 : Simple latches and recorded exits improve chances that loop will
1032 : proved to be finite in testcases such as in loop-15.c
1033 : and loop-24.c */
1034 427262 : loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1035 : | LOOPS_HAVE_SIMPLE_LATCHES
1036 : | LOOPS_HAVE_RECORDED_EXITS);
1037 427262 : if (dump_file && (dump_flags & TDF_DETAILS))
1038 0 : flow_loops_dump (dump_file, NULL, 0);
1039 427262 : if (mark_irreducible_loops ())
1040 : {
1041 2066 : if (dump_file)
1042 0 : fprintf (dump_file, " has irreducible loops\n");
1043 : finite = false;
1044 : }
1045 : else
1046 : {
1047 425196 : scev_initialize ();
1048 2117410 : for (auto loop : loops_list (cfun, 0))
1049 917287 : if (!finite_loop_p (loop))
1050 : {
1051 75465 : if (dump_file)
1052 1 : fprintf (dump_file, " cannot prove finiteness of "
1053 : "loop %i\n", loop->num);
1054 : finite =false;
1055 : break;
1056 425196 : }
1057 425196 : scev_finalize ();
1058 : }
1059 427262 : loop_optimizer_finalize ();
1060 : }
1061 4218300 : return finite;
1062 : }
1063 :
1064 : /* This is the main routine for finding the reference patterns for
1065 : global variables within a function FN. */
1066 :
1067 : static funct_state
1068 4680640 : analyze_function (struct cgraph_node *fn, bool ipa)
1069 : {
1070 4680640 : tree decl = fn->decl;
1071 4680640 : funct_state l;
1072 4680640 : basic_block this_block;
1073 :
1074 4680640 : l = XCNEW (class funct_state_d);
1075 4680640 : l->pure_const_state = IPA_CONST;
1076 4680640 : l->state_previously_known = IPA_NEITHER;
1077 4680640 : l->looping_previously_known = true;
1078 4680640 : l->looping = false;
1079 4680640 : l->can_throw = false;
1080 4680640 : l->can_free = false;
1081 4680640 : state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1082 4680640 : flags_from_decl_or_type (fn->decl),
1083 4680640 : fn->cannot_return_p ());
1084 :
1085 4680640 : if (fn->thunk || fn->alias)
1086 : {
1087 : /* Thunk gets propagated through, so nothing interesting happens. */
1088 62407 : gcc_assert (ipa);
1089 62407 : if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1090 729 : l->pure_const_state = IPA_NEITHER;
1091 62407 : return l;
1092 : }
1093 :
1094 4618233 : if (dump_file)
1095 : {
1096 190 : fprintf (dump_file, "\n\n local analysis of %s\n ",
1097 : fn->dump_name ());
1098 : }
1099 :
1100 4618233 : push_cfun (DECL_STRUCT_FUNCTION (decl));
1101 :
1102 36013716 : FOR_EACH_BB_FN (this_block, cfun)
1103 : {
1104 31460409 : gimple_stmt_iterator gsi;
1105 31460409 : struct walk_stmt_info wi;
1106 :
1107 31460409 : memset (&wi, 0, sizeof (wi));
1108 62920818 : for (gsi = gsi_start_bb (this_block);
1109 215824052 : !gsi_end_p (gsi);
1110 184363643 : gsi_next (&gsi))
1111 : {
1112 : /* NULL memory accesses terminates BB. These accesses are known
1113 : to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1114 : to volatile accesses and adds builtin_trap call which would
1115 : confuse us otherwise. */
1116 184430946 : if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
1117 : null_pointer_node))
1118 : {
1119 2377 : if (dump_file)
1120 0 : fprintf (dump_file, " NULL memory access; terminating BB%s\n",
1121 0 : flag_non_call_exceptions ? "; looping" : "");
1122 2377 : if (flag_non_call_exceptions)
1123 : {
1124 426 : l->looping = true;
1125 426 : if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
1126 : {
1127 330 : if (dump_file)
1128 0 : fprintf (dump_file, " can throw externally\n");
1129 330 : l->can_throw = true;
1130 : }
1131 : }
1132 : break;
1133 : }
1134 184428569 : check_stmt (&gsi, l, ipa);
1135 184428569 : if (l->pure_const_state == IPA_NEITHER
1136 117767570 : && l->looping
1137 95695871 : && l->can_throw
1138 46051280 : && l->can_free)
1139 64926 : goto end;
1140 : }
1141 : }
1142 :
1143 4553307 : end:
1144 4618233 : if (l->pure_const_state != IPA_NEITHER
1145 2012873 : && !l->looping
1146 6448381 : && !finite_function_p ())
1147 23951 : l->looping = true;
1148 :
1149 4618233 : if (dump_file && (dump_flags & TDF_DETAILS))
1150 22 : fprintf (dump_file, " checking previously known:");
1151 :
1152 4618233 : better_state (&l->pure_const_state, &l->looping,
1153 : l->state_previously_known,
1154 4618233 : l->looping_previously_known);
1155 4618233 : if (TREE_NOTHROW (decl))
1156 3120251 : l->can_throw = false;
1157 :
1158 4618233 : l->malloc_state = STATE_MALLOC_BOTTOM;
1159 4618233 : if (DECL_IS_MALLOC (decl))
1160 9462 : l->malloc_state = STATE_MALLOC;
1161 4608771 : else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1162 11580 : l->malloc_state = STATE_MALLOC_TOP;
1163 4597191 : else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1164 38213 : l->malloc_state = STATE_MALLOC;
1165 :
1166 4618233 : pop_cfun ();
1167 4618233 : if (dump_file)
1168 : {
1169 190 : if (l->looping)
1170 50 : fprintf (dump_file, "Function is locally looping.\n");
1171 190 : if (l->can_throw)
1172 6 : fprintf (dump_file, "Function is locally throwing.\n");
1173 190 : if (l->pure_const_state == IPA_CONST)
1174 103 : fprintf (dump_file, "Function is locally const.\n");
1175 190 : if (l->pure_const_state == IPA_PURE)
1176 7 : fprintf (dump_file, "Function is locally pure.\n");
1177 190 : if (l->can_free)
1178 3 : fprintf (dump_file, "Function can locally free.\n");
1179 190 : if (l->malloc_state == STATE_MALLOC)
1180 8 : fprintf (dump_file, "Function is locally malloc.\n");
1181 : }
1182 : return l;
1183 : }
1184 :
1185 : void
1186 19199 : funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1187 : {
1188 : /* There are some shared nodes, in particular the initializers on
1189 : static declarations. We do not need to scan them more than once
1190 : since all we would be interested in are the addressof
1191 : operations. */
1192 19199 : if (opt_for_fn (node->decl, flag_ipa_pure_const))
1193 : {
1194 19198 : funct_state_d *a = analyze_function (node, true);
1195 19198 : new (state) funct_state_d (*a);
1196 19198 : free (a);
1197 : }
1198 : else
1199 : /* Do not keep stale summaries. */
1200 1 : funct_state_summaries->remove (node);
1201 19199 : }
1202 :
1203 : /* Called when new clone is inserted to callgraph late. */
1204 :
1205 : void
1206 1160552 : funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1207 : funct_state_d *src_data,
1208 : funct_state_d *dst_data)
1209 : {
1210 1160552 : new (dst_data) funct_state_d (*src_data);
1211 1160552 : if (dst_data->malloc_state == STATE_MALLOC
1212 1160552 : && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1213 11 : dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1214 1160552 : }
1215 :
1216 :
1217 : void
1218 160647 : pass_ipa_pure_const::
1219 : register_hooks (void)
1220 : {
1221 160647 : if (init_p)
1222 : return;
1223 :
1224 160647 : init_p = true;
1225 :
1226 160647 : funct_state_summaries = new funct_state_summary_t (symtab);
1227 : }
1228 :
1229 :
1230 : /* Analyze each function in the cgraph to see if it is locally PURE or
1231 : CONST. */
1232 :
1233 : static void
1234 148436 : pure_const_generate_summary (void)
1235 : {
1236 148436 : struct cgraph_node *node;
1237 :
1238 148436 : pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1239 148436 : pass->register_hooks ();
1240 :
1241 : /* Process all of the functions.
1242 :
1243 : We process AVAIL_INTERPOSABLE functions. We cannot use the results
1244 : by default, but the info can be used at LTO with -fwhole-program or
1245 : when function got cloned and the clone is AVAILABLE. */
1246 :
1247 1540031 : FOR_EACH_DEFINED_FUNCTION (node)
1248 1391595 : if (opt_for_fn (node->decl, flag_ipa_pure_const))
1249 : {
1250 1391350 : funct_state_d *a = analyze_function (node, true);
1251 1391350 : new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1252 1391350 : free (a);
1253 : }
1254 148436 : }
1255 :
1256 :
1257 : /* Serialize the ipa info for lto. */
1258 :
1259 : static void
1260 20022 : pure_const_write_summary (void)
1261 : {
1262 20022 : struct cgraph_node *node;
1263 20022 : struct lto_simple_output_block *ob
1264 20022 : = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1265 20022 : unsigned int count = 0;
1266 20022 : lto_symtab_encoder_iterator lsei;
1267 20022 : lto_symtab_encoder_t encoder;
1268 :
1269 20022 : encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1270 :
1271 113822 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1272 93800 : lsei_next_function_in_partition (&lsei))
1273 : {
1274 93800 : node = lsei_cgraph_node (lsei);
1275 93800 : if (node->definition && funct_state_summaries->exists (node))
1276 93648 : count++;
1277 : }
1278 :
1279 20022 : streamer_write_uhwi_stream (ob->main_stream, count);
1280 :
1281 : /* Process all of the functions. */
1282 113822 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1283 93800 : lsei_next_function_in_partition (&lsei))
1284 : {
1285 93800 : node = lsei_cgraph_node (lsei);
1286 93800 : funct_state_d *fs = funct_state_summaries->get (node);
1287 93800 : if (node->definition && fs != NULL)
1288 : {
1289 93648 : struct bitpack_d bp;
1290 93648 : int node_ref;
1291 93648 : lto_symtab_encoder_t encoder;
1292 :
1293 93648 : encoder = ob->decl_state->symtab_node_encoder;
1294 93648 : node_ref = lto_symtab_encoder_encode (encoder, node);
1295 93648 : streamer_write_uhwi_stream (ob->main_stream, node_ref);
1296 :
1297 : /* Note that flags will need to be read in the opposite
1298 : order as we are pushing the bitflags into FLAGS. */
1299 93648 : bp = bitpack_create (ob->main_stream);
1300 93648 : bp_pack_value (&bp, fs->pure_const_state, 2);
1301 93648 : bp_pack_value (&bp, fs->state_previously_known, 2);
1302 93648 : bp_pack_value (&bp, fs->looping_previously_known, 1);
1303 93648 : bp_pack_value (&bp, fs->looping, 1);
1304 93648 : bp_pack_value (&bp, fs->can_throw, 1);
1305 93648 : bp_pack_value (&bp, fs->can_free, 1);
1306 93648 : bp_pack_value (&bp, fs->malloc_state, 2);
1307 93648 : streamer_write_bitpack (&bp);
1308 : }
1309 : }
1310 :
1311 20022 : lto_destroy_simple_output_block (ob);
1312 20022 : }
1313 :
1314 :
1315 : /* Deserialize the ipa info for lto. */
1316 :
1317 : static void
1318 12211 : pure_const_read_summary (void)
1319 : {
1320 12211 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1321 12211 : struct lto_file_decl_data *file_data;
1322 12211 : unsigned int j = 0;
1323 :
1324 12211 : pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1325 12211 : pass->register_hooks ();
1326 :
1327 37688 : while ((file_data = file_data_vec[j++]))
1328 : {
1329 13266 : const char *data;
1330 13266 : size_t len;
1331 13266 : class lto_input_block *ib
1332 13266 : = lto_create_simple_input_block (file_data,
1333 : LTO_section_ipa_pure_const,
1334 : &data, &len);
1335 13266 : if (ib)
1336 : {
1337 10953 : unsigned int i;
1338 10953 : unsigned int count = streamer_read_uhwi (ib);
1339 :
1340 88888 : for (i = 0; i < count; i++)
1341 : {
1342 77935 : unsigned int index;
1343 77935 : struct cgraph_node *node;
1344 77935 : struct bitpack_d bp;
1345 77935 : funct_state fs;
1346 77935 : lto_symtab_encoder_t encoder;
1347 :
1348 77935 : index = streamer_read_uhwi (ib);
1349 77935 : encoder = file_data->symtab_node_encoder;
1350 77935 : node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1351 : index));
1352 :
1353 77935 : fs = funct_state_summaries->get_create (node);
1354 : /* Note that the flags must be read in the opposite
1355 : order in which they were written (the bitflags were
1356 : pushed into FLAGS). */
1357 77935 : bp = streamer_read_bitpack (ib);
1358 77935 : fs->pure_const_state
1359 77935 : = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1360 77935 : fs->state_previously_known
1361 77935 : = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1362 77935 : fs->looping_previously_known = bp_unpack_value (&bp, 1);
1363 77935 : fs->looping = bp_unpack_value (&bp, 1);
1364 77935 : fs->can_throw = bp_unpack_value (&bp, 1);
1365 77935 : fs->can_free = bp_unpack_value (&bp, 1);
1366 77935 : fs->malloc_state
1367 77935 : = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1368 :
1369 77935 : if (dump_file)
1370 : {
1371 0 : int flags = flags_from_decl_or_type (node->decl);
1372 0 : fprintf (dump_file, "Read info for %s ", node->dump_name ());
1373 0 : if (flags & ECF_CONST)
1374 0 : fprintf (dump_file, " const");
1375 0 : if (flags & ECF_PURE)
1376 0 : fprintf (dump_file, " pure");
1377 0 : if (flags & ECF_NOTHROW)
1378 0 : fprintf (dump_file, " nothrow");
1379 0 : fprintf (dump_file, "\n pure const state: %s\n",
1380 0 : pure_const_names[fs->pure_const_state]);
1381 0 : fprintf (dump_file, " previously known state: %s\n",
1382 0 : pure_const_names[fs->state_previously_known]);
1383 0 : if (fs->looping)
1384 0 : fprintf (dump_file," function is locally looping\n");
1385 0 : if (fs->looping_previously_known)
1386 0 : fprintf (dump_file," function is previously known looping\n");
1387 0 : if (fs->can_throw)
1388 0 : fprintf (dump_file," function is locally throwing\n");
1389 0 : if (fs->can_free)
1390 0 : fprintf (dump_file," function can locally free\n");
1391 0 : fprintf (dump_file, "\n malloc state: %s\n",
1392 0 : malloc_state_names[fs->malloc_state]);
1393 : }
1394 : }
1395 :
1396 10953 : lto_destroy_simple_input_block (file_data,
1397 : LTO_section_ipa_pure_const,
1398 : ib, data, len);
1399 : }
1400 : }
1401 12211 : }
1402 :
1403 : /* We only propagate across edges that can throw externally and their callee
1404 : is not interposable. */
1405 :
1406 : static bool
1407 7337597 : ignore_edge_for_nothrow (struct cgraph_edge *e)
1408 : {
1409 7337597 : if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1410 : return true;
1411 :
1412 2143785 : enum availability avail;
1413 2143785 : cgraph_node *ultimate_target
1414 2143785 : = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1415 2143785 : if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1416 : return true;
1417 761869 : return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1418 206494 : && !e->callee->binds_to_current_def_p (e->caller))
1419 761835 : || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1420 1522600 : || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1421 : }
1422 :
1423 : /* Return true if NODE is self recursive function.
1424 : Indirectly recursive functions appears as non-trivial strongly
1425 : connected components, so we need to care about self recursion
1426 : only. */
1427 :
1428 : static bool
1429 2073107 : self_recursive_p (struct cgraph_node *node)
1430 : {
1431 2073107 : struct cgraph_edge *e;
1432 8129550 : for (e = node->callees; e; e = e->next_callee)
1433 6060310 : if (e->callee->function_symbol () == node)
1434 : return true;
1435 : return false;
1436 : }
1437 :
1438 : /* Return true if N is cdtor that is not const or pure. In this case we may
1439 : need to remove unreachable function if it is marked const/pure. */
1440 :
1441 : static bool
1442 50531 : cdtor_p (cgraph_node *n, void *)
1443 : {
1444 50531 : if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1445 3 : return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1446 3 : || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1447 : return false;
1448 : }
1449 :
1450 : /* Skip edges from and to nodes without ipa_pure_const enabled.
1451 : Ignore not available symbols. */
1452 :
1453 : static bool
1454 7337597 : ignore_edge_for_pure_const (struct cgraph_edge *e)
1455 : {
1456 7337597 : enum availability avail;
1457 7337597 : cgraph_node *ultimate_target
1458 7337597 : = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1459 :
1460 7337597 : return (avail <= AVAIL_INTERPOSABLE
1461 2561626 : || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1462 9890509 : || !opt_for_fn (ultimate_target->decl,
1463 7337597 : flag_ipa_pure_const));
1464 : }
1465 :
1466 : /* Return true if function should be skipped for local pure const analysis. */
1467 :
1468 : static bool
1469 4648671 : skip_function_for_local_pure_const (struct cgraph_node *node)
1470 : {
1471 : /* Because we do not schedule pass_fixup_cfg over whole program after early
1472 : optimizations we must not promote functions that are called by already
1473 : processed functions. */
1474 :
1475 4648671 : if (function_called_by_processed_nodes_p ())
1476 : {
1477 3344 : if (dump_file)
1478 1 : fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1479 3344 : return true;
1480 : }
1481 : /* Save some work and do not analyze functions which are interposable and
1482 : do not have any non-interposable aliases. */
1483 4645327 : if (node->get_availability () <= AVAIL_INTERPOSABLE
1484 4645327 : && !node->has_aliases_p ())
1485 : {
1486 196333 : if (dump_file)
1487 0 : fprintf (dump_file,
1488 : "Function is interposable; not analyzing.\n");
1489 196333 : return true;
1490 : }
1491 : return false;
1492 : }
1493 :
1494 : /* Make function const and output warning. If LOCAL is true,
1495 : return true if anything changed. Otherwise return true if
1496 : we may have introduced removale ctors. */
1497 :
1498 : bool
1499 1538392 : ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1500 : {
1501 1538392 : bool cdtor = false;
1502 :
1503 1538392 : if (TREE_READONLY (node->decl)
1504 1538392 : && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1505 : return false;
1506 905720 : warn_function_const (node->decl, !looping);
1507 905720 : if (local && skip_function_for_local_pure_const (node))
1508 : return false;
1509 886644 : if (dump_file)
1510 58 : fprintf (dump_file, "Function found to be %sconst: %s\n",
1511 : looping ? "looping " : "",
1512 : node->dump_name ());
1513 886644 : if (!local && !looping)
1514 43830 : cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1515 886644 : if (!dbg_cnt (ipa_attr))
1516 : return false;
1517 886644 : if (node->set_const_flag (true, looping))
1518 : {
1519 498119 : if (dump_file)
1520 58 : fprintf (dump_file,
1521 : "Declaration updated to be %sconst: %s\n",
1522 : looping ? "looping " : "",
1523 : node->dump_name ());
1524 498119 : if (local)
1525 : return true;
1526 2803 : return cdtor;
1527 : }
1528 : return false;
1529 : }
1530 :
1531 : /* Make function const and output warning. If LOCAL is true,
1532 : return true if anything changed. Otherwise return true if
1533 : we may have introduced removale ctors. */
1534 :
1535 : bool
1536 1036181 : ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1537 : {
1538 1036181 : bool cdtor = false;
1539 :
1540 1036181 : if (TREE_READONLY (node->decl)
1541 1036181 : || (DECL_PURE_P (node->decl)
1542 671393 : && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
1543 : return false;
1544 364006 : warn_function_pure (node->decl, !looping);
1545 364006 : if (local && skip_function_for_local_pure_const (node))
1546 : return false;
1547 354989 : if (dump_file)
1548 8 : fprintf (dump_file, "Function found to be %spure: %s\n",
1549 : looping ? "looping " : "",
1550 : node->dump_name ());
1551 354989 : if (!local && !looping)
1552 3429 : cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1553 354989 : if (!dbg_cnt (ipa_attr))
1554 : return false;
1555 354989 : if (node->set_pure_flag (true, looping))
1556 : {
1557 344205 : if (dump_file)
1558 8 : fprintf (dump_file,
1559 : "Declaration updated to be %spure: %s\n",
1560 : looping ? "looping " : "",
1561 : node->dump_name ());
1562 344205 : if (local)
1563 : return true;
1564 7758 : return cdtor;
1565 : }
1566 : return false;
1567 : }
1568 :
1569 : /* Produce transitive closure over the callgraph and compute pure/const
1570 : attributes. */
1571 :
1572 : static bool
1573 151453 : propagate_pure_const (void)
1574 : {
1575 151453 : struct cgraph_node *node;
1576 151453 : struct cgraph_node *w;
1577 151453 : struct cgraph_node **order =
1578 151453 : XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1579 151453 : int order_pos;
1580 151453 : int i;
1581 151453 : struct ipa_dfs_info * w_info;
1582 151453 : bool remove_p = false;
1583 :
1584 151453 : order_pos = ipa_reduced_postorder (order, true,
1585 : ignore_edge_for_pure_const);
1586 151453 : if (dump_file)
1587 : {
1588 29 : cgraph_node::dump_cgraph (dump_file);
1589 29 : ipa_print_order (dump_file, "reduced", order, order_pos);
1590 : }
1591 :
1592 : /* Propagate the local information through the call graph to produce
1593 : the global information. All the nodes within a cycle will have
1594 : the same info so we collapse cycles first. Then we can do the
1595 : propagation in one pass from the leaves to the roots. */
1596 2525997 : for (i = 0; i < order_pos; i++ )
1597 : {
1598 2374544 : enum pure_const_state_e pure_const_state = IPA_CONST;
1599 2374544 : bool looping = false;
1600 2374544 : int count = 0;
1601 2374544 : node = order[i];
1602 :
1603 2374544 : if (node->alias)
1604 36801 : continue;
1605 :
1606 2337743 : if (dump_file && (dump_flags & TDF_DETAILS))
1607 5 : fprintf (dump_file, "Starting cycle\n");
1608 :
1609 : /* Find the worst state for any node in the cycle. */
1610 : w = node;
1611 3909516 : while (w && pure_const_state != IPA_NEITHER)
1612 : {
1613 2340317 : struct cgraph_edge *e;
1614 2340317 : struct cgraph_edge *ie;
1615 2340317 : int i;
1616 2340317 : struct ipa_ref *ref = NULL;
1617 :
1618 2340317 : funct_state w_l = funct_state_summaries->get_create (w);
1619 2340317 : if (dump_file && (dump_flags & TDF_DETAILS))
1620 6 : fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1621 : w->dump_name (),
1622 6 : pure_const_names[w_l->pure_const_state],
1623 6 : w_l->looping);
1624 :
1625 : /* First merge in function body properties.
1626 : We are safe to pass NULL as FROM and TO because we will take care
1627 : of possible interposition when walking callees. */
1628 2340317 : worse_state (&pure_const_state, &looping,
1629 2340317 : w_l->pure_const_state, w_l->looping,
1630 : NULL, NULL);
1631 2340317 : if (pure_const_state == IPA_NEITHER)
1632 : break;
1633 :
1634 1571773 : count++;
1635 :
1636 : /* We consider recursive cycles as possibly infinite.
1637 : This might be relaxed since infinite recursion leads to stack
1638 : overflow. */
1639 1571773 : if (count > 1)
1640 2574 : looping = true;
1641 :
1642 : /* Now walk the edges and merge in callee properties. */
1643 2408722 : for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1644 836949 : e = e->next_callee)
1645 : {
1646 1892557 : enum availability avail;
1647 1892557 : struct cgraph_node *y = e->callee->
1648 3785114 : function_or_virtual_thunk_symbol (&avail,
1649 1892557 : e->caller);
1650 1892557 : enum pure_const_state_e edge_state = IPA_CONST;
1651 1892557 : bool edge_looping = false;
1652 :
1653 1892557 : if (e->recursive_p ())
1654 5480 : looping = true;
1655 :
1656 1892557 : if (dump_file && (dump_flags & TDF_DETAILS))
1657 : {
1658 7 : fprintf (dump_file, " Call to %s",
1659 7 : e->callee->dump_name ());
1660 : }
1661 1892557 : if (avail > AVAIL_INTERPOSABLE)
1662 : {
1663 631278 : funct_state y_l = funct_state_summaries->get_create (y);
1664 :
1665 631278 : if (dump_file && (dump_flags & TDF_DETAILS))
1666 : {
1667 2 : fprintf (dump_file,
1668 : " state:%s looping:%i\n",
1669 2 : pure_const_names[y_l->pure_const_state],
1670 2 : y_l->looping);
1671 : }
1672 631278 : if (y_l->pure_const_state > IPA_PURE
1673 631278 : && e->cannot_lead_to_return_p ())
1674 : {
1675 8416 : if (dump_file && (dump_flags & TDF_DETAILS))
1676 0 : fprintf (dump_file,
1677 : " Ignoring side effects"
1678 : " -> pure, looping\n");
1679 8416 : edge_state = IPA_PURE;
1680 8416 : edge_looping = true;
1681 : }
1682 : else
1683 : {
1684 622862 : edge_state = y_l->pure_const_state;
1685 622862 : edge_looping = y_l->looping;
1686 : }
1687 : }
1688 1261279 : else if (builtin_safe_for_const_function_p (&edge_looping,
1689 : y->decl))
1690 : edge_state = IPA_CONST;
1691 : else
1692 1086472 : state_from_flags (&edge_state, &edge_looping,
1693 1086472 : flags_from_decl_or_type (y->decl),
1694 1086472 : e->cannot_lead_to_return_p ());
1695 :
1696 : /* Merge the results with what we already know. */
1697 1892557 : better_state (&edge_state, &edge_looping,
1698 : w_l->state_previously_known,
1699 1892557 : w_l->looping_previously_known);
1700 1892557 : worse_state (&pure_const_state, &looping,
1701 1892557 : edge_state, edge_looping, e->caller, e->callee);
1702 1892557 : if (pure_const_state == IPA_NEITHER)
1703 : break;
1704 : }
1705 :
1706 : /* Now process the indirect call. */
1707 1571773 : for (ie = w->indirect_calls;
1708 1572521 : ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1709 : {
1710 25467 : enum pure_const_state_e edge_state = IPA_CONST;
1711 25467 : bool edge_looping = false;
1712 :
1713 25467 : if (dump_file && (dump_flags & TDF_DETAILS))
1714 0 : fprintf (dump_file, " Indirect call");
1715 50934 : state_from_flags (&edge_state, &edge_looping,
1716 25467 : ie->indirect_info->ecf_flags,
1717 25467 : ie->cannot_lead_to_return_p ());
1718 : /* Merge the results with what we already know. */
1719 25467 : better_state (&edge_state, &edge_looping,
1720 : w_l->state_previously_known,
1721 25467 : w_l->looping_previously_known);
1722 25467 : worse_state (&pure_const_state, &looping,
1723 : edge_state, edge_looping, NULL, NULL);
1724 25467 : if (pure_const_state == IPA_NEITHER)
1725 : break;
1726 : }
1727 :
1728 : /* And finally all loads and stores. */
1729 320011 : for (i = 0; w->iterate_reference (i, ref)
1730 2541143 : && pure_const_state != IPA_NEITHER; i++)
1731 : {
1732 348143 : enum pure_const_state_e ref_state = IPA_CONST;
1733 348143 : bool ref_looping = false;
1734 348143 : switch (ref->use)
1735 : {
1736 220577 : case IPA_REF_LOAD:
1737 : /* readonly reads are safe. */
1738 220577 : if (TREE_READONLY (ref->referred->decl))
1739 : break;
1740 204742 : if (dump_file && (dump_flags & TDF_DETAILS))
1741 0 : fprintf (dump_file, " nonreadonly global var read\n");
1742 204742 : ref_state = IPA_PURE;
1743 204742 : break;
1744 89898 : case IPA_REF_STORE:
1745 89898 : if (ref->cannot_lead_to_return ())
1746 : break;
1747 28210 : ref_state = IPA_NEITHER;
1748 28210 : if (dump_file && (dump_flags & TDF_DETAILS))
1749 0 : fprintf (dump_file, " global var write\n");
1750 : break;
1751 : case IPA_REF_ADDR:
1752 : break;
1753 0 : default:
1754 0 : gcc_unreachable ();
1755 : }
1756 348143 : better_state (&ref_state, &ref_looping,
1757 : w_l->state_previously_known,
1758 348143 : w_l->looping_previously_known);
1759 348143 : worse_state (&pure_const_state, &looping,
1760 : ref_state, ref_looping, NULL, NULL);
1761 348143 : if (pure_const_state == IPA_NEITHER)
1762 : break;
1763 : }
1764 1571773 : w_info = (struct ipa_dfs_info *) w->aux;
1765 1571773 : w = w_info->next_cycle;
1766 : }
1767 2337743 : if (dump_file && (dump_flags & TDF_DETAILS))
1768 5 : fprintf (dump_file, "Result %s looping %i\n",
1769 5 : pure_const_names [pure_const_state],
1770 : looping);
1771 :
1772 : /* Find the worst state of can_free for any node in the cycle. */
1773 : bool can_free = false;
1774 : w = node;
1775 4678549 : while (w && !can_free)
1776 : {
1777 2340806 : struct cgraph_edge *e;
1778 2340806 : funct_state w_l = funct_state_summaries->get (w);
1779 :
1780 2340806 : if (w_l->can_free
1781 2161859 : || w->get_availability () == AVAIL_INTERPOSABLE
1782 4423353 : || w->indirect_calls)
1783 : can_free = true;
1784 :
1785 4058140 : for (e = w->callees; e && !can_free; e = e->next_callee)
1786 : {
1787 1717334 : enum availability avail;
1788 1717334 : struct cgraph_node *y = e->callee->
1789 3434668 : function_or_virtual_thunk_symbol (&avail,
1790 1717334 : e->caller);
1791 :
1792 1717334 : if (avail > AVAIL_INTERPOSABLE)
1793 772852 : can_free = funct_state_summaries->get (y)->can_free;
1794 : else
1795 : can_free = true;
1796 : }
1797 2340806 : w_info = (struct ipa_dfs_info *) w->aux;
1798 2340806 : w = w_info->next_cycle;
1799 : }
1800 :
1801 : /* Copy back the region's pure_const_state which is shared by
1802 : all nodes in the region. */
1803 : w = node;
1804 4696175 : while (w)
1805 : {
1806 2358432 : funct_state w_l = funct_state_summaries->get (w);
1807 2358432 : enum pure_const_state_e this_state = pure_const_state;
1808 2358432 : bool this_looping = looping;
1809 :
1810 2358432 : w_l->can_free = can_free;
1811 2358432 : w->nonfreeing_fn = !can_free;
1812 2358432 : if (!can_free && dump_file)
1813 28 : fprintf (dump_file, "Function found not to call free: %s\n",
1814 : w->dump_name ());
1815 :
1816 2358432 : if (w_l->state_previously_known != IPA_NEITHER
1817 410582 : && this_state > w_l->state_previously_known)
1818 : {
1819 1177 : if (this_state == IPA_NEITHER)
1820 50 : this_looping = w_l->looping_previously_known;
1821 : this_state = w_l->state_previously_known;
1822 : }
1823 2358432 : if (!this_looping && self_recursive_p (w))
1824 : this_looping = true;
1825 2358432 : if (!w_l->looping_previously_known)
1826 314925 : this_looping = false;
1827 :
1828 : /* All nodes within a cycle share the same info. */
1829 2358432 : w_l->pure_const_state = this_state;
1830 2358432 : w_l->looping = this_looping;
1831 :
1832 : /* Inline clones share declaration with their offline copies;
1833 : do not modify their declarations since the offline copy may
1834 : be different. */
1835 2358432 : if (!w->inlined_to)
1836 1043294 : switch (this_state)
1837 : {
1838 162092 : case IPA_CONST:
1839 162092 : remove_p |= ipa_make_function_const (w, this_looping, false);
1840 162092 : break;
1841 :
1842 97983 : case IPA_PURE:
1843 97983 : remove_p |= ipa_make_function_pure (w, this_looping, false);
1844 97983 : break;
1845 :
1846 : default:
1847 : break;
1848 : }
1849 2358432 : w_info = (struct ipa_dfs_info *) w->aux;
1850 2358432 : w = w_info->next_cycle;
1851 : }
1852 : }
1853 :
1854 151453 : ipa_free_postorder_info ();
1855 151453 : free (order);
1856 151453 : return remove_p;
1857 : }
1858 :
1859 : /* Produce transitive closure over the callgraph and compute nothrow
1860 : attributes. */
1861 :
1862 : static void
1863 151453 : propagate_nothrow (void)
1864 : {
1865 151453 : struct cgraph_node *node;
1866 151453 : struct cgraph_node *w;
1867 151453 : struct cgraph_node **order =
1868 151453 : XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1869 151453 : int order_pos;
1870 151453 : int i;
1871 151453 : struct ipa_dfs_info * w_info;
1872 :
1873 151453 : order_pos = ipa_reduced_postorder (order, true,
1874 : ignore_edge_for_nothrow);
1875 151453 : if (dump_file)
1876 : {
1877 29 : cgraph_node::dump_cgraph (dump_file);
1878 29 : ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1879 : }
1880 :
1881 : /* Propagate the local information through the call graph to produce
1882 : the global information. All the nodes within a cycle will have
1883 : the same info so we collapse cycles first. Then we can do the
1884 : propagation in one pass from the leaves to the roots. */
1885 2542512 : for (i = 0; i < order_pos; i++ )
1886 : {
1887 2391059 : bool can_throw = false;
1888 2391059 : node = order[i];
1889 :
1890 2391059 : if (node->alias)
1891 36801 : continue;
1892 :
1893 : /* Find the worst state for any node in the cycle. */
1894 : w = node;
1895 4708681 : while (w && !can_throw)
1896 : {
1897 2354423 : struct cgraph_edge *e, *ie;
1898 :
1899 2354423 : if (!TREE_NOTHROW (w->decl))
1900 : {
1901 934078 : funct_state w_l = funct_state_summaries->get_create (w);
1902 :
1903 934078 : if (w_l->can_throw
1904 934078 : || w->get_availability () == AVAIL_INTERPOSABLE)
1905 : can_throw = true;
1906 :
1907 1791657 : for (e = w->callees; e && !can_throw; e = e->next_callee)
1908 : {
1909 857579 : enum availability avail;
1910 :
1911 857579 : if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1912 251312 : continue;
1913 :
1914 606267 : struct cgraph_node *y = e->callee->
1915 1212534 : function_or_virtual_thunk_symbol (&avail,
1916 606267 : e->caller);
1917 :
1918 : /* We can use info about the callee only if we know it
1919 : cannot be interposed.
1920 : When callee is compiled with non-call exceptions we also
1921 : must check that the declaration is bound to current
1922 : body as other semantically equivalent body may still
1923 : throw. */
1924 606267 : if (avail <= AVAIL_INTERPOSABLE
1925 606267 : || (!TREE_NOTHROW (y->decl)
1926 325647 : && (funct_state_summaries->get_create (y)->can_throw
1927 5711 : || (opt_for_fn (y->decl, flag_non_call_exceptions)
1928 548 : && !e->callee->binds_to_current_def_p (w)))))
1929 : can_throw = true;
1930 : }
1931 947848 : for (ie = w->indirect_calls; ie && !can_throw;
1932 13770 : ie = ie->next_callee)
1933 13770 : if (ie->can_throw_external
1934 12288 : && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1935 13770 : can_throw = true;
1936 : }
1937 2354423 : w_info = (struct ipa_dfs_info *) w->aux;
1938 2354423 : w = w_info->next_cycle;
1939 : }
1940 :
1941 : /* Copy back the region's pure_const_state which is shared by
1942 : all nodes in the region. */
1943 : w = node;
1944 4712690 : while (w)
1945 : {
1946 2358432 : funct_state w_l = funct_state_summaries->get_create (w);
1947 2358432 : if (!can_throw && !TREE_NOTHROW (w->decl))
1948 : {
1949 : /* Inline clones share declaration with their offline copies;
1950 : do not modify their declarations since the offline copy may
1951 : be different. */
1952 18448 : if (!w->inlined_to)
1953 : {
1954 3036 : w->set_nothrow_flag (true);
1955 3036 : if (dump_file)
1956 0 : fprintf (dump_file, "Function found to be nothrow: %s\n",
1957 : w->dump_name ());
1958 : }
1959 : }
1960 919595 : else if (can_throw && !TREE_NOTHROW (w->decl))
1961 919595 : w_l->can_throw = true;
1962 2358432 : w_info = (struct ipa_dfs_info *) w->aux;
1963 2358432 : w = w_info->next_cycle;
1964 : }
1965 : }
1966 :
1967 151453 : ipa_free_postorder_info ();
1968 151453 : free (order);
1969 151453 : }
1970 :
1971 : /* Debugging function to dump state of malloc lattice. */
1972 :
1973 : DEBUG_FUNCTION
1974 : static void
1975 302906 : dump_malloc_lattice (FILE *dump_file, const char *s)
1976 : {
1977 302906 : if (!dump_file)
1978 : return;
1979 :
1980 58 : fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1981 58 : cgraph_node *node;
1982 290 : FOR_EACH_FUNCTION (node)
1983 : {
1984 232 : funct_state fs = funct_state_summaries->get (node);
1985 232 : if (fs)
1986 156 : fprintf (dump_file, "%s: %s\n", node->dump_name (),
1987 156 : malloc_state_names[fs->malloc_state]);
1988 : }
1989 : }
1990 :
1991 : /* Propagate malloc attribute across the callgraph. */
1992 :
1993 : static void
1994 151453 : propagate_malloc (void)
1995 : {
1996 151453 : cgraph_node *node;
1997 3956653 : FOR_EACH_FUNCTION (node)
1998 : {
1999 3805200 : if (DECL_IS_MALLOC (node->decl))
2000 83521 : if (!funct_state_summaries->exists (node))
2001 : {
2002 23320 : funct_state fs = funct_state_summaries->get_create (node);
2003 23320 : fs->malloc_state = STATE_MALLOC;
2004 : }
2005 : }
2006 :
2007 151453 : dump_malloc_lattice (dump_file, "Initial");
2008 151453 : struct cgraph_node **order
2009 151453 : = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
2010 151453 : int order_pos = ipa_reverse_postorder (order);
2011 151453 : bool changed = true;
2012 :
2013 456159 : while (changed)
2014 : {
2015 153253 : changed = false;
2016 : /* Walk in postorder. */
2017 4686268 : for (int i = order_pos - 1; i >= 0; --i)
2018 : {
2019 4533015 : cgraph_node *node = order[i];
2020 6125916 : if (node->alias
2021 4490624 : || !node->definition
2022 7473129 : || !funct_state_summaries->exists (node))
2023 4448488 : continue;
2024 :
2025 2940114 : funct_state l = funct_state_summaries->get (node);
2026 :
2027 : /* FIXME: add support for indirect-calls. */
2028 2940114 : if (node->indirect_calls)
2029 : {
2030 145235 : l->malloc_state = STATE_MALLOC_BOTTOM;
2031 145235 : continue;
2032 : }
2033 :
2034 2794879 : if (node->get_availability () <= AVAIL_INTERPOSABLE)
2035 : {
2036 92965 : l->malloc_state = STATE_MALLOC_BOTTOM;
2037 92965 : continue;
2038 : }
2039 :
2040 2701914 : if (l->malloc_state == STATE_MALLOC_BOTTOM)
2041 2617387 : continue;
2042 :
2043 84527 : auto_vec<cgraph_node *, 16> callees;
2044 369169 : for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2045 : {
2046 284642 : ipa_call_summary *es = ipa_call_summaries->get_create (cs);
2047 284642 : if (es && es->is_return_callee_uncaptured)
2048 13032 : callees.safe_push (cs->callee);
2049 : }
2050 :
2051 84527 : malloc_state_e new_state = l->malloc_state;
2052 97559 : for (unsigned j = 0; j < callees.length (); j++)
2053 : {
2054 13032 : cgraph_node *callee = callees[j];
2055 13032 : if (!funct_state_summaries->exists (node))
2056 : {
2057 : new_state = STATE_MALLOC_BOTTOM;
2058 : break;
2059 : }
2060 13032 : malloc_state_e callee_state
2061 13032 : = funct_state_summaries->get_create (callee)->malloc_state;
2062 13032 : if (new_state < callee_state)
2063 10932 : new_state = callee_state;
2064 : }
2065 84527 : if (new_state != l->malloc_state)
2066 : {
2067 10926 : changed = true;
2068 10926 : l->malloc_state = new_state;
2069 : }
2070 84527 : }
2071 : }
2072 :
2073 2546690 : FOR_EACH_DEFINED_FUNCTION (node)
2074 2395237 : if (funct_state_summaries->exists (node))
2075 : {
2076 2383354 : funct_state l = funct_state_summaries->get (node);
2077 2383354 : if (!node->alias
2078 2358432 : && l->malloc_state == STATE_MALLOC
2079 52114 : && !node->inlined_to
2080 2383783 : && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2081 : {
2082 429 : if (dump_file && (dump_flags & TDF_DETAILS))
2083 6 : fprintf (dump_file, "Function %s found to be malloc\n",
2084 : node->dump_name ());
2085 :
2086 429 : bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2087 429 : node->set_malloc_flag (true);
2088 429 : if (!malloc_decl_p && warn_suggest_attribute_malloc)
2089 0 : warn_function_malloc (node->decl);
2090 : }
2091 : }
2092 :
2093 151453 : dump_malloc_lattice (dump_file, "after propagation");
2094 151453 : ipa_free_postorder_info ();
2095 151453 : free (order);
2096 151453 : }
2097 :
2098 : /* Produce the global information by preforming a transitive closure
2099 : on the local information that was produced by generate_summary. */
2100 :
2101 : unsigned int
2102 151453 : pass_ipa_pure_const::
2103 : execute (function *)
2104 : {
2105 151453 : bool remove_p;
2106 :
2107 : /* Nothrow makes more function to not lead to return and improve
2108 : later analysis. */
2109 151453 : propagate_nothrow ();
2110 151453 : propagate_malloc ();
2111 151453 : remove_p = propagate_pure_const ();
2112 :
2113 151453 : delete funct_state_summaries;
2114 151453 : return remove_p ? TODO_remove_functions : 0;
2115 : }
2116 :
2117 : static bool
2118 4033231 : gate_pure_const (void)
2119 : {
2120 591477 : return flag_ipa_pure_const || in_lto_p;
2121 : }
2122 :
2123 287872 : pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2124 : : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2125 : pure_const_generate_summary, /* generate_summary */
2126 : pure_const_write_summary, /* write_summary */
2127 : pure_const_read_summary, /* read_summary */
2128 : NULL, /* write_optimization_summary */
2129 : NULL, /* read_optimization_summary */
2130 : NULL, /* stmt_fixup */
2131 : 0, /* function_transform_todo_flags_start */
2132 : NULL, /* function_transform */
2133 : NULL), /* variable_transform */
2134 287872 : init_p (false) {}
2135 :
2136 : ipa_opt_pass_d *
2137 287872 : make_pass_ipa_pure_const (gcc::context *ctxt)
2138 : {
2139 287872 : return new pass_ipa_pure_const (ctxt);
2140 : }
2141 :
2142 : /* Simple local pass for pure const discovery reusing the analysis from
2143 : ipa_pure_const. This pass is effective when executed together with
2144 : other optimization passes in early optimization pass queue. */
2145 :
2146 : namespace {
2147 :
2148 : const pass_data pass_data_local_pure_const =
2149 : {
2150 : GIMPLE_PASS, /* type */
2151 : "local-pure-const", /* name */
2152 : OPTGROUP_NONE, /* optinfo_flags */
2153 : TV_IPA_PURE_CONST, /* tv_id */
2154 : 0, /* properties_required */
2155 : 0, /* properties_provided */
2156 : 0, /* properties_destroyed */
2157 : 0, /* todo_flags_start */
2158 : 0, /* todo_flags_finish */
2159 : };
2160 :
2161 : class pass_local_pure_const : public gimple_opt_pass
2162 : {
2163 : public:
2164 575744 : pass_local_pure_const (gcc::context *ctxt)
2165 1151488 : : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2166 : {}
2167 :
2168 : /* opt_pass methods: */
2169 287872 : opt_pass * clone () final override
2170 : {
2171 287872 : return new pass_local_pure_const (m_ctxt);
2172 : }
2173 3442006 : bool gate (function *) final override { return gate_pure_const (); }
2174 : unsigned int execute (function *) final override;
2175 :
2176 : }; // class pass_local_pure_const
2177 :
2178 : unsigned int
2179 3441676 : pass_local_pure_const::execute (function *fun)
2180 : {
2181 3441676 : bool changed = false;
2182 3441676 : funct_state l;
2183 3441676 : bool skip;
2184 3441676 : struct cgraph_node *node;
2185 :
2186 3441676 : node = cgraph_node::get (current_function_decl);
2187 3441676 : skip = skip_function_for_local_pure_const (node);
2188 :
2189 3441676 : if (!warn_suggest_attribute_const
2190 3441653 : && !warn_suggest_attribute_pure
2191 3441632 : && skip)
2192 : return 0;
2193 :
2194 3270092 : l = analyze_function (node, false);
2195 :
2196 : /* Do NORETURN discovery. */
2197 3270092 : if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2198 6511724 : && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2199 : {
2200 26667 : warn_function_noreturn (fun->decl);
2201 26667 : if (dump_file)
2202 1 : fprintf (dump_file, "Function found to be noreturn: %s\n",
2203 : current_function_name ());
2204 :
2205 : /* Update declaration and reduce profile to executed once. */
2206 26667 : if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2207 : changed = true;
2208 26667 : if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2209 12283 : node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2210 : }
2211 :
2212 3270092 : switch (l->pure_const_state)
2213 : {
2214 644025 : case IPA_CONST:
2215 1288050 : changed |= ipa_make_function_const
2216 644025 : (cgraph_node::get (current_function_decl), l->looping, true);
2217 644025 : break;
2218 :
2219 419695 : case IPA_PURE:
2220 839390 : changed |= ipa_make_function_pure
2221 419695 : (cgraph_node::get (current_function_decl), l->looping, true);
2222 419695 : break;
2223 :
2224 : default:
2225 : break;
2226 : }
2227 3270092 : if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2228 : {
2229 20560 : node->set_nothrow_flag (true);
2230 20560 : changed = true;
2231 20560 : if (dump_file)
2232 2 : fprintf (dump_file, "Function found to be nothrow: %s\n",
2233 : current_function_name ());
2234 : }
2235 :
2236 3270092 : if (l->malloc_state == STATE_MALLOC
2237 3270092 : && !DECL_IS_MALLOC (current_function_decl))
2238 : {
2239 38213 : node->set_malloc_flag (true);
2240 38213 : if (warn_suggest_attribute_malloc)
2241 3 : warn_function_malloc (node->decl);
2242 38213 : changed = true;
2243 38213 : if (dump_file)
2244 2 : fprintf (dump_file, "Function found to be malloc: %s\n",
2245 : node->dump_name ());
2246 : }
2247 :
2248 3270092 : free (l);
2249 3270092 : if (changed)
2250 890393 : return execute_fixup_cfg ();
2251 : else
2252 : return 0;
2253 : }
2254 :
2255 : } // anon namespace
2256 :
2257 : gimple_opt_pass *
2258 287872 : make_pass_local_pure_const (gcc::context *ctxt)
2259 : {
2260 287872 : return new pass_local_pure_const (ctxt);
2261 : }
2262 :
2263 : /* Emit noreturn warnings. */
2264 :
2265 : namespace {
2266 :
2267 : const pass_data pass_data_warn_function_noreturn =
2268 : {
2269 : GIMPLE_PASS, /* type */
2270 : "*warn_function_noreturn", /* name */
2271 : OPTGROUP_NONE, /* optinfo_flags */
2272 : TV_NONE, /* tv_id */
2273 : PROP_cfg, /* properties_required */
2274 : 0, /* properties_provided */
2275 : 0, /* properties_destroyed */
2276 : 0, /* todo_flags_start */
2277 : 0, /* todo_flags_finish */
2278 : };
2279 :
2280 : class pass_warn_function_noreturn : public gimple_opt_pass
2281 : {
2282 : public:
2283 287872 : pass_warn_function_noreturn (gcc::context *ctxt)
2284 575744 : : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2285 : {}
2286 :
2287 : /* opt_pass methods: */
2288 1481735 : bool gate (function *) final override
2289 : {
2290 1481735 : return warn_suggest_attribute_noreturn;
2291 : }
2292 29 : unsigned int execute (function *fun) final override
2293 : {
2294 29 : if (!TREE_THIS_VOLATILE (current_function_decl)
2295 29 : && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2296 4 : warn_function_noreturn (current_function_decl);
2297 29 : return 0;
2298 : }
2299 :
2300 : }; // class pass_warn_function_noreturn
2301 :
2302 : } // anon namespace
2303 :
2304 : gimple_opt_pass *
2305 287872 : make_pass_warn_function_noreturn (gcc::context *ctxt)
2306 : {
2307 287872 : return new pass_warn_function_noreturn (ctxt);
2308 : }
2309 :
2310 : /* Simple local pass for nothrow discovery reusing the analysis from
2311 : ipa_pure_const. This pass is effective when executed together with
2312 : other optimization passes in early optimization pass queue. */
2313 :
2314 : namespace {
2315 :
2316 : const pass_data pass_data_nothrow =
2317 : {
2318 : GIMPLE_PASS, /* type */
2319 : "nothrow", /* name */
2320 : OPTGROUP_NONE, /* optinfo_flags */
2321 : TV_IPA_PURE_CONST, /* tv_id */
2322 : 0, /* properties_required */
2323 : 0, /* properties_provided */
2324 : 0, /* properties_destroyed */
2325 : 0, /* todo_flags_start */
2326 : 0, /* todo_flags_finish */
2327 : };
2328 :
2329 : class pass_nothrow : public gimple_opt_pass
2330 : {
2331 : public:
2332 287872 : pass_nothrow (gcc::context *ctxt)
2333 575744 : : gimple_opt_pass (pass_data_nothrow, ctxt)
2334 : {}
2335 :
2336 : /* opt_pass methods: */
2337 0 : opt_pass * clone () final override { return new pass_nothrow (m_ctxt); }
2338 2853317 : bool gate (function *) final override { return optimize; }
2339 : unsigned int execute (function *) final override;
2340 :
2341 : }; // class pass_nothrow
2342 :
2343 : unsigned int
2344 2404002 : pass_nothrow::execute (function *)
2345 : {
2346 2404002 : struct cgraph_node *node;
2347 2404002 : basic_block this_block;
2348 :
2349 2404002 : if (TREE_NOTHROW (current_function_decl))
2350 : return 0;
2351 :
2352 1517223 : node = cgraph_node::get (current_function_decl);
2353 :
2354 : /* We run during lowering, we cannot really use availability yet. */
2355 1517223 : if (cgraph_node::get (current_function_decl)->get_availability ()
2356 : <= AVAIL_INTERPOSABLE)
2357 : {
2358 83141 : if (dump_file)
2359 0 : fprintf (dump_file, "Function is interposable;"
2360 : " not analyzing.\n");
2361 83141 : return true;
2362 : }
2363 :
2364 11072930 : FOR_EACH_BB_FN (this_block, cfun)
2365 : {
2366 20475394 : for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2367 49880368 : !gsi_end_p (gsi);
2368 39642671 : gsi_next (&gsi))
2369 40241520 : if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2370 : {
2371 600244 : if (is_gimple_call (gsi_stmt (gsi)))
2372 : {
2373 379931 : tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2374 379931 : if (callee_t && recursive_call_p (current_function_decl,
2375 : callee_t))
2376 1395 : continue;
2377 : }
2378 :
2379 598849 : if (dump_file)
2380 : {
2381 0 : fprintf (dump_file, "Statement can throw: ");
2382 0 : print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2383 : }
2384 598849 : return 0;
2385 : }
2386 : }
2387 :
2388 835233 : node->set_nothrow_flag (true);
2389 :
2390 835233 : bool cfg_changed = false;
2391 835233 : if (self_recursive_p (node))
2392 29829 : FOR_EACH_BB_FN (this_block, cfun)
2393 80590 : if (gcall *g = safe_dyn_cast <gcall *> (*gsi_last_bb (this_block)))
2394 : {
2395 2048 : tree callee_t = gimple_call_fndecl (g);
2396 2048 : if (callee_t
2397 1938 : && recursive_call_p (current_function_decl, callee_t)
2398 571 : && maybe_clean_eh_stmt (g)
2399 2051 : && gimple_purge_dead_eh_edges (this_block))
2400 : cfg_changed = true;
2401 : }
2402 :
2403 835233 : if (dump_file)
2404 33 : fprintf (dump_file, "Function found to be nothrow: %s\n",
2405 : current_function_name ());
2406 835233 : return cfg_changed ? TODO_cleanup_cfg : 0;
2407 : }
2408 :
2409 : } // anon namespace
2410 :
2411 : gimple_opt_pass *
2412 287872 : make_pass_nothrow (gcc::context *ctxt)
2413 : {
2414 287872 : return new pass_nothrow (ctxt);
2415 : }
|