Line data Source code
1 : /* Interprocedural analyses.
2 : Copyright (C) 2005-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "rtl.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "alloc-pool.h"
28 : #include "tree-pass.h"
29 : #include "ssa.h"
30 : #include "tree-streamer.h"
31 : #include "cgraph.h"
32 : #include "diagnostic.h"
33 : #include "fold-const.h"
34 : #include "gimple-iterator.h"
35 : #include "gimple-fold.h"
36 : #include "tree-eh.h"
37 : #include "calls.h"
38 : #include "stor-layout.h"
39 : #include "print-tree.h"
40 : #include "gimplify.h"
41 : #include "gimplify-me.h"
42 : #include "gimple-walk.h"
43 : #include "symbol-summary.h"
44 : #include "sreal.h"
45 : #include "ipa-cp.h"
46 : #include "ipa-prop.h"
47 : #include "tree-cfg.h"
48 : #include "tree-dfa.h"
49 : #include "tree-inline.h"
50 : #include "ipa-fnsummary.h"
51 : #include "gimple-pretty-print.h"
52 : #include "ipa-utils.h"
53 : #include "dbgcnt.h"
54 : #include "domwalk.h"
55 : #include "builtins.h"
56 : #include "tree-cfgcleanup.h"
57 : #include "options.h"
58 : #include "symtab-clones.h"
59 : #include "attr-fnspec.h"
60 : #include "gimple-range.h"
61 : #include "value-range-storage.h"
62 : #include "vr-values.h"
63 : #include "lto-streamer.h"
64 : #include "attribs.h"
65 : #include "attr-callback.h"
66 :
67 : /* Function summary where the parameter infos are actually stored. */
68 : ipa_node_params_t *ipa_node_params_sum = NULL;
69 :
70 : function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
71 :
72 : /* Edge summary for IPA-CP edge information. */
73 : ipa_edge_args_sum_t *ipa_edge_args_sum;
74 :
75 : /* Traits for a hash table for reusing ranges. */
76 :
77 : struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <ipa_vr *>
78 : {
79 : typedef ipa_vr *value_type;
80 : typedef const vrange *compare_type;
81 : static hashval_t
82 17146462 : hash (const ipa_vr *p)
83 : {
84 : // This never get called, except in the verification code, as
85 : // ipa_get_value_range() calculates the hash itself. This
86 : // function is mostly here for completness' sake.
87 17146462 : value_range vr;
88 17146462 : p->get_vrange (vr);
89 17146462 : inchash::hash hstate;
90 17146462 : add_vrange (vr, hstate);
91 17146462 : return hstate.end ();
92 17146462 : }
93 : static bool
94 23771575 : equal (const ipa_vr *a, const vrange *b)
95 : {
96 23771575 : return a->equal_p (*b);
97 : }
98 : static const bool empty_zero_p = true;
99 : static void
100 1930 : mark_empty (ipa_vr *&p)
101 : {
102 1930 : p = NULL;
103 : }
104 : static bool
105 : is_empty (const ipa_vr *p)
106 : {
107 : return p == NULL;
108 : }
109 : static bool
110 43974120 : is_deleted (const ipa_vr *p)
111 : {
112 43974120 : return p == reinterpret_cast<const ipa_vr *> (1);
113 : }
114 : static void
115 147729 : mark_deleted (ipa_vr *&p)
116 : {
117 147729 : p = reinterpret_cast<ipa_vr *> (1);
118 : }
119 : };
120 :
121 : /* Hash table for avoid repeated allocations of equal ranges. */
122 : static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
123 :
124 : /* Holders of ipa cgraph hooks: */
125 : static struct cgraph_node_hook_list *function_insertion_hook_holder;
126 :
127 : /* Description of a reference to an IPA constant. */
128 : struct ipa_cst_ref_desc
129 : {
130 : /* Edge that corresponds to the statement which took the reference. */
131 : struct cgraph_edge *cs;
132 : /* Linked list of duplicates created when call graph edges are cloned. */
133 : struct ipa_cst_ref_desc *next_duplicate;
134 : /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
135 : is out of control. */
136 : int refcount;
137 : };
138 :
139 : /* Allocation pool for reference descriptions. */
140 :
141 : static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
142 : ("IPA-PROP ref descriptions");
143 :
144 656172 : ipa_vr::ipa_vr ()
145 656172 : : m_storage (NULL),
146 656172 : m_type (NULL)
147 : {
148 656172 : }
149 :
150 755055 : ipa_vr::ipa_vr (const vrange &r)
151 755055 : : m_storage (ggc_alloc_vrange_storage (r)),
152 755055 : m_type (r.type ())
153 : {
154 755055 : }
155 :
156 : bool
157 23771575 : ipa_vr::equal_p (const vrange &r) const
158 : {
159 23771575 : gcc_checking_assert (!r.undefined_p ());
160 23771575 : return (types_compatible_p (m_type, r.type ()) && m_storage->equal_p (r));
161 : }
162 :
163 : bool
164 84657 : ipa_vr::equal_p (const ipa_vr &o) const
165 : {
166 84657 : if (!known_p ())
167 0 : return !o.known_p ();
168 :
169 84657 : if (!types_compatible_p (m_type, o.m_type))
170 : return false;
171 :
172 84657 : value_range r;
173 84657 : o.get_vrange (r);
174 84657 : return m_storage->equal_p (r);
175 84657 : }
176 :
177 : void
178 28478557 : ipa_vr::get_vrange (value_range &r) const
179 : {
180 28478557 : r.set_range_class (m_type);
181 28478557 : m_storage->get_vrange (r, m_type);
182 28478557 : }
183 :
184 : void
185 1959 : ipa_vr::set_unknown ()
186 : {
187 1959 : if (m_storage)
188 1959 : ggc_free (m_storage);
189 :
190 1959 : m_storage = NULL;
191 1959 : }
192 :
193 : void
194 611838 : ipa_vr::streamer_read (lto_input_block *ib, data_in *data_in)
195 : {
196 611838 : struct bitpack_d bp = streamer_read_bitpack (ib);
197 611838 : bool known = bp_unpack_value (&bp, 1);
198 611838 : if (known)
199 : {
200 437996 : value_range vr;
201 437996 : streamer_read_value_range (ib, data_in, vr);
202 437996 : if (!m_storage || !m_storage->fits_p (vr))
203 : {
204 437996 : if (m_storage)
205 0 : ggc_free (m_storage);
206 437996 : m_storage = ggc_alloc_vrange_storage (vr);
207 : }
208 437996 : m_storage->set_vrange (vr);
209 437996 : m_type = vr.type ();
210 437996 : }
211 : else
212 : {
213 173842 : m_storage = NULL;
214 173842 : m_type = NULL;
215 : }
216 611838 : }
217 :
218 : void
219 468844 : ipa_vr::streamer_write (output_block *ob) const
220 : {
221 468844 : struct bitpack_d bp = bitpack_create (ob->main_stream);
222 468844 : bp_pack_value (&bp, !!m_storage, 1);
223 468844 : streamer_write_bitpack (&bp);
224 468844 : if (m_storage)
225 : {
226 466334 : value_range vr (m_type);
227 466334 : m_storage->get_vrange (vr, m_type);
228 466334 : streamer_write_vrange (ob, vr);
229 466334 : }
230 468844 : }
231 :
232 : void
233 723 : ipa_vr::dump (FILE *out) const
234 : {
235 723 : if (known_p ())
236 : {
237 723 : value_range vr (m_type);
238 723 : m_storage->get_vrange (vr, m_type);
239 723 : vr.dump (out);
240 723 : }
241 : else
242 0 : fprintf (out, "NO RANGE");
243 723 : }
244 :
245 : // These stubs are because we use an ipa_vr in a hash_traits and
246 : // hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
247 : // picking up the gt_ggc_mx (T *) version.
248 : void
249 0 : gt_pch_nx (ipa_vr *&x)
250 : {
251 0 : return gt_pch_nx ((ipa_vr *) x);
252 : }
253 :
254 : void
255 0 : gt_ggc_mx (ipa_vr *&x)
256 : {
257 0 : return gt_ggc_mx ((ipa_vr *) x);
258 : }
259 :
260 : /* Analysis summery of function call return value. */
261 : struct GTY(()) ipa_return_value_summary
262 : {
263 : /* Known value range.
264 : This needs to be wrapped in struccture due to specific way
265 : we allocate ipa_vr. */
266 : ipa_vr *vr;
267 : };
268 :
269 : /* Function summary for return values. */
270 : class ipa_return_value_sum_t : public function_summary <ipa_return_value_summary *>
271 : {
272 : public:
273 86655 : ipa_return_value_sum_t (symbol_table *table, bool ggc):
274 173310 : function_summary <ipa_return_value_summary *> (table, ggc) { }
275 :
276 : /* Hook that is called by summary when a node is duplicated. */
277 575359 : void duplicate (cgraph_node *,
278 : cgraph_node *,
279 : ipa_return_value_summary *data,
280 : ipa_return_value_summary *data2) final override
281 : {
282 575359 : *data2=*data;
283 575359 : }
284 : };
285 :
286 : /* Structure holding the information that all stores to FLD_OFFSET (measured in
287 : bytes) of a particular record type REC_TYPE was storing a pointer to
288 : function FN or that there were multiple functions, which is denoted by fn
289 : being nullptr. */
290 :
291 : struct GTY((for_user)) noted_fnptr_store
292 : {
293 : tree rec_type;
294 : tree fn;
295 : unsigned fld_offset;
296 : };
297 :
298 : /* Hash traits to have a hash table of noted_fnptr_stores. */
299 :
300 : struct noted_fnptr_hasher : ggc_ptr_hash <noted_fnptr_store>
301 : {
302 : static hashval_t hash (noted_fnptr_store *);
303 : static bool equal (noted_fnptr_store *,
304 : noted_fnptr_store *);
305 : };
306 :
307 : hashval_t
308 601802 : noted_fnptr_hasher::hash (noted_fnptr_store *val)
309 : {
310 1203604 : return iterative_hash_host_wide_int (val->fld_offset,
311 601802 : TYPE_UID (val->rec_type));
312 : }
313 :
314 : bool
315 520884 : noted_fnptr_hasher::equal (noted_fnptr_store *v1,
316 : noted_fnptr_store *v2)
317 : {
318 520884 : return (v1->rec_type == v2->rec_type
319 520884 : && v1->fld_offset == v2->fld_offset);
320 : }
321 :
322 :
323 : /* Structore holding the information that all stores to OFFSET of a particular
324 : record type RECTYPE was storing a pointer to specific function or that there
325 : were multiple such functions. */
326 :
327 : static GTY(()) hash_table <noted_fnptr_hasher> *noted_fnptrs_in_records;
328 :
329 : /* Variable hoding the return value summary. */
330 : static GTY(()) function_summary <ipa_return_value_summary *> *ipa_return_value_sum;
331 :
332 :
333 : /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
334 : with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
335 :
336 : static bool
337 4010371 : ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
338 : {
339 4010371 : tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
340 :
341 4010371 : if (!fs_opts)
342 : return false;
343 549976 : return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
344 : }
345 :
346 : /* Return index of the formal whose tree is PTREE in function which corresponds
347 : to INFO. */
348 :
349 : static int
350 42053592 : ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
351 : tree ptree)
352 : {
353 42053592 : int i, count;
354 :
355 42053592 : count = vec_safe_length (descriptors);
356 86208066 : for (i = 0; i < count; i++)
357 75796942 : if ((*descriptors)[i].decl_or_type == ptree)
358 : return i;
359 :
360 : return -1;
361 : }
362 :
363 : /* Return index of the formal whose tree is PTREE in function which corresponds
364 : to INFO. */
365 :
366 : int
367 26145324 : ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
368 : {
369 26145324 : return ipa_get_param_decl_index_1 (info->descriptors, ptree);
370 : }
371 :
372 : static void
373 : ipa_duplicate_jump_function (cgraph_edge *src, cgraph_edge *dst,
374 : ipa_jump_func *src_jf, ipa_jump_func *dst_jf);
375 :
376 : /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
377 : NODE. */
378 :
379 : static void
380 5424484 : ipa_populate_param_decls (struct cgraph_node *node,
381 : vec<ipa_param_descriptor, va_gc> &descriptors)
382 : {
383 5424484 : tree fndecl;
384 5424484 : tree fnargs;
385 5424484 : tree parm;
386 5424484 : int param_num;
387 :
388 5424484 : fndecl = node->decl;
389 5424484 : gcc_assert (gimple_has_body_p (fndecl));
390 5424484 : fnargs = DECL_ARGUMENTS (fndecl);
391 5424484 : param_num = 0;
392 17879263 : for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
393 : {
394 12454779 : descriptors[param_num].decl_or_type = parm;
395 12454779 : unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
396 12454779 : descriptors[param_num].move_cost = cost;
397 : /* Watch overflow, move_cost is a bitfield. */
398 12454779 : gcc_checking_assert (cost == descriptors[param_num].move_cost);
399 12454779 : param_num++;
400 : }
401 5424484 : }
402 :
403 : /* Return how many formal parameters FNDECL has. */
404 :
405 : int
406 14926658 : count_formal_params (tree fndecl)
407 : {
408 14926658 : tree parm;
409 14926658 : int count = 0;
410 14926658 : gcc_assert (gimple_has_body_p (fndecl));
411 :
412 46113141 : for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
413 31186483 : count++;
414 :
415 14926658 : return count;
416 : }
417 :
418 : /* Return the declaration of Ith formal parameter of the function corresponding
419 : to INFO. Note there is no setter function as this array is built just once
420 : using ipa_initialize_node_params. */
421 :
422 : void
423 609 : ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
424 : {
425 609 : fprintf (file, "param #%i", i);
426 609 : if ((*info->descriptors)[i].decl_or_type)
427 : {
428 609 : fprintf (file, " ");
429 609 : print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
430 : }
431 609 : }
432 :
433 : /* If necessary, allocate vector of parameter descriptors in info of NODE.
434 : Return true if they were allocated, false if not. */
435 :
436 : static bool
437 6302519 : ipa_alloc_node_params (struct cgraph_node *node, int param_count)
438 : {
439 6302519 : ipa_node_params *info = ipa_node_params_sum->get_create (node);
440 :
441 6302519 : if (!info->descriptors && param_count)
442 : {
443 5476263 : vec_safe_grow_cleared (info->descriptors, param_count, true);
444 5476263 : return true;
445 : }
446 : else
447 : return false;
448 : }
449 :
450 : /* Initialize the ipa_node_params structure associated with NODE by counting
451 : the function parameters, creating the descriptors and populating their
452 : param_decls. */
453 :
454 : void
455 6224930 : ipa_initialize_node_params (struct cgraph_node *node)
456 : {
457 6224930 : ipa_node_params *info = ipa_node_params_sum->get_create (node);
458 :
459 6224930 : if (!info->descriptors
460 6224930 : && ipa_alloc_node_params (node, count_formal_params (node->decl)))
461 5423137 : ipa_populate_param_decls (node, *info->descriptors);
462 6224930 : }
463 :
464 : /* Print VAL which is extracted from a jump function to F. */
465 :
466 : void
467 1173 : ipa_print_constant_value (FILE *f, tree val)
468 : {
469 1173 : print_generic_expr (f, val);
470 :
471 : /* This is in keeping with values_equal_for_ipcp_p. */
472 1173 : if (TREE_CODE (val) == ADDR_EXPR
473 1173 : && (TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL
474 249 : || (TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
475 102 : && DECL_IN_CONSTANT_POOL (TREE_OPERAND (val, 0)))))
476 : {
477 0 : fputs (" -> ", f);
478 0 : print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
479 : }
480 1173 : }
481 :
482 : /* Print contents of JFUNC to F. If CTX is non-NULL, dump it too. */
483 :
484 : DEBUG_FUNCTION void
485 1023 : ipa_dump_jump_function (FILE *f, ipa_jump_func *jump_func,
486 : class ipa_polymorphic_call_context *ctx)
487 : {
488 1023 : enum jump_func_type type = jump_func->type;
489 :
490 1023 : if (type == IPA_JF_UNKNOWN)
491 251 : fprintf (f, "UNKNOWN\n");
492 772 : else if (type == IPA_JF_CONST)
493 : {
494 303 : fprintf (f, "CONST: ");
495 303 : ipa_print_constant_value (f, jump_func->value.constant.value);
496 303 : fprintf (f, "\n");
497 : }
498 469 : else if (type == IPA_JF_PASS_THROUGH)
499 : {
500 408 : fprintf (f, "PASS THROUGH: ");
501 408 : fprintf (f, "%d, op %s",
502 : jump_func->value.pass_through.formal_id,
503 : get_tree_code_name(jump_func->value.pass_through.operation));
504 408 : if (jump_func->value.pass_through.operation != NOP_EXPR)
505 : {
506 31 : fprintf (f, " ");
507 31 : if (jump_func->value.pass_through.operand)
508 27 : print_generic_expr (f, jump_func->value.pass_through.operand);
509 31 : fprintf (f, " (in type ");
510 31 : print_generic_expr (f, jump_func->value.pass_through.op_type);
511 31 : fprintf (f, ")");
512 : }
513 408 : if (jump_func->value.pass_through.agg_preserved)
514 134 : fprintf (f, ", agg_preserved");
515 408 : if (jump_func->value.pass_through.refdesc_decremented)
516 0 : fprintf (f, ", refdesc_decremented");
517 408 : fprintf (f, "\n");
518 : }
519 61 : else if (type == IPA_JF_ANCESTOR)
520 : {
521 61 : fprintf (f, "ANCESTOR: ");
522 61 : fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
523 : jump_func->value.ancestor.formal_id,
524 : jump_func->value.ancestor.offset);
525 61 : if (jump_func->value.ancestor.agg_preserved)
526 29 : fprintf (f, ", agg_preserved");
527 61 : if (jump_func->value.ancestor.keep_null)
528 4 : fprintf (f, ", keep_null");
529 61 : fprintf (f, "\n");
530 : }
531 :
532 1023 : if (jump_func->agg.items)
533 : {
534 91 : struct ipa_agg_jf_item *item;
535 91 : int j;
536 :
537 182 : fprintf (f, " Aggregate passed by %s:\n",
538 91 : jump_func->agg.by_ref ? "reference" : "value");
539 377 : FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
540 : {
541 195 : fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
542 : item->offset);
543 195 : fprintf (f, "type: ");
544 195 : print_generic_expr (f, item->type);
545 195 : fprintf (f, ", ");
546 195 : if (item->jftype == IPA_JF_PASS_THROUGH)
547 7 : fprintf (f, "PASS THROUGH: %d,",
548 : item->value.pass_through.formal_id);
549 188 : else if (item->jftype == IPA_JF_LOAD_AGG)
550 : {
551 11 : fprintf (f, "LOAD AGG: %d",
552 : item->value.pass_through.formal_id);
553 11 : fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
554 : item->value.load_agg.offset,
555 11 : item->value.load_agg.by_ref ? "reference"
556 : : "value");
557 : }
558 :
559 195 : if (item->jftype == IPA_JF_PASS_THROUGH
560 195 : || item->jftype == IPA_JF_LOAD_AGG)
561 : {
562 18 : fprintf (f, " op %s",
563 : get_tree_code_name (item->value.pass_through.operation));
564 18 : if (item->value.pass_through.operation != NOP_EXPR)
565 : {
566 9 : fprintf (f, " ");
567 9 : if (item->value.pass_through.operand)
568 8 : print_generic_expr (f, item->value.pass_through.operand);
569 9 : fprintf (f, " (in type ");
570 9 : print_generic_expr (f, jump_func->value.pass_through.op_type);
571 9 : fprintf (f, ")");
572 : }
573 : }
574 177 : else if (item->jftype == IPA_JF_CONST)
575 : {
576 177 : fprintf (f, "CONST: ");
577 177 : ipa_print_constant_value (f, item->value.constant);
578 : }
579 0 : else if (item->jftype == IPA_JF_UNKNOWN)
580 0 : fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
581 0 : tree_to_uhwi (TYPE_SIZE (item->type)));
582 195 : fprintf (f, "\n");
583 : }
584 : }
585 :
586 1023 : if (ctx && !ctx->useless_p ())
587 : {
588 378 : fprintf (f, " Context: ");
589 378 : ctx->dump (dump_file);
590 : }
591 :
592 1023 : if (jump_func->m_vr)
593 : {
594 723 : fprintf (f, " ");
595 723 : jump_func->m_vr->dump (f);
596 723 : fprintf (f, "\n");
597 : }
598 : else
599 300 : fprintf (f, " Unknown VR\n");
600 1023 : }
601 :
602 : /* Print the jump functions associated with call graph edge CS to file F. */
603 :
604 : static void
605 725 : ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
606 : {
607 725 : ipa_edge_args *args = ipa_edge_args_sum->get (cs);
608 725 : int count = ipa_get_cs_argument_count (args);
609 :
610 1748 : for (int i = 0; i < count; i++)
611 : {
612 1023 : struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
613 1023 : class ipa_polymorphic_call_context *ctx
614 1023 : = ipa_get_ith_polymorhic_call_context (args, i);
615 :
616 1023 : fprintf (f, " param %d: ", i);
617 1023 : ipa_dump_jump_function (f, jump_func, ctx);
618 : }
619 725 : }
620 :
621 :
622 : /* Print the jump functions of all arguments on all call graph edges going from
623 : NODE to file F. */
624 :
625 : void
626 1069 : ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
627 : {
628 1069 : struct cgraph_edge *cs;
629 :
630 1069 : fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
631 2134 : for (cs = node->callees; cs; cs = cs->next_callee)
632 : {
633 :
634 1065 : fprintf (f, " callsite %s -> %s : \n",
635 : node->dump_name (),
636 1065 : cs->callee->dump_name ());
637 1065 : if (!ipa_edge_args_info_available_for_edge_p (cs))
638 487 : fprintf (f, " no arg info\n");
639 : else
640 578 : ipa_print_node_jump_functions_for_edge (f, cs);
641 : }
642 :
643 1216 : for (cs = node->indirect_calls; cs; cs = cs->next_callee)
644 : {
645 147 : fprintf (f, " ");
646 147 : cs->indirect_info->dump (f, false);
647 147 : if (cs->call_stmt)
648 : {
649 131 : fprintf (f, ", for stmt ");
650 131 : print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
651 : }
652 : else
653 16 : fprintf (f, "\n");
654 147 : if (!ipa_edge_args_info_available_for_edge_p (cs))
655 0 : fprintf (f, " no arg info\n");
656 : else
657 147 : ipa_print_node_jump_functions_for_edge (f, cs);
658 : }
659 1069 : }
660 :
661 : /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
662 :
663 : void
664 161 : ipa_print_all_jump_functions (FILE *f)
665 : {
666 161 : struct cgraph_node *node;
667 :
668 161 : fprintf (f, "\nJump functions:\n");
669 1216 : FOR_EACH_FUNCTION (node)
670 : {
671 1055 : ipa_print_node_jump_functions (f, node);
672 : }
673 161 : }
674 :
675 : /* Set jfunc to be a know-really nothing jump function. */
676 :
677 : static void
678 517352 : ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
679 : {
680 517352 : jfunc->type = IPA_JF_UNKNOWN;
681 503273 : }
682 :
683 : /* Set DST to be a copy of another SRC. The two functions will share their
684 : rdesc. */
685 :
686 : static void
687 878550 : ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
688 : struct ipa_jump_func *src)
689 :
690 : {
691 878550 : gcc_checking_assert (src->type == IPA_JF_CONST);
692 878550 : dst->type = IPA_JF_CONST;
693 878550 : dst->value.constant = src->value.constant;
694 878550 : }
695 :
696 : /* Set DST to be a copy of another jump function SRC but possibly adjust it to
697 : a new passed type PARM_TYPE. If the adjustment fails, the jump function can
698 : end up being set to the unknown type. If the conversion is not necessary or
699 : it succeeds and if the destination rdesc has not been already used, the two
700 : functions will share their rdesc. */
701 :
702 : static void
703 153252 : ipa_convert_prop_cst_jf (struct ipa_jump_func *dst,
704 : struct ipa_jump_func *src,
705 : tree parm_type)
706 :
707 : {
708 153252 : gcc_checking_assert (src->type == IPA_JF_CONST);
709 153252 : tree new_val = ipacp_value_safe_for_type (parm_type,
710 : ipa_get_jf_constant (src));
711 153252 : if (new_val)
712 : {
713 153252 : bool rd = ipa_get_jf_pass_through_refdesc_decremented (dst);
714 :
715 153252 : dst->type = IPA_JF_CONST;
716 153252 : dst->value.constant.value = new_val;
717 153252 : if (!rd)
718 153227 : dst->value.constant.rdesc = src->value.constant.rdesc;
719 : else
720 25 : ipa_zap_jf_refdesc (dst);
721 : }
722 : else
723 0 : ipa_set_jf_unknown (dst);
724 153252 : }
725 :
726 : /* Set JFUNC to be a constant jmp function. */
727 :
728 : static void
729 2751898 : ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
730 : struct cgraph_edge *cs)
731 : {
732 2751898 : jfunc->type = IPA_JF_CONST;
733 2751898 : jfunc->value.constant.value = unshare_expr_without_location (constant);
734 :
735 2751898 : if (TREE_CODE (constant) == ADDR_EXPR
736 2751898 : && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
737 982130 : || (VAR_P (TREE_OPERAND (constant, 0))
738 347714 : && TREE_STATIC (TREE_OPERAND (constant, 0)))))
739 : {
740 385795 : struct ipa_cst_ref_desc *rdesc;
741 :
742 385795 : rdesc = ipa_refdesc_pool.allocate ();
743 385795 : rdesc->cs = cs;
744 385795 : rdesc->next_duplicate = NULL;
745 385795 : rdesc->refcount = 1;
746 385795 : jfunc->value.constant.rdesc = rdesc;
747 : }
748 : else
749 2366103 : jfunc->value.constant.rdesc = NULL;
750 2751898 : }
751 :
752 : /* Set JFUNC to be a simple pass-through jump function. */
753 : static void
754 1368148 : ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
755 : bool agg_preserved)
756 : {
757 1368148 : jfunc->type = IPA_JF_PASS_THROUGH;
758 1368148 : jfunc->value.pass_through.operand = NULL_TREE;
759 1368148 : jfunc->value.pass_through.op_type = NULL_TREE;
760 1368148 : jfunc->value.pass_through.formal_id = formal_id;
761 1368148 : jfunc->value.pass_through.operation = NOP_EXPR;
762 1368148 : jfunc->value.pass_through.agg_preserved = agg_preserved;
763 1368148 : jfunc->value.pass_through.refdesc_decremented = false;
764 1234714 : }
765 :
766 : /* Set JFUNC to be an unary pass through jump function. */
767 :
768 : static void
769 1021 : ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
770 : enum tree_code operation, tree op_type)
771 : {
772 1021 : jfunc->type = IPA_JF_PASS_THROUGH;
773 1021 : jfunc->value.pass_through.operand = NULL_TREE;
774 1021 : jfunc->value.pass_through.op_type = op_type;
775 1021 : jfunc->value.pass_through.formal_id = formal_id;
776 1021 : jfunc->value.pass_through.operation = operation;
777 1021 : jfunc->value.pass_through.agg_preserved = false;
778 1021 : jfunc->value.pass_through.refdesc_decremented = false;
779 1021 : }
780 : /* Set JFUNC to be an arithmetic pass through jump function. */
781 :
782 : static void
783 41786 : ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
784 : tree operand, enum tree_code operation,
785 : tree op_type)
786 : {
787 41786 : jfunc->type = IPA_JF_PASS_THROUGH;
788 0 : jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
789 41786 : jfunc->value.pass_through.op_type = op_type;
790 41786 : jfunc->value.pass_through.formal_id = formal_id;
791 41786 : jfunc->value.pass_through.operation = operation;
792 41786 : jfunc->value.pass_through.agg_preserved = false;
793 41786 : jfunc->value.pass_through.refdesc_decremented = false;
794 41786 : }
795 :
796 : /* Set JFUNC to be an ancestor jump function. */
797 :
798 : static void
799 181391 : ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
800 : int formal_id, bool agg_preserved, bool keep_null)
801 : {
802 181391 : jfunc->type = IPA_JF_ANCESTOR;
803 181391 : jfunc->value.ancestor.formal_id = formal_id;
804 181391 : jfunc->value.ancestor.offset = offset;
805 181391 : jfunc->value.ancestor.agg_preserved = agg_preserved;
806 181391 : jfunc->value.ancestor.keep_null = keep_null;
807 180723 : }
808 :
809 : /* Get IPA BB information about the given BB. FBI is the context of analyzis
810 : of this function body. */
811 :
812 : static struct ipa_bb_info *
813 33163065 : ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
814 : {
815 27657391 : gcc_checking_assert (fbi);
816 33163065 : return &fbi->bb_infos[bb->index];
817 : }
818 :
819 : /* Structure to be passed in between detect_type_change and
820 : check_stmt_for_type_change. */
821 :
822 : struct prop_type_change_info
823 : {
824 : /* Offset into the object where there is the virtual method pointer we are
825 : looking for. */
826 : HOST_WIDE_INT offset;
827 : /* The declaration or SSA_NAME pointer of the base that we are checking for
828 : type change. */
829 : tree object;
830 : /* Set to true if dynamic type change has been detected. */
831 : bool type_maybe_changed;
832 : };
833 :
834 : /* Return true if STMT can modify a virtual method table pointer.
835 :
836 : This function makes special assumptions about both constructors and
837 : destructors which are all the functions that are allowed to alter the VMT
838 : pointers. It assumes that destructors begin with assignment into all VMT
839 : pointers and that constructors essentially look in the following way:
840 :
841 : 1) The very first thing they do is that they call constructors of ancestor
842 : sub-objects that have them.
843 :
844 : 2) Then VMT pointers of this and all its ancestors is set to new values
845 : corresponding to the type corresponding to the constructor.
846 :
847 : 3) Only afterwards, other stuff such as constructor of member sub-objects
848 : and the code written by the user is run. Only this may include calling
849 : virtual functions, directly or indirectly.
850 :
851 : There is no way to call a constructor of an ancestor sub-object in any
852 : other way.
853 :
854 : This means that we do not have to care whether constructors get the correct
855 : type information because they will always change it (in fact, if we define
856 : the type to be given by the VMT pointer, it is undefined).
857 :
858 : The most important fact to derive from the above is that if, for some
859 : statement in the section 3, we try to detect whether the dynamic type has
860 : changed, we can safely ignore all calls as we examine the function body
861 : backwards until we reach statements in section 2 because these calls cannot
862 : be ancestor constructors or destructors (if the input is not bogus) and so
863 : do not change the dynamic type (this holds true only for automatically
864 : allocated objects but at the moment we devirtualize only these). We then
865 : must detect that statements in section 2 change the dynamic type and can try
866 : to derive the new type. That is enough and we can stop, we will never see
867 : the calls into constructors of sub-objects in this code. Therefore we can
868 : safely ignore all call statements that we traverse.
869 : */
870 :
871 : static bool
872 9 : stmt_may_be_vtbl_ptr_store (gimple *stmt)
873 : {
874 9 : if (is_gimple_call (stmt))
875 : return false;
876 0 : if (gimple_clobber_p (stmt))
877 : return false;
878 0 : else if (is_gimple_assign (stmt))
879 : {
880 0 : tree lhs = gimple_assign_lhs (stmt);
881 :
882 0 : if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
883 : {
884 0 : if (flag_strict_aliasing
885 0 : && !POINTER_TYPE_P (TREE_TYPE (lhs)))
886 : return false;
887 :
888 0 : if (TREE_CODE (lhs) == COMPONENT_REF
889 0 : && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
890 0 : return false;
891 : /* In the future we might want to use get_ref_base_and_extent to find
892 : if there is a field corresponding to the offset and if so, proceed
893 : almost like if it was a component ref. */
894 : }
895 : }
896 : return true;
897 : }
898 :
899 : /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
900 : to check whether a particular statement may modify the virtual table
901 : pointerIt stores its result into DATA, which points to a
902 : prop_type_change_info structure. */
903 :
904 : static bool
905 9 : check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
906 : {
907 9 : gimple *stmt = SSA_NAME_DEF_STMT (vdef);
908 9 : struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
909 :
910 9 : if (stmt_may_be_vtbl_ptr_store (stmt))
911 : {
912 0 : tci->type_maybe_changed = true;
913 0 : return true;
914 : }
915 : else
916 : return false;
917 : }
918 :
919 : /* See if ARG is PARAM_DECl describing instance passed by pointer
920 : or reference in FUNCTION. Return false if the dynamic type may change
921 : in between beggining of the function until CALL is invoked.
922 :
923 : Generally functions are not allowed to change type of such instances,
924 : but they call destructors. We assume that methods cannot destroy the THIS
925 : pointer. Also as a special cases, constructor and destructors may change
926 : type of the THIS pointer. */
927 :
928 : static bool
929 9506 : param_type_may_change_p (tree function, tree arg, gimple *call)
930 : {
931 : /* Pure functions cannot do any changes on the dynamic type;
932 : that require writing to memory. */
933 9506 : if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
934 : return false;
935 : /* We need to check if we are within inlined consturctor
936 : or destructor (ideally we would have way to check that the
937 : inline cdtor is actually working on ARG, but we don't have
938 : easy tie on this, so punt on all non-pure cdtors.
939 : We may also record the types of cdtors and once we know type
940 : of the instance match them.
941 :
942 : Also code unification optimizations may merge calls from
943 : different blocks making return values unreliable. So
944 : do nothing during late optimization. */
945 9506 : if (DECL_STRUCT_FUNCTION (function)->after_inlining)
946 : return true;
947 9506 : if (TREE_CODE (arg) == SSA_NAME
948 9506 : && SSA_NAME_IS_DEFAULT_DEF (arg)
949 19012 : && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
950 : {
951 : /* Normal (non-THIS) argument. */
952 9506 : if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
953 8839 : || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
954 : /* THIS pointer of an method - here we want to watch constructors
955 : and destructors as those definitely may change the dynamic
956 : type. */
957 17754 : || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
958 8248 : && !DECL_CXX_CONSTRUCTOR_P (function)
959 8247 : && !DECL_CXX_DESTRUCTOR_P (function)
960 8246 : && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
961 : {
962 : /* Walk the inline stack and watch out for ctors/dtors. */
963 40908 : for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
964 15702 : block = BLOCK_SUPERCONTEXT (block))
965 15717 : if (inlined_polymorphic_ctor_dtor_block_p (block, false))
966 : return true;
967 : return false;
968 : }
969 : }
970 : return true;
971 : }
972 :
973 : /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
974 : callsite CALL) by looking for assignments to its virtual table pointer. If
975 : it is, return true. ARG is the object itself (not a pointer
976 : to it, unless dereferenced). BASE is the base of the memory access as
977 : returned by get_ref_base_and_extent, as is the offset.
978 :
979 : This is helper function for detect_type_change and detect_type_change_ssa
980 : that does the heavy work which is usually unnecesary. */
981 :
982 : static bool
983 17 : detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
984 : tree base, tree comp_type, gcall *call,
985 : HOST_WIDE_INT offset)
986 : {
987 17 : struct prop_type_change_info tci;
988 17 : ao_ref ao;
989 :
990 17 : gcc_checking_assert (DECL_P (arg)
991 : || TREE_CODE (arg) == MEM_REF
992 : || handled_component_p (arg));
993 :
994 17 : comp_type = TYPE_MAIN_VARIANT (comp_type);
995 :
996 : /* Const calls cannot call virtual methods through VMT and so type changes do
997 : not matter. */
998 17 : if (!flag_devirtualize || !gimple_vuse (call)
999 : /* Be sure expected_type is polymorphic. */
1000 17 : || !comp_type
1001 17 : || TREE_CODE (comp_type) != RECORD_TYPE
1002 17 : || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
1003 34 : || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
1004 : return true;
1005 :
1006 17 : if (fbi->aa_walk_budget == 0)
1007 : return false;
1008 :
1009 17 : ao_ref_init (&ao, arg);
1010 17 : ao.base = base;
1011 17 : ao.offset = offset;
1012 17 : ao.size = POINTER_SIZE;
1013 17 : ao.max_size = ao.size;
1014 :
1015 17 : tci.offset = offset;
1016 17 : tci.object = get_base_address (arg);
1017 17 : tci.type_maybe_changed = false;
1018 :
1019 17 : int walked
1020 34 : = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
1021 : &tci, NULL, NULL, fbi->aa_walk_budget);
1022 17 : if (walked >= 0)
1023 17 : fbi->aa_walk_budget -= walked;
1024 : else
1025 0 : fbi->aa_walk_budget = 0;
1026 :
1027 17 : if (walked >= 0 && !tci.type_maybe_changed)
1028 : return false;
1029 :
1030 : return true;
1031 : }
1032 :
1033 : /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
1034 : If it is, return true. ARG is the object itself (not a pointer
1035 : to it, unless dereferenced). BASE is the base of the memory access as
1036 : returned by get_ref_base_and_extent, as is the offset. */
1037 :
1038 : static bool
1039 749 : detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
1040 : tree comp_type, gcall *call,
1041 : HOST_WIDE_INT offset)
1042 : {
1043 749 : if (!flag_devirtualize)
1044 : return false;
1045 :
1046 749 : if (TREE_CODE (base) == MEM_REF
1047 1498 : && !param_type_may_change_p (current_function_decl,
1048 749 : TREE_OPERAND (base, 0),
1049 : call))
1050 : return false;
1051 12 : return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
1052 12 : call, offset);
1053 : }
1054 :
1055 : /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
1056 : SSA name (its dereference will become the base and the offset is assumed to
1057 : be zero). */
1058 :
1059 : static bool
1060 8757 : detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
1061 : gcall *call)
1062 : {
1063 8757 : gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
1064 8757 : if (!flag_devirtualize
1065 8757 : || !POINTER_TYPE_P (TREE_TYPE (arg)))
1066 : return false;
1067 :
1068 8757 : if (!param_type_may_change_p (current_function_decl, arg, call))
1069 : return false;
1070 :
1071 5 : arg = build2 (MEM_REF, ptr_type_node, arg,
1072 : build_int_cst (ptr_type_node, 0));
1073 :
1074 5 : return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
1075 5 : call, 0);
1076 : }
1077 :
1078 : /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
1079 : boolean variable pointed to by DATA. */
1080 :
1081 : static bool
1082 1556785 : mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
1083 : void *data)
1084 : {
1085 1556785 : bool *b = (bool *) data;
1086 1556785 : *b = true;
1087 1556785 : return true;
1088 : }
1089 :
1090 : /* Find the nearest valid aa status for parameter specified by INDEX that
1091 : dominates BB. */
1092 :
1093 : static struct ipa_param_aa_status *
1094 4344812 : find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
1095 : int index)
1096 : {
1097 13086944 : while (true)
1098 : {
1099 13086944 : bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1100 13086944 : if (!bb)
1101 : return NULL;
1102 10164933 : struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1103 12230969 : if (!bi->param_aa_statuses.is_empty ()
1104 2066036 : && bi->param_aa_statuses[index].valid)
1105 1422801 : return &bi->param_aa_statuses[index];
1106 : }
1107 : }
1108 :
1109 : /* Get AA status structure for the given BB and parameter with INDEX. Allocate
1110 : structures and/or intialize the result with a dominating description as
1111 : necessary. */
1112 :
1113 : static struct ipa_param_aa_status *
1114 6530119 : parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
1115 : int index)
1116 : {
1117 6530119 : gcc_checking_assert (fbi);
1118 6530119 : struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1119 6530119 : if (bi->param_aa_statuses.is_empty ())
1120 3742550 : bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
1121 6530119 : struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
1122 6530119 : if (!paa->valid)
1123 : {
1124 4344812 : gcc_checking_assert (!paa->parm_modified
1125 : && !paa->ref_modified
1126 : && !paa->pt_modified);
1127 4344812 : struct ipa_param_aa_status *dom_paa;
1128 4344812 : dom_paa = find_dominating_aa_status (fbi, bb, index);
1129 4344812 : if (dom_paa)
1130 1422801 : *paa = *dom_paa;
1131 : else
1132 2922011 : paa->valid = true;
1133 : }
1134 :
1135 6530119 : return paa;
1136 : }
1137 :
1138 : /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1139 : a value known not to be modified in this function before reaching the
1140 : statement STMT. FBI holds information about the function we have so far
1141 : gathered but do not survive the summary building stage. */
1142 :
1143 : static bool
1144 1151135 : parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
1145 : gimple *stmt, tree parm_load)
1146 : {
1147 1151135 : struct ipa_param_aa_status *paa;
1148 1151135 : bool modified = false;
1149 1151135 : ao_ref refd;
1150 :
1151 1151135 : tree base = get_base_address (parm_load);
1152 1151135 : gcc_assert (TREE_CODE (base) == PARM_DECL);
1153 1151135 : if (TREE_READONLY (base))
1154 : return true;
1155 :
1156 1069987 : gcc_checking_assert (fbi);
1157 1069987 : paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1158 1069987 : if (paa->parm_modified || fbi->aa_walk_budget == 0)
1159 : return false;
1160 :
1161 1828266 : gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1162 914133 : ao_ref_init (&refd, parm_load);
1163 1828266 : int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1164 : &modified, NULL, NULL,
1165 : fbi->aa_walk_budget);
1166 914133 : if (walked < 0)
1167 : {
1168 8 : modified = true;
1169 8 : fbi->aa_walk_budget = 0;
1170 : }
1171 : else
1172 914125 : fbi->aa_walk_budget -= walked;
1173 914133 : if (paa && modified)
1174 100619 : paa->parm_modified = true;
1175 914133 : return !modified;
1176 : }
1177 :
1178 : /* If STMT is an assignment that loads a value from an parameter declaration,
1179 : return the index of the parameter in ipa_node_params which has not been
1180 : modified. Otherwise return -1. */
1181 :
1182 : static int
1183 6450013 : load_from_unmodified_param (struct ipa_func_body_info *fbi,
1184 : vec<ipa_param_descriptor, va_gc> *descriptors,
1185 : gimple *stmt)
1186 : {
1187 6450013 : int index;
1188 6450013 : tree op1;
1189 :
1190 6450013 : if (!gimple_assign_single_p (stmt))
1191 : return -1;
1192 :
1193 4295893 : op1 = gimple_assign_rhs1 (stmt);
1194 4295893 : if (TREE_CODE (op1) != PARM_DECL)
1195 : return -1;
1196 :
1197 69923 : index = ipa_get_param_decl_index_1 (descriptors, op1);
1198 69923 : if (index < 0
1199 69923 : || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1200 24733 : return -1;
1201 :
1202 : return index;
1203 : }
1204 :
1205 : /* Return true if memory reference REF (which must be a load through parameter
1206 : with INDEX) loads data that are known to be unmodified in this function
1207 : before reaching statement STMT. */
1208 :
1209 : static bool
1210 4649592 : parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1211 : int index, gimple *stmt, tree ref)
1212 : {
1213 4649592 : struct ipa_param_aa_status *paa;
1214 4649592 : bool modified = false;
1215 4649592 : ao_ref refd;
1216 :
1217 4649592 : gcc_checking_assert (fbi);
1218 4649592 : paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1219 4649592 : if (paa->ref_modified || fbi->aa_walk_budget == 0)
1220 : return false;
1221 :
1222 7265338 : gcc_checking_assert (gimple_vuse (stmt));
1223 3632669 : ao_ref_init (&refd, ref);
1224 7265338 : int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1225 : &modified, NULL, NULL,
1226 : fbi->aa_walk_budget);
1227 3632669 : if (walked < 0)
1228 : {
1229 8 : modified = true;
1230 8 : fbi->aa_walk_budget = 0;
1231 : }
1232 : else
1233 3632661 : fbi->aa_walk_budget -= walked;
1234 3632669 : if (modified)
1235 629445 : paa->ref_modified = true;
1236 3632669 : return !modified;
1237 : }
1238 :
1239 : /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1240 : is known to be unmodified in this function before reaching call statement
1241 : CALL into which it is passed. FBI describes the function body. */
1242 :
1243 : static bool
1244 1142937 : parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1245 : gimple *call, tree parm)
1246 : {
1247 1142937 : bool modified = false;
1248 1142937 : ao_ref refd;
1249 :
1250 : /* It's unnecessary to calculate anything about memory contnets for a const
1251 : function because it is not goin to use it. But do not cache the result
1252 : either. Also, no such calculations for non-pointers. */
1253 1588346 : if (!gimple_vuse (call)
1254 1142937 : || !POINTER_TYPE_P (TREE_TYPE (parm)))
1255 : return false;
1256 :
1257 810540 : struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1258 : gimple_bb (call),
1259 : index);
1260 810540 : if (paa->pt_modified || fbi->aa_walk_budget == 0)
1261 : return false;
1262 :
1263 697528 : ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1264 1395056 : int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1265 : &modified, NULL, NULL,
1266 : fbi->aa_walk_budget);
1267 697528 : if (walked < 0)
1268 : {
1269 0 : fbi->aa_walk_budget = 0;
1270 0 : modified = true;
1271 : }
1272 : else
1273 697528 : fbi->aa_walk_budget -= walked;
1274 697528 : if (modified)
1275 258120 : paa->pt_modified = true;
1276 697528 : return !modified;
1277 : }
1278 :
1279 : /* Return true if we can prove that OP is a memory reference loading
1280 : data from an aggregate passed as a parameter.
1281 :
1282 : The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1283 : false if it cannot prove that the value has not been modified before the
1284 : load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1285 : if it cannot prove the value has not been modified, in that case it will
1286 : store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1287 :
1288 : INFO and PARMS_AINFO describe parameters of the current function (but the
1289 : latter can be NULL), STMT is the load statement. If function returns true,
1290 : *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1291 : within the aggregate and whether it is a load from a value passed by
1292 : reference respectively.
1293 :
1294 : Return false if the offset divided by BITS_PER_UNIT would not fit into an
1295 : unsigned int. */
1296 :
1297 : bool
1298 25140430 : ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1299 : vec<ipa_param_descriptor, va_gc> *descriptors,
1300 : gimple *stmt, tree op, int *index_p,
1301 : HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1302 : bool *by_ref_p, bool *guaranteed_unmodified)
1303 : {
1304 25140430 : int index;
1305 25140430 : HOST_WIDE_INT size;
1306 25140430 : bool reverse;
1307 25140430 : tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1308 :
1309 25140430 : if (!base
1310 23763713 : || (*offset_p / BITS_PER_UNIT) > UINT_MAX)
1311 : return false;
1312 :
1313 : /* We can not propagate across volatile loads. */
1314 23763682 : if (TREE_THIS_VOLATILE (op))
1315 : return false;
1316 :
1317 22552977 : if (DECL_P (base))
1318 : {
1319 11074138 : int index = ipa_get_param_decl_index_1 (descriptors, base);
1320 11074138 : if (index >= 0
1321 11074138 : && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1322 : {
1323 785277 : *index_p = index;
1324 785277 : *by_ref_p = false;
1325 785277 : if (size_p)
1326 23975 : *size_p = size;
1327 785277 : if (guaranteed_unmodified)
1328 135 : *guaranteed_unmodified = true;
1329 785277 : return true;
1330 : }
1331 10288861 : return false;
1332 : }
1333 :
1334 11478839 : if (TREE_CODE (base) != MEM_REF
1335 10697582 : || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1336 22172847 : || !integer_zerop (TREE_OPERAND (base, 1)))
1337 1727774 : return false;
1338 :
1339 9751065 : if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1340 : {
1341 4764207 : tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1342 4764207 : index = ipa_get_param_decl_index_1 (descriptors, parm);
1343 : }
1344 : else
1345 : {
1346 : /* This branch catches situations where a pointer parameter is not a
1347 : gimple register, for example:
1348 :
1349 : void hip7(S*) (struct S * p)
1350 : {
1351 : void (*<T2e4>) (struct S *) D.1867;
1352 : struct S * p.1;
1353 :
1354 : <bb 2>:
1355 : p.1_1 = p;
1356 : D.1867_2 = p.1_1->f;
1357 : D.1867_2 ();
1358 : gdp = &p;
1359 : */
1360 :
1361 4986858 : gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1362 4986858 : index = load_from_unmodified_param (fbi, descriptors, def);
1363 : }
1364 :
1365 9751065 : if (index >= 0)
1366 : {
1367 4649293 : bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1368 4649293 : if (!data_preserved && !guaranteed_unmodified)
1369 : return false;
1370 :
1371 3003439 : *index_p = index;
1372 3003439 : *by_ref_p = true;
1373 3003439 : if (size_p)
1374 28359 : *size_p = size;
1375 3003439 : if (guaranteed_unmodified)
1376 2051 : *guaranteed_unmodified = data_preserved;
1377 3003439 : return true;
1378 : }
1379 : return false;
1380 : }
1381 :
1382 : /* If STMT is an assignment that loads a value from a parameter declaration,
1383 : or from an aggregate passed as the parameter either by value or reference,
1384 : return the index of the parameter in ipa_node_params. Otherwise return -1.
1385 :
1386 : FBI holds gathered information about the function. INFO describes
1387 : parameters of the function, STMT is the assignment statement. If it is a
1388 : memory load from an aggregate, *OFFSET_P is filled with offset within the
1389 : aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1390 : reference. */
1391 :
1392 : static int
1393 345992 : load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1394 : class ipa_node_params *info,
1395 : gimple *stmt,
1396 : HOST_WIDE_INT *offset_p,
1397 : bool *by_ref_p)
1398 : {
1399 345992 : int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1400 345992 : poly_int64 size;
1401 :
1402 : /* Load value from a parameter declaration. */
1403 345992 : if (index >= 0)
1404 : {
1405 244 : *offset_p = -1;
1406 244 : return index;
1407 : }
1408 :
1409 345748 : if (!gimple_assign_load_p (stmt))
1410 : return -1;
1411 :
1412 177874 : tree rhs = gimple_assign_rhs1 (stmt);
1413 :
1414 : /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1415 322997 : for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1416 145180 : if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1417 : return -1;
1418 :
1419 : /* Skip memory reference containing bit-field. */
1420 177817 : if (TREE_CODE (rhs) == BIT_FIELD_REF
1421 177817 : || contains_bitfld_component_ref_p (rhs))
1422 0 : return -1;
1423 :
1424 177817 : if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1425 : offset_p, &size, by_ref_p))
1426 : return -1;
1427 :
1428 49302 : gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1429 : size));
1430 49302 : if (!*by_ref_p)
1431 : {
1432 23283 : tree param_type = ipa_get_type (info, index);
1433 :
1434 23283 : if (!param_type || !AGGREGATE_TYPE_P (param_type))
1435 : return -1;
1436 : }
1437 26019 : else if (TREE_THIS_VOLATILE (rhs))
1438 : return -1;
1439 :
1440 47758 : return index;
1441 : }
1442 :
1443 : /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1444 : to find original pointer. Initialize RET to the pointer which results from
1445 : the walk.
1446 : If offset is known return true and initialize OFFSET_RET. */
1447 :
1448 : bool
1449 15472357 : unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1450 : {
1451 15472357 : poly_int64 offset = 0;
1452 15472357 : bool offset_known = true;
1453 15472357 : int i;
1454 :
1455 19989618 : for (i = 0; i < param_ipa_jump_function_lookups; i++)
1456 : {
1457 19988600 : if (TREE_CODE (op) == ADDR_EXPR)
1458 : {
1459 1545746 : poly_int64 extra_offset;
1460 1545746 : tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1461 : &extra_offset);
1462 1545746 : if (!base)
1463 : {
1464 27401 : base = get_base_address (TREE_OPERAND (op, 0));
1465 27401 : if (TREE_CODE (base) != MEM_REF)
1466 : break;
1467 : offset_known = false;
1468 : }
1469 : else
1470 : {
1471 1518345 : if (TREE_CODE (base) != MEM_REF)
1472 : break;
1473 255877 : offset += extra_offset;
1474 : }
1475 255877 : op = TREE_OPERAND (base, 0);
1476 255877 : if (mem_ref_offset (base).to_shwi (&extra_offset))
1477 255877 : offset += extra_offset;
1478 : else
1479 : offset_known = false;
1480 : }
1481 18442854 : else if (TREE_CODE (op) == SSA_NAME
1482 18442854 : && !SSA_NAME_IS_DEFAULT_DEF (op))
1483 : {
1484 6350208 : gimple *pstmt = SSA_NAME_DEF_STMT (op);
1485 :
1486 6350208 : if (gimple_assign_single_p (pstmt))
1487 3069471 : op = gimple_assign_rhs1 (pstmt);
1488 3280737 : else if (is_gimple_assign (pstmt)
1489 3280737 : && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1490 : {
1491 1191913 : poly_int64 extra_offset = 0;
1492 1191913 : if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1493 : &extra_offset))
1494 1191913 : offset += extra_offset;
1495 : else
1496 : offset_known = false;
1497 1191913 : op = gimple_assign_rhs1 (pstmt);
1498 : }
1499 : else
1500 : break;
1501 : }
1502 : else
1503 : break;
1504 : }
1505 15472357 : *ret = op;
1506 15472357 : *offset_ret = offset;
1507 15472357 : return offset_known;
1508 : }
1509 :
1510 : /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1511 : of an assignment statement STMT, try to determine whether we are actually
1512 : handling any of the following cases and construct an appropriate jump
1513 : function into JFUNC if so:
1514 :
1515 : 1) The passed value is loaded from a formal parameter which is not a gimple
1516 : register (most probably because it is addressable, the value has to be
1517 : scalar) and we can guarantee the value has not changed. This case can
1518 : therefore be described by a simple pass-through jump function. For example:
1519 :
1520 : foo (int a)
1521 : {
1522 : int a.0;
1523 :
1524 : a.0_2 = a;
1525 : bar (a.0_2);
1526 :
1527 : 2) The passed value can be described by a simple arithmetic pass-through
1528 : jump function. E.g.
1529 :
1530 : foo (int a)
1531 : {
1532 : int D.2064;
1533 :
1534 : D.2064_4 = a.1(D) + 4;
1535 : bar (D.2064_4);
1536 :
1537 : This case can also occur in combination of the previous one, e.g.:
1538 :
1539 : foo (int a, int z)
1540 : {
1541 : int a.0;
1542 : int D.2064;
1543 :
1544 : a.0_3 = a;
1545 : D.2064_4 = a.0_3 + 4;
1546 : foo (D.2064_4);
1547 :
1548 : 3) The passed value is an address of an object within another one (which
1549 : also passed by reference). Such situations are described by an ancestor
1550 : jump function and describe situations such as:
1551 :
1552 : B::foo() (struct B * const this)
1553 : {
1554 : struct A * D.1845;
1555 :
1556 : D.1845_2 = &this_1(D)->D.1748;
1557 : A::bar (D.1845_2);
1558 :
1559 : INFO is the structure describing individual parameters access different
1560 : stages of IPA optimizations. PARMS_AINFO contains the information that is
1561 : only needed for intraprocedural analysis. */
1562 :
1563 : static void
1564 1227677 : compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1565 : class ipa_node_params *info,
1566 : struct ipa_jump_func *jfunc,
1567 : gcall *call, gimple *stmt, tree name,
1568 : tree param_type)
1569 : {
1570 1227677 : HOST_WIDE_INT offset, size;
1571 1227677 : tree op1, tc_ssa, base, ssa;
1572 1227677 : bool reverse;
1573 1227677 : int index;
1574 :
1575 1227677 : op1 = gimple_assign_rhs1 (stmt);
1576 :
1577 1227677 : if (TREE_CODE (op1) == SSA_NAME)
1578 : {
1579 386564 : if (SSA_NAME_IS_DEFAULT_DEF (op1))
1580 110514 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1581 : else
1582 276050 : index = load_from_unmodified_param (fbi, info->descriptors,
1583 276050 : SSA_NAME_DEF_STMT (op1));
1584 : tc_ssa = op1;
1585 : }
1586 : else
1587 : {
1588 841113 : index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1589 841113 : tc_ssa = gimple_assign_lhs (stmt);
1590 : }
1591 :
1592 1227677 : if (index >= 0)
1593 : {
1594 111806 : if (lto_variably_modified_type_p (TREE_TYPE (name)))
1595 1067021 : return;
1596 :
1597 111760 : switch (gimple_assign_rhs_class (stmt))
1598 : {
1599 63633 : case GIMPLE_BINARY_RHS:
1600 63633 : {
1601 63633 : tree op2 = gimple_assign_rhs2 (stmt);
1602 63633 : if (!is_gimple_ip_invariant (op2)
1603 63633 : || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1604 : != tcc_comparison)
1605 32379 : && !useless_type_conversion_p (TREE_TYPE (name),
1606 32379 : TREE_TYPE (op1))))
1607 23267 : return;
1608 :
1609 40366 : ipa_set_jf_arith_pass_through (jfunc, index, op2,
1610 : gimple_assign_rhs_code (stmt),
1611 40366 : TREE_TYPE (name));
1612 40366 : break;
1613 : }
1614 1119 : case GIMPLE_SINGLE_RHS:
1615 1119 : {
1616 1119 : bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1617 : tc_ssa);
1618 1119 : ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1619 1119 : break;
1620 : }
1621 47006 : case GIMPLE_UNARY_RHS:
1622 47006 : if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1623 994 : ipa_set_jf_unary_pass_through (jfunc, index,
1624 : gimple_assign_rhs_code (stmt),
1625 994 : TREE_TYPE (name));
1626 88493 : default:;
1627 : }
1628 88493 : return;
1629 : }
1630 :
1631 1115871 : if (TREE_CODE (op1) != ADDR_EXPR)
1632 : return;
1633 234293 : op1 = TREE_OPERAND (op1, 0);
1634 234293 : base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1635 234293 : offset_int mem_offset;
1636 234293 : if (!base
1637 207221 : || TREE_CODE (base) != MEM_REF
1638 430615 : || !mem_ref_offset (base).is_constant (&mem_offset))
1639 37971 : return;
1640 196322 : offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1641 196322 : ssa = TREE_OPERAND (base, 0);
1642 196322 : if (TREE_CODE (ssa) != SSA_NAME
1643 196322 : || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1644 357077 : || offset < 0)
1645 : return;
1646 :
1647 : /* Dynamic types are changed in constructors and destructors. */
1648 160656 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1649 160656 : if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1650 157816 : ipa_set_ancestor_jf (jfunc, offset, index,
1651 157816 : parm_ref_data_pass_through_p (fbi, index, call, ssa),
1652 : false);
1653 : }
1654 :
1655 : /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1656 : it looks like:
1657 :
1658 : iftmp.1_3 = &obj_2(D)->D.1762;
1659 :
1660 : The base of the MEM_REF must be a default definition SSA NAME of a
1661 : parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1662 : whole MEM_REF expression is returned and the offset calculated from any
1663 : handled components and the MEM_REF itself is stored into *OFFSET. The whole
1664 : RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1665 :
1666 : static tree
1667 15483 : get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1668 : {
1669 15483 : HOST_WIDE_INT size;
1670 15483 : tree expr, parm, obj;
1671 15483 : bool reverse;
1672 :
1673 15483 : if (!gimple_assign_single_p (assign))
1674 : return NULL_TREE;
1675 8548 : expr = gimple_assign_rhs1 (assign);
1676 :
1677 8548 : if (TREE_CODE (expr) != ADDR_EXPR)
1678 : return NULL_TREE;
1679 4407 : expr = TREE_OPERAND (expr, 0);
1680 4407 : obj = expr;
1681 4407 : expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1682 :
1683 4407 : offset_int mem_offset;
1684 4407 : if (!expr
1685 4403 : || TREE_CODE (expr) != MEM_REF
1686 8810 : || !mem_ref_offset (expr).is_constant (&mem_offset))
1687 4 : return NULL_TREE;
1688 4403 : parm = TREE_OPERAND (expr, 0);
1689 4403 : if (TREE_CODE (parm) != SSA_NAME
1690 4403 : || !SSA_NAME_IS_DEFAULT_DEF (parm)
1691 5178 : || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1692 : return NULL_TREE;
1693 :
1694 775 : *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1695 775 : *obj_p = obj;
1696 775 : return expr;
1697 : }
1698 :
1699 :
1700 : /* Given that an actual argument is an SSA_NAME that is a result of a phi
1701 : statement PHI, try to find out whether NAME is in fact a
1702 : multiple-inheritance typecast from a descendant into an ancestor of a formal
1703 : parameter and thus can be described by an ancestor jump function and if so,
1704 : write the appropriate function into JFUNC.
1705 :
1706 : Essentially we want to match the following pattern:
1707 :
1708 : if (obj_2(D) != 0B)
1709 : goto <bb 3>;
1710 : else
1711 : goto <bb 4>;
1712 :
1713 : <bb 3>:
1714 : iftmp.1_3 = &obj_2(D)->D.1762;
1715 :
1716 : <bb 4>:
1717 : # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1718 : D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1719 : return D.1879_6; */
1720 :
1721 : static void
1722 88777 : compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1723 : class ipa_node_params *info,
1724 : struct ipa_jump_func *jfunc,
1725 : gcall *call, gphi *phi)
1726 : {
1727 88777 : HOST_WIDE_INT offset;
1728 88777 : gimple *assign;
1729 88777 : basic_block phi_bb, assign_bb, cond_bb;
1730 88777 : tree tmp, parm, expr, obj;
1731 88777 : int index, i;
1732 :
1733 88777 : if (gimple_phi_num_args (phi) != 2)
1734 88763 : return;
1735 :
1736 75365 : if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1737 4084 : tmp = PHI_ARG_DEF (phi, 0);
1738 71281 : else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1739 13304 : tmp = PHI_ARG_DEF (phi, 1);
1740 : else
1741 : return;
1742 17388 : if (TREE_CODE (tmp) != SSA_NAME
1743 15251 : || SSA_NAME_IS_DEFAULT_DEF (tmp)
1744 15140 : || !POINTER_TYPE_P (TREE_TYPE (tmp))
1745 20601 : || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1746 : return;
1747 :
1748 1131 : assign = SSA_NAME_DEF_STMT (tmp);
1749 1131 : assign_bb = gimple_bb (assign);
1750 89476 : if (!single_pred_p (assign_bb))
1751 : return;
1752 713 : expr = get_ancestor_addr_info (assign, &obj, &offset);
1753 713 : if (!expr)
1754 : return;
1755 26 : parm = TREE_OPERAND (expr, 0);
1756 26 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1757 26 : if (index < 0)
1758 : return;
1759 :
1760 26 : cond_bb = single_pred (assign_bb);
1761 52 : gcond *cond = safe_dyn_cast <gcond *> (*gsi_last_bb (cond_bb));
1762 26 : if (!cond
1763 26 : || gimple_cond_code (cond) != NE_EXPR
1764 26 : || gimple_cond_lhs (cond) != parm
1765 14 : || !integer_zerop (gimple_cond_rhs (cond)))
1766 12 : return;
1767 :
1768 14 : phi_bb = gimple_bb (phi);
1769 42 : for (i = 0; i < 2; i++)
1770 : {
1771 28 : basic_block pred = EDGE_PRED (phi_bb, i)->src;
1772 28 : if (pred != assign_bb && pred != cond_bb)
1773 : return;
1774 : }
1775 :
1776 14 : ipa_set_ancestor_jf (jfunc, offset, index,
1777 14 : parm_ref_data_pass_through_p (fbi, index, call, parm),
1778 : true);
1779 : }
1780 :
1781 : /* Inspect the given TYPE and return true iff it has the same structure (the
1782 : same number of fields of the same types) as a C++ member pointer. If
1783 : METHOD_PTR and DELTA are non-NULL, store the trees representing the
1784 : corresponding fields there. */
1785 :
1786 : static bool
1787 876 : type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1788 : {
1789 876 : tree fld;
1790 :
1791 876 : if (TREE_CODE (type) != RECORD_TYPE)
1792 : return false;
1793 :
1794 876 : fld = TYPE_FIELDS (type);
1795 876 : if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1796 876 : || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1797 1752 : || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1798 : return false;
1799 :
1800 876 : if (method_ptr)
1801 876 : *method_ptr = fld;
1802 :
1803 876 : fld = DECL_CHAIN (fld);
1804 876 : if (!fld || INTEGRAL_TYPE_P (fld)
1805 1752 : || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1806 : return false;
1807 876 : if (delta)
1808 876 : *delta = fld;
1809 :
1810 876 : if (DECL_CHAIN (fld))
1811 : return false;
1812 :
1813 : return true;
1814 : }
1815 :
1816 : /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1817 : return the rhs of its defining statement, and this statement is stored in
1818 : *RHS_STMT. Otherwise return RHS as it is. */
1819 :
1820 : static inline tree
1821 126266 : get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1822 : {
1823 167763 : while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1824 : {
1825 91043 : gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1826 :
1827 91043 : if (gimple_assign_single_p (def_stmt))
1828 41497 : rhs = gimple_assign_rhs1 (def_stmt);
1829 : else
1830 : break;
1831 41497 : *rhs_stmt = def_stmt;
1832 : }
1833 126266 : return rhs;
1834 : }
1835 :
1836 : /* Simple linked list, describing contents of an aggregate before call. */
1837 :
1838 : struct ipa_known_agg_contents_list
1839 : {
1840 : /* Offset and size of the described part of the aggregate. */
1841 : HOST_WIDE_INT offset, size;
1842 :
1843 : /* Type of the described part of the aggregate. */
1844 : tree type;
1845 :
1846 : /* Known constant value or jump function data describing contents. */
1847 : struct ipa_load_agg_data value;
1848 :
1849 : /* Pointer to the next structure in the list. */
1850 : struct ipa_known_agg_contents_list *next;
1851 : };
1852 :
1853 : /* Add an aggregate content item into a linked list of
1854 : ipa_known_agg_contents_list structure, in which all elements
1855 : are sorted ascendingly by offset. */
1856 :
1857 : static inline void
1858 2703243 : add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1859 : struct ipa_known_agg_contents_list *item)
1860 : {
1861 2703243 : struct ipa_known_agg_contents_list *list = *plist;
1862 :
1863 5019762 : for (; list; list = list->next)
1864 : {
1865 3830201 : if (list->offset >= item->offset)
1866 : break;
1867 :
1868 2316519 : plist = &list->next;
1869 : }
1870 :
1871 2703243 : item->next = list;
1872 2703243 : *plist = item;
1873 : }
1874 :
1875 : /* Check whether a given aggregate content is clobbered by certain element in
1876 : a linked list of ipa_known_agg_contents_list. */
1877 :
1878 : static inline bool
1879 1054079 : clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1880 : struct ipa_known_agg_contents_list *item)
1881 : {
1882 2253083 : for (; list; list = list->next)
1883 : {
1884 1823144 : if (list->offset >= item->offset)
1885 611480 : return list->offset < item->offset + item->size;
1886 :
1887 1211664 : if (list->offset + list->size > item->offset)
1888 : return true;
1889 : }
1890 :
1891 : return false;
1892 : }
1893 :
1894 : /* Build aggregate jump function from LIST, assuming there are exactly
1895 : VALUE_COUNT entries there and that offset of the passed argument
1896 : is ARG_OFFSET and store it into JFUNC. */
1897 :
1898 : static void
1899 339985 : build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1900 : int value_count, HOST_WIDE_INT arg_offset,
1901 : struct ipa_jump_func *jfunc)
1902 : {
1903 339985 : vec_safe_reserve (jfunc->agg.items, value_count, true);
1904 1333249 : for (; list; list = list->next)
1905 : {
1906 993264 : struct ipa_agg_jf_item item;
1907 993264 : tree operand = list->value.pass_through.operand;
1908 :
1909 993264 : if (list->value.pass_through.formal_id >= 0)
1910 : {
1911 : /* Content value is derived from some formal paramerter. */
1912 87143 : if (list->value.offset >= 0)
1913 46219 : item.jftype = IPA_JF_LOAD_AGG;
1914 : else
1915 40924 : item.jftype = IPA_JF_PASS_THROUGH;
1916 :
1917 87143 : item.value.load_agg = list->value;
1918 87143 : if (operand)
1919 8934 : item.value.pass_through.operand
1920 8934 : = unshare_expr_without_location (operand);
1921 : }
1922 906121 : else if (operand)
1923 : {
1924 : /* Content value is known constant. */
1925 906121 : item.jftype = IPA_JF_CONST;
1926 906121 : item.value.constant = unshare_expr_without_location (operand);
1927 : }
1928 : else
1929 0 : continue;
1930 :
1931 993264 : item.type = list->type;
1932 993264 : gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1933 :
1934 993264 : item.offset = list->offset - arg_offset;
1935 993264 : gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1936 :
1937 993264 : jfunc->agg.items->quick_push (item);
1938 : }
1939 339985 : }
1940 :
1941 : /* Given an assignment statement STMT, try to collect information into
1942 : AGG_VALUE that will be used to construct jump function for RHS of the
1943 : assignment, from which content value of an aggregate part comes.
1944 :
1945 : Besides constant and simple pass-through jump functions, also try to
1946 : identify whether it matches the following pattern that can be described by
1947 : a load-value-from-aggregate jump function, which is a derivative of simple
1948 : pass-through jump function.
1949 :
1950 : foo (int *p)
1951 : {
1952 : ...
1953 :
1954 : *(q_5 + 4) = *(p_3(D) + 28) op 1;
1955 : bar (q_5);
1956 : }
1957 :
1958 : Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1959 : constant, simple pass-through and load-vale-from-aggregate. If value
1960 : is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1961 : set to -1. For simple pass-through and load-value-from-aggregate, field
1962 : FORMAL_ID specifies the related formal parameter index, and field
1963 : OFFSET can be used to distinguish them, -1 means simple pass-through,
1964 : otherwise means load-value-from-aggregate. */
1965 :
1966 : static void
1967 1712210 : analyze_agg_content_value (struct ipa_func_body_info *fbi,
1968 : struct ipa_load_agg_data *agg_value,
1969 : gimple *stmt)
1970 : {
1971 1712210 : tree lhs = gimple_assign_lhs (stmt);
1972 1712210 : tree rhs1 = gimple_assign_rhs1 (stmt);
1973 1712210 : enum tree_code code;
1974 1712210 : int index = -1;
1975 :
1976 : /* Initialize jump function data for the aggregate part. */
1977 1712210 : memset (agg_value, 0, sizeof (*agg_value));
1978 1712210 : agg_value->pass_through.operation = NOP_EXPR;
1979 1712210 : agg_value->pass_through.formal_id = -1;
1980 1712210 : agg_value->offset = -1;
1981 :
1982 1712210 : if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1983 1524901 : || TREE_THIS_VOLATILE (lhs)
1984 1524233 : || TREE_CODE (lhs) == BIT_FIELD_REF
1985 1524225 : || contains_bitfld_component_ref_p (lhs))
1986 196805 : return;
1987 :
1988 : /* Skip SSA copies. */
1989 1789499 : while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1990 : {
1991 1702463 : if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1992 : break;
1993 :
1994 366530 : stmt = SSA_NAME_DEF_STMT (rhs1);
1995 366530 : if (!is_gimple_assign (stmt))
1996 : break;
1997 :
1998 274094 : lhs = gimple_assign_lhs (stmt);
1999 274094 : rhs1 = gimple_assign_rhs1 (stmt);
2000 : }
2001 :
2002 1515405 : if (gphi *phi = dyn_cast<gphi *> (stmt))
2003 : {
2004 : /* Also special case like the following (a is a formal parameter):
2005 :
2006 : _12 = *a_11(D).dim[0].stride;
2007 : ...
2008 : # iftmp.22_9 = PHI <_12(2), 1(3)>
2009 : ...
2010 : parm.6.dim[0].stride = iftmp.22_9;
2011 : ...
2012 : __x_MOD_foo (&parm.6, b_31(D));
2013 :
2014 : The aggregate function describing parm.6.dim[0].stride is encoded as a
2015 : PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
2016 : (the constant from the PHI node). */
2017 :
2018 33913 : if (gimple_phi_num_args (phi) != 2
2019 33913 : || lto_variably_modified_type_p (TREE_TYPE (lhs)))
2020 6696 : return;
2021 27217 : tree arg0 = gimple_phi_arg_def (phi, 0);
2022 27217 : tree arg1 = gimple_phi_arg_def (phi, 1);
2023 27217 : tree operand;
2024 :
2025 27217 : if (is_gimple_ip_invariant (arg1))
2026 : {
2027 : operand = arg1;
2028 : rhs1 = arg0;
2029 : }
2030 22352 : else if (is_gimple_ip_invariant (arg0))
2031 : {
2032 : operand = arg0;
2033 : rhs1 = arg1;
2034 : }
2035 : else
2036 : return;
2037 :
2038 10247 : rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
2039 10247 : if (!is_gimple_assign (stmt))
2040 : return;
2041 :
2042 4447 : code = ASSERT_EXPR;
2043 4447 : agg_value->pass_through.operand = operand;
2044 4447 : agg_value->pass_through.op_type = TREE_TYPE (lhs);
2045 : }
2046 1481492 : else if (is_gimple_assign (stmt))
2047 : {
2048 1422969 : code = gimple_assign_rhs_code (stmt);
2049 1422969 : switch (gimple_assign_rhs_class (stmt))
2050 : {
2051 1335933 : case GIMPLE_SINGLE_RHS:
2052 1335933 : if (is_gimple_ip_invariant (rhs1))
2053 : {
2054 965112 : agg_value->pass_through.operand = rhs1;
2055 965112 : return;
2056 : }
2057 : code = NOP_EXPR;
2058 : break;
2059 :
2060 29597 : case GIMPLE_UNARY_RHS:
2061 : /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
2062 : (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
2063 : tcc_binary, this subtleness is somewhat misleading.
2064 :
2065 : Since tcc_unary is widely used in IPA-CP code to check an operation
2066 : with one operand, here we only allow tc_unary operation to avoid
2067 : possible problem. Then we can use (opclass == tc_unary) or not to
2068 : distinguish unary and binary. */
2069 29597 : if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code)
2070 31730 : || lto_variably_modified_type_p (TREE_TYPE (lhs)))
2071 27464 : return;
2072 :
2073 2133 : rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
2074 2133 : agg_value->pass_through.op_type = TREE_TYPE (lhs);
2075 2133 : break;
2076 :
2077 56947 : case GIMPLE_BINARY_RHS:
2078 56947 : {
2079 56947 : gimple *rhs1_stmt = stmt;
2080 56947 : gimple *rhs2_stmt = stmt;
2081 56947 : tree rhs2 = gimple_assign_rhs2 (stmt);
2082 :
2083 56947 : if (lto_variably_modified_type_p (TREE_TYPE (lhs)))
2084 29525 : return;
2085 :
2086 56943 : rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
2087 56943 : rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
2088 :
2089 56943 : if (is_gimple_ip_invariant (rhs2))
2090 : {
2091 27422 : agg_value->pass_through.operand = rhs2;
2092 27422 : agg_value->pass_through.op_type = TREE_TYPE (lhs);
2093 27422 : stmt = rhs1_stmt;
2094 : }
2095 29521 : else if (is_gimple_ip_invariant (rhs1))
2096 : {
2097 2190 : if (TREE_CODE_CLASS (code) == tcc_comparison)
2098 0 : code = swap_tree_comparison (code);
2099 2190 : else if (!commutative_tree_code (code))
2100 : return;
2101 :
2102 0 : agg_value->pass_through.operand = rhs1;
2103 0 : agg_value->pass_through.op_type = TREE_TYPE (lhs);
2104 0 : stmt = rhs2_stmt;
2105 0 : rhs1 = rhs2;
2106 : }
2107 : else
2108 : return;
2109 :
2110 27422 : if (TREE_CODE_CLASS (code) != tcc_comparison
2111 54306 : && !useless_type_conversion_p (TREE_TYPE (lhs),
2112 26884 : TREE_TYPE (rhs1)))
2113 : return;
2114 : }
2115 27422 : break;
2116 :
2117 : default:
2118 : return;
2119 : }
2120 : }
2121 : else
2122 : return;
2123 :
2124 404823 : if (TREE_CODE (rhs1) != SSA_NAME)
2125 345992 : index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
2126 : &agg_value->offset,
2127 : &agg_value->by_ref);
2128 58831 : else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
2129 44710 : index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
2130 :
2131 390702 : if (index >= 0)
2132 : {
2133 88967 : if (agg_value->offset >= 0)
2134 47758 : agg_value->type = TREE_TYPE (rhs1);
2135 88967 : agg_value->pass_through.formal_id = index;
2136 88967 : agg_value->pass_through.operation = code;
2137 : }
2138 : else
2139 315856 : agg_value->pass_through.operand = NULL_TREE;
2140 : }
2141 :
2142 : /* If STMT is a memory store to the object whose address is BASE, extract
2143 : information (offset, size, and value) into CONTENT, and return true,
2144 : otherwise we conservatively assume the whole object is modified with
2145 : unknown content, and return false. CHECK_REF means that access to object
2146 : is expected to be in form of MEM_REF expression. */
2147 :
2148 : static bool
2149 2788942 : extract_mem_content (struct ipa_func_body_info *fbi,
2150 : gimple *stmt, tree base, bool check_ref,
2151 : struct ipa_known_agg_contents_list *content)
2152 : {
2153 2788942 : HOST_WIDE_INT lhs_offset, lhs_size;
2154 2788942 : bool reverse;
2155 :
2156 2788942 : if (!is_gimple_assign (stmt))
2157 : return false;
2158 :
2159 1822584 : tree lhs = gimple_assign_lhs (stmt);
2160 1822584 : tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
2161 : &reverse);
2162 1822584 : if (!lhs_base)
2163 : return false;
2164 :
2165 1820925 : if (check_ref)
2166 : {
2167 144118 : if (TREE_CODE (lhs_base) != MEM_REF
2168 115828 : || TREE_OPERAND (lhs_base, 0) != base
2169 189750 : || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
2170 104697 : return false;
2171 : }
2172 1676807 : else if (lhs_base != base)
2173 : return false;
2174 :
2175 1712210 : content->offset = lhs_offset;
2176 1712210 : content->size = lhs_size;
2177 1712210 : content->type = TREE_TYPE (lhs);
2178 1712210 : content->next = NULL;
2179 :
2180 1712210 : analyze_agg_content_value (fbi, &content->value, stmt);
2181 1712210 : return true;
2182 : }
2183 :
2184 : /* Traverse statements from CALL backwards, scanning whether an aggregate given
2185 : in ARG is filled in constants or values that are derived from caller's
2186 : formal parameter in the way described by some kinds of jump functions. FBI
2187 : is the context of the caller function for interprocedural analysis. ARG can
2188 : either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2189 : the type of the aggregate, JFUNC is the jump function for the aggregate. */
2190 :
2191 : static void
2192 3334813 : determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
2193 : gcall *call, tree arg,
2194 : tree arg_type,
2195 : struct ipa_jump_func *jfunc)
2196 : {
2197 3334813 : struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
2198 3334813 : bitmap visited = NULL;
2199 3334813 : int item_count = 0, value_count = 0;
2200 3334813 : HOST_WIDE_INT arg_offset, arg_size;
2201 3334813 : tree arg_base;
2202 3334813 : bool check_ref, by_ref;
2203 3334813 : ao_ref r;
2204 3334813 : int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
2205 :
2206 3334813 : if (max_agg_items == 0)
2207 842046 : return;
2208 :
2209 : /* The function operates in three stages. First, we prepare check_ref, r,
2210 : arg_base and arg_offset based on what is actually passed as an actual
2211 : argument. */
2212 :
2213 3334813 : if (POINTER_TYPE_P (arg_type))
2214 : {
2215 2977953 : by_ref = true;
2216 2977953 : if (TREE_CODE (arg) == SSA_NAME)
2217 : {
2218 1085954 : tree type_size;
2219 1085954 : if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
2220 1085954 : || !POINTER_TYPE_P (TREE_TYPE (arg)))
2221 : return;
2222 769883 : check_ref = true;
2223 769883 : arg_base = arg;
2224 769883 : arg_offset = 0;
2225 769883 : type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2226 769883 : arg_size = tree_to_uhwi (type_size);
2227 769883 : ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2228 : }
2229 1891999 : else if (TREE_CODE (arg) == ADDR_EXPR)
2230 : {
2231 1816103 : bool reverse;
2232 :
2233 1816103 : arg = TREE_OPERAND (arg, 0);
2234 1816103 : arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2235 : &arg_size, &reverse);
2236 1816103 : if (!arg_base)
2237 450003 : return;
2238 1814887 : if (DECL_P (arg_base))
2239 : {
2240 1366100 : check_ref = false;
2241 1366100 : ao_ref_init (&r, arg_base);
2242 : }
2243 : else
2244 : return;
2245 : }
2246 : else
2247 : return;
2248 : }
2249 : else
2250 : {
2251 356860 : bool reverse;
2252 :
2253 356860 : gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2254 :
2255 356860 : by_ref = false;
2256 356860 : check_ref = false;
2257 356860 : arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2258 : &arg_size, &reverse);
2259 356860 : if (!arg_base)
2260 76 : return;
2261 :
2262 356784 : ao_ref_init (&r, arg);
2263 : }
2264 :
2265 : /* Second stage traverses virtual SSA web backwards starting from the call
2266 : statement, only looks at individual dominating virtual operand (its
2267 : definition dominates the call), as long as it is confident that content
2268 : of the aggregate is affected by definition of the virtual operand, it
2269 : builds a sorted linked list of ipa_agg_jf_list describing that. */
2270 :
2271 2492767 : for (tree dom_vuse = gimple_vuse (call);
2272 24600765 : dom_vuse && fbi->aa_walk_budget > 0;)
2273 : {
2274 24103798 : gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2275 :
2276 24103798 : if (gphi *phi = dyn_cast <gphi *> (stmt))
2277 : {
2278 2356896 : dom_vuse = get_continuation_for_phi (phi, &r, true,
2279 1178448 : fbi->aa_walk_budget,
2280 : &visited, false, NULL, NULL);
2281 1178448 : continue;
2282 : }
2283 :
2284 22925350 : fbi->aa_walk_budget--;
2285 22925350 : if (stmt_may_clobber_ref_p_1 (stmt, &r))
2286 : {
2287 2788942 : struct ipa_known_agg_contents_list *content
2288 2788942 : = XALLOCA (struct ipa_known_agg_contents_list);
2289 :
2290 2788942 : if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2291 : break;
2292 :
2293 : /* Now we get a dominating virtual operand, and need to check
2294 : whether its value is clobbered any other dominating one. */
2295 1712210 : if ((content->value.pass_through.formal_id >= 0
2296 1623243 : || content->value.pass_through.operand)
2297 1054079 : && !clobber_by_agg_contents_list_p (all_list, content)
2298 : /* Since IPA-CP stores results with unsigned int offsets, we can
2299 : discard those which would not fit now before we stream them to
2300 : WPA. */
2301 2705612 : && (content->offset + content->size - arg_offset
2302 : <= (HOST_WIDE_INT) UINT_MAX * BITS_PER_UNIT))
2303 : {
2304 993264 : struct ipa_known_agg_contents_list *copy
2305 993264 : = XALLOCA (struct ipa_known_agg_contents_list);
2306 :
2307 : /* Add to the list consisting of only dominating virtual
2308 : operands, whose definitions can finally reach the call. */
2309 993264 : add_to_agg_contents_list (&list, (*copy = *content, copy));
2310 :
2311 993264 : if (++value_count == max_agg_items)
2312 : break;
2313 : }
2314 :
2315 : /* Add to the list consisting of all dominating virtual operands. */
2316 1709979 : add_to_agg_contents_list (&all_list, content);
2317 :
2318 1709979 : if (++item_count == 2 * max_agg_items)
2319 : break;
2320 : }
2321 42774737 : dom_vuse = gimple_vuse (stmt);
2322 : }
2323 :
2324 2492767 : if (visited)
2325 610114 : BITMAP_FREE (visited);
2326 :
2327 : /* Third stage just goes over the list and creates an appropriate vector of
2328 : ipa_agg_jf_item structures out of it, of course only if there are
2329 : any meaningful items to begin with. */
2330 :
2331 2492767 : if (value_count)
2332 : {
2333 339985 : jfunc->agg.by_ref = by_ref;
2334 339985 : build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2335 : }
2336 : }
2337 :
2338 :
2339 : /* Return the Ith param type of callee associated with call graph
2340 : edge E. */
2341 :
2342 : tree
2343 6194932 : ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2344 : {
2345 6194932 : int n;
2346 6194932 : tree type = (e->callee
2347 6194932 : ? TREE_TYPE (e->callee->decl)
2348 6194932 : : gimple_call_fntype (e->call_stmt));
2349 6194932 : tree t = TYPE_ARG_TYPES (type);
2350 :
2351 12746322 : for (n = 0; n < i; n++)
2352 : {
2353 6804350 : if (!t)
2354 : break;
2355 6551390 : t = TREE_CHAIN (t);
2356 : }
2357 6194932 : if (t && t != void_list_node)
2358 5848953 : return TREE_VALUE (t);
2359 345979 : if (!e->callee)
2360 : return NULL;
2361 324193 : t = DECL_ARGUMENTS (e->callee->decl);
2362 850860 : for (n = 0; n < i; n++)
2363 : {
2364 806371 : if (!t)
2365 : return NULL;
2366 526667 : t = TREE_CHAIN (t);
2367 : }
2368 44489 : if (t)
2369 2192 : return TREE_TYPE (t);
2370 : return NULL;
2371 : }
2372 :
2373 : /* Return a pointer to an ipa_vr just like TMP, but either find it in
2374 : ipa_vr_hash_table or allocate it in GC memory. */
2375 :
2376 : static ipa_vr *
2377 5707796 : ipa_get_value_range (const vrange &tmp)
2378 : {
2379 5707796 : inchash::hash hstate;
2380 5707796 : inchash::add_vrange (tmp, hstate);
2381 5707796 : hashval_t hash = hstate.end ();
2382 5707796 : ipa_vr **slot = ipa_vr_hash_table->find_slot_with_hash (&tmp, hash, INSERT);
2383 5707796 : if (*slot)
2384 : return *slot;
2385 :
2386 635660 : ipa_vr *vr = new (ggc_alloc<ipa_vr> ()) ipa_vr (tmp);
2387 635660 : *slot = vr;
2388 635660 : return vr;
2389 : }
2390 :
2391 : /* Assign to JF a pointer to a range just like TMP but either fetch a
2392 : copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2393 :
2394 : static void
2395 4948831 : ipa_set_jfunc_vr (ipa_jump_func *jf, const vrange &tmp)
2396 : {
2397 1885525 : jf->m_vr = ipa_get_value_range (tmp);
2398 3063306 : }
2399 :
2400 : static void
2401 615238 : ipa_set_jfunc_vr (ipa_jump_func *jf, const ipa_vr &vr)
2402 : {
2403 615238 : value_range tmp;
2404 615238 : vr.get_vrange (tmp);
2405 615238 : ipa_set_jfunc_vr (jf, tmp);
2406 615238 : }
2407 :
2408 : /* Given VAL that conforms to is_gimple_ip_invariant, produce a VRANGE that
2409 : represents it as a range. CONTEXT_NODE is the call graph node representing
2410 : the function for which optimization flags should be evaluated. */
2411 :
2412 : void
2413 1412874 : ipa_get_range_from_ip_invariant (vrange &r, tree val, cgraph_node *context_node)
2414 : {
2415 1412874 : if (TREE_CODE (val) == ADDR_EXPR)
2416 : {
2417 1023 : symtab_node *symbol;
2418 1023 : tree base = TREE_OPERAND (val, 0);
2419 1023 : if (!DECL_P (base))
2420 : {
2421 182 : r.set_varying (TREE_TYPE (val));
2422 182 : return;
2423 : }
2424 841 : if (!decl_in_symtab_p (base))
2425 : {
2426 0 : r.set_nonzero (TREE_TYPE (val));
2427 0 : return;
2428 : }
2429 841 : if (!(symbol = symtab_node::get (base)))
2430 : {
2431 0 : r.set_varying (TREE_TYPE (val));
2432 0 : return;
2433 : }
2434 :
2435 841 : bool delete_null_pointer_checks
2436 841 : = opt_for_fn (context_node->decl, flag_delete_null_pointer_checks);
2437 841 : if (symbol->nonzero_address (delete_null_pointer_checks))
2438 841 : r.set_nonzero (TREE_TYPE (val));
2439 : else
2440 0 : r.set_varying (TREE_TYPE (val));
2441 : }
2442 : else
2443 1411851 : r.set (val, val);
2444 : }
2445 :
2446 : /* If T is an SSA_NAME that is the result of a simple type conversion statement
2447 : from an integer type to another integer type which is known to be able to
2448 : represent the values the operand of the conversion can hold, return the
2449 : operand of that conversion, otherwise return T. */
2450 :
2451 : static tree
2452 6194932 : skip_a_safe_conversion_op (tree t)
2453 : {
2454 6194932 : if (TREE_CODE (t) != SSA_NAME
2455 6194932 : || SSA_NAME_IS_DEFAULT_DEF (t))
2456 : return t;
2457 :
2458 1515528 : gimple *def = SSA_NAME_DEF_STMT (t);
2459 1515528 : if (!is_gimple_assign (def)
2460 1250292 : || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))
2461 244060 : || !INTEGRAL_TYPE_P (TREE_TYPE (t))
2462 1694305 : || !INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (def))))
2463 : return t;
2464 :
2465 170759 : tree rhs1 = gimple_assign_rhs1 (def);
2466 170759 : if (TYPE_PRECISION (TREE_TYPE (t))
2467 170759 : >= TYPE_PRECISION (TREE_TYPE (rhs1)))
2468 : return gimple_assign_rhs1 (def);
2469 :
2470 8285 : value_range vr (TREE_TYPE (rhs1));
2471 16570 : if (!get_range_query (cfun)->range_of_expr (vr, rhs1, def)
2472 8285 : || vr.undefined_p ())
2473 : return t;
2474 :
2475 8267 : irange &ir = as_a <irange> (vr);
2476 8267 : if (range_fits_type_p (&ir, TYPE_PRECISION (TREE_TYPE (t)),
2477 8267 : TYPE_SIGN (TREE_TYPE (t))))
2478 4012 : return gimple_assign_rhs1 (def);
2479 :
2480 : return t;
2481 8285 : }
2482 :
2483 : /* Initializes ipa_edge_args summary of CBE given its callback-carrying edge.
2484 : This primarily means allocating the correct amount of jump functions. */
2485 :
2486 : static inline void
2487 15083 : init_callback_edge_summary (struct cgraph_edge *cbe, tree attr)
2488 : {
2489 15083 : ipa_edge_args *cb_args = ipa_edge_args_sum->get_create (cbe);
2490 15083 : size_t jf_vec_length = callback_num_args(attr);
2491 15083 : vec_safe_grow_cleared (cb_args->jump_functions,
2492 : jf_vec_length, true);
2493 15083 : }
2494 :
2495 : /* Compute jump function for all arguments of callsite CS and insert the
2496 : information in the jump_functions array in the ipa_edge_args corresponding
2497 : to this callsite. */
2498 :
2499 : static void
2500 2929832 : ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2501 : struct cgraph_edge *cs)
2502 : {
2503 2929832 : ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2504 2929832 : ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
2505 2929832 : gcall *call = cs->call_stmt;
2506 2929832 : int n, arg_num = gimple_call_num_args (call);
2507 2929832 : bool useful_context = false;
2508 :
2509 2929832 : if (arg_num == 0 || args->jump_functions)
2510 272750 : return;
2511 2657082 : vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2512 2657082 : if (flag_devirtualize)
2513 2476462 : vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2514 :
2515 2657082 : if (gimple_call_internal_p (call))
2516 : return;
2517 2657082 : if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2518 : return;
2519 :
2520 2657082 : auto_vec<cgraph_edge*> callback_edges;
2521 8852014 : for (n = 0; n < arg_num; n++)
2522 : {
2523 6194932 : struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2524 6194932 : tree arg = gimple_call_arg (call, n);
2525 6194932 : tree param_type = ipa_get_callee_param_type (cs, n);
2526 6194932 : if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2527 : {
2528 3181918 : tree instance;
2529 3181918 : class ipa_polymorphic_call_context context (cs->caller->decl,
2530 3181918 : arg, cs->call_stmt,
2531 3181918 : &instance);
2532 3181918 : context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2533 : &fbi->aa_walk_budget);
2534 3181918 : *ipa_get_ith_polymorhic_call_context (args, n) = context;
2535 6363836 : if (!context.useless_p ())
2536 : useful_context = true;
2537 : }
2538 :
2539 6194932 : value_range vr (TREE_TYPE (arg));
2540 6194932 : if (POINTER_TYPE_P (TREE_TYPE (arg)))
2541 : {
2542 6875788 : if (!get_range_query (cfun)->range_of_expr (vr, arg, cs->call_stmt)
2543 3437894 : || vr.varying_p ()
2544 5973886 : || vr.undefined_p ())
2545 : {
2546 902314 : bool strict_overflow = false;
2547 902314 : if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2548 0 : vr.set_nonzero (TREE_TYPE (arg));
2549 : else
2550 902314 : vr.set_varying (TREE_TYPE (arg));
2551 : }
2552 3437894 : gcc_assert (!vr.undefined_p ());
2553 3437894 : unsigned HOST_WIDE_INT bitpos;
2554 3437894 : unsigned align = BITS_PER_UNIT;
2555 :
2556 3437894 : if (!vr.singleton_p ())
2557 3362415 : get_pointer_alignment_1 (arg, &align, &bitpos);
2558 :
2559 3437894 : if (align > BITS_PER_UNIT
2560 3437894 : && opt_for_fn (cs->caller->decl, flag_ipa_bit_cp))
2561 : {
2562 1270287 : unsigned prec = TYPE_PRECISION (TREE_TYPE (arg));
2563 1270287 : wide_int mask
2564 2540574 : = wi::bit_and_not (wi::mask (prec, false, prec),
2565 1270287 : wide_int::from (align / BITS_PER_UNIT - 1,
2566 1270287 : prec, UNSIGNED));
2567 1270287 : wide_int value = wide_int::from (bitpos / BITS_PER_UNIT, prec,
2568 1270287 : UNSIGNED);
2569 1270287 : irange_bitmask bm (value, mask);
2570 1270287 : vr.update_bitmask (bm);
2571 1270287 : ipa_set_jfunc_vr (jfunc, vr);
2572 1270287 : }
2573 2167607 : else if (!vr.varying_p ())
2574 1328297 : ipa_set_jfunc_vr (jfunc, vr);
2575 : else
2576 839310 : gcc_assert (!jfunc->m_vr);
2577 : }
2578 : else
2579 : {
2580 2757038 : if (param_type
2581 2446514 : && ipa_vr_supported_type_p (TREE_TYPE (arg))
2582 2757176 : && ipa_vr_supported_type_p (param_type)
2583 3647538 : && get_range_query (cfun)->range_of_expr (vr, arg, cs->call_stmt)
2584 4580807 : && !vr.undefined_p ())
2585 : {
2586 1823631 : value_range resvr (vr);
2587 1823631 : range_cast (resvr, param_type);
2588 1823631 : if (!resvr.undefined_p () && !resvr.varying_p ())
2589 1455837 : ipa_set_jfunc_vr (jfunc, resvr);
2590 : else
2591 367794 : gcc_assert (!jfunc->m_vr);
2592 1823631 : }
2593 : else
2594 933407 : gcc_assert (!jfunc->m_vr);
2595 : }
2596 :
2597 6194932 : arg = skip_a_safe_conversion_op (arg);
2598 6194932 : if (is_gimple_ip_invariant (arg)
2599 6194932 : || (VAR_P (arg) && is_global_var (arg) && TREE_READONLY (arg)))
2600 : {
2601 2303399 : ipa_set_jf_constant (jfunc, arg, cs);
2602 2303399 : if (TREE_CODE (arg) == ADDR_EXPR)
2603 : {
2604 866544 : tree pointee = TREE_OPERAND (arg, 0);
2605 866544 : if (TREE_CODE (pointee) == FUNCTION_DECL && !cs->callback
2606 44158 : && cs->callee)
2607 : {
2608 : /* Argument is a pointer to a function. Look for a callback
2609 : attribute describing this argument. */
2610 43808 : tree callback_attr
2611 43808 : = lookup_attribute (CALLBACK_ATTR_IDENT,
2612 43808 : DECL_ATTRIBUTES (cs->callee->decl));
2613 87616 : for (; callback_attr;
2614 : callback_attr
2615 0 : = lookup_attribute (CALLBACK_ATTR_IDENT,
2616 0 : TREE_CHAIN (callback_attr)))
2617 13313 : if (callback_get_fn_index (callback_attr) == n)
2618 : break;
2619 :
2620 : /* If no callback attribute is found, check if the function is
2621 : a special case. */
2622 43808 : if (!callback_attr
2623 43808 : && callback_is_special_cased (cs->callee->decl, call))
2624 : {
2625 1770 : callback_attr
2626 1770 : = callback_special_case_attr (cs->callee->decl);
2627 : /* Check if the special attribute describes the correct
2628 : attribute, as a special cased function might have
2629 : multiple callbacks. */
2630 1770 : if (callback_get_fn_index (callback_attr) != n)
2631 : callback_attr = NULL;
2632 : }
2633 :
2634 : /* If a callback attribute describing this pointer is found,
2635 : create a callback edge to the pointee function to
2636 : allow for further optimizations. */
2637 43808 : if (callback_attr)
2638 : {
2639 15083 : cgraph_node *kernel_node
2640 15083 : = cgraph_node::get_create (pointee);
2641 15083 : unsigned callback_id = n;
2642 15083 : cgraph_edge *cbe
2643 15083 : = cs->make_callback (kernel_node, callback_id);
2644 15083 : init_callback_edge_summary (cbe, callback_attr);
2645 15083 : callback_edges.safe_push (cbe);
2646 : }
2647 : }
2648 : }
2649 : }
2650 3891533 : else if (!is_gimple_reg_type (TREE_TYPE (arg))
2651 3891533 : && TREE_CODE (arg) == PARM_DECL)
2652 : {
2653 79077 : int index = ipa_get_param_decl_index (info, arg);
2654 :
2655 79077 : gcc_assert (index >=0);
2656 : /* Aggregate passed by value, check for pass-through, otherwise we
2657 : will attempt to fill in aggregate contents later in this
2658 : for cycle. */
2659 79077 : if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2660 : {
2661 64056 : ipa_set_jf_simple_pass_through (jfunc, index, false);
2662 64056 : continue;
2663 : }
2664 : }
2665 3812456 : else if (TREE_CODE (arg) == SSA_NAME)
2666 : {
2667 2498507 : if (SSA_NAME_IS_DEFAULT_DEF (arg))
2668 : {
2669 993775 : int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2670 993775 : if (index >= 0)
2671 : {
2672 983988 : bool agg_p;
2673 983988 : agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2674 983988 : ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2675 : }
2676 : }
2677 : else
2678 : {
2679 1504732 : gimple *stmt = SSA_NAME_DEF_STMT (arg);
2680 1504732 : if (is_gimple_assign (stmt))
2681 1227677 : compute_complex_assign_jump_func (fbi, info, jfunc,
2682 : call, stmt, arg, param_type);
2683 277055 : else if (gimple_code (stmt) == GIMPLE_PHI)
2684 88777 : compute_complex_ancestor_jump_func (fbi, info, jfunc,
2685 : call,
2686 : as_a <gphi *> (stmt));
2687 : }
2688 : }
2689 :
2690 : /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2691 : passed (because type conversions are ignored in gimple). Usually we can
2692 : safely get type from function declaration, but in case of K&R prototypes or
2693 : variadic functions we can try our luck with type of the pointer passed.
2694 : TODO: Since we look for actual initialization of the memory object, we may better
2695 : work out the type based on the memory stores we find. */
2696 6130876 : if (!param_type)
2697 343785 : param_type = TREE_TYPE (arg);
2698 :
2699 6130876 : if ((jfunc->type != IPA_JF_PASS_THROUGH
2700 1026467 : || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2701 5750710 : && (jfunc->type != IPA_JF_ANCESTOR
2702 157830 : || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2703 11822344 : && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2704 5334548 : || POINTER_TYPE_P (param_type)))
2705 3334813 : determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2706 6194932 : }
2707 :
2708 2657082 : if (!callback_edges.is_empty ())
2709 : {
2710 : /* For every callback edge, fetch jump functions of arguments
2711 : passed to them and copy them over to their respective summaries.
2712 : This avoids recalculating them for every callback edge, since their
2713 : arguments are just passed through. */
2714 : unsigned j;
2715 30166 : for (j = 0; j < callback_edges.length (); j++)
2716 : {
2717 15083 : cgraph_edge *callback_edge = callback_edges[j];
2718 15083 : ipa_edge_args *cb_summary
2719 15083 : = ipa_edge_args_sum->get_create (callback_edge);
2720 15083 : auto_vec<int> arg_mapping
2721 15083 : = callback_get_arg_mapping (callback_edge, cs);
2722 15083 : unsigned i;
2723 30166 : for (i = 0; i < arg_mapping.length (); i++)
2724 : {
2725 15083 : if (arg_mapping[i] == -1)
2726 0 : continue;
2727 15083 : class ipa_jump_func *src
2728 15083 : = ipa_get_ith_jump_func (args, arg_mapping[i]);
2729 15083 : class ipa_jump_func *dst = ipa_get_ith_jump_func (cb_summary, i);
2730 15083 : ipa_duplicate_jump_function (cs, callback_edge, src, dst);
2731 : }
2732 15083 : }
2733 : }
2734 :
2735 2657082 : if (!useful_context)
2736 4626036 : vec_free (args->polymorphic_call_contexts);
2737 2657082 : }
2738 :
2739 : /* Compute jump functions for all edges - both direct and indirect - outgoing
2740 : from BB. */
2741 :
2742 : static void
2743 10962339 : ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2744 : {
2745 10962339 : struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2746 10962339 : int i;
2747 10962339 : struct cgraph_edge *cs;
2748 :
2749 20491429 : FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2750 : {
2751 5505674 : struct cgraph_node *callee = cs->callee;
2752 :
2753 5505674 : if (callee)
2754 : {
2755 5367048 : callee = callee->ultimate_alias_target ();
2756 : /* We do not need to bother analyzing calls to unknown functions
2757 : unless they may become known during lto/whopr. */
2758 3531204 : if (!callee->definition && !flag_lto
2759 5382118 : && !gimple_call_fnspec (cs->call_stmt).known_p ()
2760 7957960 : && !callback_edge_callee_has_attr (cs))
2761 2575842 : continue;
2762 : }
2763 2929832 : ipa_compute_jump_functions_for_edge (fbi, cs);
2764 : }
2765 10962339 : }
2766 :
2767 : /* If REF is a memory access that loads a function pointer (but not a method
2768 : pointer) from a RECORD_TYPE, return true and store the type of the RECORD to
2769 : *REC_TYPE and the byte offset of the field to *FLD_OFFSET. Otherwise return
2770 : false. OHS es the "other hand side" which is used to check type
2771 : compatibility with field in question, when possible. */
2772 :
2773 : static bool
2774 118357 : is_func_ptr_from_record (tree ref, tree *rec_type, unsigned *fld_offset,
2775 : tree ohs)
2776 : {
2777 118369 : if (!POINTER_TYPE_P (TREE_TYPE (ref))
2778 118369 : || TREE_CODE (TREE_TYPE (TREE_TYPE (ref))) != FUNCTION_TYPE)
2779 : return false;
2780 :
2781 103239 : if (TREE_CODE (ref) == COMPONENT_REF
2782 103239 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
2783 : {
2784 55409 : gcc_assert (POINTER_TYPE_P (TREE_TYPE (ohs)));
2785 55409 : ohs = TREE_TYPE (TREE_TYPE (ohs));
2786 55409 : tree ftype = TREE_TYPE (TREE_OPERAND (ref, 1));
2787 55409 : if (!POINTER_TYPE_P (ftype))
2788 : return false;
2789 55409 : ftype = TREE_TYPE (ftype);
2790 55409 : if (!types_compatible_p (ohs, ftype))
2791 : return false;
2792 :
2793 55280 : tree tree_off = bit_position (TREE_OPERAND (ref, 1));
2794 55280 : if (!tree_fits_shwi_p (tree_off))
2795 : return false;
2796 55280 : HOST_WIDE_INT bit_offset = tree_to_shwi (tree_off);
2797 55280 : if (bit_offset % BITS_PER_UNIT)
2798 : return false;
2799 55280 : HOST_WIDE_INT unit_offset = bit_offset / BITS_PER_UNIT;
2800 55280 : if (unit_offset > UINT_MAX)
2801 : return false;
2802 55280 : *rec_type = TREE_TYPE (TREE_OPERAND (ref, 0));
2803 55280 : *fld_offset = unit_offset;
2804 55280 : return true;
2805 : }
2806 47830 : else if (TREE_CODE (ref) == MEM_REF
2807 4937 : && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
2808 4937 : && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0))))
2809 : == RECORD_TYPE)
2810 49980 : && tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
2811 : {
2812 2150 : HOST_WIDE_INT unit_offset = tree_to_shwi (TREE_OPERAND (ref, 1));
2813 2150 : if (unit_offset > UINT_MAX)
2814 : return false;
2815 2150 : *rec_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
2816 2150 : *fld_offset = unit_offset;
2817 2150 : return true;
2818 : }
2819 : return false;
2820 : }
2821 :
2822 : /* If STMT looks like a statement loading a value from a member pointer formal
2823 : parameter, return that parameter and store the offset of the field to
2824 : *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2825 : might be clobbered). If USE_DELTA, then we look for a use of the delta
2826 : field rather than the pfn. */
2827 :
2828 : static tree
2829 2164 : ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2830 : HOST_WIDE_INT *offset_p)
2831 : {
2832 2164 : tree rhs, fld, ptr_field, delta_field;
2833 2164 : tree ref_field = NULL_TREE;
2834 2164 : tree ref_offset = NULL_TREE;
2835 :
2836 2164 : if (!gimple_assign_single_p (stmt))
2837 : return NULL_TREE;
2838 :
2839 2164 : rhs = gimple_assign_rhs1 (stmt);
2840 2164 : if (TREE_CODE (rhs) == COMPONENT_REF)
2841 : {
2842 1301 : ref_field = TREE_OPERAND (rhs, 1);
2843 1301 : rhs = TREE_OPERAND (rhs, 0);
2844 : }
2845 :
2846 2164 : if (TREE_CODE (rhs) == MEM_REF)
2847 : {
2848 1463 : ref_offset = TREE_OPERAND (rhs, 1);
2849 1463 : if (ref_field && integer_nonzerop (ref_offset))
2850 : return NULL_TREE;
2851 : }
2852 701 : else if (!ref_field)
2853 : return NULL_TREE;
2854 :
2855 2164 : if (TREE_CODE (rhs) == MEM_REF
2856 1463 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
2857 3627 : && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (rhs, 0)))
2858 : {
2859 598 : rhs = TREE_OPERAND (rhs, 0);
2860 598 : if (TREE_CODE (SSA_NAME_VAR (rhs)) != PARM_DECL
2861 598 : || !type_like_member_ptr_p (TREE_TYPE (TREE_TYPE (rhs)), &ptr_field,
2862 : &delta_field))
2863 0 : return NULL_TREE;
2864 : }
2865 : else
2866 : {
2867 1566 : if (TREE_CODE (rhs) == MEM_REF
2868 1566 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR)
2869 0 : rhs = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
2870 1566 : if (TREE_CODE (rhs) != PARM_DECL
2871 1566 : || !type_like_member_ptr_p (TREE_TYPE (rhs), &ptr_field,
2872 : &delta_field))
2873 1288 : return NULL_TREE;
2874 : }
2875 :
2876 876 : if (use_delta)
2877 0 : fld = delta_field;
2878 : else
2879 876 : fld = ptr_field;
2880 :
2881 876 : if (ref_field)
2882 : {
2883 876 : if (ref_field != fld)
2884 : return NULL_TREE;
2885 : }
2886 0 : else if (!tree_int_cst_equal (byte_position (fld), ref_offset))
2887 : return NULL_TREE;
2888 :
2889 876 : if (offset_p)
2890 438 : *offset_p = int_bit_position (fld);
2891 : return rhs;
2892 : }
2893 :
2894 : /* Returns true iff T is an SSA_NAME defined by a statement. */
2895 :
2896 : static bool
2897 3040 : ipa_is_ssa_with_stmt_def (tree t)
2898 : {
2899 3040 : if (TREE_CODE (t) == SSA_NAME
2900 3040 : && !SSA_NAME_IS_DEFAULT_DEF (t))
2901 : return true;
2902 : else
2903 0 : return false;
2904 : }
2905 :
2906 : /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2907 : (described by INFO). PARMS_AINFO is a pointer to a vector containing
2908 : intermediate information about each formal parameter. Currently it checks
2909 : whether the call calls a pointer that is a formal parameter and if so, the
2910 : parameter is marked with the called flag and an indirect call graph edge
2911 : describing the call is created. This is very simple for ordinary pointers
2912 : represented in SSA but not-so-nice when it comes to member pointers. The
2913 : ugly part of this function does nothing more than trying to match the
2914 : pattern of such a call. Look up the documentation of macro
2915 : TARGET_PTRMEMFUNC_VBIT_LOCATION for details. An example of such a pattern
2916 : is the gimple dump below, the call is on the last line:
2917 :
2918 : <bb 2>:
2919 : f$__delta_5 = f.__delta;
2920 : f$__pfn_24 = f.__pfn;
2921 :
2922 : or
2923 : <bb 2>:
2924 : f$__delta_5 = MEM[(struct *)&f];
2925 : f$__pfn_24 = MEM[(struct *)&f + 4B];
2926 :
2927 : and a few lines below:
2928 :
2929 : <bb 5>
2930 : D.2496_3 = (int) f$__pfn_24;
2931 : D.2497_4 = D.2496_3 & 1;
2932 : if (D.2497_4 != 0)
2933 : goto <bb 3>;
2934 : else
2935 : goto <bb 4>;
2936 :
2937 : <bb 6>:
2938 : D.2500_7 = (unsigned int) f$__delta_5;
2939 : D.2501_8 = &S + D.2500_7;
2940 : D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2941 : D.2503_10 = *D.2502_9;
2942 : D.2504_12 = f$__pfn_24 + -1;
2943 : D.2505_13 = (unsigned int) D.2504_12;
2944 : D.2506_14 = D.2503_10 + D.2505_13;
2945 : D.2507_15 = *D.2506_14;
2946 : iftmp.11_16 = (String:: *) D.2507_15;
2947 :
2948 : <bb 7>:
2949 : # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2950 : D.2500_19 = (unsigned int) f$__delta_5;
2951 : D.2508_20 = &S + D.2500_19;
2952 : D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2953 :
2954 : Such patterns are results of simple calls to a member pointer:
2955 :
2956 : int doprinting (int (MyString::* f)(int) const)
2957 : {
2958 : MyString S ("somestring");
2959 :
2960 : return (S.*f)(4);
2961 : }
2962 :
2963 : Moreover, the function also looks for called pointers loaded from aggregates
2964 : passed by value or reference. */
2965 :
2966 : static void
2967 114398 : ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2968 : tree target)
2969 : {
2970 114398 : class ipa_node_params *info = fbi->info;
2971 114398 : HOST_WIDE_INT offset;
2972 114398 : bool by_ref;
2973 :
2974 114398 : if (SSA_NAME_IS_DEFAULT_DEF (target))
2975 : {
2976 3781 : tree var = SSA_NAME_VAR (target);
2977 3781 : int index = ipa_get_param_decl_index (info, var);
2978 3781 : if (index >= 0)
2979 : {
2980 3779 : cgraph_edge *cs = fbi->node->get_edge (call);
2981 3779 : cgraph_simple_indirect_info *sii =
2982 3779 : as_a <cgraph_simple_indirect_info *> (cs->indirect_info);
2983 3779 : sii->param_index = index;
2984 3779 : gcc_assert (!sii->agg_contents && !sii->member_ptr);
2985 3779 : ipa_set_param_used_by_indirect_call (info, index, true);
2986 : }
2987 3781 : return;
2988 : }
2989 :
2990 110617 : int index;
2991 110617 : gimple *def = SSA_NAME_DEF_STMT (target);
2992 110617 : bool guaranteed_unmodified;
2993 110617 : if (gimple_assign_single_p (def))
2994 : {
2995 88038 : cgraph_edge *cs = fbi->node->get_edge (call);
2996 88038 : cgraph_simple_indirect_info *sii =
2997 88038 : as_a <cgraph_simple_indirect_info *> (cs->indirect_info);
2998 88038 : tree rectype;
2999 88038 : unsigned fldoff;
3000 88038 : if (is_func_ptr_from_record (gimple_assign_rhs1 (def), &rectype, &fldoff,
3001 : target))
3002 : {
3003 46260 : sii->fnptr_loaded_from_record = 1;
3004 46260 : sii->fld_offset = fldoff;
3005 46260 : sii->rec_type = rectype;
3006 : }
3007 88038 : if (ipa_load_from_parm_agg (fbi, info->descriptors, def,
3008 : gimple_assign_rhs1 (def), &index, &offset,
3009 : NULL, &by_ref, &guaranteed_unmodified))
3010 : {
3011 2186 : sii->param_index = index;
3012 2186 : sii->offset = offset;
3013 2186 : sii->agg_contents = 1;
3014 2186 : sii->by_ref = by_ref;
3015 2186 : sii->guaranteed_unmodified = guaranteed_unmodified;
3016 2186 : ipa_set_param_used_by_indirect_call (info, index, true);
3017 2186 : return;
3018 : }
3019 : }
3020 :
3021 : /* Now we need to try to match the complex pattern of calling a member
3022 : pointer. */
3023 108431 : if (gimple_code (def) != GIMPLE_PHI
3024 1143 : || gimple_phi_num_args (def) != 2
3025 1135 : || !POINTER_TYPE_P (TREE_TYPE (target))
3026 109566 : || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
3027 : return;
3028 :
3029 : /* First, we need to check whether one of these is a load from a member
3030 : pointer that is a parameter to this function. */
3031 863 : tree n1 = PHI_ARG_DEF (def, 0);
3032 863 : tree n2 = PHI_ARG_DEF (def, 1);
3033 1726 : if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3034 : return;
3035 863 : gimple *d1 = SSA_NAME_DEF_STMT (n1);
3036 863 : gimple *d2 = SSA_NAME_DEF_STMT (n2);
3037 :
3038 863 : tree rec;
3039 863 : basic_block bb, virt_bb;
3040 863 : basic_block join = gimple_bb (def);
3041 863 : if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3042 : {
3043 0 : if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3044 : return;
3045 :
3046 0 : bb = EDGE_PRED (join, 0)->src;
3047 0 : virt_bb = gimple_bb (d2);
3048 : }
3049 863 : else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3050 : {
3051 438 : bb = EDGE_PRED (join, 1)->src;
3052 438 : virt_bb = gimple_bb (d1);
3053 : }
3054 : else
3055 : return;
3056 :
3057 : /* Second, we need to check that the basic blocks are laid out in the way
3058 : corresponding to the pattern. */
3059 :
3060 876 : if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
3061 876 : || single_succ (virt_bb) != join)
3062 : return;
3063 :
3064 :
3065 438 : if (single_pred (virt_bb) != bb)
3066 : {
3067 : /* In cases when the distinction between a normal and a virtual
3068 : function is encoded in the delta field, the load of the
3069 : actual non-virtual function pointer can be in its own BB. */
3070 :
3071 0 : if (!single_pred_p (bb) || !single_succ_p (bb))
3072 : return;
3073 0 : bb = single_pred (bb);
3074 0 : if (bb != single_pred (virt_bb))
3075 : return;
3076 : }
3077 :
3078 : /* Third, let's see that the branching is done depending on the least
3079 : significant bit of the pfn. */
3080 :
3081 876 : gcond *branch = safe_dyn_cast <gcond *> (*gsi_last_bb (bb));
3082 438 : if (!branch)
3083 : return;
3084 :
3085 438 : if ((gimple_cond_code (branch) != NE_EXPR
3086 0 : && gimple_cond_code (branch) != EQ_EXPR)
3087 438 : || !integer_zerop (gimple_cond_rhs (branch)))
3088 0 : return;
3089 :
3090 438 : tree cond = gimple_cond_lhs (branch);
3091 438 : if (!ipa_is_ssa_with_stmt_def (cond))
3092 : return;
3093 :
3094 438 : def = SSA_NAME_DEF_STMT (cond);
3095 438 : if (!is_gimple_assign (def)
3096 438 : || gimple_assign_rhs_code (def) != BIT_AND_EXPR
3097 876 : || !integer_onep (gimple_assign_rhs2 (def)))
3098 0 : return;
3099 :
3100 438 : cond = gimple_assign_rhs1 (def);
3101 438 : if (!ipa_is_ssa_with_stmt_def (cond))
3102 : return;
3103 :
3104 438 : def = SSA_NAME_DEF_STMT (cond);
3105 :
3106 438 : if (is_gimple_assign (def)
3107 438 : && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3108 : {
3109 438 : cond = gimple_assign_rhs1 (def);
3110 438 : if (!ipa_is_ssa_with_stmt_def (cond))
3111 : return;
3112 438 : def = SSA_NAME_DEF_STMT (cond);
3113 : }
3114 :
3115 438 : tree rec2;
3116 438 : rec2 = ipa_get_stmt_member_ptr_load_param (def,
3117 : (TARGET_PTRMEMFUNC_VBIT_LOCATION
3118 : == ptrmemfunc_vbit_in_delta),
3119 : NULL);
3120 438 : if (rec != rec2)
3121 : return;
3122 :
3123 438 : if (TREE_CODE (rec) == SSA_NAME)
3124 : {
3125 299 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (rec));
3126 299 : if (index < 0
3127 299 : || !parm_ref_data_preserved_p (fbi, index, call,
3128 : gimple_assign_rhs1 (def)))
3129 4 : return;
3130 295 : by_ref = true;
3131 : }
3132 : else
3133 : {
3134 139 : index = ipa_get_param_decl_index (info, rec);
3135 139 : if (index < 0
3136 139 : || !parm_preserved_before_stmt_p (fbi, index, call, rec))
3137 0 : return;
3138 139 : by_ref = false;
3139 : }
3140 :
3141 434 : cgraph_edge *cs = fbi->node->get_edge (call);
3142 434 : cgraph_simple_indirect_info *sii =
3143 434 : as_a <cgraph_simple_indirect_info *> (cs->indirect_info);
3144 434 : sii->param_index = index;
3145 434 : sii->offset = offset;
3146 434 : sii->agg_contents = 1;
3147 434 : sii->member_ptr = 1;
3148 434 : sii->by_ref = by_ref;
3149 434 : sii->guaranteed_unmodified = 1;
3150 434 : ipa_set_param_used_by_indirect_call (info, index, true);
3151 434 : return;
3152 : }
3153 :
3154 : /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
3155 : object referenced in the expression is a formal parameter of the caller
3156 : FBI->node (described by FBI->info), create a call note for the
3157 : statement. */
3158 :
3159 : static void
3160 24156 : ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
3161 : gcall *call, tree target)
3162 : {
3163 24156 : tree obj = OBJ_TYPE_REF_OBJECT (target);
3164 24156 : int index;
3165 24156 : HOST_WIDE_INT anc_offset;
3166 :
3167 24156 : if (!flag_devirtualize)
3168 14650 : return;
3169 :
3170 23886 : if (TREE_CODE (obj) != SSA_NAME)
3171 : return;
3172 :
3173 23527 : class ipa_node_params *info = fbi->info;
3174 23527 : if (SSA_NAME_IS_DEFAULT_DEF (obj))
3175 : {
3176 8757 : if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
3177 : return;
3178 :
3179 8757 : anc_offset = 0;
3180 8757 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
3181 8757 : gcc_assert (index >= 0);
3182 8757 : if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
3183 : call))
3184 : return;
3185 : }
3186 : else
3187 : {
3188 14770 : gimple *stmt = SSA_NAME_DEF_STMT (obj);
3189 14770 : tree expr;
3190 :
3191 14770 : expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
3192 14770 : if (!expr)
3193 : return;
3194 749 : index = ipa_get_param_decl_index (info,
3195 749 : SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
3196 749 : gcc_assert (index >= 0);
3197 749 : if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
3198 : call, anc_offset))
3199 : return;
3200 : }
3201 :
3202 9506 : cgraph_edge *cs = fbi->node->get_edge (call);
3203 9506 : cgraph_polymorphic_indirect_info *pii =
3204 9506 : as_a <cgraph_polymorphic_indirect_info *> (cs->indirect_info);
3205 9506 : pii->param_index = index;
3206 9506 : pii->offset = anc_offset;
3207 9506 : gcc_assert (pii->otr_token == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
3208 9506 : gcc_assert (pii->otr_type = obj_type_ref_class (target));
3209 9506 : ipa_set_param_used_by_indirect_call (info, index, true);
3210 9506 : ipa_set_param_used_by_polymorphic_call (info, index, true);
3211 : }
3212 :
3213 : /* Analyze a call statement CALL whether and how it utilizes formal parameters
3214 : of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
3215 : containing intermediate information about each formal parameter. */
3216 :
3217 : static void
3218 5727271 : ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
3219 : {
3220 5727271 : tree target = gimple_call_fn (call);
3221 :
3222 5727271 : if (!target
3223 5727271 : || (TREE_CODE (target) != SSA_NAME
3224 5391276 : && !virtual_method_call_p (target)))
3225 5588715 : return;
3226 :
3227 138556 : struct cgraph_edge *cs = fbi->node->get_edge (call);
3228 : /* If we previously turned the call into a direct call, there is
3229 : no need to analyze. */
3230 138556 : if (cs && !cs->indirect_unknown_callee)
3231 : return;
3232 :
3233 138554 : cgraph_polymorphic_indirect_info *pii;
3234 138554 : if (flag_devirtualize
3235 138554 : && (pii
3236 134299 : = dyn_cast <cgraph_polymorphic_indirect_info *> (cs->indirect_info)))
3237 : {
3238 23886 : tree instance;
3239 23886 : tree target = gimple_call_fn (call);
3240 23886 : ipa_polymorphic_call_context context (current_function_decl,
3241 23886 : target, call, &instance);
3242 :
3243 23886 : gcc_checking_assert (pii->otr_type == obj_type_ref_class (target));
3244 23886 : gcc_checking_assert (pii->otr_token
3245 : == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
3246 :
3247 23886 : pii->vptr_changed
3248 47772 : = !context.get_dynamic_type (instance,
3249 23886 : OBJ_TYPE_REF_OBJECT (target),
3250 : obj_type_ref_class (target), call,
3251 : &fbi->aa_walk_budget);
3252 23886 : pii->context = context;
3253 : }
3254 :
3255 138554 : if (TREE_CODE (target) == SSA_NAME)
3256 114398 : ipa_analyze_indirect_call_uses (fbi, call, target);
3257 24156 : else if (virtual_method_call_p (target))
3258 24156 : ipa_analyze_virtual_call_uses (fbi, call, target);
3259 : }
3260 :
3261 : /* Store that that there was a store of FN to a record of type REC_TYPE and
3262 : FLD_OFFSET. */
3263 :
3264 : static void
3265 63389 : note_fnptr_in_record (tree rec_type, unsigned fld_offset, tree fn)
3266 : {
3267 63389 : gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3268 63389 : gcc_assert (TREE_CODE (rec_type) == RECORD_TYPE);
3269 63389 : if (!noted_fnptrs_in_records)
3270 6760 : noted_fnptrs_in_records = hash_table<noted_fnptr_hasher>::create_ggc (37);
3271 :
3272 63389 : noted_fnptr_store repr;
3273 63389 : repr.rec_type = rec_type;
3274 63389 : repr.fld_offset = fld_offset;
3275 :
3276 63389 : noted_fnptr_store **slot = noted_fnptrs_in_records->find_slot (&repr,
3277 : NO_INSERT);
3278 63389 : if (slot)
3279 : {
3280 7570 : if ((*slot)->fn && (*slot)->fn != fn)
3281 803 : (*slot)->fn = nullptr;
3282 7570 : return;
3283 : }
3284 :
3285 55819 : slot = noted_fnptrs_in_records->find_slot (&repr, INSERT);
3286 55819 : *slot = ggc_cleared_alloc<noted_fnptr_store> ();
3287 55819 : (*slot)->rec_type = rec_type;
3288 55819 : (*slot)->fn = fn;
3289 55819 : (*slot)->fld_offset = fld_offset;
3290 :
3291 55819 : return;
3292 : }
3293 :
3294 : /* Dump contents of noted_fnptrs_in_records to F in humad readable form. */
3295 :
3296 : void DEBUG_FUNCTION
3297 41 : ipa_dump_noted_record_fnptrs (FILE *f)
3298 : {
3299 41 : if (!noted_fnptrs_in_records)
3300 : {
3301 38 : fprintf (f, "No noted function pointers stored in records.\n\n");
3302 38 : return;
3303 : }
3304 :
3305 3 : fprintf (f, "Noted function pointers stored in records:\n");
3306 7 : for (auto iter = noted_fnptrs_in_records->begin ();
3307 7 : iter != noted_fnptrs_in_records->end ();
3308 4 : ++iter)
3309 : {
3310 4 : const noted_fnptr_store *elem = *iter;
3311 4 : fprintf (f, " Type:");
3312 4 : print_generic_expr (f, elem->rec_type);
3313 4 : fprintf (f, ", offset %ul, function: ", elem->fld_offset);
3314 4 : print_generic_expr (f, elem->fn);
3315 4 : fprintf (f, "\n");
3316 : }
3317 3 : fprintf (f, "\n");
3318 : }
3319 :
3320 : /* Dump contents of noted_fnptrs_in_records to stderr in humad readable
3321 : form. */
3322 :
3323 : void DEBUG_FUNCTION
3324 0 : ipa_debug_noted_record_fnptrs (void)
3325 : {
3326 0 : ipa_dump_noted_record_fnptrs (stderr);
3327 0 : }
3328 :
3329 :
3330 : /* If we have noticed a single function pointer stored into a record of type
3331 : REC_TYPE at the given FLD_OFFSET (measured in bytes), return its
3332 : declaration. Otherwise return NULL_TREE. */
3333 :
3334 : tree
3335 37249 : ipa_single_noted_fnptr_in_record (tree rec_type, unsigned fld_offset)
3336 : {
3337 37249 : if (!noted_fnptrs_in_records)
3338 : return NULL_TREE;
3339 :
3340 35149 : noted_fnptr_store repr;
3341 35149 : repr.rec_type = rec_type;
3342 35149 : repr.fld_offset = fld_offset;
3343 :
3344 35149 : noted_fnptr_store **slot = noted_fnptrs_in_records->find_slot (&repr,
3345 : NO_INSERT);
3346 35149 : if (!slot)
3347 : return NULL_TREE;
3348 3293 : return (*slot)->fn;
3349 : }
3350 :
3351 : /* Free the hash table storing the information about function pointers stored
3352 : to a particular position in record typed strucutres. */
3353 :
3354 : void
3355 128621 : ipa_free_noted_fnptr_calls ()
3356 : {
3357 128621 : if (noted_fnptrs_in_records)
3358 : {
3359 6397 : noted_fnptrs_in_records->empty ();
3360 6397 : noted_fnptrs_in_records = nullptr;
3361 : }
3362 128621 : }
3363 :
3364 : /* Analyze the call statement STMT with respect to formal parameters (described
3365 : in INFO) of caller given by FBI->NODE. Also note any stores of function
3366 : pointers to record typed memory. */
3367 :
3368 : static void
3369 31267343 : ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3370 : {
3371 31267343 : if (is_gimple_call (stmt))
3372 5727271 : ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
3373 25540072 : else if (gimple_assign_single_p (stmt)
3374 13432116 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR
3375 26770665 : && (TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0))
3376 : == FUNCTION_DECL))
3377 : {
3378 30319 : tree rec_type;
3379 30319 : unsigned fld_offset;
3380 30319 : if (is_func_ptr_from_record (gimple_assign_lhs (stmt), &rec_type,
3381 : &fld_offset, gimple_assign_rhs1 (stmt)))
3382 11170 : note_fnptr_in_record (rec_type, fld_offset,
3383 11170 : TREE_OPERAND (gimple_assign_rhs1 (stmt), 0));
3384 : }
3385 31267343 : }
3386 :
3387 : /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
3388 : If OP is a parameter declaration, mark it as used in the info structure
3389 : passed in DATA. */
3390 :
3391 : static bool
3392 19509863 : visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
3393 : {
3394 19509863 : class ipa_node_params *info = (class ipa_node_params *) data;
3395 :
3396 19509863 : op = get_base_address (op);
3397 19509863 : if (op
3398 19509863 : && TREE_CODE (op) == PARM_DECL)
3399 : {
3400 460988 : int index = ipa_get_param_decl_index (info, op);
3401 460988 : gcc_assert (index >= 0);
3402 460988 : ipa_set_param_used (info, index, true);
3403 : }
3404 :
3405 19509863 : return false;
3406 : }
3407 :
3408 : /* Scan the statements in BB and inspect the uses of formal parameters. Store
3409 : the findings in various structures of the associated ipa_node_params
3410 : structure, such as parameter flags, notes etc. FBI holds various data about
3411 : the function being analyzed. */
3412 :
3413 : static void
3414 10962339 : ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3415 : {
3416 10962339 : gimple_stmt_iterator gsi;
3417 76215042 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3418 : {
3419 54290364 : gimple *stmt = gsi_stmt (gsi);
3420 :
3421 54290364 : if (is_gimple_debug (stmt))
3422 23023021 : continue;
3423 :
3424 31267343 : ipa_analyze_stmt_uses (fbi, stmt);
3425 31267343 : walk_stmt_load_store_addr_ops (stmt, fbi->info,
3426 : visit_ref_for_mod_analysis,
3427 : visit_ref_for_mod_analysis,
3428 : visit_ref_for_mod_analysis);
3429 : }
3430 13919049 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3431 2956710 : walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
3432 : visit_ref_for_mod_analysis,
3433 : visit_ref_for_mod_analysis,
3434 : visit_ref_for_mod_analysis);
3435 10962339 : }
3436 :
3437 : /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
3438 :
3439 : static bool
3440 4141313 : load_from_dereferenced_name (tree expr, tree name)
3441 : {
3442 4141313 : tree base = get_base_address (expr);
3443 4141313 : return (TREE_CODE (base) == MEM_REF
3444 4141313 : && TREE_OPERAND (base, 0) == name);
3445 : }
3446 :
3447 : /* Calculate controlled uses of parameters of NODE. */
3448 :
3449 : static void
3450 1352285 : ipa_analyze_controlled_uses (struct cgraph_node *node)
3451 : {
3452 1352285 : ipa_node_params *info = ipa_node_params_sum->get (node);
3453 :
3454 7374510 : for (int i = 0; i < ipa_get_param_count (info); i++)
3455 : {
3456 2461738 : tree parm = ipa_get_param (info, i);
3457 2461738 : int call_uses = 0;
3458 2461738 : bool load_dereferenced = false;
3459 :
3460 : /* For SSA regs see if parameter is used. For non-SSA we compute
3461 : the flag during modification analysis. */
3462 2461738 : if (is_gimple_reg (parm))
3463 : {
3464 2234910 : tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
3465 : parm);
3466 2234910 : if (ddef && !has_zero_uses (ddef))
3467 : {
3468 1954760 : imm_use_iterator imm_iter;
3469 1954760 : gimple *stmt;
3470 :
3471 1954760 : ipa_set_param_used (info, i, true);
3472 6692950 : FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
3473 : {
3474 3896399 : if (is_gimple_debug (stmt))
3475 738125 : continue;
3476 :
3477 3158274 : int all_stmt_uses = 0;
3478 3158274 : use_operand_p use_p;
3479 6344403 : FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3480 3186129 : all_stmt_uses++;
3481 :
3482 3158274 : if (is_gimple_call (stmt))
3483 : {
3484 1191099 : if (gimple_call_internal_p (stmt))
3485 : {
3486 : call_uses = IPA_UNDESCRIBED_USE;
3487 : break;
3488 : }
3489 1136301 : int recognized_stmt_uses;
3490 1136301 : if (gimple_call_fn (stmt) == ddef)
3491 : recognized_stmt_uses = 1;
3492 : else
3493 1132587 : recognized_stmt_uses = 0;
3494 1136301 : unsigned arg_count = gimple_call_num_args (stmt);
3495 5048682 : for (unsigned i = 0; i < arg_count; i++)
3496 : {
3497 3912381 : tree arg = gimple_call_arg (stmt, i);
3498 3912381 : if (arg == ddef)
3499 1123132 : recognized_stmt_uses++;
3500 2789249 : else if (load_from_dereferenced_name (arg, ddef))
3501 : {
3502 15741 : load_dereferenced = true;
3503 15741 : recognized_stmt_uses++;
3504 : }
3505 : }
3506 :
3507 1136301 : if (recognized_stmt_uses != all_stmt_uses)
3508 : {
3509 : call_uses = IPA_UNDESCRIBED_USE;
3510 : break;
3511 : }
3512 1128479 : if (call_uses >= 0)
3513 1128479 : call_uses += all_stmt_uses;
3514 : }
3515 1967175 : else if (gimple_assign_single_p (stmt))
3516 : {
3517 1352917 : tree rhs = gimple_assign_rhs1 (stmt);
3518 1352917 : if (all_stmt_uses != 1
3519 1352917 : || !load_from_dereferenced_name (rhs, ddef))
3520 : {
3521 : call_uses = IPA_UNDESCRIBED_USE;
3522 : break;
3523 : }
3524 : load_dereferenced = true;
3525 : }
3526 : else
3527 : {
3528 : call_uses = IPA_UNDESCRIBED_USE;
3529 : break;
3530 : }
3531 1954760 : }
3532 : }
3533 : else
3534 : call_uses = 0;
3535 : }
3536 : else
3537 : call_uses = IPA_UNDESCRIBED_USE;
3538 2461738 : ipa_set_controlled_uses (info, i, call_uses);
3539 2461738 : ipa_set_param_load_dereferenced (info, i, load_dereferenced);
3540 : }
3541 1352285 : }
3542 :
3543 : /* Free stuff in BI. */
3544 :
3545 : static void
3546 62999796 : free_ipa_bb_info (struct ipa_bb_info *bi)
3547 : {
3548 0 : bi->cg_edges.release ();
3549 62999796 : bi->param_aa_statuses.release ();
3550 0 : }
3551 :
3552 : /* Dominator walker driving the analysis. */
3553 :
3554 2704570 : class analysis_dom_walker : public dom_walker
3555 : {
3556 : public:
3557 1352285 : analysis_dom_walker (struct ipa_func_body_info *fbi)
3558 2704570 : : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3559 :
3560 : edge before_dom_children (basic_block) final override;
3561 :
3562 : private:
3563 : struct ipa_func_body_info *m_fbi;
3564 : };
3565 :
3566 : edge
3567 10962339 : analysis_dom_walker::before_dom_children (basic_block bb)
3568 : {
3569 10962339 : ipa_analyze_params_uses_in_bb (m_fbi, bb);
3570 10962339 : ipa_compute_jump_functions_for_bb (m_fbi, bb);
3571 10962339 : return NULL;
3572 : }
3573 :
3574 : /* Release body info FBI. */
3575 :
3576 : void
3577 8486059 : ipa_release_body_info (struct ipa_func_body_info *fbi)
3578 : {
3579 8486059 : int i;
3580 8486059 : struct ipa_bb_info *bi;
3581 :
3582 71461693 : FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3583 125951268 : free_ipa_bb_info (bi);
3584 8486059 : fbi->bb_infos.release ();
3585 8486059 : }
3586 :
3587 : /* Initialize the array describing properties of formal parameters
3588 : of NODE, analyze their uses and compute jump functions associated
3589 : with actual arguments of calls from within NODE. */
3590 :
3591 : void
3592 2651759 : ipa_analyze_node (struct cgraph_node *node)
3593 : {
3594 2651759 : struct ipa_func_body_info fbi;
3595 2651759 : class ipa_node_params *info;
3596 :
3597 2651759 : ipa_check_create_node_params ();
3598 2651759 : ipa_check_create_edge_args ();
3599 2651759 : info = ipa_node_params_sum->get_create (node);
3600 :
3601 2651759 : if (info->analysis_done)
3602 1299474 : return;
3603 1353289 : info->analysis_done = 1;
3604 :
3605 1353289 : if (ipa_func_spec_opts_forbid_analysis_p (node)
3606 1353289 : || (count_formal_params (node->decl)
3607 : >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS)))
3608 : {
3609 1299474 : gcc_assert (!ipa_get_param_count (info));
3610 : return;
3611 : }
3612 :
3613 1352285 : struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3614 1352285 : push_cfun (func);
3615 1352285 : calculate_dominance_info (CDI_DOMINATORS);
3616 1352285 : ipa_initialize_node_params (node);
3617 1352285 : ipa_analyze_controlled_uses (node);
3618 :
3619 1352285 : fbi.node = node;
3620 1352285 : fbi.info = info;
3621 1352285 : fbi.bb_infos = vNULL;
3622 1352285 : fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3623 1352285 : fbi.param_count = ipa_get_param_count (info);
3624 1352285 : fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3625 :
3626 6719333 : for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3627 : {
3628 5367048 : ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3629 5367048 : bi->cg_edges.safe_push (cs);
3630 : }
3631 :
3632 1490911 : for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3633 : {
3634 138626 : ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3635 138626 : bi->cg_edges.safe_push (cs);
3636 : }
3637 :
3638 1352285 : enable_ranger (cfun, false);
3639 1352285 : analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3640 1352285 : disable_ranger (cfun);
3641 :
3642 1352285 : ipa_release_body_info (&fbi);
3643 1352285 : free_dominance_info (CDI_DOMINATORS);
3644 1352285 : pop_cfun ();
3645 : }
3646 :
3647 : /* Analyze NODE and note any function pointers in record-typed static
3648 : initializers.
3649 :
3650 : TODO: The current implementation does not traverse the initializers to scan
3651 : records nested inside other types. It should catch the most basic way of
3652 : writing "virtual functions" in C but can be extended, of course.
3653 : */
3654 :
3655 : void
3656 1668342 : ipa_analyze_var_static_initializer (varpool_node *node)
3657 : {
3658 1668342 : tree decl = node->decl;
3659 1668342 : tree rec_type = TREE_TYPE (decl);
3660 1668342 : if (TREE_CODE (rec_type) != RECORD_TYPE
3661 1668342 : || TREE_CODE (DECL_INITIAL (decl)) != CONSTRUCTOR)
3662 : return;
3663 :
3664 : unsigned ix;
3665 : tree index, val;
3666 3833046 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (DECL_INITIAL (decl)), ix, index,
3667 : val)
3668 : {
3669 5347821 : if (TREE_CODE (val) != ADDR_EXPR
3670 1001923 : || TREE_CODE (TREE_OPERAND (val, 0)) != FUNCTION_DECL
3671 : /* ObjC can produce constructor elements with NULL indices. */
3672 2751237 : || !index)
3673 2648135 : continue;
3674 51551 : HOST_WIDE_INT elt_offset = int_bit_position (index);
3675 51551 : if ((elt_offset % BITS_PER_UNIT) != 0)
3676 0 : continue;
3677 51551 : elt_offset = elt_offset / BITS_PER_UNIT;
3678 51551 : if (elt_offset > UINT_MAX)
3679 0 : continue;
3680 51551 : note_fnptr_in_record (rec_type, elt_offset, TREE_OPERAND (val, 0));
3681 : }
3682 : }
3683 :
3684 : /* Update the jump functions associated with call graph edge E when the call
3685 : graph edge CS is being inlined, assuming that E->caller is already (possibly
3686 : indirectly) inlined into CS->callee and that E has not been inlined. */
3687 :
3688 : static void
3689 2646541 : update_jump_functions_after_inlining (struct cgraph_edge *cs,
3690 : struct cgraph_edge *e)
3691 : {
3692 2646541 : ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3693 2646541 : ipa_edge_args *args = ipa_edge_args_sum->get (e);
3694 2646541 : if (!args)
3695 : return;
3696 1484773 : ipa_node_params *old_inline_root_info = ipa_node_params_sum->get (cs->callee);
3697 1484773 : ipa_node_params *new_inline_root_info
3698 1484773 : = ipa_node_params_sum->get (cs->caller->inlined_to
3699 : ? cs->caller->inlined_to : cs->caller);
3700 1484773 : int count = ipa_get_cs_argument_count (args);
3701 1484773 : int i;
3702 :
3703 4396651 : for (i = 0; i < count; i++)
3704 : {
3705 2911878 : struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3706 2911878 : class ipa_polymorphic_call_context *dst_ctx
3707 2911878 : = ipa_get_ith_polymorhic_call_context (args, i);
3708 :
3709 2911878 : if (dst->agg.items)
3710 : {
3711 : struct ipa_agg_jf_item *item;
3712 : int j;
3713 :
3714 252476 : FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3715 : {
3716 171860 : int dst_fid;
3717 171860 : struct ipa_jump_func *src;
3718 :
3719 304816 : if (item->jftype != IPA_JF_PASS_THROUGH
3720 171860 : && item->jftype != IPA_JF_LOAD_AGG)
3721 132956 : continue;
3722 :
3723 38904 : dst_fid = item->value.pass_through.formal_id;
3724 77805 : if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3725 : {
3726 3 : item->jftype = IPA_JF_UNKNOWN;
3727 3 : continue;
3728 : }
3729 :
3730 38901 : item->value.pass_through.formal_id = -1;
3731 38901 : src = ipa_get_ith_jump_func (top, dst_fid);
3732 38901 : if (src->type == IPA_JF_CONST)
3733 : {
3734 2424 : if (item->jftype == IPA_JF_PASS_THROUGH
3735 2149 : && item->value.pass_through.operation == NOP_EXPR)
3736 : {
3737 2086 : item->jftype = IPA_JF_CONST;
3738 2086 : item->value.constant = src->value.constant.value;
3739 2086 : continue;
3740 : }
3741 : }
3742 36477 : else if (src->type == IPA_JF_PASS_THROUGH
3743 5484 : && src->value.pass_through.operation == NOP_EXPR)
3744 : {
3745 5336 : if (item->jftype == IPA_JF_PASS_THROUGH
3746 3643 : || !item->value.load_agg.by_ref
3747 2435 : || src->value.pass_through.agg_preserved)
3748 3697 : item->value.pass_through.formal_id
3749 3697 : = src->value.pass_through.formal_id;
3750 : }
3751 31141 : else if (src->type == IPA_JF_ANCESTOR)
3752 : {
3753 4639 : if (item->jftype == IPA_JF_PASS_THROUGH)
3754 : {
3755 994 : if (!src->value.ancestor.offset)
3756 685 : item->value.pass_through.formal_id
3757 685 : = src->value.ancestor.formal_id;
3758 : }
3759 3645 : else if (src->value.ancestor.agg_preserved)
3760 : {
3761 1439 : gcc_checking_assert (item->value.load_agg.by_ref);
3762 :
3763 1439 : item->value.pass_through.formal_id
3764 1439 : = src->value.ancestor.formal_id;
3765 1439 : item->value.load_agg.offset
3766 1439 : += src->value.ancestor.offset;
3767 : }
3768 : }
3769 :
3770 36815 : if (item->value.pass_through.formal_id < 0)
3771 30994 : item->jftype = IPA_JF_UNKNOWN;
3772 : }
3773 : }
3774 :
3775 2911878 : if (!top)
3776 : {
3777 13241 : ipa_set_jf_unknown (dst);
3778 13241 : continue;
3779 : }
3780 :
3781 2898637 : if (dst->type == IPA_JF_ANCESTOR)
3782 : {
3783 131790 : struct ipa_jump_func *src;
3784 131790 : int dst_fid = dst->value.ancestor.formal_id;
3785 131790 : class ipa_polymorphic_call_context *src_ctx
3786 131790 : = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3787 :
3788 : /* Variable number of arguments can cause havoc if we try to access
3789 : one that does not exist in the inlined edge. So make sure we
3790 : don't. */
3791 263580 : if (dst_fid >= ipa_get_cs_argument_count (top))
3792 : {
3793 0 : ipa_set_jf_unknown (dst);
3794 0 : continue;
3795 : }
3796 :
3797 131790 : src = ipa_get_ith_jump_func (top, dst_fid);
3798 :
3799 131790 : if (src_ctx && !src_ctx->useless_p ())
3800 : {
3801 42399 : class ipa_polymorphic_call_context ctx = *src_ctx;
3802 :
3803 : /* TODO: Make type preserved safe WRT contexts. */
3804 42399 : if (!ipa_get_jf_ancestor_type_preserved (dst))
3805 28999 : ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3806 42399 : ctx.offset_by (dst->value.ancestor.offset);
3807 84798 : if (!ctx.useless_p ())
3808 : {
3809 37072 : if (!dst_ctx)
3810 : {
3811 4476 : vec_safe_grow_cleared (args->polymorphic_call_contexts,
3812 : count, true);
3813 4476 : dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3814 : }
3815 :
3816 37072 : dst_ctx->combine_with (ctx);
3817 : }
3818 : }
3819 :
3820 : /* Parameter and argument in ancestor jump function must be pointer
3821 : type, which means access to aggregate must be by-reference. */
3822 131790 : gcc_assert (!src->agg.items || src->agg.by_ref);
3823 :
3824 131790 : if (src->agg.items && dst->value.ancestor.agg_preserved)
3825 : {
3826 1588 : struct ipa_agg_jf_item *item;
3827 1588 : int j;
3828 :
3829 : /* Currently we do not produce clobber aggregate jump functions,
3830 : replace with merging when we do. */
3831 1588 : gcc_assert (!dst->agg.items);
3832 :
3833 1588 : dst->agg.items = vec_safe_copy (src->agg.items);
3834 1588 : dst->agg.by_ref = src->agg.by_ref;
3835 5451 : FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3836 3863 : item->offset -= dst->value.ancestor.offset;
3837 : }
3838 :
3839 131790 : if (src->type == IPA_JF_PASS_THROUGH
3840 23741 : && src->value.pass_through.operation == NOP_EXPR)
3841 : {
3842 23737 : dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3843 23737 : dst->value.ancestor.agg_preserved &=
3844 23737 : src->value.pass_through.agg_preserved;
3845 : }
3846 108053 : else if (src->type == IPA_JF_ANCESTOR)
3847 : {
3848 8141 : dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3849 8141 : dst->value.ancestor.offset += src->value.ancestor.offset;
3850 8141 : dst->value.ancestor.agg_preserved &=
3851 8141 : src->value.ancestor.agg_preserved;
3852 8141 : dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3853 : }
3854 : else
3855 99912 : ipa_set_jf_unknown (dst);
3856 : }
3857 2766847 : else if (dst->type == IPA_JF_PASS_THROUGH)
3858 : {
3859 780267 : struct ipa_jump_func *src;
3860 : /* We must check range due to calls with variable number of arguments
3861 : and we cannot combine jump functions with operations. */
3862 780267 : if (dst->value.pass_through.operation == NOP_EXPR
3863 780267 : && (top && dst->value.pass_through.formal_id
3864 747226 : < ipa_get_cs_argument_count (top)))
3865 : {
3866 747212 : int dst_fid = dst->value.pass_through.formal_id;
3867 747212 : src = ipa_get_ith_jump_func (top, dst_fid);
3868 747212 : bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3869 747212 : class ipa_polymorphic_call_context *src_ctx
3870 856796 : = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3871 :
3872 109584 : if (src_ctx && !src_ctx->useless_p ())
3873 : {
3874 62709 : class ipa_polymorphic_call_context ctx = *src_ctx;
3875 :
3876 : /* TODO: Make type preserved safe WRT contexts. */
3877 62709 : if (!ipa_get_jf_pass_through_type_preserved (dst))
3878 27374 : ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3879 125418 : if (!ctx.useless_p ())
3880 : {
3881 59909 : if (!dst_ctx)
3882 : {
3883 11952 : vec_safe_grow_cleared (args->polymorphic_call_contexts,
3884 : count, true);
3885 11952 : dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3886 : }
3887 59909 : dst_ctx->combine_with (ctx);
3888 : }
3889 : }
3890 747212 : switch (src->type)
3891 : {
3892 319708 : case IPA_JF_UNKNOWN:
3893 319708 : ipa_set_jf_unknown (dst);
3894 319708 : break;
3895 153252 : case IPA_JF_CONST:
3896 153252 : ipa_convert_prop_cst_jf (dst, src,
3897 : ipa_get_type (old_inline_root_info,
3898 : dst_fid));
3899 153252 : break;
3900 :
3901 251345 : case IPA_JF_PASS_THROUGH:
3902 251345 : {
3903 251345 : int formal_id = ipa_get_jf_pass_through_formal_id (src);
3904 251345 : enum tree_code operation;
3905 251345 : operation = ipa_get_jf_pass_through_operation (src);
3906 :
3907 251345 : tree old_ir_ptype = ipa_get_type (old_inline_root_info,
3908 : dst_fid);
3909 251345 : tree new_ir_ptype = ipa_get_type (new_inline_root_info,
3910 : formal_id);
3911 251345 : if (!useless_type_conversion_p (old_ir_ptype, new_ir_ptype))
3912 : {
3913 : /* Jump-function construction now permits type-casts
3914 : from an integer to another if the latter can hold
3915 : all values or has at least the same precision.
3916 : However, as we're combining multiple pass-through
3917 : functions together, we are losing information about
3918 : signedness and thus if conversions should sign or
3919 : zero extend. Therefore we must prevent combining
3920 : such jump-function if signednesses do not match. */
3921 1757 : if (!INTEGRAL_TYPE_P (old_ir_ptype)
3922 919 : || !INTEGRAL_TYPE_P (new_ir_ptype)
3923 1838 : || (TYPE_UNSIGNED (new_ir_ptype)
3924 919 : != TYPE_UNSIGNED (old_ir_ptype)))
3925 : {
3926 838 : ipa_set_jf_unknown (dst);
3927 838 : continue;
3928 : }
3929 : }
3930 :
3931 250507 : if (operation == NOP_EXPR)
3932 : {
3933 249607 : bool agg_p;
3934 499214 : agg_p = dst_agg_p
3935 249607 : && ipa_get_jf_pass_through_agg_preserved (src);
3936 249607 : ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3937 : }
3938 900 : else if (TREE_CODE_CLASS (operation) == tcc_unary)
3939 : {
3940 8 : tree op_t = ipa_get_jf_pass_through_op_type (src);
3941 8 : ipa_set_jf_unary_pass_through (dst, formal_id, operation,
3942 : op_t);
3943 : }
3944 : else
3945 : {
3946 892 : tree operand = ipa_get_jf_pass_through_operand (src);
3947 892 : tree op_t = ipa_get_jf_pass_through_op_type (src);
3948 892 : ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3949 : operation, op_t);
3950 : }
3951 : break;
3952 : }
3953 22907 : case IPA_JF_ANCESTOR:
3954 22907 : {
3955 22907 : bool agg_p;
3956 45814 : agg_p = dst_agg_p
3957 22907 : && ipa_get_jf_ancestor_agg_preserved (src);
3958 22907 : ipa_set_ancestor_jf (dst,
3959 : ipa_get_jf_ancestor_offset (src),
3960 : ipa_get_jf_ancestor_formal_id (src),
3961 : agg_p,
3962 22907 : ipa_get_jf_ancestor_keep_null (src));
3963 22907 : break;
3964 : }
3965 0 : default:
3966 0 : gcc_unreachable ();
3967 : }
3968 :
3969 746374 : if (src->m_vr && src->m_vr->known_p ())
3970 : {
3971 506237 : value_range svr (src->m_vr->type ());
3972 506237 : if (!dst->m_vr || !dst->m_vr->known_p ())
3973 216900 : ipa_set_jfunc_vr (dst, *src->m_vr);
3974 289337 : else if (ipa_vr_operation_and_type_effects (svr, *src->m_vr,
3975 : NOP_EXPR,
3976 289337 : dst->m_vr->type (),
3977 289337 : src->m_vr->type ()))
3978 : {
3979 289329 : value_range dvr;
3980 289329 : dst->m_vr->get_vrange (dvr);
3981 289329 : dvr.intersect (svr);
3982 289329 : if (!dvr.undefined_p ())
3983 279172 : ipa_set_jfunc_vr (dst, dvr);
3984 289329 : }
3985 506237 : }
3986 :
3987 746374 : if (src->agg.items
3988 30580 : && (dst_agg_p || !src->agg.by_ref))
3989 : {
3990 : /* Currently we do not produce clobber aggregate jump
3991 : functions, replace with merging when we do. */
3992 23054 : gcc_assert (!dst->agg.items);
3993 :
3994 23054 : dst->agg.by_ref = src->agg.by_ref;
3995 23054 : dst->agg.items = vec_safe_copy (src->agg.items);
3996 : }
3997 : }
3998 : else
3999 33055 : ipa_set_jf_unknown (dst);
4000 : }
4001 : }
4002 : }
4003 :
4004 : /* If TARGET is an addr_expr of a function declaration, make it the
4005 : (SPECULATIVE)destination of an indirect edge IE and return the edge.
4006 : Otherwise, return NULL. */
4007 :
4008 : struct cgraph_edge *
4009 3995 : ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
4010 : bool speculative)
4011 : {
4012 3995 : struct cgraph_node *callee;
4013 3995 : bool unreachable = false;
4014 :
4015 3995 : if (TREE_CODE (target) == ADDR_EXPR)
4016 1323 : target = TREE_OPERAND (target, 0);
4017 3995 : if (TREE_CODE (target) != FUNCTION_DECL)
4018 : {
4019 17 : target = canonicalize_constructor_val (target, NULL);
4020 17 : if (!target || TREE_CODE (target) != FUNCTION_DECL)
4021 : {
4022 17 : cgraph_simple_indirect_info *sii
4023 17 : = dyn_cast <cgraph_simple_indirect_info *> (ie->indirect_info);
4024 : /* Member pointer call that goes through a VMT lookup. */
4025 17 : if ((sii && sii->member_ptr)
4026 : /* Or if target is not an invariant expression and we do not
4027 : know if it will evaulate to function at runtime.
4028 : This can happen when folding through &VAR, where &VAR
4029 : is IP invariant, but VAR itself is not.
4030 :
4031 : TODO: It seems that we may try to fold the expression and see
4032 : if VAR is readonly. */
4033 11 : || !is_gimple_ip_invariant (target))
4034 : {
4035 6 : if (dump_enabled_p ())
4036 : {
4037 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
4038 : "discovered direct call non-invariant %s\n",
4039 0 : ie->caller->dump_name ());
4040 : }
4041 6 : return NULL;
4042 : }
4043 :
4044 :
4045 11 : if (dump_enabled_p ())
4046 : {
4047 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
4048 : "discovered direct call to non-function in %s, "
4049 : "making it __builtin_unreachable\n",
4050 0 : ie->caller->dump_name ());
4051 : }
4052 :
4053 11 : target = builtin_decl_unreachable ();
4054 11 : callee = cgraph_node::get_create (target);
4055 11 : unreachable = true;
4056 11 : }
4057 : else
4058 0 : callee = cgraph_node::get (target);
4059 : }
4060 : else
4061 3978 : callee = cgraph_node::get (target);
4062 :
4063 : /* Because may-edges are not explicitly represented and vtable may be external,
4064 : we may create the first reference to the object in the unit. */
4065 3989 : if (!callee || callee->inlined_to)
4066 : {
4067 :
4068 : /* We are better to ensure we can refer to it.
4069 : In the case of static functions we are out of luck, since we already
4070 : removed its body. In the case of public functions we may or may
4071 : not introduce the reference. */
4072 0 : if (!canonicalize_constructor_val (target, NULL)
4073 0 : || !TREE_PUBLIC (target))
4074 : {
4075 0 : if (dump_file)
4076 0 : fprintf (dump_file, "ipa-prop: Discovered call to a known target "
4077 : "(%s -> %s) but cannot refer to it. Giving up.\n",
4078 0 : ie->caller->dump_name (),
4079 0 : ie->callee->dump_name ());
4080 0 : return NULL;
4081 : }
4082 0 : callee = cgraph_node::get_create (target);
4083 : }
4084 :
4085 : /* If the edge is already speculated. */
4086 3989 : if (speculative && ie->speculative)
4087 : {
4088 0 : if (dump_file)
4089 : {
4090 0 : cgraph_edge *e2 = ie->speculative_call_for_target (callee);
4091 0 : if (!e2)
4092 : {
4093 0 : if (dump_file)
4094 0 : fprintf (dump_file, "ipa-prop: Discovered call to a "
4095 : "speculative target (%s -> %s) but the call is "
4096 : "already speculated to different target. "
4097 : "Giving up.\n",
4098 0 : ie->caller->dump_name (), callee->dump_name ());
4099 : }
4100 : else
4101 : {
4102 0 : if (dump_file)
4103 0 : fprintf (dump_file,
4104 : "ipa-prop: Discovered call to a speculative target "
4105 : "(%s -> %s) this agree with previous speculation.\n",
4106 0 : ie->caller->dump_name (), callee->dump_name ());
4107 : }
4108 : }
4109 0 : return NULL;
4110 : }
4111 :
4112 3989 : if (!dbg_cnt (devirt))
4113 : return NULL;
4114 :
4115 3989 : ipa_check_create_node_params ();
4116 :
4117 : /* We cannot make edges to inline clones. It is bug that someone removed
4118 : the cgraph node too early. */
4119 3989 : gcc_assert (!callee->inlined_to);
4120 :
4121 3989 : if (dump_file && !unreachable)
4122 : {
4123 402 : fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
4124 : "(%s -> %s), for stmt ",
4125 402 : is_a <cgraph_polymorphic_indirect_info *> (ie->indirect_info)
4126 : ? "a virtual" : "an indirect",
4127 : speculative ? "speculative" : "known",
4128 201 : ie->caller->dump_name (),
4129 : callee->dump_name ());
4130 201 : if (ie->call_stmt)
4131 193 : print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
4132 : else
4133 8 : fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
4134 : }
4135 3989 : if (dump_enabled_p ())
4136 : {
4137 402 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
4138 : "converting indirect call in %s to direct call to %s\n",
4139 201 : ie->caller->dump_name (), callee->dump_name ());
4140 : }
4141 3989 : if (!speculative)
4142 : {
4143 3962 : struct cgraph_edge *orig = ie;
4144 3962 : ie = cgraph_edge::make_direct (ie, callee);
4145 : /* If we resolved speculative edge the cost is already up to date
4146 : for direct call (adjusted by inline_edge_duplication_hook). */
4147 3962 : if (ie == orig)
4148 : {
4149 3143 : ipa_call_summary *es = ipa_call_summaries->get (ie);
4150 3143 : es->call_stmt_size -= (eni_size_weights.indirect_call_cost
4151 3143 : - eni_size_weights.call_cost);
4152 3143 : es->call_stmt_time -= (eni_time_weights.indirect_call_cost
4153 3143 : - eni_time_weights.call_cost);
4154 : }
4155 : }
4156 : else
4157 : {
4158 27 : if (!callee->can_be_discarded_p ())
4159 : {
4160 4 : cgraph_node *alias;
4161 4 : alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
4162 : if (alias)
4163 27 : callee = alias;
4164 : }
4165 : /* make_speculative will update ie's cost to direct call cost. */
4166 27 : ie = ie->make_speculative
4167 27 : (callee, ie->count.apply_scale (8, 10));
4168 : }
4169 :
4170 : return ie;
4171 : }
4172 :
4173 : /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
4174 : CONSTRUCTOR and return it. Return NULL if the search fails for some
4175 : reason. */
4176 :
4177 : static tree
4178 11078 : find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
4179 : {
4180 16438 : tree type = TREE_TYPE (constructor);
4181 16438 : if (TREE_CODE (type) != ARRAY_TYPE
4182 16438 : && TREE_CODE (type) != RECORD_TYPE)
4183 : return NULL;
4184 :
4185 16426 : unsigned ix;
4186 16426 : tree index, val;
4187 22535 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
4188 : {
4189 21863 : HOST_WIDE_INT elt_offset;
4190 21863 : if (TREE_CODE (type) == ARRAY_TYPE)
4191 : {
4192 290 : offset_int off;
4193 290 : tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
4194 290 : gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
4195 :
4196 290 : if (index)
4197 : {
4198 290 : if (TREE_CODE (index) == RANGE_EXPR)
4199 60 : off = wi::to_offset (TREE_OPERAND (index, 0));
4200 : else
4201 230 : off = wi::to_offset (index);
4202 290 : if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
4203 : {
4204 290 : tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
4205 290 : gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
4206 290 : off = wi::sext (off - wi::to_offset (low_bound),
4207 290 : TYPE_PRECISION (TREE_TYPE (index)));
4208 : }
4209 290 : off *= wi::to_offset (unit_size);
4210 : /* ??? Handle more than just the first index of a
4211 : RANGE_EXPR. */
4212 : }
4213 : else
4214 0 : off = wi::to_offset (unit_size) * ix;
4215 :
4216 290 : off = wi::lshift (off, LOG2_BITS_PER_UNIT);
4217 290 : if (!wi::fits_shwi_p (off) || wi::neg_p (off))
4218 0 : continue;
4219 290 : elt_offset = off.to_shwi ();
4220 : }
4221 21573 : else if (TREE_CODE (type) == RECORD_TYPE)
4222 : {
4223 21573 : gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
4224 21573 : if (DECL_BIT_FIELD (index))
4225 0 : continue;
4226 21573 : elt_offset = int_bit_position (index);
4227 : }
4228 : else
4229 0 : gcc_unreachable ();
4230 :
4231 21863 : if (elt_offset > req_offset)
4232 : return NULL;
4233 :
4234 21863 : if (TREE_CODE (val) == CONSTRUCTOR)
4235 5360 : return find_constructor_constant_at_offset (val,
4236 5360 : req_offset - elt_offset);
4237 :
4238 16503 : if (elt_offset == req_offset
4239 11066 : && is_gimple_reg_type (TREE_TYPE (val))
4240 27569 : && is_gimple_ip_invariant (val))
4241 : return val;
4242 : }
4243 : return NULL;
4244 : }
4245 :
4246 : /* Check whether SCALAR could be used to look up an aggregate interprocedural
4247 : invariant from a static constructor and if so, return it. Otherwise return
4248 : NULL. */
4249 :
4250 : tree
4251 13275587 : ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
4252 : {
4253 13275587 : if (by_ref)
4254 : {
4255 13260496 : if (TREE_CODE (scalar) != ADDR_EXPR)
4256 : return NULL;
4257 4191684 : scalar = TREE_OPERAND (scalar, 0);
4258 : }
4259 :
4260 4206775 : if (!VAR_P (scalar)
4261 2722777 : || !is_global_var (scalar)
4262 281991 : || !TREE_READONLY (scalar)
4263 15626 : || !DECL_INITIAL (scalar)
4264 4219973 : || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
4265 : return NULL;
4266 :
4267 11078 : return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
4268 : }
4269 :
4270 : /* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
4271 : is none. BY_REF specifies whether the value has to be passed by reference
4272 : or by value. */
4273 :
4274 : static tree
4275 24482 : ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function *agg_jfunc,
4276 : ipa_node_params *src_info,
4277 : cgraph_node *src_node,
4278 : HOST_WIDE_INT offset, bool by_ref)
4279 : {
4280 24482 : if (by_ref != agg_jfunc->by_ref)
4281 : return NULL_TREE;
4282 :
4283 4233 : for (const ipa_agg_jf_item &item : agg_jfunc->items)
4284 1500 : if (item.offset == offset)
4285 1085 : return ipa_agg_value_from_jfunc (src_info, src_node, &item);
4286 :
4287 : return NULL_TREE;
4288 : }
4289 :
4290 : /* Remove a reference to SYMBOL from the list of references of a node given by
4291 : reference description RDESC. Return true if the reference has been
4292 : successfully found and removed. */
4293 :
4294 : static bool
4295 7856 : remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4296 : {
4297 7856 : struct ipa_ref *to_del;
4298 7856 : struct cgraph_edge *origin;
4299 :
4300 7856 : origin = rdesc->cs;
4301 7856 : if (!origin)
4302 : return false;
4303 7856 : to_del = origin->caller->find_reference (symbol, origin->call_stmt,
4304 : origin->lto_stmt_uid, IPA_REF_ADDR);
4305 7856 : if (!to_del)
4306 : return false;
4307 :
4308 7856 : to_del->remove_reference ();
4309 7856 : if (dump_file)
4310 26 : fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
4311 13 : origin->caller->dump_name (), symbol->dump_name ());
4312 : return true;
4313 : }
4314 :
4315 : /* If JFUNC has a reference description with refcount different from
4316 : IPA_UNDESCRIBED_USE, return the reference description, otherwise return
4317 : NULL. JFUNC must be a constant jump function. */
4318 :
4319 : static struct ipa_cst_ref_desc *
4320 1311958 : jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
4321 : {
4322 1311958 : struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
4323 1311958 : if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
4324 : return rdesc;
4325 : else
4326 1123376 : return NULL;
4327 : }
4328 :
4329 : /* If the value of constant jump function JFUNC is an address of a function
4330 : declaration, return the associated call graph node. Otherwise return
4331 : NULL. */
4332 :
4333 : static symtab_node *
4334 1782 : symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
4335 : {
4336 1782 : gcc_checking_assert (jfunc->type == IPA_JF_CONST);
4337 1782 : tree cst = ipa_get_jf_constant (jfunc);
4338 1782 : if (TREE_CODE (cst) != ADDR_EXPR
4339 1782 : || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
4340 31 : && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
4341 : return NULL;
4342 :
4343 1772 : return symtab_node::get (TREE_OPERAND (cst, 0));
4344 : }
4345 :
4346 :
4347 : /* If JFUNC is a constant jump function with a usable rdesc, decrement its
4348 : refcount and if it hits zero, remove reference to SYMBOL from the caller of
4349 : the edge specified in the rdesc. Return false if either the symbol or the
4350 : reference could not be found, otherwise return true. */
4351 :
4352 : static bool
4353 969 : try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
4354 : {
4355 969 : struct ipa_cst_ref_desc *rdesc;
4356 969 : if (jfunc->type == IPA_JF_CONST
4357 969 : && (rdesc = jfunc_rdesc_usable (jfunc))
4358 1911 : && --rdesc->refcount == 0)
4359 : {
4360 778 : symtab_node *symbol = symtab_node_for_jfunc (jfunc);
4361 778 : if (!symbol)
4362 : return false;
4363 :
4364 778 : return remove_described_reference (symbol, rdesc);
4365 : }
4366 : return true;
4367 : }
4368 :
4369 : /* Try to find a destination for indirect edge IE that corresponds to a simple
4370 : call or a call of a member function pointer and where the destination is a
4371 : pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
4372 : the type of the parameter to which the result of JFUNC is passed. If it can
4373 : be determined, return the newly direct edge, otherwise return NULL.
4374 : NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
4375 : relative to. */
4376 :
4377 : static struct cgraph_edge *
4378 9626 : try_make_edge_direct_simple_call (struct cgraph_edge *ie,
4379 : struct ipa_jump_func *jfunc, tree target_type,
4380 : struct cgraph_node *new_root,
4381 : class ipa_node_params *new_root_info)
4382 : {
4383 9626 : tree target = NULL_TREE;
4384 9626 : cgraph_simple_indirect_info *sii
4385 9626 : = as_a <cgraph_simple_indirect_info *> (ie->indirect_info);
4386 9626 : bool agg_contents = sii->agg_contents;
4387 9626 : tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
4388 9626 : if (agg_contents)
4389 : {
4390 7519 : if (scalar)
4391 14 : target = ipa_find_agg_cst_from_init (scalar, sii->offset, sii->by_ref);
4392 7519 : if (!target && sii->guaranteed_unmodified)
4393 6360 : target = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
4394 : new_root, sii->offset,
4395 : sii->by_ref);
4396 : }
4397 : else
4398 : target = scalar;
4399 9625 : if (!target)
4400 : return NULL;
4401 1340 : cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target);
4402 :
4403 1340 : if (cs && !agg_contents)
4404 : {
4405 969 : bool ok;
4406 969 : gcc_checking_assert (cs->callee
4407 : && (cs != ie
4408 : || jfunc->type != IPA_JF_CONST
4409 : || !symtab_node_for_jfunc (jfunc)
4410 : || cs->callee == symtab_node_for_jfunc (jfunc)));
4411 969 : ok = try_decrement_rdesc_refcount (jfunc);
4412 969 : gcc_checking_assert (ok);
4413 : }
4414 :
4415 : return cs;
4416 : }
4417 :
4418 : /* Return the target to be used in cases of impossible devirtualization. IE
4419 : and target (the latter can be NULL) are dumped when dumping is enabled. */
4420 :
4421 : tree
4422 508 : ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
4423 : {
4424 508 : if (dump_file)
4425 : {
4426 57 : if (target)
4427 18 : fprintf (dump_file,
4428 : "Type inconsistent devirtualization: %s->%s\n",
4429 18 : ie->caller->dump_name (),
4430 18 : IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
4431 : else
4432 39 : fprintf (dump_file,
4433 : "No devirtualization target in %s\n",
4434 39 : ie->caller->dump_name ());
4435 : }
4436 508 : tree new_target = builtin_decl_unreachable ();
4437 508 : cgraph_node::get_create (new_target);
4438 508 : return new_target;
4439 : }
4440 :
4441 : /* Try to find a destination for indirect edge IE that corresponds to a virtual
4442 : call based on a formal parameter which is described by jump function JFUNC
4443 : and if it can be determined, make it direct and return the direct edge.
4444 : Otherwise, return NULL. CTX describes the polymorphic context that the
4445 : parameter the call is based on brings along with it. NEW_ROOT and
4446 : NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
4447 : to. */
4448 :
4449 : static struct cgraph_edge *
4450 18125 : try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
4451 : struct ipa_jump_func *jfunc,
4452 : class ipa_polymorphic_call_context ctx,
4453 : struct cgraph_node *new_root,
4454 : class ipa_node_params *new_root_info)
4455 : {
4456 18125 : tree target = NULL;
4457 18125 : bool speculative = false;
4458 :
4459 18125 : if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
4460 : return NULL;
4461 18125 : cgraph_polymorphic_indirect_info *pii
4462 18125 : = as_a <cgraph_polymorphic_indirect_info *> (ie->indirect_info);
4463 18125 : if (!pii->usable_p ())
4464 : return nullptr;
4465 :
4466 : /* Try to do lookup via known virtual table pointer value. */
4467 18125 : if (!pii->vptr_changed
4468 18125 : || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
4469 : {
4470 18125 : tree vtable;
4471 18125 : unsigned HOST_WIDE_INT offset;
4472 18125 : tree t = NULL_TREE;
4473 18125 : if (jfunc->type == IPA_JF_CONST)
4474 330 : t = ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc),
4475 : pii->offset, true);
4476 330 : if (!t)
4477 18122 : t = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
4478 : new_root, pii->offset, true);
4479 18125 : if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
4480 : {
4481 697 : bool can_refer;
4482 697 : t = gimple_get_virt_method_for_vtable (pii->otr_token, vtable, offset,
4483 : &can_refer);
4484 697 : if (can_refer)
4485 : {
4486 680 : if (!t
4487 680 : || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE,
4488 : BUILT_IN_UNREACHABLE_TRAP)
4489 1312 : || !possible_polymorphic_call_target_p
4490 632 : (ie, cgraph_node::get (t)))
4491 : {
4492 : /* Do not speculate builtin_unreachable, it is stupid! */
4493 90 : if (!pii->vptr_changed)
4494 90 : target = ipa_impossible_devirt_target (ie, target);
4495 : else
4496 : target = NULL;
4497 : }
4498 : else
4499 : {
4500 590 : target = t;
4501 590 : speculative = pii->vptr_changed;
4502 : }
4503 : }
4504 : }
4505 : }
4506 :
4507 18125 : ipa_polymorphic_call_context ie_context (ie);
4508 18125 : vec <cgraph_node *>targets;
4509 18125 : bool final;
4510 :
4511 18125 : ctx.offset_by (pii->offset);
4512 18125 : if (pii->vptr_changed)
4513 6247 : ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
4514 : pii->otr_type);
4515 18125 : ctx.combine_with (ie_context, pii->otr_type);
4516 18125 : targets = possible_polymorphic_call_targets (pii->otr_type, pii->otr_token,
4517 : ctx, &final);
4518 20186 : if (final && targets.length () <= 1)
4519 : {
4520 2042 : speculative = false;
4521 2042 : if (targets.length () == 1)
4522 1991 : target = targets[0]->decl;
4523 : else
4524 51 : target = ipa_impossible_devirt_target (ie, NULL_TREE);
4525 : }
4526 16075 : else if (!target && opt_for_fn (ie->caller->decl,
4527 : flag_devirtualize_speculatively)
4528 32076 : && !ie->speculative && ie->maybe_hot_p ())
4529 : {
4530 8825 : cgraph_node *n;
4531 8825 : n = try_speculative_devirtualization (pii->otr_type, pii->otr_token,
4532 : pii->context);
4533 8825 : if (n)
4534 : {
4535 18 : target = n->decl;
4536 18 : speculative = true;
4537 : }
4538 : }
4539 :
4540 18125 : if (target)
4541 : {
4542 2068 : if (!possible_polymorphic_call_target_p
4543 2068 : (ie, cgraph_node::get_create (target)))
4544 : {
4545 51 : if (speculative)
4546 : return NULL;
4547 51 : target = ipa_impossible_devirt_target (ie, target);
4548 : }
4549 2068 : return ipa_make_edge_direct_to_target (ie, target, speculative);
4550 : }
4551 : else
4552 : return NULL;
4553 : }
4554 :
4555 : /* Update the param called notes associated with NODE when CS is being inlined,
4556 : assuming NODE is (potentially indirectly) inlined into CS->callee.
4557 : Moreover, if the callee is discovered to be constant, create a new cgraph
4558 : edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
4559 : unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
4560 :
4561 : static bool
4562 1600873 : update_indirect_edges_after_inlining (struct cgraph_edge *cs,
4563 : struct cgraph_node *node,
4564 : vec<cgraph_edge *> *new_edges)
4565 : {
4566 1600873 : bool res = false;
4567 :
4568 1600873 : ipa_check_create_edge_args ();
4569 1600873 : class ipa_edge_args *top = ipa_edge_args_sum->get (cs);
4570 1111670 : cgraph_node *new_root
4571 1600873 : = cs->caller->inlined_to ? cs->caller->inlined_to : cs->caller;
4572 1600873 : ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4573 1600873 : ipa_node_params *inlined_node_info
4574 1600873 : = ipa_node_params_sum->get (cs->callee->function_symbol ());
4575 :
4576 1600873 : cgraph_edge *next_ie;
4577 1671748 : for (cgraph_edge *ie = node->indirect_calls; ie; ie = next_ie)
4578 : {
4579 70875 : next_ie = ie->next_callee;
4580 :
4581 113818 : if (!top
4582 70750 : || ie->indirect_info->param_index < 0
4583 126743 : || ie->indirect_info->param_index >= ipa_get_cs_argument_count (top))
4584 : {
4585 42943 : ie->indirect_info->param_index = -1;
4586 46321 : continue;
4587 : }
4588 :
4589 27932 : int param_index = ie->indirect_info->param_index;
4590 27932 : cgraph_polymorphic_indirect_info *pii
4591 27932 : = dyn_cast <cgraph_polymorphic_indirect_info *> (ie->indirect_info);
4592 27932 : cgraph_simple_indirect_info *sii
4593 27932 : = dyn_cast <cgraph_simple_indirect_info *> (ie->indirect_info);
4594 27932 : struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (top, param_index);
4595 :
4596 27932 : auto_vec<cgraph_node *, 4> spec_targets;
4597 27932 : if (ie->speculative)
4598 8085 : for (cgraph_edge *direct = ie->first_speculative_call_target ();
4599 20405 : direct;
4600 12320 : direct = direct->next_speculative_call_target ())
4601 12320 : spec_targets.safe_push (direct->callee);
4602 :
4603 27932 : cgraph_edge *new_direct_edge;
4604 27932 : if (!opt_for_fn (node->decl, flag_indirect_inlining))
4605 181 : new_direct_edge = NULL;
4606 27751 : else if (pii)
4607 : {
4608 18125 : ipa_polymorphic_call_context ctx;
4609 18125 : ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
4610 18125 : new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
4611 : new_root,
4612 : new_root_info);
4613 : }
4614 9626 : else if (sii)
4615 : {
4616 9626 : tree target_type = ipa_get_type (inlined_node_info, param_index);
4617 9626 : new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
4618 : target_type,
4619 : new_root,
4620 : new_root_info);
4621 : }
4622 : else
4623 0 : gcc_unreachable ();
4624 :
4625 : /* If speculation was removed, then we need to do nothing. */
4626 3402 : if (new_direct_edge && new_direct_edge != ie
4627 28679 : && spec_targets.contains (new_direct_edge->callee))
4628 : {
4629 723 : new_direct_edge->indirect_inlining_edge = 1;
4630 723 : res = true;
4631 723 : if (!new_direct_edge->speculative)
4632 723 : continue;
4633 : }
4634 27209 : else if (new_direct_edge)
4635 : {
4636 2679 : new_direct_edge->indirect_inlining_edge = 1;
4637 2679 : if (new_edges)
4638 : {
4639 2671 : new_edges->safe_push (new_direct_edge);
4640 2671 : res = true;
4641 : }
4642 : /* If speculative edge was introduced we still need to update
4643 : call info of the indirect edge. */
4644 2679 : if (!new_direct_edge->speculative)
4645 2655 : continue;
4646 : }
4647 24554 : if (jfunc->type == IPA_JF_PASS_THROUGH
4648 24554 : && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4649 : {
4650 5829 : if (!pii
4651 2600 : && sii->agg_contents
4652 7338 : && !ipa_get_jf_pass_through_agg_preserved (jfunc))
4653 981 : ie->indirect_info->param_index = -1;
4654 : else
4655 : {
4656 4848 : param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4657 4848 : ie->indirect_info->param_index = param_index;
4658 4848 : ipa_set_param_used_by_indirect_call (new_root_info, param_index,
4659 : true);
4660 4848 : if (pii)
4661 : {
4662 3229 : if (!ipa_get_jf_pass_through_type_preserved (jfunc))
4663 2123 : pii->vptr_changed = true;
4664 3229 : ipa_set_param_used_by_polymorphic_call (new_root_info,
4665 : param_index, true);
4666 : }
4667 : }
4668 : }
4669 18725 : else if (jfunc->type == IPA_JF_ANCESTOR)
4670 : {
4671 513 : if (!pii
4672 158 : && sii->agg_contents
4673 671 : && !ipa_get_jf_ancestor_agg_preserved (jfunc))
4674 56 : ie->indirect_info->param_index = -1;
4675 : else
4676 : {
4677 457 : param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4678 457 : ie->indirect_info->param_index = param_index;
4679 457 : ipa_set_param_used_by_indirect_call (new_root_info, param_index,
4680 : true);
4681 457 : if (pii)
4682 : {
4683 355 : pii->offset += ipa_get_jf_ancestor_offset (jfunc);
4684 355 : if (!ipa_get_jf_ancestor_type_preserved (jfunc))
4685 315 : pii->vptr_changed = true;
4686 355 : ipa_set_param_used_by_polymorphic_call (new_root_info,
4687 : param_index, true);
4688 : }
4689 : else
4690 102 : sii->offset += ipa_get_jf_ancestor_offset (jfunc);
4691 : }
4692 : }
4693 : else
4694 : /* Either we can find a destination for this edge now or never. */
4695 18212 : ie->indirect_info->param_index = -1;
4696 27932 : }
4697 :
4698 1600873 : return res;
4699 : }
4700 :
4701 : /* Recursively traverse subtree of NODE (including node) made of inlined
4702 : cgraph_edges when CS has been inlined and invoke
4703 : update_indirect_edges_after_inlining on all nodes and
4704 : update_jump_functions_after_inlining on all non-inlined edges that lead out
4705 : of this subtree. Newly discovered indirect edges will be added to
4706 : *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4707 : created. */
4708 :
4709 : static bool
4710 1600873 : propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4711 : struct cgraph_node *node,
4712 : vec<cgraph_edge *> *new_edges)
4713 : {
4714 1600873 : struct cgraph_edge *e;
4715 1600873 : bool res;
4716 :
4717 1600873 : res = update_indirect_edges_after_inlining (cs, node, new_edges);
4718 :
4719 4857599 : for (e = node->callees; e; e = e->next_callee)
4720 3256726 : if (!e->inline_failed)
4721 677682 : res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4722 : else
4723 2579044 : update_jump_functions_after_inlining (cs, e);
4724 1668370 : for (e = node->indirect_calls; e; e = e->next_callee)
4725 67497 : update_jump_functions_after_inlining (cs, e);
4726 :
4727 1600873 : return res;
4728 : }
4729 :
4730 : /* Combine two controlled uses counts as done during inlining. */
4731 :
4732 : static int
4733 384167 : combine_controlled_uses_counters (int c, int d)
4734 : {
4735 0 : if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4736 : return IPA_UNDESCRIBED_USE;
4737 : else
4738 100536 : return c + d - 1;
4739 : }
4740 :
4741 : /* Propagate number of controlled users from CS->caleee to the new root of the
4742 : tree of inlined nodes. */
4743 :
4744 : static void
4745 923191 : propagate_controlled_uses (struct cgraph_edge *cs)
4746 : {
4747 923191 : ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4748 923191 : if (!args)
4749 : return;
4750 651675 : struct cgraph_node *new_root = cs->caller->inlined_to
4751 921436 : ? cs->caller->inlined_to : cs->caller;
4752 921436 : ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4753 921436 : ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4754 921436 : int count, i;
4755 :
4756 921436 : if (!old_root_info)
4757 : return;
4758 :
4759 1788614 : count = MIN (ipa_get_cs_argument_count (args),
4760 : ipa_get_param_count (old_root_info));
4761 2817684 : for (i = 0; i < count; i++)
4762 : {
4763 1896328 : struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4764 1896328 : struct ipa_cst_ref_desc *rdesc;
4765 :
4766 1896328 : if (jf->type == IPA_JF_PASS_THROUGH
4767 1896328 : && !ipa_get_jf_pass_through_refdesc_decremented (jf))
4768 : {
4769 329159 : int src_idx, c, d;
4770 329159 : src_idx = ipa_get_jf_pass_through_formal_id (jf);
4771 329159 : c = ipa_get_controlled_uses (new_root_info, src_idx);
4772 329159 : d = ipa_get_controlled_uses (old_root_info, i);
4773 :
4774 329159 : gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4775 : == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4776 329159 : c = combine_controlled_uses_counters (c, d);
4777 329159 : ipa_set_controlled_uses (new_root_info, src_idx, c);
4778 329159 : bool lderef = true;
4779 329159 : if (c != IPA_UNDESCRIBED_USE)
4780 : {
4781 89694 : lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4782 89694 : || ipa_get_param_load_dereferenced (old_root_info, i));
4783 89694 : ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4784 : }
4785 :
4786 329159 : if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4787 : {
4788 195 : struct cgraph_node *n;
4789 195 : struct ipa_ref *ref;
4790 195 : tree t = new_root_info->known_csts[src_idx];
4791 :
4792 167 : if (t && TREE_CODE (t) == ADDR_EXPR
4793 145 : && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4794 9 : && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4795 204 : && (ref = new_root->find_reference (n, NULL, 0,
4796 : IPA_REF_ADDR)))
4797 : {
4798 9 : if (dump_file)
4799 1 : fprintf (dump_file, "ipa-prop: Removing cloning-created "
4800 : "reference from %s to %s.\n",
4801 : new_root->dump_name (),
4802 : n->dump_name ());
4803 9 : ref->remove_reference ();
4804 : }
4805 : }
4806 : }
4807 1567169 : else if (jf->type == IPA_JF_CONST
4808 1567169 : && (rdesc = jfunc_rdesc_usable (jf)))
4809 : {
4810 55008 : int d = ipa_get_controlled_uses (old_root_info, i);
4811 55008 : int c = rdesc->refcount;
4812 55008 : tree cst = ipa_get_jf_constant (jf);
4813 55008 : rdesc->refcount = combine_controlled_uses_counters (c, d);
4814 55008 : if (rdesc->refcount != IPA_UNDESCRIBED_USE
4815 10842 : && ipa_get_param_load_dereferenced (old_root_info, i)
4816 2279 : && TREE_CODE (cst) == ADDR_EXPR
4817 57287 : && VAR_P (TREE_OPERAND (cst, 0)))
4818 : {
4819 2278 : symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4820 2278 : new_root->create_reference (n, IPA_REF_LOAD, NULL);
4821 2278 : if (dump_file)
4822 3 : fprintf (dump_file, "ipa-prop: Address IPA constant will reach "
4823 : "a load so adding LOAD reference from %s to %s.\n",
4824 : new_root->dump_name (), n->dump_name ());
4825 : }
4826 55008 : if (rdesc->refcount == 0)
4827 : {
4828 7078 : gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4829 : && ((TREE_CODE (TREE_OPERAND (cst, 0))
4830 : == FUNCTION_DECL)
4831 : || VAR_P (TREE_OPERAND (cst, 0))));
4832 :
4833 7078 : symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4834 7078 : if (n)
4835 : {
4836 7078 : remove_described_reference (n, rdesc);
4837 7078 : cgraph_node *clone = cs->caller;
4838 7078 : while (clone->inlined_to
4839 1633 : && clone->ipcp_clone
4840 7310 : && clone != rdesc->cs->caller)
4841 : {
4842 95 : struct ipa_ref *ref;
4843 95 : ref = clone->find_reference (n, NULL, 0, IPA_REF_ADDR);
4844 95 : if (ref)
4845 : {
4846 92 : if (dump_file)
4847 1 : fprintf (dump_file, "ipa-prop: Removing "
4848 : "cloning-created reference "
4849 : "from %s to %s.\n",
4850 : clone->dump_name (),
4851 : n->dump_name ());
4852 92 : ref->remove_reference ();
4853 : }
4854 95 : clone = clone->callers->caller;
4855 : }
4856 : }
4857 : }
4858 : }
4859 : }
4860 :
4861 1788715 : for (i = ipa_get_param_count (old_root_info);
4862 1789282 : i < ipa_get_cs_argument_count (args);
4863 : i++)
4864 : {
4865 334 : struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4866 :
4867 334 : if (jf->type == IPA_JF_CONST)
4868 : {
4869 298 : struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4870 298 : if (rdesc)
4871 235 : rdesc->refcount = IPA_UNDESCRIBED_USE;
4872 : }
4873 36 : else if (jf->type == IPA_JF_PASS_THROUGH)
4874 5 : ipa_set_controlled_uses (new_root_info,
4875 : jf->value.pass_through.formal_id,
4876 : IPA_UNDESCRIBED_USE);
4877 : }
4878 : }
4879 :
4880 : /* Update jump functions and call note functions on inlining the call site CS.
4881 : CS is expected to lead to a node already cloned by
4882 : cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4883 : *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4884 : created. */
4885 :
4886 : bool
4887 3869005 : ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4888 : vec<cgraph_edge *> *new_edges)
4889 : {
4890 3869005 : bool changed;
4891 : /* Do nothing if the preparation phase has not been carried out yet
4892 : (i.e. during early inlining). */
4893 3869005 : if (!ipa_node_params_sum)
4894 : return false;
4895 923191 : gcc_assert (ipa_edge_args_sum);
4896 :
4897 923191 : propagate_controlled_uses (cs);
4898 923191 : changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4899 923191 : ipa_node_params_sum->remove (cs->callee);
4900 :
4901 923191 : ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4902 923191 : if (args)
4903 : {
4904 921436 : bool ok = true;
4905 921436 : if (args->jump_functions)
4906 : {
4907 : struct ipa_jump_func *jf;
4908 : int i;
4909 2572942 : FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4910 1763619 : if (jf->type == IPA_JF_CONST
4911 1763619 : && ipa_get_jf_constant_rdesc (jf))
4912 : {
4913 : ok = false;
4914 : break;
4915 : }
4916 : }
4917 867338 : if (ok)
4918 863421 : ipa_edge_args_sum->remove (cs);
4919 : }
4920 923191 : if (ipcp_transformation_sum)
4921 649973 : ipcp_transformation_sum->remove (cs->callee);
4922 :
4923 : return changed;
4924 : }
4925 :
4926 : /* Ensure that array of edge arguments infos is big enough to accommodate a
4927 : structure for all edges and reallocates it if not. Also, allocate
4928 : associated hash tables is they do not already exist. */
4929 :
4930 : void
4931 4775411 : ipa_check_create_edge_args (void)
4932 : {
4933 4775411 : if (!ipa_edge_args_sum)
4934 244392 : ipa_edge_args_sum
4935 244392 : = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4936 244392 : ipa_edge_args_sum_t (symtab, true));
4937 4775411 : if (!ipa_vr_hash_table)
4938 171439 : ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4939 4775411 : }
4940 :
4941 : /* Free all ipa_edge structures. */
4942 :
4943 : void
4944 463971 : ipa_free_all_edge_args (void)
4945 : {
4946 463971 : if (!ipa_edge_args_sum)
4947 : return;
4948 :
4949 232040 : ggc_delete (ipa_edge_args_sum);
4950 232040 : ipa_edge_args_sum = NULL;
4951 : }
4952 :
4953 : /* Free all ipa_node_params structures. */
4954 :
4955 : void
4956 5336602 : ipa_free_all_node_params (void)
4957 : {
4958 5336602 : if (ipa_node_params_sum)
4959 5104671 : ggc_delete (ipa_node_params_sum);
4960 5336602 : ipa_node_params_sum = NULL;
4961 5336602 : }
4962 :
4963 : /* Initialize IPA CP transformation summary and also allocate any necessary hash
4964 : tables if they do not already exist. */
4965 :
4966 : void
4967 95789 : ipcp_transformation_initialize (void)
4968 : {
4969 95789 : if (!ipa_vr_hash_table)
4970 1888 : ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4971 95789 : if (ipcp_transformation_sum == NULL)
4972 : {
4973 19906 : ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4974 19906 : ipcp_transformation_sum->disable_insertion_hook ();
4975 : }
4976 95789 : }
4977 :
4978 : /* Release the IPA CP transformation summary. */
4979 :
4980 : void
4981 258746 : ipcp_free_transformation_sum (void)
4982 : {
4983 258746 : if (!ipcp_transformation_sum)
4984 : return;
4985 :
4986 19897 : ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4987 19897 : ggc_free (ipcp_transformation_sum);
4988 19897 : ipcp_transformation_sum = NULL;
4989 : }
4990 :
4991 : /* Set the aggregate replacements of NODE to be AGGVALS. */
4992 :
4993 : void
4994 22503 : ipa_set_node_agg_value_chain (struct cgraph_node *node,
4995 : vec<ipa_argagg_value, va_gc> *aggs)
4996 : {
4997 22503 : ipcp_transformation_initialize ();
4998 22503 : ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4999 22503 : s->m_agg_values = aggs;
5000 22503 : }
5001 :
5002 : /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
5003 : count data structures accordingly. */
5004 :
5005 : void
5006 0 : ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
5007 : {
5008 0 : if (args->jump_functions)
5009 : {
5010 : struct ipa_jump_func *jf;
5011 : int i;
5012 0 : FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
5013 : {
5014 0 : struct ipa_cst_ref_desc *rdesc;
5015 0 : try_decrement_rdesc_refcount (jf);
5016 0 : if (jf->type == IPA_JF_CONST
5017 0 : && (rdesc = ipa_get_jf_constant_rdesc (jf))
5018 0 : && rdesc->cs == cs)
5019 0 : rdesc->cs = NULL;
5020 : }
5021 : }
5022 0 : }
5023 :
5024 : /* Copy information from SRC_JF to DST_JF which correstpond to call graph edges
5025 : SRC and DST. */
5026 :
5027 : static void
5028 2757832 : ipa_duplicate_jump_function (cgraph_edge *src, cgraph_edge *dst,
5029 : ipa_jump_func *src_jf, ipa_jump_func *dst_jf)
5030 : {
5031 2757832 : dst_jf->agg.items = vec_safe_copy (src_jf->agg.items);
5032 2757832 : dst_jf->agg.by_ref = src_jf->agg.by_ref;
5033 :
5034 : /* We can avoid calling ipa_set_jfunc_vr since it would only look up the
5035 : place in the hash_table where the source m_vr resides. */
5036 2757832 : dst_jf->m_vr = src_jf->m_vr;
5037 :
5038 2757832 : if (src_jf->type == IPA_JF_CONST)
5039 : {
5040 878550 : ipa_set_jf_cst_copy (dst_jf, src_jf);
5041 878550 : struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
5042 :
5043 878550 : if (!src_rdesc)
5044 747093 : dst_jf->value.constant.rdesc = NULL;
5045 131457 : else if (src->caller == dst->caller)
5046 : {
5047 : /* Creation of a speculative edge. If the source edge is the one
5048 : grabbing a reference, we must create a new (duplicate)
5049 : reference description. Otherwise they refer to the same
5050 : description corresponding to a reference taken in a function
5051 : src->caller is inlined to. In that case we just must
5052 : increment the refcount. */
5053 37 : if (src_rdesc->cs == src)
5054 : {
5055 37 : symtab_node *n = symtab_node_for_jfunc (src_jf);
5056 37 : gcc_checking_assert (n);
5057 37 : ipa_ref *ref
5058 37 : = src->caller->find_reference (n, src->call_stmt,
5059 : src->lto_stmt_uid,
5060 : IPA_REF_ADDR);
5061 37 : gcc_checking_assert (ref);
5062 37 : dst->caller->clone_reference (ref, ref->stmt);
5063 :
5064 37 : ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
5065 37 : dst_rdesc->cs = dst;
5066 37 : dst_rdesc->refcount = src_rdesc->refcount;
5067 37 : dst_rdesc->next_duplicate = NULL;
5068 37 : dst_jf->value.constant.rdesc = dst_rdesc;
5069 : }
5070 : else
5071 : {
5072 0 : src_rdesc->refcount++;
5073 0 : dst_jf->value.constant.rdesc = src_rdesc;
5074 : }
5075 : }
5076 131420 : else if (src_rdesc->cs == src)
5077 : {
5078 131153 : struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
5079 131153 : dst_rdesc->cs = dst;
5080 131153 : dst_rdesc->refcount = src_rdesc->refcount;
5081 131153 : dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
5082 131153 : src_rdesc->next_duplicate = dst_rdesc;
5083 131153 : dst_jf->value.constant.rdesc = dst_rdesc;
5084 : }
5085 : else
5086 : {
5087 267 : struct ipa_cst_ref_desc *dst_rdesc;
5088 : /* This can happen during inlining, when a JFUNC can refer to a
5089 : reference taken in a function up in the tree of inline clones.
5090 : We need to find the duplicate that refers to our tree of
5091 : inline clones. */
5092 :
5093 267 : gcc_assert (dst->caller->inlined_to);
5094 267 : for (dst_rdesc = src_rdesc->next_duplicate;
5095 267 : dst_rdesc;
5096 0 : dst_rdesc = dst_rdesc->next_duplicate)
5097 : {
5098 267 : struct cgraph_node *top;
5099 0 : top = dst_rdesc->cs->caller->inlined_to
5100 267 : ? dst_rdesc->cs->caller->inlined_to
5101 : : dst_rdesc->cs->caller;
5102 267 : if (dst->caller->inlined_to == top)
5103 : break;
5104 : }
5105 267 : gcc_assert (dst_rdesc);
5106 267 : dst_jf->value.constant.rdesc = dst_rdesc;
5107 : }
5108 : }
5109 1879282 : else if (src_jf->type == IPA_JF_PASS_THROUGH)
5110 : {
5111 689665 : dst_jf->type = IPA_JF_PASS_THROUGH;
5112 689665 : dst_jf->value.pass_through = src_jf->value.pass_through;
5113 689665 : if (src->caller == dst->caller)
5114 : {
5115 10340 : struct cgraph_node *inline_root = dst->caller->inlined_to
5116 10363 : ? dst->caller->inlined_to : dst->caller;
5117 10363 : ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
5118 10363 : int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
5119 :
5120 10363 : int c = ipa_get_controlled_uses (root_info, idx);
5121 10363 : if (c != IPA_UNDESCRIBED_USE)
5122 : {
5123 2603 : c++;
5124 2603 : ipa_set_controlled_uses (root_info, idx, c);
5125 : }
5126 : }
5127 : }
5128 1189617 : else if (src_jf->type == IPA_JF_ANCESTOR)
5129 : {
5130 89289 : dst_jf->type = IPA_JF_ANCESTOR;
5131 89289 : dst_jf->value.ancestor = src_jf->value.ancestor;
5132 : }
5133 : else
5134 1100328 : gcc_assert (src_jf->type == IPA_JF_UNKNOWN);
5135 2757832 : }
5136 :
5137 : /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
5138 : reference count data strucutres accordingly. */
5139 :
5140 : void
5141 1248333 : ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
5142 : ipa_edge_args *old_args, ipa_edge_args *new_args)
5143 : {
5144 1248333 : unsigned int i;
5145 :
5146 1248333 : if (old_args->polymorphic_call_contexts)
5147 140623 : new_args->polymorphic_call_contexts
5148 140623 : = vec_safe_copy (old_args->polymorphic_call_contexts);
5149 :
5150 1248333 : if (!vec_safe_length (old_args->jump_functions))
5151 : {
5152 52856 : new_args->jump_functions = NULL;
5153 52856 : return;
5154 : }
5155 1195477 : vec_safe_grow_cleared (new_args->jump_functions,
5156 1195477 : old_args->jump_functions->length (), true);
5157 :
5158 3938226 : for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
5159 : {
5160 2742749 : struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
5161 2742749 : struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
5162 :
5163 2742749 : ipa_duplicate_jump_function (src, dst, src_jf, dst_jf);
5164 : }
5165 : }
5166 :
5167 : /* Analyze newly added function into callgraph. */
5168 :
5169 : static void
5170 40537 : ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
5171 : {
5172 40537 : if (node->has_gimple_body_p ())
5173 40537 : ipa_analyze_node (node);
5174 40537 : }
5175 :
5176 : /* Hook that is called by summary when a node is duplicated. */
5177 :
5178 : void
5179 757565 : ipa_node_params_t::duplicate(cgraph_node *, cgraph_node *,
5180 : ipa_node_params *old_info,
5181 : ipa_node_params *new_info)
5182 : {
5183 757565 : new_info->descriptors = vec_safe_copy (old_info->descriptors);
5184 757565 : gcc_assert (new_info->lattices.is_empty ());
5185 757565 : new_info->ipcp_orig_node = old_info->ipcp_orig_node;
5186 757565 : new_info->known_csts = old_info->known_csts.copy ();
5187 757565 : new_info->known_contexts = old_info->known_contexts.copy ();
5188 :
5189 757565 : new_info->analysis_done = old_info->analysis_done;
5190 757565 : new_info->node_enqueued = old_info->node_enqueued;
5191 757565 : new_info->versionable = old_info->versionable;
5192 757565 : }
5193 :
5194 : /* Duplication of ipcp transformation summaries. */
5195 :
5196 : void
5197 59718 : ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
5198 : ipcp_transformation *src_trans,
5199 : ipcp_transformation *dst_trans)
5200 : {
5201 : /* Avoid redundant work of duplicating vectors we will never use. */
5202 59718 : if (dst->inlined_to)
5203 : return;
5204 8551 : dst_trans->m_agg_values = vec_safe_copy (src_trans->m_agg_values);
5205 15813 : dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
5206 : }
5207 :
5208 : /* Register our cgraph hooks if they are not already there. */
5209 :
5210 : void
5211 381827 : ipa_register_cgraph_hooks (void)
5212 : {
5213 381827 : ipa_check_create_node_params ();
5214 381827 : ipa_check_create_edge_args ();
5215 :
5216 763654 : function_insertion_hook_holder =
5217 381827 : symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
5218 381827 : }
5219 :
5220 : /* Unregister our cgraph hooks if they are not already there. */
5221 :
5222 : static void
5223 463971 : ipa_unregister_cgraph_hooks (void)
5224 : {
5225 463971 : if (function_insertion_hook_holder)
5226 232040 : symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
5227 463971 : function_insertion_hook_holder = NULL;
5228 463971 : }
5229 :
5230 : /* Free all ipa_node_params and all ipa_edge_args structures if they are no
5231 : longer needed after ipa-cp. */
5232 :
5233 : void
5234 128668 : ipa_free_all_structures_after_ipa_cp (void)
5235 : {
5236 128668 : if (!optimize && !in_lto_p)
5237 : {
5238 0 : ipa_free_all_edge_args ();
5239 0 : ipa_free_all_node_params ();
5240 0 : ipcp_sources_pool.release ();
5241 0 : ipcp_cst_values_pool.release ();
5242 0 : ipcp_poly_ctx_values_pool.release ();
5243 0 : ipcp_agg_lattice_pool.release ();
5244 0 : ipa_unregister_cgraph_hooks ();
5245 0 : ipa_refdesc_pool.release ();
5246 : }
5247 128668 : }
5248 :
5249 : /* Free all ipa_node_params and all ipa_edge_args structures if they are no
5250 : longer needed after indirect inlining. */
5251 :
5252 : void
5253 463971 : ipa_free_all_structures_after_iinln (void)
5254 : {
5255 463971 : ipa_free_all_edge_args ();
5256 463971 : ipa_free_all_node_params ();
5257 463971 : ipa_unregister_cgraph_hooks ();
5258 463971 : ipcp_sources_pool.release ();
5259 463971 : ipcp_cst_values_pool.release ();
5260 463971 : ipcp_poly_ctx_values_pool.release ();
5261 463971 : ipcp_agg_lattice_pool.release ();
5262 463971 : ipa_refdesc_pool.release ();
5263 463971 : }
5264 :
5265 : /* Print ipa_tree_map data structures of all functions in the
5266 : callgraph to F. */
5267 :
5268 : void
5269 240 : ipa_print_node_params (FILE *f, struct cgraph_node *node)
5270 : {
5271 240 : int i, count;
5272 240 : class ipa_node_params *info;
5273 :
5274 240 : if (!node->definition)
5275 : return;
5276 181 : info = ipa_node_params_sum->get (node);
5277 181 : fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
5278 181 : if (!info)
5279 : {
5280 0 : fprintf (f, " no params return\n");
5281 0 : return;
5282 : }
5283 181 : count = ipa_get_param_count (info);
5284 367 : for (i = 0; i < count; i++)
5285 : {
5286 186 : int c;
5287 :
5288 186 : fprintf (f, " ");
5289 186 : ipa_dump_param (f, info, i);
5290 186 : if (ipa_is_param_used (info, i))
5291 178 : fprintf (f, " used");
5292 186 : if (ipa_is_param_used_by_ipa_predicates (info, i))
5293 108 : fprintf (f, " used_by_ipa_predicates");
5294 186 : if (ipa_is_param_used_by_indirect_call (info, i))
5295 10 : fprintf (f, " used_by_indirect_call");
5296 186 : if (ipa_is_param_used_by_polymorphic_call (info, i))
5297 0 : fprintf (f, " used_by_polymorphic_call");
5298 186 : c = ipa_get_controlled_uses (info, i);
5299 186 : if (c == IPA_UNDESCRIBED_USE)
5300 108 : fprintf (f, " undescribed_use");
5301 : else
5302 134 : fprintf (f, " controlled_uses=%i %s", c,
5303 78 : ipa_get_param_load_dereferenced (info, i)
5304 : ? "(load_dereferenced)" : "");
5305 186 : fprintf (f, "\n");
5306 : }
5307 : }
5308 :
5309 : /* Print ipa_tree_map data structures of all functions in the
5310 : callgraph to F. */
5311 :
5312 : void
5313 48 : ipa_print_all_params (FILE * f)
5314 : {
5315 48 : struct cgraph_node *node;
5316 :
5317 48 : fprintf (f, "\nFunction parameters:\n");
5318 274 : FOR_EACH_FUNCTION (node)
5319 226 : ipa_print_node_params (f, node);
5320 48 : }
5321 :
5322 : /* Stream out jump function JUMP_FUNC to OB. */
5323 :
5324 : static void
5325 608813 : ipa_write_jump_function (struct output_block *ob,
5326 : struct ipa_jump_func *jump_func)
5327 : {
5328 608813 : struct ipa_agg_jf_item *item;
5329 608813 : struct bitpack_d bp;
5330 608813 : int i, count;
5331 608813 : int flag = 0;
5332 :
5333 : /* ADDR_EXPRs are very comon IP invariants; save some streamer data
5334 : as well as WPA memory by handling them specially. */
5335 608813 : if (jump_func->type == IPA_JF_CONST
5336 464831 : && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
5337 608813 : flag = 1;
5338 :
5339 608813 : streamer_write_uhwi (ob, jump_func->type * 2 + flag);
5340 608813 : switch (jump_func->type)
5341 : {
5342 : case IPA_JF_UNKNOWN:
5343 : break;
5344 464831 : case IPA_JF_CONST:
5345 464831 : gcc_assert (
5346 : EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
5347 464831 : stream_write_tree (ob,
5348 : flag
5349 : ? TREE_OPERAND (jump_func->value.constant.value, 0)
5350 : : jump_func->value.constant.value, true);
5351 464831 : break;
5352 76693 : case IPA_JF_PASS_THROUGH:
5353 76693 : streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
5354 76693 : if (jump_func->value.pass_through.operation == NOP_EXPR)
5355 : {
5356 75871 : streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
5357 75871 : bp = bitpack_create (ob->main_stream);
5358 75871 : bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
5359 75871 : gcc_assert (!jump_func->value.pass_through.refdesc_decremented);
5360 75871 : streamer_write_bitpack (&bp);
5361 : }
5362 822 : else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
5363 : == tcc_unary)
5364 : {
5365 39 : stream_write_tree (ob, jump_func->value.pass_through.op_type, true);
5366 39 : streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
5367 : }
5368 : else
5369 : {
5370 783 : stream_write_tree (ob, jump_func->value.pass_through.op_type, true);
5371 783 : stream_write_tree (ob, jump_func->value.pass_through.operand, true);
5372 783 : streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
5373 : }
5374 : break;
5375 1313 : case IPA_JF_ANCESTOR:
5376 1313 : streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
5377 1313 : streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
5378 1313 : bp = bitpack_create (ob->main_stream);
5379 1313 : bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
5380 1313 : bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1);
5381 1313 : streamer_write_bitpack (&bp);
5382 1313 : break;
5383 0 : default:
5384 0 : fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
5385 : }
5386 :
5387 608813 : count = vec_safe_length (jump_func->agg.items);
5388 608813 : streamer_write_uhwi (ob, count);
5389 608813 : if (count)
5390 : {
5391 3287 : bp = bitpack_create (ob->main_stream);
5392 3287 : bp_pack_value (&bp, jump_func->agg.by_ref, 1);
5393 3287 : streamer_write_bitpack (&bp);
5394 : }
5395 :
5396 615419 : FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
5397 : {
5398 6606 : stream_write_tree (ob, item->type, true);
5399 6606 : streamer_write_uhwi (ob, item->offset);
5400 6606 : streamer_write_uhwi (ob, item->jftype);
5401 6606 : switch (item->jftype)
5402 : {
5403 : case IPA_JF_UNKNOWN:
5404 : break;
5405 6081 : case IPA_JF_CONST:
5406 6081 : stream_write_tree (ob, item->value.constant, true);
5407 6081 : break;
5408 525 : case IPA_JF_PASS_THROUGH:
5409 525 : case IPA_JF_LOAD_AGG:
5410 525 : streamer_write_uhwi (ob, item->value.pass_through.operation);
5411 525 : streamer_write_uhwi (ob, item->value.pass_through.formal_id);
5412 525 : if (item->value.pass_through.operation != NOP_EXPR)
5413 4 : stream_write_tree (ob, item->value.pass_through.op_type, true);
5414 525 : if (TREE_CODE_CLASS (item->value.pass_through.operation)
5415 : != tcc_unary)
5416 4 : stream_write_tree (ob, item->value.pass_through.operand, true);
5417 525 : if (item->jftype == IPA_JF_LOAD_AGG)
5418 : {
5419 85 : stream_write_tree (ob, item->value.load_agg.type, true);
5420 85 : streamer_write_uhwi (ob, item->value.load_agg.offset);
5421 85 : bp = bitpack_create (ob->main_stream);
5422 85 : bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
5423 85 : streamer_write_bitpack (&bp);
5424 : }
5425 : break;
5426 0 : default:
5427 0 : fatal_error (UNKNOWN_LOCATION,
5428 : "invalid jump function in LTO stream");
5429 : }
5430 : }
5431 :
5432 608813 : bp = bitpack_create (ob->main_stream);
5433 608813 : if (jump_func->m_vr)
5434 423150 : jump_func->m_vr->streamer_write (ob);
5435 : else
5436 : {
5437 185663 : bp_pack_value (&bp, false, 1);
5438 185663 : streamer_write_bitpack (&bp);
5439 : }
5440 608813 : }
5441 :
5442 : /* Read in jump function JUMP_FUNC from IB. */
5443 :
5444 : static void
5445 569676 : ipa_read_jump_function (class lto_input_block *ib,
5446 : struct ipa_jump_func *jump_func,
5447 : struct cgraph_edge *cs,
5448 : class data_in *data_in,
5449 : bool prevails)
5450 : {
5451 569676 : enum jump_func_type jftype;
5452 569676 : enum tree_code operation;
5453 569676 : int i, count;
5454 569676 : int val = streamer_read_uhwi (ib);
5455 569676 : bool flag = val & 1;
5456 :
5457 569676 : jftype = (enum jump_func_type) (val / 2);
5458 569676 : switch (jftype)
5459 : {
5460 50598 : case IPA_JF_UNKNOWN:
5461 50598 : ipa_set_jf_unknown (jump_func);
5462 50598 : break;
5463 448499 : case IPA_JF_CONST:
5464 448499 : {
5465 448499 : tree t = stream_read_tree (ib, data_in);
5466 448499 : if (flag && prevails)
5467 161587 : t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
5468 448499 : ipa_set_jf_constant (jump_func, t, cs);
5469 : }
5470 448499 : break;
5471 69925 : case IPA_JF_PASS_THROUGH:
5472 69925 : operation = (enum tree_code) streamer_read_uhwi (ib);
5473 69925 : if (operation == NOP_EXPR)
5474 : {
5475 69378 : int formal_id = streamer_read_uhwi (ib);
5476 69378 : struct bitpack_d bp = streamer_read_bitpack (ib);
5477 69378 : bool agg_preserved = bp_unpack_value (&bp, 1);
5478 69378 : ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
5479 : }
5480 547 : else if (TREE_CODE_CLASS (operation) == tcc_unary)
5481 : {
5482 19 : tree op_type = stream_read_tree (ib, data_in);
5483 19 : int formal_id = streamer_read_uhwi (ib);
5484 19 : ipa_set_jf_unary_pass_through (jump_func, formal_id, operation,
5485 : op_type);
5486 : }
5487 : else
5488 : {
5489 528 : tree op_type = stream_read_tree (ib, data_in);
5490 528 : tree operand = stream_read_tree (ib, data_in);
5491 528 : int formal_id = streamer_read_uhwi (ib);
5492 528 : ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
5493 : operation, op_type);
5494 : }
5495 : break;
5496 654 : case IPA_JF_ANCESTOR:
5497 654 : {
5498 654 : HOST_WIDE_INT offset = streamer_read_uhwi (ib);
5499 654 : int formal_id = streamer_read_uhwi (ib);
5500 654 : struct bitpack_d bp = streamer_read_bitpack (ib);
5501 654 : bool agg_preserved = bp_unpack_value (&bp, 1);
5502 654 : bool keep_null = bp_unpack_value (&bp, 1);
5503 654 : ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved,
5504 : keep_null);
5505 654 : break;
5506 : }
5507 0 : default:
5508 0 : fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
5509 : }
5510 :
5511 569676 : count = streamer_read_uhwi (ib);
5512 569676 : if (prevails)
5513 : {
5514 569670 : jump_func->agg.items = NULL;
5515 569670 : vec_safe_reserve (jump_func->agg.items, count, true);
5516 : }
5517 569676 : if (count)
5518 : {
5519 2929 : struct bitpack_d bp = streamer_read_bitpack (ib);
5520 2929 : jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
5521 : }
5522 575668 : for (i = 0; i < count; i++)
5523 : {
5524 5992 : struct ipa_agg_jf_item item;
5525 5992 : item.type = stream_read_tree (ib, data_in);
5526 5992 : item.offset = streamer_read_uhwi (ib);
5527 5992 : item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
5528 :
5529 5992 : switch (item.jftype)
5530 : {
5531 : case IPA_JF_UNKNOWN:
5532 : break;
5533 5553 : case IPA_JF_CONST:
5534 5553 : item.value.constant = stream_read_tree (ib, data_in);
5535 5553 : break;
5536 439 : case IPA_JF_PASS_THROUGH:
5537 439 : case IPA_JF_LOAD_AGG:
5538 439 : operation = (enum tree_code) streamer_read_uhwi (ib);
5539 439 : item.value.pass_through.operation = operation;
5540 439 : item.value.pass_through.formal_id = streamer_read_uhwi (ib);
5541 439 : if (operation != NOP_EXPR)
5542 0 : item.value.pass_through.op_type = stream_read_tree (ib, data_in);
5543 : else
5544 439 : item.value.pass_through.op_type = NULL_TREE;
5545 439 : if (TREE_CODE_CLASS (operation) == tcc_unary)
5546 439 : item.value.pass_through.operand = NULL_TREE;
5547 : else
5548 0 : item.value.pass_through.operand = stream_read_tree (ib, data_in);
5549 439 : if (item.jftype == IPA_JF_LOAD_AGG)
5550 : {
5551 47 : struct bitpack_d bp;
5552 47 : item.value.load_agg.type = stream_read_tree (ib, data_in);
5553 47 : item.value.load_agg.offset = streamer_read_uhwi (ib);
5554 47 : bp = streamer_read_bitpack (ib);
5555 47 : item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
5556 : }
5557 : break;
5558 0 : default:
5559 0 : fatal_error (UNKNOWN_LOCATION,
5560 : "invalid jump function in LTO stream");
5561 : }
5562 5992 : if (prevails)
5563 5992 : jump_func->agg.items->quick_push (item);
5564 : }
5565 :
5566 569676 : ipa_vr vr;
5567 569676 : vr.streamer_read (ib, data_in);
5568 569676 : if (vr.known_p ())
5569 : {
5570 398344 : if (prevails)
5571 398338 : ipa_set_jfunc_vr (jump_func, vr);
5572 : }
5573 : else
5574 171332 : jump_func->m_vr = NULL;
5575 569676 : }
5576 :
5577 : /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
5578 : relevant to indirect inlining to OB. */
5579 :
5580 : static void
5581 2610 : ipa_write_indirect_edge_info (struct output_block *ob,
5582 : struct cgraph_edge *cs)
5583 : {
5584 2610 : struct bitpack_d bp;
5585 :
5586 2610 : bp = bitpack_create (ob->main_stream);
5587 2610 : bp_pack_enum (&bp, cgraph_indirect_info_kind, CIIK_N_KINDS,
5588 : cs->indirect_info->kind);
5589 2610 : streamer_write_bitpack (&bp);
5590 :
5591 2610 : if (cgraph_polymorphic_indirect_info *pii
5592 2610 : = dyn_cast <cgraph_polymorphic_indirect_info *> (cs->indirect_info))
5593 : {
5594 1023 : bp = bitpack_create (ob->main_stream);
5595 1023 : bp_pack_value (&bp, pii->vptr_changed, 1);
5596 1023 : streamer_write_bitpack (&bp);
5597 :
5598 1023 : streamer_write_hwi (ob, pii->param_index);
5599 1023 : pii->context.stream_out (ob);
5600 1023 : streamer_write_hwi (ob, pii->otr_token);
5601 1023 : stream_write_tree (ob, pii->otr_type, true);
5602 1023 : streamer_write_hwi (ob, pii->offset);
5603 : }
5604 1587 : else if (cgraph_simple_indirect_info *sii
5605 1587 : = dyn_cast <cgraph_simple_indirect_info *> (cs->indirect_info))
5606 : {
5607 1568 : bp = bitpack_create (ob->main_stream);
5608 1568 : bp_pack_value (&bp, sii->agg_contents, 1);
5609 1568 : bp_pack_value (&bp, sii->member_ptr, 1);
5610 1568 : bp_pack_value (&bp, sii->fnptr_loaded_from_record, 1);
5611 1568 : bp_pack_value (&bp, sii->by_ref, 1);
5612 1568 : bp_pack_value (&bp, sii->guaranteed_unmodified, 1);
5613 1568 : streamer_write_bitpack (&bp);
5614 :
5615 1568 : streamer_write_hwi (ob, sii->param_index);
5616 1568 : if (sii->agg_contents)
5617 55 : streamer_write_hwi (ob, sii->offset);
5618 : else
5619 1513 : gcc_assert (sii->offset == 0);
5620 1568 : if (sii->fnptr_loaded_from_record)
5621 : {
5622 128 : stream_write_tree (ob, sii->rec_type, true);
5623 128 : streamer_write_uhwi (ob, sii->fld_offset);
5624 : }
5625 : }
5626 : else
5627 19 : gcc_assert (cs->indirect_info->param_index == -1);
5628 2610 : }
5629 :
5630 : /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5631 : relevant to indirect inlining from IB. */
5632 :
5633 : static void
5634 1417 : ipa_read_indirect_edge_info (class lto_input_block *ib,
5635 : class data_in *data_in,
5636 : struct cgraph_edge *cs,
5637 : class ipa_node_params *info)
5638 : {
5639 1417 : struct bitpack_d bp;
5640 :
5641 1417 : bp = streamer_read_bitpack (ib);
5642 1417 : enum cgraph_indirect_info_kind ii_kind
5643 1417 : = bp_unpack_enum (&bp, cgraph_indirect_info_kind, CIIK_N_KINDS);
5644 1417 : gcc_assert (ii_kind == cs->indirect_info->kind);
5645 :
5646 1417 : if (cgraph_polymorphic_indirect_info *pii
5647 1417 : = dyn_cast <cgraph_polymorphic_indirect_info *> (cs->indirect_info))
5648 : {
5649 93 : bp = streamer_read_bitpack (ib);
5650 93 : pii->vptr_changed = bp_unpack_value (&bp, 1);
5651 :
5652 93 : pii->param_index = (int) streamer_read_hwi (ib);
5653 93 : pii->context.stream_in (ib, data_in);
5654 93 : pii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5655 93 : pii->otr_type = stream_read_tree (ib, data_in);
5656 93 : pii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5657 :
5658 93 : if (info && pii->param_index >= 0)
5659 : {
5660 70 : ipa_set_param_used_by_polymorphic_call (info, pii->param_index, true);
5661 70 : ipa_set_param_used_by_indirect_call (info, pii->param_index, true);
5662 : }
5663 : }
5664 1324 : else if (cgraph_simple_indirect_info *sii
5665 1324 : = dyn_cast <cgraph_simple_indirect_info *> (cs->indirect_info))
5666 : {
5667 1319 : bp = streamer_read_bitpack (ib);
5668 1319 : sii->agg_contents = bp_unpack_value (&bp, 1);
5669 1319 : sii->member_ptr = bp_unpack_value (&bp, 1);
5670 1319 : sii->fnptr_loaded_from_record = bp_unpack_value (&bp, 1);
5671 1319 : sii->by_ref = bp_unpack_value (&bp, 1);
5672 1319 : sii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5673 :
5674 1319 : sii->param_index = (int) streamer_read_hwi (ib);
5675 1319 : if (sii->agg_contents)
5676 31 : sii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5677 : else
5678 1288 : sii->offset = 0;
5679 1319 : if (sii->fnptr_loaded_from_record)
5680 : {
5681 66 : sii->rec_type = stream_read_tree (ib, data_in);
5682 66 : sii->fld_offset = (unsigned) streamer_read_uhwi (ib);
5683 : }
5684 1319 : if (info && sii->param_index >= 0)
5685 263 : ipa_set_param_used_by_indirect_call (info, sii->param_index, true);
5686 : }
5687 : else
5688 5 : cs->indirect_info->param_index = -1;
5689 1417 : }
5690 :
5691 : /* Stream out NODE info to OB. */
5692 :
5693 : static void
5694 92945 : ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5695 : {
5696 92945 : int node_ref;
5697 92945 : lto_symtab_encoder_t encoder;
5698 92945 : ipa_node_params *info = ipa_node_params_sum->get (node);
5699 92945 : int j;
5700 92945 : struct cgraph_edge *e;
5701 92945 : struct bitpack_d bp;
5702 :
5703 92945 : encoder = ob->decl_state->symtab_node_encoder;
5704 92945 : node_ref = lto_symtab_encoder_encode (encoder, node);
5705 92945 : streamer_write_uhwi (ob, node_ref);
5706 :
5707 92945 : streamer_write_uhwi (ob, ipa_get_param_count (info));
5708 450252 : for (j = 0; j < ipa_get_param_count (info); j++)
5709 100883 : streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5710 92945 : bp = bitpack_create (ob->main_stream);
5711 92945 : gcc_assert (info->analysis_done
5712 : || ipa_get_param_count (info) == 0);
5713 92945 : gcc_assert (!info->node_enqueued);
5714 92945 : gcc_assert (!info->ipcp_orig_node);
5715 357307 : for (j = 0; j < ipa_get_param_count (info); j++)
5716 : {
5717 : /* TODO: We could just not stream the bit in the undescribed case. */
5718 100883 : bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE)
5719 100883 : ? ipa_get_param_load_dereferenced (info, j) : true;
5720 100883 : bp_pack_value (&bp, d, 1);
5721 100883 : bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5722 : }
5723 92945 : streamer_write_bitpack (&bp);
5724 450252 : for (j = 0; j < ipa_get_param_count (info); j++)
5725 : {
5726 100883 : streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5727 100883 : stream_write_tree (ob, ipa_get_type (info, j), true);
5728 : }
5729 456012 : for (e = node->callees; e; e = e->next_callee)
5730 : {
5731 363067 : ipa_edge_args *args = ipa_edge_args_sum->get (e);
5732 :
5733 363067 : if (!args)
5734 : {
5735 806 : streamer_write_uhwi (ob, 0);
5736 806 : continue;
5737 : }
5738 :
5739 362261 : streamer_write_uhwi (ob,
5740 362261 : ipa_get_cs_argument_count (args) * 2
5741 362261 : + (args->polymorphic_call_contexts != NULL));
5742 2189042 : for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5743 : {
5744 605687 : ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5745 605687 : if (args->polymorphic_call_contexts != NULL)
5746 2416 : ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5747 : }
5748 : }
5749 95555 : for (e = node->indirect_calls; e; e = e->next_callee)
5750 : {
5751 2610 : ipa_edge_args *args = ipa_edge_args_sum->get (e);
5752 2610 : if (!args)
5753 6 : streamer_write_uhwi (ob, 0);
5754 : else
5755 : {
5756 2604 : streamer_write_uhwi (ob,
5757 2604 : ipa_get_cs_argument_count (args) * 2
5758 2604 : + (args->polymorphic_call_contexts != NULL));
5759 13922 : for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5760 : {
5761 3126 : ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5762 3126 : if (args->polymorphic_call_contexts != NULL)
5763 1321 : ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5764 : }
5765 : }
5766 2610 : ipa_write_indirect_edge_info (ob, e);
5767 : }
5768 92945 : }
5769 :
5770 : /* Stream in edge E from IB. */
5771 :
5772 : static void
5773 334927 : ipa_read_edge_info (class lto_input_block *ib,
5774 : class data_in *data_in,
5775 : struct cgraph_edge *e, bool prevails)
5776 : {
5777 334927 : int count = streamer_read_uhwi (ib);
5778 334927 : bool contexts_computed = count & 1;
5779 :
5780 334927 : count /= 2;
5781 334927 : if (!count)
5782 : return;
5783 234304 : if (prevails
5784 234304 : && (e->possibly_call_in_translation_unit_p ()
5785 : /* Also stream in jump functions to builtins in hope that they
5786 : will get fnspecs. */
5787 115755 : || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5788 : {
5789 223094 : ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
5790 223094 : vec_safe_grow_cleared (args->jump_functions, count, true);
5791 223094 : if (contexts_computed)
5792 634 : vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5793 774806 : for (int k = 0; k < count; k++)
5794 : {
5795 551712 : ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5796 : data_in, prevails);
5797 551712 : if (contexts_computed)
5798 984 : ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5799 984 : (ib, data_in);
5800 : }
5801 : }
5802 : else
5803 : {
5804 29174 : for (int k = 0; k < count; k++)
5805 : {
5806 17964 : struct ipa_jump_func dummy;
5807 17964 : ipa_read_jump_function (ib, &dummy, e,
5808 : data_in, prevails);
5809 17964 : if (contexts_computed)
5810 : {
5811 444 : class ipa_polymorphic_call_context ctx;
5812 444 : ctx.stream_in (ib, data_in);
5813 : }
5814 : }
5815 : }
5816 : }
5817 :
5818 : /* Stream in NODE info from IB. */
5819 :
5820 : static void
5821 77607 : ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5822 : class data_in *data_in)
5823 : {
5824 77607 : int k;
5825 77607 : struct cgraph_edge *e;
5826 77607 : struct bitpack_d bp;
5827 77607 : bool prevails = node->prevailing_p ();
5828 77607 : ipa_node_params *info
5829 77607 : = prevails ? ipa_node_params_sum->get_create (node) : NULL;
5830 :
5831 77607 : int param_count = streamer_read_uhwi (ib);
5832 77607 : if (prevails)
5833 : {
5834 77589 : ipa_alloc_node_params (node, param_count);
5835 233366 : for (k = 0; k < param_count; k++)
5836 78188 : (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5837 77589 : if (ipa_get_param_count (info) != 0)
5838 53126 : info->analysis_done = true;
5839 77589 : info->node_enqueued = false;
5840 : }
5841 : else
5842 27 : for (k = 0; k < param_count; k++)
5843 9 : streamer_read_uhwi (ib);
5844 :
5845 77607 : bp = streamer_read_bitpack (ib);
5846 155804 : for (k = 0; k < param_count; k++)
5847 : {
5848 78197 : bool load_dereferenced = bp_unpack_value (&bp, 1);
5849 78197 : bool used = bp_unpack_value (&bp, 1);
5850 :
5851 78197 : if (prevails)
5852 : {
5853 78188 : ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5854 78188 : ipa_set_param_used (info, k, used);
5855 : }
5856 : }
5857 155804 : for (k = 0; k < param_count; k++)
5858 : {
5859 78197 : int nuses = streamer_read_hwi (ib);
5860 78197 : tree type = stream_read_tree (ib, data_in);
5861 :
5862 78197 : if (prevails)
5863 : {
5864 78188 : ipa_set_controlled_uses (info, k, nuses);
5865 78188 : (*info->descriptors)[k].decl_or_type = type;
5866 : }
5867 : }
5868 411117 : for (e = node->callees; e; e = e->next_callee)
5869 333510 : ipa_read_edge_info (ib, data_in, e, prevails);
5870 79024 : for (e = node->indirect_calls; e; e = e->next_callee)
5871 : {
5872 1417 : ipa_read_edge_info (ib, data_in, e, prevails);
5873 1417 : ipa_read_indirect_edge_info (ib, data_in, e, info);
5874 : }
5875 77607 : }
5876 :
5877 : /* Stream out ipa_return_summary. */
5878 : static void
5879 31422 : ipa_write_return_summaries (output_block *ob)
5880 : {
5881 31422 : if (!ipa_return_value_sum)
5882 : {
5883 14932 : streamer_write_uhwi (ob, 0);
5884 14932 : return;
5885 : }
5886 :
5887 16490 : lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
5888 16490 : unsigned int count = 0;
5889 437916 : for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5890 : {
5891 202468 : toplevel_node *tnode = lto_symtab_encoder_deref (encoder, i);
5892 404936 : cgraph_node *cnode = dyn_cast <cgraph_node *> (tnode);
5893 166833 : ipa_return_value_summary *v;
5894 :
5895 166833 : if (cnode && cnode->definition && !cnode->alias
5896 122867 : && (v = ipa_return_value_sum->get (cnode))
5897 25243 : && v->vr)
5898 25243 : count++;
5899 : }
5900 16490 : streamer_write_uhwi (ob, count);
5901 :
5902 437916 : for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5903 : {
5904 202468 : toplevel_node *tnode = lto_symtab_encoder_deref (encoder, i);
5905 404936 : cgraph_node *cnode = dyn_cast <cgraph_node *> (tnode);
5906 166833 : ipa_return_value_summary *v;
5907 :
5908 166833 : if (cnode && cnode->definition && !cnode->alias
5909 122867 : && (v = ipa_return_value_sum->get (cnode))
5910 25243 : && v->vr)
5911 : {
5912 25243 : streamer_write_uhwi
5913 25243 : (ob,
5914 25243 : lto_symtab_encoder_encode (encoder, cnode));
5915 25243 : v->vr->streamer_write (ob);
5916 : }
5917 : }
5918 : }
5919 :
5920 : /* Write jump functions for nodes in SET. */
5921 :
5922 : void
5923 23187 : ipa_prop_write_jump_functions (void)
5924 : {
5925 23187 : struct output_block *ob;
5926 23187 : unsigned int count = 0;
5927 23187 : lto_symtab_encoder_iterator lsei;
5928 23187 : lto_symtab_encoder_t encoder;
5929 :
5930 23187 : if (!ipa_node_params_sum || !ipa_edge_args_sum)
5931 0 : return;
5932 :
5933 23187 : ob = create_output_block (LTO_section_jump_functions);
5934 23187 : encoder = ob->decl_state->symtab_node_encoder;
5935 23187 : ob->symbol = NULL;
5936 128147 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5937 104960 : lsei_next_function_in_partition (&lsei))
5938 : {
5939 104960 : cgraph_node *node = lsei_cgraph_node (lsei);
5940 104960 : if (node->has_gimple_body_p ()
5941 104960 : && ipa_node_params_sum->get (node) != NULL)
5942 92945 : count++;
5943 : }
5944 :
5945 23187 : streamer_write_uhwi (ob, count);
5946 :
5947 : /* Process all of the functions. */
5948 128147 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5949 104960 : lsei_next_function_in_partition (&lsei))
5950 : {
5951 104960 : cgraph_node *node = lsei_cgraph_node (lsei);
5952 104960 : if (node->has_gimple_body_p ()
5953 104960 : && ipa_node_params_sum->get (node) != NULL)
5954 92945 : ipa_write_node_info (ob, node);
5955 : }
5956 23187 : ipa_write_return_summaries (ob);
5957 :
5958 23187 : if (noted_fnptrs_in_records)
5959 : {
5960 340 : count = 0;
5961 1069 : for (auto iter = noted_fnptrs_in_records->begin ();
5962 1069 : iter != noted_fnptrs_in_records->end();
5963 729 : ++iter)
5964 729 : if ((*iter)->fn)
5965 721 : count++;
5966 340 : streamer_write_uhwi (ob, count);
5967 :
5968 1069 : for (auto iter = noted_fnptrs_in_records->begin ();
5969 1409 : iter != noted_fnptrs_in_records->end();
5970 729 : ++iter)
5971 729 : if ((*iter)->fn)
5972 : {
5973 721 : stream_write_tree (ob, (*iter)->rec_type, true);
5974 721 : stream_write_tree (ob, (*iter)->fn, true);
5975 721 : streamer_write_uhwi (ob, (*iter)->fld_offset);
5976 : }
5977 : }
5978 : else
5979 22847 : streamer_write_uhwi (ob, 0);
5980 :
5981 23187 : produce_asm (ob);
5982 23187 : destroy_output_block (ob);
5983 : }
5984 :
5985 : /* Record that return value range of N is VAL. */
5986 :
5987 : static void
5988 758965 : ipa_record_return_value_range_1 (cgraph_node *n, value_range val)
5989 : {
5990 758965 : if (!ipa_return_value_sum)
5991 : {
5992 86655 : if (!ipa_vr_hash_table)
5993 75895 : ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
5994 86655 : ipa_return_value_sum = new (ggc_alloc_no_dtor <ipa_return_value_sum_t> ())
5995 86655 : ipa_return_value_sum_t (symtab, true);
5996 86655 : ipa_return_value_sum->disable_insertion_hook ();
5997 : }
5998 758965 : ipa_return_value_sum->get_create (n)->vr = ipa_get_value_range (val);
5999 758965 : if (dump_file && (dump_flags & TDF_DETAILS))
6000 : {
6001 21 : fprintf (dump_file, "Recording return range of %s:", n->dump_name ());
6002 21 : val.dump (dump_file);
6003 21 : fprintf (dump_file, "\n");
6004 : }
6005 758965 : }
6006 :
6007 : /* Stream out ipa_return_summary. */
6008 : static void
6009 21574 : ipa_read_return_summaries (lto_input_block *ib,
6010 : struct lto_file_decl_data *file_data,
6011 : class data_in *data_in)
6012 : {
6013 21574 : unsigned int f_count = streamer_read_uhwi (ib);
6014 43285 : for (unsigned int i = 0; i < f_count; i++)
6015 : {
6016 21711 : unsigned int index = streamer_read_uhwi (ib);
6017 21711 : lto_symtab_encoder_t encoder = file_data->symtab_node_encoder;
6018 21711 : struct cgraph_node *node
6019 : = dyn_cast <cgraph_node *>
6020 21711 : (lto_symtab_encoder_deref (encoder, index));
6021 21711 : ipa_vr rvr;
6022 21711 : rvr.streamer_read (ib, data_in);
6023 21711 : if (node->prevailing_p ())
6024 : {
6025 21709 : value_range tmp;
6026 21709 : rvr.get_vrange (tmp);
6027 21709 : ipa_record_return_value_range_1 (node, tmp);
6028 21709 : }
6029 : }
6030 21574 : }
6031 :
6032 : /* Read section in file FILE_DATA of length LEN with data DATA. */
6033 :
6034 : static void
6035 13339 : ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
6036 : size_t len)
6037 : {
6038 13339 : const struct lto_function_header *header =
6039 : (const struct lto_function_header *) data;
6040 13339 : const int cfg_offset = sizeof (struct lto_function_header);
6041 13339 : const int main_offset = cfg_offset + header->cfg_size;
6042 13339 : const int string_offset = main_offset + header->main_size;
6043 13339 : class data_in *data_in;
6044 13339 : unsigned int i;
6045 13339 : unsigned int count;
6046 :
6047 13339 : lto_input_block ib_main ((const char *) data + main_offset,
6048 13339 : header->main_size, file_data);
6049 :
6050 13339 : data_in =
6051 26678 : lto_data_in_create (file_data, (const char *) data + string_offset,
6052 13339 : header->string_size, vNULL);
6053 13339 : count = streamer_read_uhwi (&ib_main);
6054 :
6055 90946 : for (i = 0; i < count; i++)
6056 : {
6057 77607 : unsigned int index;
6058 77607 : struct cgraph_node *node;
6059 77607 : lto_symtab_encoder_t encoder;
6060 :
6061 77607 : index = streamer_read_uhwi (&ib_main);
6062 77607 : encoder = file_data->symtab_node_encoder;
6063 77607 : node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
6064 : index));
6065 77607 : gcc_assert (node->definition);
6066 77607 : ipa_read_node_info (&ib_main, node, data_in);
6067 : }
6068 13339 : ipa_read_return_summaries (&ib_main, file_data, data_in);
6069 :
6070 13339 : count = streamer_read_uhwi (&ib_main);
6071 14007 : for (i = 0; i < count; i++)
6072 : {
6073 668 : tree rec_type = stream_read_tree (&ib_main, data_in);
6074 668 : tree fn = stream_read_tree (&ib_main, data_in);
6075 668 : unsigned fld_offset = (unsigned) streamer_read_uhwi (&ib_main);
6076 668 : note_fnptr_in_record (rec_type, fld_offset, fn);
6077 : }
6078 :
6079 13339 : lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
6080 : len);
6081 13339 : lto_data_in_delete (data_in);
6082 13339 : }
6083 :
6084 : /* Read ipcp jump functions. */
6085 :
6086 : void
6087 12284 : ipa_prop_read_jump_functions (void)
6088 : {
6089 12284 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
6090 12284 : struct lto_file_decl_data *file_data;
6091 12284 : unsigned int j = 0;
6092 :
6093 12284 : ipa_check_create_node_params ();
6094 12284 : ipa_check_create_edge_args ();
6095 12284 : ipa_register_cgraph_hooks ();
6096 :
6097 37907 : while ((file_data = file_data_vec[j++]))
6098 : {
6099 13339 : size_t len;
6100 13339 : const char *data
6101 13339 : = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
6102 : &len);
6103 13339 : if (data)
6104 13339 : ipa_prop_read_section (file_data, data, len);
6105 : }
6106 12284 : }
6107 :
6108 : /* Return true if the IPA-CP transformation summary TS is non-NULL and contains
6109 : useful info. */
6110 : static bool
6111 166140 : useful_ipcp_transformation_info_p (ipcp_transformation *ts)
6112 : {
6113 166140 : if (!ts)
6114 : return false;
6115 24264 : if (!vec_safe_is_empty (ts->m_agg_values)
6116 23860 : || !vec_safe_is_empty (ts->m_vr))
6117 23982 : return true;
6118 : return false;
6119 : }
6120 :
6121 : /* Write into OB IPA-CP transfromation summary TS describing NODE. */
6122 :
6123 : void
6124 11973 : write_ipcp_transformation_info (output_block *ob, cgraph_node *node,
6125 : ipcp_transformation *ts)
6126 : {
6127 11973 : lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
6128 11973 : int node_ref = lto_symtab_encoder_encode (encoder, node);
6129 11973 : streamer_write_uhwi (ob, node_ref);
6130 :
6131 12172 : streamer_write_uhwi (ob, vec_safe_length (ts->m_agg_values));
6132 13604 : for (const ipa_argagg_value &av : ts->m_agg_values)
6133 : {
6134 1233 : struct bitpack_d bp;
6135 :
6136 1233 : stream_write_tree (ob, av.value, true);
6137 1233 : streamer_write_uhwi (ob, av.unit_offset);
6138 1233 : streamer_write_uhwi (ob, av.index);
6139 :
6140 1233 : bp = bitpack_create (ob->main_stream);
6141 1233 : bp_pack_value (&bp, av.by_ref, 1);
6142 1233 : bp_pack_value (&bp, av.killed, 1);
6143 1233 : streamer_write_bitpack (&bp);
6144 : }
6145 :
6146 : /* If all instances of this node are inlined, ipcp info is not useful. */
6147 11973 : if (!lto_symtab_encoder_only_for_inlining_p (encoder, node))
6148 : {
6149 21752 : streamer_write_uhwi (ob, vec_safe_length (ts->m_vr));
6150 53077 : for (const ipa_vr &parm_vr : ts->m_vr)
6151 20451 : parm_vr.streamer_write (ob);
6152 : }
6153 : else
6154 1095 : streamer_write_uhwi (ob, 0);
6155 11973 : }
6156 :
6157 : /* Stream in the aggregate value replacement chain for NODE from IB. */
6158 :
6159 : static void
6160 11973 : read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
6161 : data_in *data_in)
6162 : {
6163 11973 : unsigned int count, i;
6164 11973 : ipcp_transformation_initialize ();
6165 11973 : ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
6166 :
6167 11973 : count = streamer_read_uhwi (ib);
6168 11973 : if (count > 0)
6169 : {
6170 199 : vec_safe_grow_cleared (ts->m_agg_values, count, true);
6171 1432 : for (i = 0; i <count; i++)
6172 : {
6173 1233 : ipa_argagg_value *av = &(*ts->m_agg_values)[i];;
6174 :
6175 1233 : av->value = stream_read_tree (ib, data_in);
6176 1233 : av->unit_offset = streamer_read_uhwi (ib);
6177 1233 : av->index = streamer_read_uhwi (ib);
6178 :
6179 1233 : bitpack_d bp = streamer_read_bitpack (ib);
6180 1233 : av->by_ref = bp_unpack_value (&bp, 1);
6181 1233 : av->killed = bp_unpack_value (&bp, 1);
6182 : }
6183 : }
6184 :
6185 11973 : count = streamer_read_uhwi (ib);
6186 11973 : if (count > 0)
6187 : {
6188 10874 : vec_safe_grow_cleared (ts->m_vr, count, true);
6189 31325 : for (i = 0; i < count; i++)
6190 : {
6191 20451 : ipa_vr *parm_vr;
6192 20451 : parm_vr = &(*ts->m_vr)[i];
6193 20451 : parm_vr->streamer_read (ib, data_in);
6194 : }
6195 : }
6196 11973 : }
6197 :
6198 :
6199 : /* Write all aggregate replacement for nodes in set. */
6200 :
6201 : void
6202 8235 : ipcp_write_transformation_summaries (void)
6203 : {
6204 8235 : struct output_block *ob;
6205 8235 : unsigned int count = 0;
6206 8235 : lto_symtab_encoder_t encoder;
6207 :
6208 8235 : ob = create_output_block (LTO_section_ipcp_transform);
6209 8235 : encoder = ob->decl_state->symtab_node_encoder;
6210 8235 : ob->symbol = NULL;
6211 :
6212 232346 : for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
6213 : {
6214 107942 : toplevel_node *tnode = lto_symtab_encoder_deref (encoder, i);
6215 107942 : cgraph_node *cnode = dyn_cast <cgraph_node *> (tnode);
6216 107942 : if (!cnode)
6217 24872 : continue;
6218 83070 : ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
6219 83070 : if (useful_ipcp_transformation_info_p (ts)
6220 83070 : && lto_symtab_encoder_encode_body_p (encoder, cnode))
6221 11973 : count++;
6222 : }
6223 :
6224 8235 : streamer_write_uhwi (ob, count);
6225 :
6226 232346 : for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
6227 : {
6228 107942 : toplevel_node *tnode = lto_symtab_encoder_deref (encoder, i);
6229 107942 : cgraph_node *cnode = dyn_cast <cgraph_node *> (tnode);
6230 107942 : if (!cnode)
6231 24872 : continue;
6232 83070 : ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
6233 83070 : if (useful_ipcp_transformation_info_p (ts)
6234 83070 : && lto_symtab_encoder_encode_body_p (encoder, cnode))
6235 11973 : write_ipcp_transformation_info (ob, cnode, ts);
6236 : }
6237 8235 : ipa_write_return_summaries (ob);
6238 8235 : produce_asm (ob);
6239 8235 : destroy_output_block (ob);
6240 8235 : }
6241 :
6242 : /* Read replacements section in file FILE_DATA of length LEN with data
6243 : DATA. */
6244 :
6245 : static void
6246 8235 : read_replacements_section (struct lto_file_decl_data *file_data,
6247 : const char *data,
6248 : size_t len)
6249 : {
6250 8235 : const struct lto_function_header *header =
6251 : (const struct lto_function_header *) data;
6252 8235 : const int cfg_offset = sizeof (struct lto_function_header);
6253 8235 : const int main_offset = cfg_offset + header->cfg_size;
6254 8235 : const int string_offset = main_offset + header->main_size;
6255 8235 : class data_in *data_in;
6256 8235 : unsigned int i;
6257 8235 : unsigned int count;
6258 :
6259 8235 : lto_input_block ib_main ((const char *) data + main_offset,
6260 8235 : header->main_size, file_data);
6261 :
6262 8235 : data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6263 8235 : header->string_size, vNULL);
6264 8235 : count = streamer_read_uhwi (&ib_main);
6265 :
6266 20208 : for (i = 0; i < count; i++)
6267 : {
6268 11973 : unsigned int index;
6269 11973 : struct cgraph_node *node;
6270 11973 : lto_symtab_encoder_t encoder;
6271 :
6272 11973 : index = streamer_read_uhwi (&ib_main);
6273 11973 : encoder = file_data->symtab_node_encoder;
6274 11973 : node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
6275 : index));
6276 11973 : read_ipcp_transformation_info (&ib_main, node, data_in);
6277 : }
6278 8235 : ipa_read_return_summaries (&ib_main, file_data, data_in);
6279 8235 : lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
6280 : len);
6281 8235 : lto_data_in_delete (data_in);
6282 8235 : }
6283 :
6284 : /* Read IPA-CP aggregate replacements. */
6285 :
6286 : void
6287 8235 : ipcp_read_transformation_summaries (void)
6288 : {
6289 8235 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
6290 8235 : struct lto_file_decl_data *file_data;
6291 8235 : unsigned int j = 0;
6292 :
6293 24705 : while ((file_data = file_data_vec[j++]))
6294 : {
6295 8235 : size_t len;
6296 8235 : const char *data
6297 8235 : = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
6298 : &len);
6299 8235 : if (data)
6300 8235 : read_replacements_section (file_data, data, len);
6301 : }
6302 8235 : }
6303 :
6304 : /* Adjust the aggregate replacements in TS to reflect any parameter removals
6305 : which might have already taken place. If after adjustments there are no
6306 : aggregate replacements left, the m_agg_values will be set to NULL. In other
6307 : cases, it may be shrunk. */
6308 :
6309 : static void
6310 1895 : adjust_agg_replacement_values (cgraph_node *node, ipcp_transformation *ts)
6311 : {
6312 1895 : clone_info *cinfo = clone_info::get (node);
6313 1895 : if (!cinfo || !cinfo->param_adjustments)
6314 : return;
6315 :
6316 919 : auto_vec<int, 16> new_indices;
6317 919 : cinfo->param_adjustments->get_updated_indices (&new_indices);
6318 919 : bool removed_item = false;
6319 919 : unsigned dst_index = 0;
6320 919 : unsigned count = ts->m_agg_values->length ();
6321 5070 : for (unsigned i = 0; i < count; i++)
6322 : {
6323 4151 : ipa_argagg_value *v = &(*ts->m_agg_values)[i];
6324 4151 : gcc_checking_assert (v->index >= 0);
6325 :
6326 4151 : int new_idx = -1;
6327 4151 : if ((unsigned) v->index < new_indices.length ())
6328 2501 : new_idx = new_indices[v->index];
6329 :
6330 2501 : if (new_idx >= 0)
6331 : {
6332 1674 : v->index = new_idx;
6333 1674 : if (removed_item)
6334 23 : (*ts->m_agg_values)[dst_index] = *v;
6335 1674 : dst_index++;
6336 : }
6337 : else
6338 : removed_item = true;
6339 : }
6340 :
6341 919 : if (dst_index == 0)
6342 : {
6343 548 : ggc_free (ts->m_agg_values);
6344 548 : ts->m_agg_values = NULL;
6345 : }
6346 371 : else if (removed_item)
6347 35 : ts->m_agg_values->truncate (dst_index);
6348 :
6349 919 : return;
6350 919 : }
6351 :
6352 : /* Dominator walker driving the ipcp modification phase. */
6353 :
6354 : class ipcp_modif_dom_walker : public dom_walker
6355 : {
6356 : public:
6357 1347 : ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
6358 : vec<ipa_param_descriptor, va_gc> *descs,
6359 : ipcp_transformation *ts, bool *sc)
6360 2694 : : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
6361 1347 : m_ts (ts), m_something_changed (sc) {}
6362 :
6363 : edge before_dom_children (basic_block) final override;
6364 1347 : bool cleanup_eh ()
6365 1347 : { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
6366 :
6367 : private:
6368 : struct ipa_func_body_info *m_fbi;
6369 : vec<ipa_param_descriptor, va_gc> *m_descriptors;
6370 : ipcp_transformation *m_ts;
6371 : bool *m_something_changed;
6372 : auto_bitmap m_need_eh_cleanup;
6373 : };
6374 :
6375 : edge
6376 22815 : ipcp_modif_dom_walker::before_dom_children (basic_block bb)
6377 : {
6378 22815 : gimple_stmt_iterator gsi;
6379 127127 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6380 : {
6381 81497 : gimple *stmt = gsi_stmt (gsi);
6382 81497 : tree rhs, val, t;
6383 81497 : HOST_WIDE_INT bit_offset;
6384 81497 : poly_int64 size;
6385 81497 : int index;
6386 81497 : bool by_ref, vce;
6387 :
6388 81497 : if (!gimple_assign_load_p (stmt))
6389 79929 : continue;
6390 12038 : rhs = gimple_assign_rhs1 (stmt);
6391 12038 : if (!is_gimple_reg_type (TREE_TYPE (rhs)))
6392 643 : continue;
6393 :
6394 26661 : vce = false;
6395 : t = rhs;
6396 26661 : while (handled_component_p (t))
6397 : {
6398 : /* V_C_E can do things like convert an array of integers to one
6399 : bigger integer and similar things we do not handle below. */
6400 15266 : if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
6401 : {
6402 : vce = true;
6403 : break;
6404 : }
6405 15266 : t = TREE_OPERAND (t, 0);
6406 : }
6407 11395 : if (vce)
6408 0 : continue;
6409 :
6410 11395 : if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
6411 : &bit_offset, &size, &by_ref))
6412 8363 : continue;
6413 3032 : unsigned unit_offset = bit_offset / BITS_PER_UNIT;
6414 3032 : ipa_argagg_value_list avl (m_ts);
6415 3032 : tree v = avl.get_value (index, unit_offset, by_ref);
6416 :
6417 4496 : if (!v
6418 3032 : || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), size))
6419 1464 : continue;
6420 :
6421 1568 : gcc_checking_assert (is_gimple_ip_invariant (v));
6422 1568 : if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v)))
6423 : {
6424 0 : if (fold_convertible_p (TREE_TYPE (rhs), v))
6425 0 : val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v);
6426 0 : else if (TYPE_SIZE (TREE_TYPE (rhs))
6427 0 : == TYPE_SIZE (TREE_TYPE (v)))
6428 0 : val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v);
6429 : else
6430 : {
6431 0 : if (dump_file)
6432 : {
6433 0 : fprintf (dump_file, " const ");
6434 0 : print_generic_expr (dump_file, v);
6435 0 : fprintf (dump_file, " can't be converted to type of ");
6436 0 : print_generic_expr (dump_file, rhs);
6437 0 : fprintf (dump_file, "\n");
6438 : }
6439 0 : continue;
6440 : }
6441 : }
6442 : else
6443 : val = v;
6444 :
6445 1568 : if (dump_file && (dump_flags & TDF_DETAILS))
6446 : {
6447 41 : fprintf (dump_file, "Modifying stmt:\n ");
6448 41 : print_gimple_stmt (dump_file, stmt, 0);
6449 : }
6450 1568 : gimple_assign_set_rhs_from_tree (&gsi, val);
6451 1568 : update_stmt (stmt);
6452 :
6453 1568 : if (dump_file && (dump_flags & TDF_DETAILS))
6454 : {
6455 41 : fprintf (dump_file, "into:\n ");
6456 41 : print_gimple_stmt (dump_file, stmt, 0);
6457 41 : fprintf (dump_file, "\n");
6458 : }
6459 :
6460 1568 : *m_something_changed = true;
6461 1568 : if (maybe_clean_eh_stmt (stmt))
6462 9 : bitmap_set_bit (m_need_eh_cleanup, bb->index);
6463 : }
6464 22815 : return NULL;
6465 : }
6466 :
6467 : /* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
6468 : - whether passed by reference or not is given by BY_REF - return that
6469 : constant. Otherwise return NULL_TREE. The is supposed to be used only
6470 : after clone materialization and transformation is done (because it asserts
6471 : that killed constants have been pruned). */
6472 :
6473 : tree
6474 4204110 : ipcp_get_aggregate_const (struct function *func, tree parm, bool by_ref,
6475 : HOST_WIDE_INT bit_offset, HOST_WIDE_INT bit_size)
6476 : {
6477 4204110 : cgraph_node *node = cgraph_node::get (func->decl);
6478 4204110 : ipcp_transformation *ts = ipcp_get_transformation_summary (node);
6479 :
6480 4204110 : if (!ts || !ts->m_agg_values)
6481 : return NULL_TREE;
6482 :
6483 10314 : int index = ts->get_param_index (func->decl, parm);
6484 10314 : if (index < 0)
6485 : return NULL_TREE;
6486 :
6487 10261 : ipa_argagg_value_list avl (ts);
6488 10261 : unsigned unit_offset = bit_offset / BITS_PER_UNIT;
6489 10261 : const ipa_argagg_value *av = avl.get_elt (index, unit_offset);
6490 10261 : if (!av || av->by_ref != by_ref)
6491 : return NULL_TREE;
6492 1924 : gcc_assert (!av->killed);
6493 1924 : tree v = av->value;
6494 1924 : if (!v
6495 1924 : || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), bit_size))
6496 688 : return NULL_TREE;
6497 :
6498 : return v;
6499 : }
6500 :
6501 : /* Return true if we have recorded VALUE and MASK about PARM.
6502 : Set VALUE and MASk accordingly. */
6503 :
6504 : bool
6505 7535993 : ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
6506 : {
6507 7535993 : cgraph_node *cnode = cgraph_node::get (current_function_decl);
6508 7535993 : ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
6509 7535993 : if (!ts
6510 122983 : || vec_safe_length (ts->m_vr) == 0
6511 7693101 : || !ipa_vr_supported_type_p (TREE_TYPE (parm)))
6512 : return false;
6513 :
6514 115742 : int i = ts->get_param_index (current_function_decl, parm);
6515 115742 : if (i < 0)
6516 : return false;
6517 114341 : clone_info *cinfo = clone_info::get (cnode);
6518 114341 : if (cinfo && cinfo->param_adjustments)
6519 : {
6520 33540 : i = cinfo->param_adjustments->get_original_index (i);
6521 33540 : if (i < 0)
6522 : return false;
6523 : }
6524 :
6525 105404 : vec<ipa_vr, va_gc> &vr = *ts->m_vr;
6526 105404 : if (!vr[i].known_p ())
6527 : return false;
6528 83899 : value_range tmp;
6529 83899 : vr[i].get_vrange (tmp);
6530 83899 : if (tmp.undefined_p () || tmp.varying_p ())
6531 : return false;
6532 83899 : irange_bitmask bm;
6533 83899 : bm = tmp.get_bitmask ();
6534 83899 : *mask = widest_int::from (bm.mask (), TYPE_SIGN (TREE_TYPE (parm)));
6535 83899 : *value = wide_int_to_tree (TREE_TYPE (parm), bm.value ());
6536 83899 : return true;
6537 83899 : }
6538 :
6539 : /* Update value range of formal parameters of NODE as described in TS. */
6540 :
6541 : static void
6542 23210 : ipcp_update_vr (struct cgraph_node *node, ipcp_transformation *ts)
6543 : {
6544 23210 : if (vec_safe_is_empty (ts->m_vr))
6545 547 : return;
6546 22663 : const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
6547 22663 : unsigned count = vr.length ();
6548 22663 : if (!count)
6549 : return;
6550 :
6551 22663 : auto_vec<int, 16> new_indices;
6552 22663 : bool need_remapping = false;
6553 22663 : clone_info *cinfo = clone_info::get (node);
6554 22663 : if (cinfo && cinfo->param_adjustments)
6555 : {
6556 7949 : cinfo->param_adjustments->get_updated_indices (&new_indices);
6557 7949 : need_remapping = true;
6558 : }
6559 22663 : auto_vec <tree, 16> parm_decls;
6560 22663 : push_function_arg_decls (&parm_decls, node->decl);
6561 :
6562 79465 : for (unsigned i = 0; i < count; ++i)
6563 : {
6564 56802 : tree parm;
6565 56802 : int remapped_idx;
6566 56802 : if (need_remapping)
6567 : {
6568 23634 : if (i >= new_indices.length ())
6569 11582 : continue;
6570 12052 : remapped_idx = new_indices[i];
6571 12052 : if (remapped_idx < 0)
6572 2557 : continue;
6573 : }
6574 : else
6575 33168 : remapped_idx = i;
6576 :
6577 42663 : parm = parm_decls[remapped_idx];
6578 :
6579 42663 : gcc_checking_assert (parm);
6580 42663 : tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
6581 :
6582 42663 : if (!ddef || !is_gimple_reg (parm))
6583 6538 : continue;
6584 :
6585 36125 : if (vr[i].known_p ())
6586 : {
6587 27976 : value_range tmp;
6588 27976 : vr[i].get_vrange (tmp);
6589 :
6590 27976 : if (!tmp.undefined_p () && !tmp.varying_p ())
6591 : {
6592 27976 : if (dump_file)
6593 : {
6594 112 : fprintf (dump_file, "Setting value range of param %u "
6595 : "(now %i) ", i, remapped_idx);
6596 112 : tmp.dump (dump_file);
6597 112 : fprintf (dump_file, "]\n");
6598 : }
6599 27976 : set_range_info (ddef, tmp);
6600 :
6601 43184 : if (POINTER_TYPE_P (TREE_TYPE (parm))
6602 31214 : && opt_for_fn (node->decl, flag_ipa_bit_cp))
6603 : {
6604 16005 : irange_bitmask bm = tmp.get_bitmask ();
6605 16005 : unsigned tem = bm.mask ().to_uhwi ();
6606 16005 : unsigned HOST_WIDE_INT bitpos = bm.value ().to_uhwi ();
6607 16005 : unsigned align = tem & -tem;
6608 16005 : unsigned misalign = bitpos & (align - 1);
6609 :
6610 16005 : if (align > 1)
6611 : {
6612 13231 : if (dump_file)
6613 : {
6614 85 : fprintf (dump_file,
6615 : "Adjusting mask for param %u to ", i);
6616 85 : print_hex (bm.mask (), dump_file);
6617 85 : fprintf (dump_file, "\n");
6618 : }
6619 :
6620 13231 : if (dump_file)
6621 85 : fprintf (dump_file,
6622 : "Adjusting align: %u, misalign: %u\n",
6623 : align, misalign);
6624 :
6625 13231 : unsigned old_align, old_misalign;
6626 13231 : struct ptr_info_def *pi = get_ptr_info (ddef);
6627 13231 : bool old_known = get_ptr_info_alignment (pi, &old_align,
6628 : &old_misalign);
6629 :
6630 13231 : if (old_known && old_align > align)
6631 : {
6632 0 : if (dump_file)
6633 : {
6634 0 : fprintf (dump_file,
6635 : "But alignment was already %u.\n",
6636 : old_align);
6637 0 : if ((old_misalign & (align - 1)) != misalign)
6638 0 : fprintf (dump_file,
6639 : "old_misalign (%u) and misalign "
6640 : "(%u) mismatch\n",
6641 : old_misalign, misalign);
6642 : }
6643 0 : continue;
6644 : }
6645 :
6646 13231 : if (dump_file
6647 85 : && old_known
6648 0 : && ((misalign & (old_align - 1)) != old_misalign))
6649 0 : fprintf (dump_file,
6650 : "old_misalign (%u) and misalign (%u) "
6651 : "mismatch\n",
6652 : old_misalign, misalign);
6653 :
6654 13231 : set_ptr_info_alignment (pi, align, misalign);
6655 : }
6656 16005 : }
6657 11971 : else if (dump_file && INTEGRAL_TYPE_P (TREE_TYPE (parm)))
6658 : {
6659 23 : irange &r = as_a<irange> (tmp);
6660 23 : irange_bitmask bm = r.get_bitmask ();
6661 23 : unsigned prec = TYPE_PRECISION (TREE_TYPE (parm));
6662 23 : if (wi::ne_p (bm.mask (), wi::shwi (-1, prec)))
6663 : {
6664 16 : fprintf (dump_file,
6665 : "Adjusting mask for param %u to ", i);
6666 16 : print_hex (bm.mask (), dump_file);
6667 16 : fprintf (dump_file, "\n");
6668 : }
6669 23 : }
6670 : }
6671 27976 : }
6672 : }
6673 22663 : }
6674 :
6675 : /* IPCP transformation phase doing propagation of aggregate values. */
6676 :
6677 : unsigned int
6678 964597 : ipcp_transform_function (struct cgraph_node *node)
6679 : {
6680 964597 : struct ipa_func_body_info fbi;
6681 964597 : int param_count;
6682 :
6683 964597 : gcc_checking_assert (cfun);
6684 964597 : gcc_checking_assert (current_function_decl);
6685 :
6686 964597 : if (dump_file)
6687 693 : fprintf (dump_file, "Modification phase of node %s\n",
6688 : node->dump_name ());
6689 :
6690 964597 : ipcp_transformation *ts = ipcp_get_transformation_summary (node);
6691 964597 : if (!ts
6692 964597 : || (vec_safe_is_empty (ts->m_agg_values)
6693 22209 : && vec_safe_is_empty (ts->m_vr)))
6694 : return 0;
6695 :
6696 23210 : ts->maybe_create_parm_idx_map (cfun->decl);
6697 23210 : ipcp_update_vr (node, ts);
6698 965423 : if (vec_safe_is_empty (ts->m_agg_values))
6699 : return 0;
6700 2173 : param_count = count_formal_params (node->decl);
6701 2173 : if (param_count == 0)
6702 : return 0;
6703 :
6704 1895 : adjust_agg_replacement_values (node, ts);
6705 1895 : if (vec_safe_is_empty (ts->m_agg_values))
6706 : {
6707 548 : if (dump_file)
6708 4 : fprintf (dump_file, " All affected aggregate parameters were either "
6709 : "removed or converted into scalars, phase done.\n");
6710 548 : return 0;
6711 : }
6712 1347 : if (dump_file)
6713 : {
6714 49 : fprintf (dump_file, " Aggregate replacements:");
6715 49 : ipa_argagg_value_list avs (ts);
6716 49 : avs.dump (dump_file);
6717 : }
6718 :
6719 1347 : fbi.node = node;
6720 1347 : fbi.info = NULL;
6721 1347 : fbi.bb_infos = vNULL;
6722 1347 : fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
6723 1347 : fbi.param_count = param_count;
6724 1347 : fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
6725 :
6726 1347 : vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
6727 1347 : vec_safe_grow_cleared (descriptors, param_count, true);
6728 1347 : ipa_populate_param_decls (node, *descriptors);
6729 1347 : bool modified_mem_access = false;
6730 1347 : calculate_dominance_info (CDI_DOMINATORS);
6731 1347 : ipcp_modif_dom_walker walker (&fbi, descriptors, ts, &modified_mem_access);
6732 1347 : walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6733 1347 : free_dominance_info (CDI_DOMINATORS);
6734 1347 : bool cfg_changed = walker.cleanup_eh ();
6735 :
6736 1347 : int i;
6737 1347 : struct ipa_bb_info *bi;
6738 26856 : FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
6739 48324 : free_ipa_bb_info (bi);
6740 1347 : fbi.bb_infos.release ();
6741 :
6742 1347 : ts->remove_argaggs_if ([](const ipa_argagg_value &v)
6743 : {
6744 5075 : return v.killed;
6745 : });
6746 :
6747 1347 : vec_free (descriptors);
6748 1347 : if (cfg_changed)
6749 1 : delete_unreachable_blocks_update_callgraph (node, false);
6750 :
6751 1347 : return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
6752 1347 : }
6753 :
6754 : /* Record that current function return value range is VAL. */
6755 :
6756 : void
6757 737256 : ipa_record_return_value_range (value_range val)
6758 : {
6759 737256 : ipa_record_return_value_range_1
6760 737256 : (cgraph_node::get (current_function_decl), val);
6761 737256 : }
6762 :
6763 : /* Return true if value range of DECL is known and if so initialize RANGE. */
6764 :
6765 : bool
6766 11948006 : ipa_return_value_range (value_range &range, tree decl)
6767 : {
6768 11948006 : cgraph_node *n = cgraph_node::get (decl);
6769 11948006 : if (!n || !ipa_return_value_sum)
6770 : return false;
6771 9716401 : enum availability avail;
6772 9716401 : n = n->ultimate_alias_target (&avail);
6773 9716401 : if (avail < AVAIL_AVAILABLE)
6774 : return false;
6775 2089209 : if (n->decl != decl && !useless_type_conversion_p (TREE_TYPE (decl), TREE_TYPE (n->decl)))
6776 : return false;
6777 2089209 : ipa_return_value_summary *v = ipa_return_value_sum->get (n);
6778 2089209 : if (!v)
6779 : return false;
6780 623961 : v->vr->get_vrange (range);
6781 623961 : return true;
6782 : }
6783 :
6784 : /* Reset all state within ipa-prop.cc so that we can rerun the compiler
6785 : within the same process. For use by toplev::finalize. */
6786 :
6787 : void
6788 258746 : ipa_prop_cc_finalize (void)
6789 : {
6790 258746 : if (function_insertion_hook_holder)
6791 12021 : symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
6792 258746 : function_insertion_hook_holder = NULL;
6793 :
6794 258746 : if (ipa_edge_args_sum)
6795 12347 : ggc_delete (ipa_edge_args_sum);
6796 258746 : ipa_edge_args_sum = NULL;
6797 :
6798 258746 : if (ipa_node_params_sum)
6799 12347 : ggc_delete (ipa_node_params_sum);
6800 258746 : ipa_node_params_sum = NULL;
6801 258746 : }
6802 :
6803 : /* Return true if the two pass_through components of two jump functions are
6804 : known to be equivalent. AGG_JF denotes whether they are part of aggregate
6805 : functions or not. The function can be used before the IPA phase of IPA-CP
6806 : or inlining because it cannot cope with refdesc changes these passes can
6807 : carry out. */
6808 :
6809 : static bool
6810 43639 : ipa_agg_pass_through_jf_equivalent_p (ipa_pass_through_data *ipt1,
6811 : ipa_pass_through_data *ipt2,
6812 : bool agg_jf)
6813 :
6814 : {
6815 43639 : gcc_assert (agg_jf ||
6816 : (!ipt1->refdesc_decremented && !ipt2->refdesc_decremented));
6817 43639 : if (ipt1->operation != ipt2->operation
6818 43639 : || ipt1->formal_id != ipt2->formal_id
6819 43639 : || (!agg_jf && (ipt1->agg_preserved != ipt2->agg_preserved)))
6820 : return false;
6821 43639 : if (ipt1->operation != NOP_EXPR
6822 43639 : && (TYPE_MAIN_VARIANT (ipt1->op_type)
6823 6389 : != TYPE_MAIN_VARIANT (ipt2->op_type)))
6824 : return false;
6825 43631 : if (((ipt1->operand != NULL_TREE) != (ipt2->operand != NULL_TREE))
6826 43631 : || (ipt1->operand
6827 6381 : && !values_equal_for_ipcp_p (ipt1->operand, ipt2->operand)))
6828 0 : return false;
6829 : return true;
6830 : }
6831 :
6832 : /* Return true if the two aggregate jump functions are known to be equivalent.
6833 : The function can be used before the IPA phase of IPA-CP or inlining because
6834 : it cannot cope with refdesc changes these passes can carry out. */
6835 :
6836 : static bool
6837 3603 : ipa_agg_jump_functions_equivalent_p (ipa_agg_jf_item *ajf1,
6838 : ipa_agg_jf_item *ajf2)
6839 : {
6840 3603 : if (ajf1->offset != ajf2->offset
6841 3603 : || ajf1->jftype != ajf2->jftype
6842 7206 : || !types_compatible_p (ajf1->type, ajf2->type))
6843 0 : return false;
6844 :
6845 3603 : switch (ajf1->jftype)
6846 : {
6847 2082 : case IPA_JF_CONST:
6848 2082 : if (!values_equal_for_ipcp_p (ajf1->value.constant,
6849 : ajf2->value.constant))
6850 : return false;
6851 : break;
6852 786 : case IPA_JF_PASS_THROUGH:
6853 786 : {
6854 786 : ipa_pass_through_data *ipt1 = &ajf1->value.pass_through;
6855 786 : ipa_pass_through_data *ipt2 = &ajf2->value.pass_through;
6856 786 : if (!ipa_agg_pass_through_jf_equivalent_p (ipt1, ipt2, true))
6857 : return false;
6858 : }
6859 : break;
6860 735 : case IPA_JF_LOAD_AGG:
6861 735 : {
6862 735 : ipa_load_agg_data *ila1 = &ajf1->value.load_agg;
6863 735 : ipa_load_agg_data *ila2 = &ajf2->value.load_agg;
6864 735 : if (!ipa_agg_pass_through_jf_equivalent_p (&ila1->pass_through,
6865 : &ila2->pass_through, true))
6866 : return false;
6867 735 : if (ila1->offset != ila2->offset
6868 735 : || ila1->by_ref != ila2->by_ref
6869 1470 : || !types_compatible_p (ila1->type, ila2->type))
6870 0 : return false;
6871 : }
6872 : break;
6873 0 : default:
6874 0 : gcc_unreachable ();
6875 : }
6876 : return true;
6877 : }
6878 :
6879 : /* Return true if the two jump functions are known to be equivalent. The
6880 : function can be used before the IPA phase of IPA-CP or inlining because it
6881 : cannot cope with refdesc changes these passes can carry out. */
6882 :
6883 : bool
6884 120558 : ipa_jump_functions_equivalent_p (ipa_jump_func *jf1, ipa_jump_func *jf2)
6885 : {
6886 120558 : if (jf1->type != jf2->type)
6887 : return false;
6888 :
6889 120558 : switch (jf1->type)
6890 : {
6891 : case IPA_JF_UNKNOWN:
6892 : break;
6893 24440 : case IPA_JF_CONST:
6894 24440 : {
6895 24440 : tree cst1 = ipa_get_jf_constant (jf1);
6896 24440 : tree cst2 = ipa_get_jf_constant (jf2);
6897 24440 : if (!values_equal_for_ipcp_p (cst1, cst2))
6898 : return false;
6899 :
6900 24439 : ipa_cst_ref_desc *rd1 = jfunc_rdesc_usable (jf1);
6901 24439 : ipa_cst_ref_desc *rd2 = jfunc_rdesc_usable (jf2);
6902 24439 : if (rd1 && rd2)
6903 : {
6904 470 : gcc_assert (rd1->refcount == 1
6905 : && rd2->refcount == 1);
6906 470 : gcc_assert (!rd1->next_duplicate && !rd2->next_duplicate);
6907 : }
6908 23969 : else if (rd1)
6909 : return false;
6910 23969 : else if (rd2)
6911 : return false;
6912 : }
6913 : break;
6914 42118 : case IPA_JF_PASS_THROUGH:
6915 42118 : {
6916 42118 : ipa_pass_through_data *ipt1 = &jf1->value.pass_through;
6917 42118 : ipa_pass_through_data *ipt2 = &jf2->value.pass_through;
6918 42118 : if (!ipa_agg_pass_through_jf_equivalent_p (ipt1, ipt2, false))
6919 : return false;
6920 : }
6921 : break;
6922 14460 : case IPA_JF_ANCESTOR:
6923 14460 : {
6924 14460 : ipa_ancestor_jf_data *ia1 = &jf1->value.ancestor;
6925 14460 : ipa_ancestor_jf_data *ia2 = &jf2->value.ancestor;
6926 :
6927 14460 : if (ia1->formal_id != ia2->formal_id
6928 14460 : || ia1->agg_preserved != ia2->agg_preserved
6929 14460 : || ia1->keep_null != ia2->keep_null
6930 14460 : || ia1->offset != ia2->offset)
6931 : return false;
6932 : }
6933 : break;
6934 0 : default:
6935 0 : gcc_unreachable ();
6936 : }
6937 :
6938 120549 : if (((jf1->m_vr != nullptr) != (jf2->m_vr != nullptr))
6939 120549 : || (jf1->m_vr && !jf1->m_vr->equal_p (*jf2->m_vr)))
6940 6 : return false;
6941 :
6942 120543 : unsigned alen = vec_safe_length (jf1->agg.items);
6943 122918 : if (vec_safe_length (jf2->agg.items) != alen)
6944 : return false;
6945 :
6946 120542 : if (!alen)
6947 : return true;
6948 :
6949 2375 : if (jf1->agg.by_ref != jf2->agg.by_ref)
6950 : return false;
6951 :
6952 5978 : for (unsigned i = 0 ; i < alen; i++)
6953 3603 : if (!ipa_agg_jump_functions_equivalent_p (&(*jf1->agg.items)[i],
6954 3603 : &(*jf2->agg.items)[i]))
6955 : return false;
6956 :
6957 : return true;
6958 : }
6959 :
6960 : #include "gt-ipa-prop.h"
|