Line data Source code
1 : /* Interprocedural analyses.
2 : Copyright (C) 2005-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "rtl.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "alloc-pool.h"
28 : #include "tree-pass.h"
29 : #include "ssa.h"
30 : #include "tree-streamer.h"
31 : #include "cgraph.h"
32 : #include "diagnostic.h"
33 : #include "fold-const.h"
34 : #include "gimple-iterator.h"
35 : #include "gimple-fold.h"
36 : #include "tree-eh.h"
37 : #include "calls.h"
38 : #include "stor-layout.h"
39 : #include "print-tree.h"
40 : #include "gimplify.h"
41 : #include "gimplify-me.h"
42 : #include "gimple-walk.h"
43 : #include "symbol-summary.h"
44 : #include "sreal.h"
45 : #include "ipa-cp.h"
46 : #include "ipa-prop.h"
47 : #include "tree-cfg.h"
48 : #include "tree-dfa.h"
49 : #include "tree-inline.h"
50 : #include "ipa-fnsummary.h"
51 : #include "gimple-pretty-print.h"
52 : #include "ipa-utils.h"
53 : #include "dbgcnt.h"
54 : #include "domwalk.h"
55 : #include "builtins.h"
56 : #include "tree-cfgcleanup.h"
57 : #include "options.h"
58 : #include "symtab-clones.h"
59 : #include "attr-fnspec.h"
60 : #include "gimple-range.h"
61 : #include "value-range-storage.h"
62 : #include "vr-values.h"
63 : #include "lto-streamer.h"
64 : #include "attribs.h"
65 : #include "attr-callback.h"
66 :
67 : /* Function summary where the parameter infos are actually stored. */
68 : ipa_node_params_t *ipa_node_params_sum = NULL;
69 :
70 : function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
71 :
72 : /* Edge summary for IPA-CP edge information. */
73 : ipa_edge_args_sum_t *ipa_edge_args_sum;
74 :
75 : /* Traits for a hash table for reusing ranges. */
76 :
77 : struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <ipa_vr *>
78 : {
79 : typedef ipa_vr *value_type;
80 : typedef const vrange *compare_type;
81 : static hashval_t
82 17386263 : hash (const ipa_vr *p)
83 : {
84 : // This never get called, except in the verification code, as
85 : // ipa_get_value_range() calculates the hash itself. This
86 : // function is mostly here for completness' sake.
87 17386263 : value_range vr;
88 17386263 : p->get_vrange (vr);
89 17386263 : inchash::hash hstate;
90 17386263 : add_vrange (vr, hstate);
91 17386263 : return hstate.end ();
92 17386263 : }
93 : static bool
94 24109724 : equal (const ipa_vr *a, const vrange *b)
95 : {
96 24109724 : return a->equal_p (*b);
97 : }
98 : static const bool empty_zero_p = true;
99 : static void
100 1816 : mark_empty (ipa_vr *&p)
101 : {
102 1816 : p = NULL;
103 : }
104 : static bool
105 : is_empty (const ipa_vr *p)
106 : {
107 : return p == NULL;
108 : }
109 : static bool
110 44319679 : is_deleted (const ipa_vr *p)
111 : {
112 44319679 : return p == reinterpret_cast<const ipa_vr *> (1);
113 : }
114 : static void
115 149064 : mark_deleted (ipa_vr *&p)
116 : {
117 149064 : p = reinterpret_cast<ipa_vr *> (1);
118 : }
119 : };
120 :
121 : /* Hash table for avoid repeated allocations of equal ranges. */
122 : static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
123 :
124 : /* Holders of ipa cgraph hooks: */
125 : static struct cgraph_node_hook_list *function_insertion_hook_holder;
126 :
127 : /* Description of a reference to an IPA constant. */
128 : struct ipa_cst_ref_desc
129 : {
130 : /* Edge that corresponds to the statement which took the reference. */
131 : struct cgraph_edge *cs;
132 : /* Linked list of duplicates created when call graph edges are cloned. */
133 : struct ipa_cst_ref_desc *next_duplicate;
134 : /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
135 : is out of control. */
136 : int refcount;
137 : };
138 :
139 : /* Allocation pool for reference descriptions. */
140 :
141 : static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
142 : ("IPA-PROP ref descriptions");
143 :
144 656350 : ipa_vr::ipa_vr ()
145 656350 : : m_storage (NULL),
146 656350 : m_type (NULL)
147 : {
148 656350 : }
149 :
150 757978 : ipa_vr::ipa_vr (const vrange &r)
151 757978 : : m_storage (ggc_alloc_vrange_storage (r)),
152 757978 : m_type (r.type ())
153 : {
154 757978 : }
155 :
156 : bool
157 24109724 : ipa_vr::equal_p (const vrange &r) const
158 : {
159 24109724 : gcc_checking_assert (!r.undefined_p ());
160 24109724 : return (types_compatible_p (m_type, r.type ()) && m_storage->equal_p (r));
161 : }
162 :
163 : bool
164 85205 : ipa_vr::equal_p (const ipa_vr &o) const
165 : {
166 85205 : if (!known_p ())
167 0 : return !o.known_p ();
168 :
169 85205 : if (!types_compatible_p (m_type, o.m_type))
170 : return false;
171 :
172 85205 : value_range r;
173 85205 : o.get_vrange (r);
174 85205 : return m_storage->equal_p (r);
175 85205 : }
176 :
177 : void
178 28988752 : ipa_vr::get_vrange (value_range &r) const
179 : {
180 28988752 : r.set_type (m_type);
181 28988752 : m_storage->get_vrange (r, m_type);
182 28988752 : }
183 :
184 : void
185 1956 : ipa_vr::set_unknown ()
186 : {
187 1956 : if (m_storage)
188 1956 : ggc_free (m_storage);
189 :
190 1956 : m_storage = NULL;
191 1956 : }
192 :
193 : void
194 611802 : ipa_vr::streamer_read (lto_input_block *ib, data_in *data_in)
195 : {
196 611802 : struct bitpack_d bp = streamer_read_bitpack (ib);
197 611802 : bool known = bp_unpack_value (&bp, 1);
198 611802 : if (known)
199 : {
200 437999 : value_range vr;
201 437999 : streamer_read_value_range (ib, data_in, vr);
202 437999 : if (!m_storage || !m_storage->fits_p (vr))
203 : {
204 437999 : if (m_storage)
205 0 : ggc_free (m_storage);
206 437999 : m_storage = ggc_alloc_vrange_storage (vr);
207 : }
208 437999 : m_storage->set_vrange (vr);
209 437999 : m_type = vr.type ();
210 437999 : }
211 : else
212 : {
213 173803 : m_storage = NULL;
214 173803 : m_type = NULL;
215 : }
216 611802 : }
217 :
218 : void
219 468831 : ipa_vr::streamer_write (output_block *ob) const
220 : {
221 468831 : struct bitpack_d bp = bitpack_create (ob->main_stream);
222 468831 : bp_pack_value (&bp, !!m_storage, 1);
223 468831 : streamer_write_bitpack (&bp);
224 468831 : if (m_storage)
225 : {
226 466321 : value_range vr (m_type);
227 466321 : m_storage->get_vrange (vr, m_type);
228 466321 : streamer_write_vrange (ob, vr);
229 466321 : }
230 468831 : }
231 :
232 : void
233 723 : ipa_vr::dump (FILE *out) const
234 : {
235 723 : if (known_p ())
236 : {
237 723 : value_range vr (m_type);
238 723 : m_storage->get_vrange (vr, m_type);
239 723 : vr.dump (out);
240 723 : }
241 : else
242 0 : fprintf (out, "NO RANGE");
243 723 : }
244 :
245 : // These stubs are because we use an ipa_vr in a hash_traits and
246 : // hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
247 : // picking up the gt_ggc_mx (T *) version.
248 : void
249 0 : gt_pch_nx (ipa_vr *&x)
250 : {
251 0 : return gt_pch_nx ((ipa_vr *) x);
252 : }
253 :
254 : void
255 0 : gt_ggc_mx (ipa_vr *&x)
256 : {
257 0 : return gt_ggc_mx ((ipa_vr *) x);
258 : }
259 :
260 : /* Analysis summery of function call return value. */
261 : struct GTY(()) ipa_return_value_summary
262 : {
263 : /* Known value range.
264 : This needs to be wrapped in struccture due to specific way
265 : we allocate ipa_vr. */
266 : ipa_vr *vr;
267 : };
268 :
269 : /* Function summary for return values. */
270 : class ipa_return_value_sum_t : public function_summary <ipa_return_value_summary *>
271 : {
272 : public:
273 86805 : ipa_return_value_sum_t (symbol_table *table, bool ggc):
274 173610 : function_summary <ipa_return_value_summary *> (table, ggc) { }
275 :
276 : /* Hook that is called by summary when a node is duplicated. */
277 585000 : void duplicate (cgraph_node *,
278 : cgraph_node *,
279 : ipa_return_value_summary *data,
280 : ipa_return_value_summary *data2) final override
281 : {
282 585000 : *data2=*data;
283 585000 : }
284 : };
285 :
286 : /* Structure holding the information that all stores to FLD_OFFSET (measured in
287 : bytes) of a particular record type REC_TYPE was storing a pointer to
288 : function FN or that there were multiple functions, which is denoted by fn
289 : being nullptr. */
290 :
291 : struct GTY((for_user)) noted_fnptr_store
292 : {
293 : tree rec_type;
294 : tree fn;
295 : unsigned fld_offset;
296 : };
297 :
298 : /* Hash traits to have a hash table of noted_fnptr_stores. */
299 :
300 : struct noted_fnptr_hasher : ggc_ptr_hash <noted_fnptr_store>
301 : {
302 : static hashval_t hash (noted_fnptr_store *);
303 : static bool equal (noted_fnptr_store *,
304 : noted_fnptr_store *);
305 : };
306 :
307 : hashval_t
308 602775 : noted_fnptr_hasher::hash (noted_fnptr_store *val)
309 : {
310 1205550 : return iterative_hash_host_wide_int (val->fld_offset,
311 602775 : TYPE_UID (val->rec_type));
312 : }
313 :
314 : bool
315 522562 : noted_fnptr_hasher::equal (noted_fnptr_store *v1,
316 : noted_fnptr_store *v2)
317 : {
318 522562 : return (v1->rec_type == v2->rec_type
319 522562 : && v1->fld_offset == v2->fld_offset);
320 : }
321 :
322 :
323 : /* Structore holding the information that all stores to OFFSET of a particular
324 : record type RECTYPE was storing a pointer to specific function or that there
325 : were multiple such functions. */
326 :
327 : static GTY(()) hash_table <noted_fnptr_hasher> *noted_fnptrs_in_records;
328 :
329 : /* Variable hoding the return value summary. */
330 : static GTY(()) function_summary <ipa_return_value_summary *> *ipa_return_value_sum;
331 :
332 :
333 : /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
334 : with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
335 :
336 : static bool
337 4039970 : ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
338 : {
339 4039970 : tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
340 :
341 4039970 : if (!fs_opts)
342 : return false;
343 549890 : return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
344 : }
345 :
346 : /* Return index of the formal whose tree is PTREE in function which corresponds
347 : to INFO. */
348 :
349 : static int
350 42287314 : ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
351 : tree ptree)
352 : {
353 42287314 : int i, count;
354 :
355 42287314 : count = vec_safe_length (descriptors);
356 86566059 : for (i = 0; i < count; i++)
357 76152110 : if ((*descriptors)[i].decl_or_type == ptree)
358 : return i;
359 :
360 : return -1;
361 : }
362 :
363 : /* Return index of the formal whose tree is PTREE in function which corresponds
364 : to INFO. */
365 :
366 : int
367 26333704 : ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
368 : {
369 26333704 : return ipa_get_param_decl_index_1 (info->descriptors, ptree);
370 : }
371 :
372 : static void
373 : ipa_duplicate_jump_function (cgraph_edge *src, cgraph_edge *dst,
374 : ipa_jump_func *src_jf, ipa_jump_func *dst_jf);
375 :
376 : /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
377 : NODE. */
378 :
379 : static void
380 5479269 : ipa_populate_param_decls (struct cgraph_node *node,
381 : vec<ipa_param_descriptor, va_gc> &descriptors)
382 : {
383 5479269 : tree fndecl;
384 5479269 : tree fnargs;
385 5479269 : tree parm;
386 5479269 : int param_num;
387 :
388 5479269 : fndecl = node->decl;
389 5479269 : gcc_assert (gimple_has_body_p (fndecl));
390 5479269 : fnargs = DECL_ARGUMENTS (fndecl);
391 5479269 : param_num = 0;
392 18034602 : for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
393 : {
394 12555333 : descriptors[param_num].decl_or_type = parm;
395 12555333 : unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
396 12555333 : descriptors[param_num].move_cost = cost;
397 : /* Watch overflow, move_cost is a bitfield. */
398 12555333 : gcc_checking_assert (cost == descriptors[param_num].move_cost);
399 12555333 : param_num++;
400 : }
401 5479269 : }
402 :
403 : /* Return how many formal parameters FNDECL has. */
404 :
405 : int
406 15052778 : count_formal_params (tree fndecl)
407 : {
408 15052778 : tree parm;
409 15052778 : int count = 0;
410 15052778 : gcc_assert (gimple_has_body_p (fndecl));
411 :
412 46471126 : for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
413 31418348 : count++;
414 :
415 15052778 : return count;
416 : }
417 :
418 : /* Return the declaration of Ith formal parameter of the function corresponding
419 : to INFO. Note there is no setter function as this array is built just once
420 : using ipa_initialize_node_params. */
421 :
422 : void
423 609 : ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
424 : {
425 609 : fprintf (file, "param #%i", i);
426 609 : if ((*info->descriptors)[i].decl_or_type)
427 : {
428 609 : fprintf (file, " ");
429 609 : print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
430 : }
431 609 : }
432 :
433 : /* If necessary, allocate vector of parameter descriptors in info of NODE.
434 : Return true if they were allocated, false if not. */
435 :
436 : static bool
437 6357983 : ipa_alloc_node_params (struct cgraph_node *node, int param_count)
438 : {
439 6357983 : ipa_node_params *info = ipa_node_params_sum->get_create (node);
440 :
441 6357983 : if (!info->descriptors && param_count)
442 : {
443 5531035 : vec_safe_grow_cleared (info->descriptors, param_count, true);
444 5531035 : return true;
445 : }
446 : else
447 : return false;
448 : }
449 :
450 : /* Initialize the ipa_node_params structure associated with NODE by counting
451 : the function parameters, creating the descriptors and populating their
452 : param_decls. */
453 :
454 : void
455 6280414 : ipa_initialize_node_params (struct cgraph_node *node)
456 : {
457 6280414 : ipa_node_params *info = ipa_node_params_sum->get_create (node);
458 :
459 6280414 : if (!info->descriptors
460 6280414 : && ipa_alloc_node_params (node, count_formal_params (node->decl)))
461 5477920 : ipa_populate_param_decls (node, *info->descriptors);
462 6280414 : }
463 :
464 : /* Print VAL which is extracted from a jump function to F. */
465 :
466 : void
467 1173 : ipa_print_constant_value (FILE *f, tree val)
468 : {
469 1173 : print_generic_expr (f, val);
470 :
471 : /* This is in keeping with values_equal_for_ipcp_p. */
472 1173 : if (TREE_CODE (val) == ADDR_EXPR
473 1173 : && (TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL
474 249 : || (TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
475 102 : && DECL_IN_CONSTANT_POOL (TREE_OPERAND (val, 0)))))
476 : {
477 0 : fputs (" -> ", f);
478 0 : print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
479 : }
480 1173 : }
481 :
482 : /* Print contents of JFUNC to F. If CTX is non-NULL, dump it too. */
483 :
484 : DEBUG_FUNCTION void
485 1023 : ipa_dump_jump_function (FILE *f, ipa_jump_func *jump_func,
486 : class ipa_polymorphic_call_context *ctx)
487 : {
488 1023 : enum jump_func_type type = jump_func->type;
489 :
490 1023 : if (type == IPA_JF_UNKNOWN)
491 251 : fprintf (f, "UNKNOWN\n");
492 772 : else if (type == IPA_JF_CONST)
493 : {
494 303 : fprintf (f, "CONST: ");
495 303 : ipa_print_constant_value (f, jump_func->value.constant.value);
496 303 : fprintf (f, "\n");
497 : }
498 469 : else if (type == IPA_JF_PASS_THROUGH)
499 : {
500 408 : fprintf (f, "PASS THROUGH: ");
501 408 : fprintf (f, "%d, op %s",
502 : jump_func->value.pass_through.formal_id,
503 : get_tree_code_name(jump_func->value.pass_through.operation));
504 408 : if (jump_func->value.pass_through.operation != NOP_EXPR)
505 : {
506 31 : fprintf (f, " ");
507 31 : if (jump_func->value.pass_through.operand)
508 27 : print_generic_expr (f, jump_func->value.pass_through.operand);
509 31 : fprintf (f, " (in type ");
510 31 : print_generic_expr (f, jump_func->value.pass_through.op_type);
511 31 : fprintf (f, ")");
512 : }
513 408 : if (jump_func->value.pass_through.agg_preserved)
514 134 : fprintf (f, ", agg_preserved");
515 408 : if (jump_func->value.pass_through.refdesc_decremented)
516 0 : fprintf (f, ", refdesc_decremented");
517 408 : fprintf (f, "\n");
518 : }
519 61 : else if (type == IPA_JF_ANCESTOR)
520 : {
521 61 : fprintf (f, "ANCESTOR: ");
522 61 : fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
523 : jump_func->value.ancestor.formal_id,
524 : jump_func->value.ancestor.offset);
525 61 : if (jump_func->value.ancestor.agg_preserved)
526 29 : fprintf (f, ", agg_preserved");
527 61 : if (jump_func->value.ancestor.keep_null)
528 4 : fprintf (f, ", keep_null");
529 61 : fprintf (f, "\n");
530 : }
531 :
532 1023 : if (jump_func->agg.items)
533 : {
534 91 : struct ipa_agg_jf_item *item;
535 91 : int j;
536 :
537 182 : fprintf (f, " Aggregate passed by %s:\n",
538 91 : jump_func->agg.by_ref ? "reference" : "value");
539 377 : FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
540 : {
541 195 : fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
542 : item->offset);
543 195 : fprintf (f, "type: ");
544 195 : print_generic_expr (f, item->type);
545 195 : fprintf (f, ", ");
546 195 : if (item->jftype == IPA_JF_PASS_THROUGH)
547 7 : fprintf (f, "PASS THROUGH: %d,",
548 : item->value.pass_through.formal_id);
549 188 : else if (item->jftype == IPA_JF_LOAD_AGG)
550 : {
551 11 : fprintf (f, "LOAD AGG: %d",
552 : item->value.pass_through.formal_id);
553 11 : fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
554 : item->value.load_agg.offset,
555 11 : item->value.load_agg.by_ref ? "reference"
556 : : "value");
557 : }
558 :
559 195 : if (item->jftype == IPA_JF_PASS_THROUGH
560 195 : || item->jftype == IPA_JF_LOAD_AGG)
561 : {
562 18 : fprintf (f, " op %s",
563 : get_tree_code_name (item->value.pass_through.operation));
564 18 : if (item->value.pass_through.operation != NOP_EXPR)
565 : {
566 9 : fprintf (f, " ");
567 9 : if (item->value.pass_through.operand)
568 8 : print_generic_expr (f, item->value.pass_through.operand);
569 9 : fprintf (f, " (in type ");
570 9 : print_generic_expr (f, jump_func->value.pass_through.op_type);
571 9 : fprintf (f, ")");
572 : }
573 : }
574 177 : else if (item->jftype == IPA_JF_CONST)
575 : {
576 177 : fprintf (f, "CONST: ");
577 177 : ipa_print_constant_value (f, item->value.constant);
578 : }
579 0 : else if (item->jftype == IPA_JF_UNKNOWN)
580 0 : fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
581 0 : tree_to_uhwi (TYPE_SIZE (item->type)));
582 195 : fprintf (f, "\n");
583 : }
584 : }
585 :
586 1023 : if (ctx && !ctx->useless_p ())
587 : {
588 378 : fprintf (f, " Context: ");
589 378 : ctx->dump (dump_file);
590 : }
591 :
592 1023 : if (jump_func->m_vr)
593 : {
594 723 : fprintf (f, " ");
595 723 : jump_func->m_vr->dump (f);
596 723 : fprintf (f, "\n");
597 : }
598 : else
599 300 : fprintf (f, " Unknown VR\n");
600 1023 : }
601 :
602 : /* Print the jump functions associated with call graph edge CS to file F. */
603 :
604 : static void
605 725 : ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
606 : {
607 725 : ipa_edge_args *args = ipa_edge_args_sum->get (cs);
608 725 : int count = ipa_get_cs_argument_count (args);
609 :
610 1748 : for (int i = 0; i < count; i++)
611 : {
612 1023 : struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
613 1023 : class ipa_polymorphic_call_context *ctx
614 1023 : = ipa_get_ith_polymorhic_call_context (args, i);
615 :
616 1023 : fprintf (f, " param %d: ", i);
617 1023 : ipa_dump_jump_function (f, jump_func, ctx);
618 : }
619 725 : }
620 :
621 :
622 : /* Print the jump functions of all arguments on all call graph edges going from
623 : NODE to file F. */
624 :
625 : void
626 1069 : ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
627 : {
628 1069 : struct cgraph_edge *cs;
629 :
630 1069 : fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
631 2134 : for (cs = node->callees; cs; cs = cs->next_callee)
632 : {
633 :
634 1065 : fprintf (f, " callsite %s -> %s : \n",
635 : node->dump_name (),
636 1065 : cs->callee->dump_name ());
637 1065 : if (!ipa_edge_args_info_available_for_edge_p (cs))
638 487 : fprintf (f, " no arg info\n");
639 : else
640 578 : ipa_print_node_jump_functions_for_edge (f, cs);
641 : }
642 :
643 1216 : for (cs = node->indirect_calls; cs; cs = cs->next_callee)
644 : {
645 147 : fprintf (f, " ");
646 147 : cs->indirect_info->dump (f, false);
647 147 : if (cs->call_stmt)
648 : {
649 131 : fprintf (f, ", for stmt ");
650 131 : print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
651 : }
652 : else
653 16 : fprintf (f, "\n");
654 147 : if (!ipa_edge_args_info_available_for_edge_p (cs))
655 0 : fprintf (f, " no arg info\n");
656 : else
657 147 : ipa_print_node_jump_functions_for_edge (f, cs);
658 : }
659 1069 : }
660 :
661 : /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
662 :
663 : void
664 161 : ipa_print_all_jump_functions (FILE *f)
665 : {
666 161 : struct cgraph_node *node;
667 :
668 161 : fprintf (f, "\nJump functions:\n");
669 1216 : FOR_EACH_FUNCTION (node)
670 : {
671 1055 : ipa_print_node_jump_functions (f, node);
672 : }
673 161 : }
674 :
675 : /* Set jfunc to be a know-really nothing jump function. */
676 :
677 : static void
678 525960 : ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
679 : {
680 525960 : jfunc->type = IPA_JF_UNKNOWN;
681 511887 : }
682 :
683 : /* Set DST to be a copy of another SRC. The two functions will share their
684 : rdesc. */
685 :
686 : static void
687 881139 : ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
688 : struct ipa_jump_func *src)
689 :
690 : {
691 881139 : gcc_checking_assert (src->type == IPA_JF_CONST);
692 881139 : dst->type = IPA_JF_CONST;
693 881139 : dst->value.constant = src->value.constant;
694 881139 : }
695 :
696 : /* Set DST to be a copy of another jump function SRC but possibly adjust it to
697 : a new passed type PARM_TYPE. If the adjustment fails, the jump function can
698 : end up being set to the unknown type. If the conversion is not necessary or
699 : it succeeds and if the destination rdesc has not been already used, the two
700 : functions will share their rdesc. */
701 :
702 : static void
703 157788 : ipa_convert_prop_cst_jf (struct ipa_jump_func *dst,
704 : struct ipa_jump_func *src,
705 : tree parm_type)
706 :
707 : {
708 157788 : gcc_checking_assert (src->type == IPA_JF_CONST);
709 157788 : tree new_val = ipacp_value_safe_for_type (parm_type,
710 : ipa_get_jf_constant (src));
711 157788 : if (new_val)
712 : {
713 157788 : bool rd = ipa_get_jf_pass_through_refdesc_decremented (dst);
714 :
715 157788 : dst->type = IPA_JF_CONST;
716 157788 : dst->value.constant.value = new_val;
717 157788 : if (!rd)
718 157763 : dst->value.constant.rdesc = src->value.constant.rdesc;
719 : else
720 25 : ipa_zap_jf_refdesc (dst);
721 : }
722 : else
723 0 : ipa_set_jf_unknown (dst);
724 157788 : }
725 :
726 : /* Set JFUNC to be a constant jmp function. */
727 :
728 : static void
729 2759503 : ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
730 : struct cgraph_edge *cs)
731 : {
732 2759503 : jfunc->type = IPA_JF_CONST;
733 2759503 : jfunc->value.constant.value = unshare_expr_without_location (constant);
734 :
735 2759503 : if (TREE_CODE (constant) == ADDR_EXPR
736 2759503 : && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
737 988008 : || (VAR_P (TREE_OPERAND (constant, 0))
738 349752 : && TREE_STATIC (TREE_OPERAND (constant, 0)))))
739 : {
740 387243 : struct ipa_cst_ref_desc *rdesc;
741 :
742 387243 : rdesc = ipa_refdesc_pool.allocate ();
743 387243 : rdesc->cs = cs;
744 387243 : rdesc->next_duplicate = NULL;
745 387243 : rdesc->refcount = 1;
746 387243 : jfunc->value.constant.rdesc = rdesc;
747 : }
748 : else
749 2372260 : jfunc->value.constant.rdesc = NULL;
750 2759503 : }
751 :
752 : /* Set JFUNC to be a simple pass-through jump function. */
753 : static void
754 1378151 : ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
755 : bool agg_preserved)
756 : {
757 1378151 : jfunc->type = IPA_JF_PASS_THROUGH;
758 1378151 : jfunc->value.pass_through.operand = NULL_TREE;
759 1378151 : jfunc->value.pass_through.op_type = NULL_TREE;
760 1378151 : jfunc->value.pass_through.formal_id = formal_id;
761 1378151 : jfunc->value.pass_through.operation = NOP_EXPR;
762 1378151 : jfunc->value.pass_through.agg_preserved = agg_preserved;
763 1378151 : jfunc->value.pass_through.refdesc_decremented = false;
764 1244533 : }
765 :
766 : /* Set JFUNC to be an unary pass through jump function. */
767 :
768 : static void
769 1021 : ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
770 : enum tree_code operation, tree op_type)
771 : {
772 1021 : jfunc->type = IPA_JF_PASS_THROUGH;
773 1021 : jfunc->value.pass_through.operand = NULL_TREE;
774 1021 : jfunc->value.pass_through.op_type = op_type;
775 1021 : jfunc->value.pass_through.formal_id = formal_id;
776 1021 : jfunc->value.pass_through.operation = operation;
777 1021 : jfunc->value.pass_through.agg_preserved = false;
778 1021 : jfunc->value.pass_through.refdesc_decremented = false;
779 1021 : }
780 : /* Set JFUNC to be an arithmetic pass through jump function. */
781 :
782 : static void
783 42234 : ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
784 : tree operand, enum tree_code operation,
785 : tree op_type)
786 : {
787 42234 : jfunc->type = IPA_JF_PASS_THROUGH;
788 0 : jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
789 42234 : jfunc->value.pass_through.op_type = op_type;
790 42234 : jfunc->value.pass_through.formal_id = formal_id;
791 42234 : jfunc->value.pass_through.operation = operation;
792 42234 : jfunc->value.pass_through.agg_preserved = false;
793 42234 : jfunc->value.pass_through.refdesc_decremented = false;
794 42234 : }
795 :
796 : /* Set JFUNC to be an ancestor jump function. */
797 :
798 : static void
799 183872 : ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
800 : int formal_id, bool agg_preserved, bool keep_null)
801 : {
802 183872 : jfunc->type = IPA_JF_ANCESTOR;
803 183872 : jfunc->value.ancestor.formal_id = formal_id;
804 183872 : jfunc->value.ancestor.offset = offset;
805 183872 : jfunc->value.ancestor.agg_preserved = agg_preserved;
806 183872 : jfunc->value.ancestor.keep_null = keep_null;
807 183204 : }
808 :
809 : /* Get IPA BB information about the given BB. FBI is the context of analyzis
810 : of this function body. */
811 :
812 : static struct ipa_bb_info *
813 33374150 : ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
814 : {
815 27838721 : gcc_checking_assert (fbi);
816 33374150 : return &fbi->bb_infos[bb->index];
817 : }
818 :
819 : /* Structure to be passed in between detect_type_change and
820 : check_stmt_for_type_change. */
821 :
822 : struct prop_type_change_info
823 : {
824 : /* Offset into the object where there is the virtual method pointer we are
825 : looking for. */
826 : HOST_WIDE_INT offset;
827 : /* The declaration or SSA_NAME pointer of the base that we are checking for
828 : type change. */
829 : tree object;
830 : /* Set to true if dynamic type change has been detected. */
831 : bool type_maybe_changed;
832 : };
833 :
834 : /* Return true if STMT can modify a virtual method table pointer.
835 :
836 : This function makes special assumptions about both constructors and
837 : destructors which are all the functions that are allowed to alter the VMT
838 : pointers. It assumes that destructors begin with assignment into all VMT
839 : pointers and that constructors essentially look in the following way:
840 :
841 : 1) The very first thing they do is that they call constructors of ancestor
842 : sub-objects that have them.
843 :
844 : 2) Then VMT pointers of this and all its ancestors is set to new values
845 : corresponding to the type corresponding to the constructor.
846 :
847 : 3) Only afterwards, other stuff such as constructor of member sub-objects
848 : and the code written by the user is run. Only this may include calling
849 : virtual functions, directly or indirectly.
850 :
851 : There is no way to call a constructor of an ancestor sub-object in any
852 : other way.
853 :
854 : This means that we do not have to care whether constructors get the correct
855 : type information because they will always change it (in fact, if we define
856 : the type to be given by the VMT pointer, it is undefined).
857 :
858 : The most important fact to derive from the above is that if, for some
859 : statement in the section 3, we try to detect whether the dynamic type has
860 : changed, we can safely ignore all calls as we examine the function body
861 : backwards until we reach statements in section 2 because these calls cannot
862 : be ancestor constructors or destructors (if the input is not bogus) and so
863 : do not change the dynamic type (this holds true only for automatically
864 : allocated objects but at the moment we devirtualize only these). We then
865 : must detect that statements in section 2 change the dynamic type and can try
866 : to derive the new type. That is enough and we can stop, we will never see
867 : the calls into constructors of sub-objects in this code. Therefore we can
868 : safely ignore all call statements that we traverse.
869 : */
870 :
871 : static bool
872 9 : stmt_may_be_vtbl_ptr_store (gimple *stmt)
873 : {
874 9 : if (is_gimple_call (stmt))
875 : return false;
876 0 : if (gimple_clobber_p (stmt))
877 : return false;
878 0 : else if (is_gimple_assign (stmt))
879 : {
880 0 : tree lhs = gimple_assign_lhs (stmt);
881 :
882 0 : if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
883 : {
884 0 : if (flag_strict_aliasing
885 0 : && !POINTER_TYPE_P (TREE_TYPE (lhs)))
886 : return false;
887 :
888 0 : if (TREE_CODE (lhs) == COMPONENT_REF
889 0 : && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
890 0 : return false;
891 : /* In the future we might want to use get_ref_base_and_extent to find
892 : if there is a field corresponding to the offset and if so, proceed
893 : almost like if it was a component ref. */
894 : }
895 : }
896 : return true;
897 : }
898 :
899 : /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
900 : to check whether a particular statement may modify the virtual table
901 : pointerIt stores its result into DATA, which points to a
902 : prop_type_change_info structure. */
903 :
904 : static bool
905 9 : check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
906 : {
907 9 : gimple *stmt = SSA_NAME_DEF_STMT (vdef);
908 9 : struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
909 :
910 9 : if (stmt_may_be_vtbl_ptr_store (stmt))
911 : {
912 0 : tci->type_maybe_changed = true;
913 0 : return true;
914 : }
915 : else
916 : return false;
917 : }
918 :
919 : /* See if ARG is PARAM_DECl describing instance passed by pointer
920 : or reference in FUNCTION. Return false if the dynamic type may change
921 : in between beggining of the function until CALL is invoked.
922 :
923 : Generally functions are not allowed to change type of such instances,
924 : but they call destructors. We assume that methods cannot destroy the THIS
925 : pointer. Also as a special cases, constructor and destructors may change
926 : type of the THIS pointer. */
927 :
928 : static bool
929 9616 : param_type_may_change_p (tree function, tree arg, gimple *call)
930 : {
931 : /* Pure functions cannot do any changes on the dynamic type;
932 : that require writting to memory. */
933 9616 : if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
934 : return false;
935 : /* We need to check if we are within inlined consturctor
936 : or destructor (ideally we would have way to check that the
937 : inline cdtor is actually working on ARG, but we don't have
938 : easy tie on this, so punt on all non-pure cdtors.
939 : We may also record the types of cdtors and once we know type
940 : of the instance match them.
941 :
942 : Also code unification optimizations may merge calls from
943 : different blocks making return values unreliable. So
944 : do nothing during late optimization. */
945 9616 : if (DECL_STRUCT_FUNCTION (function)->after_inlining)
946 : return true;
947 9616 : if (TREE_CODE (arg) == SSA_NAME
948 9616 : && SSA_NAME_IS_DEFAULT_DEF (arg)
949 19232 : && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
950 : {
951 : /* Normal (non-THIS) argument. */
952 9616 : if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
953 8949 : || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
954 : /* THIS pointer of an method - here we want to watch constructors
955 : and destructors as those definitely may change the dynamic
956 : type. */
957 17974 : || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
958 8358 : && !DECL_CXX_CONSTRUCTOR_P (function)
959 8357 : && !DECL_CXX_DESTRUCTOR_P (function)
960 8356 : && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
961 : {
962 : /* Walk the inline stack and watch out for ctors/dtors. */
963 41238 : for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
964 15812 : block = BLOCK_SUPERCONTEXT (block))
965 15827 : if (inlined_polymorphic_ctor_dtor_block_p (block, false))
966 : return true;
967 : return false;
968 : }
969 : }
970 : return true;
971 : }
972 :
973 : /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
974 : callsite CALL) by looking for assignments to its virtual table pointer. If
975 : it is, return true. ARG is the object itself (not a pointer
976 : to it, unless dereferenced). BASE is the base of the memory access as
977 : returned by get_ref_base_and_extent, as is the offset.
978 :
979 : This is helper function for detect_type_change and detect_type_change_ssa
980 : that does the heavy work which is usually unnecesary. */
981 :
982 : static bool
983 17 : detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
984 : tree base, tree comp_type, gcall *call,
985 : HOST_WIDE_INT offset)
986 : {
987 17 : struct prop_type_change_info tci;
988 17 : ao_ref ao;
989 :
990 17 : gcc_checking_assert (DECL_P (arg)
991 : || TREE_CODE (arg) == MEM_REF
992 : || handled_component_p (arg));
993 :
994 17 : comp_type = TYPE_MAIN_VARIANT (comp_type);
995 :
996 : /* Const calls cannot call virtual methods through VMT and so type changes do
997 : not matter. */
998 17 : if (!flag_devirtualize || !gimple_vuse (call)
999 : /* Be sure expected_type is polymorphic. */
1000 17 : || !comp_type
1001 17 : || TREE_CODE (comp_type) != RECORD_TYPE
1002 17 : || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
1003 34 : || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
1004 : return true;
1005 :
1006 17 : if (fbi->aa_walk_budget == 0)
1007 : return false;
1008 :
1009 17 : ao_ref_init (&ao, arg);
1010 17 : ao.base = base;
1011 17 : ao.offset = offset;
1012 17 : ao.size = POINTER_SIZE;
1013 17 : ao.max_size = ao.size;
1014 :
1015 17 : tci.offset = offset;
1016 17 : tci.object = get_base_address (arg);
1017 17 : tci.type_maybe_changed = false;
1018 :
1019 17 : int walked
1020 34 : = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
1021 : &tci, NULL, NULL, fbi->aa_walk_budget);
1022 17 : if (walked >= 0)
1023 17 : fbi->aa_walk_budget -= walked;
1024 : else
1025 0 : fbi->aa_walk_budget = 0;
1026 :
1027 17 : if (walked >= 0 && !tci.type_maybe_changed)
1028 : return false;
1029 :
1030 : return true;
1031 : }
1032 :
1033 : /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
1034 : If it is, return true. ARG is the object itself (not a pointer
1035 : to it, unless dereferenced). BASE is the base of the memory access as
1036 : returned by get_ref_base_and_extent, as is the offset. */
1037 :
1038 : static bool
1039 749 : detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
1040 : tree comp_type, gcall *call,
1041 : HOST_WIDE_INT offset)
1042 : {
1043 749 : if (!flag_devirtualize)
1044 : return false;
1045 :
1046 749 : if (TREE_CODE (base) == MEM_REF
1047 1498 : && !param_type_may_change_p (current_function_decl,
1048 749 : TREE_OPERAND (base, 0),
1049 : call))
1050 : return false;
1051 12 : return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
1052 12 : call, offset);
1053 : }
1054 :
1055 : /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
1056 : SSA name (its dereference will become the base and the offset is assumed to
1057 : be zero). */
1058 :
1059 : static bool
1060 8867 : detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
1061 : gcall *call)
1062 : {
1063 8867 : gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
1064 8867 : if (!flag_devirtualize
1065 8867 : || !POINTER_TYPE_P (TREE_TYPE (arg)))
1066 : return false;
1067 :
1068 8867 : if (!param_type_may_change_p (current_function_decl, arg, call))
1069 : return false;
1070 :
1071 5 : arg = build2 (MEM_REF, ptr_type_node, arg,
1072 : build_int_cst (ptr_type_node, 0));
1073 :
1074 5 : return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
1075 5 : call, 0);
1076 : }
1077 :
1078 : /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
1079 : boolean variable pointed to by DATA. */
1080 :
1081 : static bool
1082 1572613 : mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
1083 : void *data)
1084 : {
1085 1572613 : bool *b = (bool *) data;
1086 1572613 : *b = true;
1087 1572613 : return true;
1088 : }
1089 :
1090 : /* Find the nearest valid aa status for parameter specified by INDEX that
1091 : dominates BB. */
1092 :
1093 : static struct ipa_param_aa_status *
1094 4382784 : find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
1095 : int index)
1096 : {
1097 13194911 : while (true)
1098 : {
1099 13194911 : bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1100 13194911 : if (!bb)
1101 : return NULL;
1102 10245192 : struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1103 12325221 : if (!bi->param_aa_statuses.is_empty ()
1104 2080029 : && bi->param_aa_statuses[index].valid)
1105 1433065 : return &bi->param_aa_statuses[index];
1106 : }
1107 : }
1108 :
1109 : /* Get AA status structure for the given BB and parameter with INDEX. Allocate
1110 : structures and/or intialize the result with a dominating description as
1111 : necessary. */
1112 :
1113 : static struct ipa_param_aa_status *
1114 6582515 : parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
1115 : int index)
1116 : {
1117 6582515 : gcc_checking_assert (fbi);
1118 6582515 : struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1119 6582515 : if (bi->param_aa_statuses.is_empty ())
1120 3776557 : bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
1121 6582515 : struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
1122 6582515 : if (!paa->valid)
1123 : {
1124 4382784 : gcc_checking_assert (!paa->parm_modified
1125 : && !paa->ref_modified
1126 : && !paa->pt_modified);
1127 4382784 : struct ipa_param_aa_status *dom_paa;
1128 4382784 : dom_paa = find_dominating_aa_status (fbi, bb, index);
1129 4382784 : if (dom_paa)
1130 1433065 : *paa = *dom_paa;
1131 : else
1132 2949719 : paa->valid = true;
1133 : }
1134 :
1135 6582515 : return paa;
1136 : }
1137 :
1138 : /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1139 : a value known not to be modified in this function before reaching the
1140 : statement STMT. FBI holds information about the function we have so far
1141 : gathered but do not survive the summary building stage. */
1142 :
1143 : static bool
1144 1157158 : parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
1145 : gimple *stmt, tree parm_load)
1146 : {
1147 1157158 : struct ipa_param_aa_status *paa;
1148 1157158 : bool modified = false;
1149 1157158 : ao_ref refd;
1150 :
1151 1157158 : tree base = get_base_address (parm_load);
1152 1157158 : gcc_assert (TREE_CODE (base) == PARM_DECL);
1153 1157158 : if (TREE_READONLY (base))
1154 : return true;
1155 :
1156 1076029 : gcc_checking_assert (fbi);
1157 1076029 : paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1158 1076029 : if (paa->parm_modified || fbi->aa_walk_budget == 0)
1159 : return false;
1160 :
1161 1836250 : gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1162 918125 : ao_ref_init (&refd, parm_load);
1163 1836250 : int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1164 : &modified, NULL, NULL,
1165 : fbi->aa_walk_budget);
1166 918125 : if (walked < 0)
1167 : {
1168 8 : modified = true;
1169 8 : fbi->aa_walk_budget = 0;
1170 : }
1171 : else
1172 918117 : fbi->aa_walk_budget -= walked;
1173 918125 : if (paa && modified)
1174 103294 : paa->parm_modified = true;
1175 918125 : return !modified;
1176 : }
1177 :
1178 : /* If STMT is an assignment that loads a value from an parameter declaration,
1179 : return the index of the parameter in ipa_node_params which has not been
1180 : modified. Otherwise return -1. */
1181 :
1182 : static int
1183 6464732 : load_from_unmodified_param (struct ipa_func_body_info *fbi,
1184 : vec<ipa_param_descriptor, va_gc> *descriptors,
1185 : gimple *stmt)
1186 : {
1187 6464732 : int index;
1188 6464732 : tree op1;
1189 :
1190 6464732 : if (!gimple_assign_single_p (stmt))
1191 : return -1;
1192 :
1193 4306023 : op1 = gimple_assign_rhs1 (stmt);
1194 4306023 : if (TREE_CODE (op1) != PARM_DECL)
1195 : return -1;
1196 :
1197 70825 : index = ipa_get_param_decl_index_1 (descriptors, op1);
1198 70825 : if (index < 0
1199 70825 : || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1200 25451 : return -1;
1201 :
1202 : return index;
1203 : }
1204 :
1205 : /* Return true if memory reference REF (which must be a load through parameter
1206 : with INDEX) loads data that are known to be unmodified in this function
1207 : before reaching statement STMT. */
1208 :
1209 : static bool
1210 4686961 : parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1211 : int index, gimple *stmt, tree ref)
1212 : {
1213 4686961 : struct ipa_param_aa_status *paa;
1214 4686961 : bool modified = false;
1215 4686961 : ao_ref refd;
1216 :
1217 4686961 : gcc_checking_assert (fbi);
1218 4686961 : paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1219 4686961 : if (paa->ref_modified || fbi->aa_walk_budget == 0)
1220 : return false;
1221 :
1222 7327330 : gcc_checking_assert (gimple_vuse (stmt));
1223 3663665 : ao_ref_init (&refd, ref);
1224 7327330 : int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1225 : &modified, NULL, NULL,
1226 : fbi->aa_walk_budget);
1227 3663665 : if (walked < 0)
1228 : {
1229 8 : modified = true;
1230 8 : fbi->aa_walk_budget = 0;
1231 : }
1232 : else
1233 3663657 : fbi->aa_walk_budget -= walked;
1234 3663665 : if (modified)
1235 635305 : paa->ref_modified = true;
1236 3663665 : return !modified;
1237 : }
1238 :
1239 : /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1240 : is known to be unmodified in this function before reaching call statement
1241 : CALL into which it is passed. FBI describes the function body. */
1242 :
1243 : static bool
1244 1152886 : parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1245 : gimple *call, tree parm)
1246 : {
1247 1152886 : bool modified = false;
1248 1152886 : ao_ref refd;
1249 :
1250 : /* It's unnecessary to calculate anything about memory contnets for a const
1251 : function because it is not goin to use it. But do not cache the result
1252 : either. Also, no such calculations for non-pointers. */
1253 1600594 : if (!gimple_vuse (call)
1254 1152886 : || !POINTER_TYPE_P (TREE_TYPE (parm)))
1255 : return false;
1256 :
1257 819525 : struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1258 : gimple_bb (call),
1259 : index);
1260 819525 : if (paa->pt_modified || fbi->aa_walk_budget == 0)
1261 : return false;
1262 :
1263 705178 : ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1264 1410356 : int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1265 : &modified, NULL, NULL,
1266 : fbi->aa_walk_budget);
1267 705178 : if (walked < 0)
1268 : {
1269 0 : fbi->aa_walk_budget = 0;
1270 0 : modified = true;
1271 : }
1272 : else
1273 705178 : fbi->aa_walk_budget -= walked;
1274 705178 : if (modified)
1275 261062 : paa->pt_modified = true;
1276 705178 : return !modified;
1277 : }
1278 :
1279 : /* Return true if we can prove that OP is a memory reference loading
1280 : data from an aggregate passed as a parameter.
1281 :
1282 : The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1283 : false if it cannot prove that the value has not been modified before the
1284 : load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1285 : if it cannot prove the value has not been modified, in that case it will
1286 : store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1287 :
1288 : INFO and PARMS_AINFO describe parameters of the current function (but the
1289 : latter can be NULL), STMT is the load statement. If function returns true,
1290 : *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1291 : within the aggregate and whether it is a load from a value passed by
1292 : reference respectively.
1293 :
1294 : Return false if the offset divided by BITS_PER_UNIT would not fit into an
1295 : unsigned int. */
1296 :
1297 : bool
1298 25200592 : ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1299 : vec<ipa_param_descriptor, va_gc> *descriptors,
1300 : gimple *stmt, tree op, int *index_p,
1301 : HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1302 : bool *by_ref_p, bool *guaranteed_unmodified)
1303 : {
1304 25200592 : int index;
1305 25200592 : HOST_WIDE_INT size;
1306 25200592 : bool reverse;
1307 25200592 : tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1308 :
1309 25200592 : if (!base
1310 23823877 : || (*offset_p / BITS_PER_UNIT) > UINT_MAX)
1311 : return false;
1312 :
1313 : /* We can not propagate across volatile loads. */
1314 23823846 : if (TREE_THIS_VOLATILE (op))
1315 : return false;
1316 :
1317 22613223 : if (DECL_P (base))
1318 : {
1319 11081353 : int index = ipa_get_param_decl_index_1 (descriptors, base);
1320 11081353 : if (index >= 0
1321 11081353 : && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1322 : {
1323 786213 : *index_p = index;
1324 786213 : *by_ref_p = false;
1325 786213 : if (size_p)
1326 24056 : *size_p = size;
1327 786213 : if (guaranteed_unmodified)
1328 135 : *guaranteed_unmodified = true;
1329 786213 : return true;
1330 : }
1331 10295140 : return false;
1332 : }
1333 :
1334 11531870 : if (TREE_CODE (base) != MEM_REF
1335 10743137 : || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1336 22271433 : || !integer_zerop (TREE_OPERAND (base, 1)))
1337 1739155 : return false;
1338 :
1339 9792715 : if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1340 : {
1341 4801432 : tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1342 4801432 : index = ipa_get_param_decl_index_1 (descriptors, parm);
1343 : }
1344 : else
1345 : {
1346 : /* This branch catches situations where a pointer parameter is not a
1347 : gimple register, for example:
1348 :
1349 : void hip7(S*) (struct S * p)
1350 : {
1351 : void (*<T2e4>) (struct S *) D.1867;
1352 : struct S * p.1;
1353 :
1354 : <bb 2>:
1355 : p.1_1 = p;
1356 : D.1867_2 = p.1_1->f;
1357 : D.1867_2 ();
1358 : gdp = &p;
1359 : */
1360 :
1361 4991283 : gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1362 4991283 : index = load_from_unmodified_param (fbi, descriptors, def);
1363 : }
1364 :
1365 9792715 : if (index >= 0)
1366 : {
1367 4686662 : bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1368 4686662 : if (!data_preserved && !guaranteed_unmodified)
1369 : return false;
1370 :
1371 3028575 : *index_p = index;
1372 3028575 : *by_ref_p = true;
1373 3028575 : if (size_p)
1374 28532 : *size_p = size;
1375 3028575 : if (guaranteed_unmodified)
1376 2044 : *guaranteed_unmodified = data_preserved;
1377 3028575 : return true;
1378 : }
1379 : return false;
1380 : }
1381 :
1382 : /* If STMT is an assignment that loads a value from a parameter declaration,
1383 : or from an aggregate passed as the parameter either by value or reference,
1384 : return the index of the parameter in ipa_node_params. Otherwise return -1.
1385 :
1386 : FBI holds gathered information about the function. INFO describes
1387 : parameters of the function, STMT is the assignment statement. If it is a
1388 : memory load from an aggregate, *OFFSET_P is filled with offset within the
1389 : aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1390 : reference. */
1391 :
1392 : static int
1393 347901 : load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1394 : class ipa_node_params *info,
1395 : gimple *stmt,
1396 : HOST_WIDE_INT *offset_p,
1397 : bool *by_ref_p)
1398 : {
1399 347901 : int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1400 347901 : poly_int64 size;
1401 :
1402 : /* Load value from a parameter declaration. */
1403 347901 : if (index >= 0)
1404 : {
1405 244 : *offset_p = -1;
1406 244 : return index;
1407 : }
1408 :
1409 347657 : if (!gimple_assign_load_p (stmt))
1410 : return -1;
1411 :
1412 178449 : tree rhs = gimple_assign_rhs1 (stmt);
1413 :
1414 : /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1415 324480 : for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1416 146088 : if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1417 : return -1;
1418 :
1419 : /* Skip memory reference containing bit-field. */
1420 178392 : if (TREE_CODE (rhs) == BIT_FIELD_REF
1421 178392 : || contains_bitfld_component_ref_p (rhs))
1422 0 : return -1;
1423 :
1424 178392 : if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1425 : offset_p, &size, by_ref_p))
1426 : return -1;
1427 :
1428 49510 : gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1429 : size));
1430 49510 : if (!*by_ref_p)
1431 : {
1432 23364 : tree param_type = ipa_get_type (info, index);
1433 :
1434 23364 : if (!param_type || !AGGREGATE_TYPE_P (param_type))
1435 : return -1;
1436 : }
1437 26146 : else if (TREE_THIS_VOLATILE (rhs))
1438 : return -1;
1439 :
1440 47878 : return index;
1441 : }
1442 :
1443 : /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1444 : to find original pointer. Initialize RET to the pointer which results from
1445 : the walk.
1446 : If offset is known return true and initialize OFFSET_RET. */
1447 :
1448 : bool
1449 15608924 : unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1450 : {
1451 15608924 : poly_int64 offset = 0;
1452 15608924 : bool offset_known = true;
1453 15608924 : int i;
1454 :
1455 20155899 : for (i = 0; i < param_ipa_jump_function_lookups; i++)
1456 : {
1457 20154885 : if (TREE_CODE (op) == ADDR_EXPR)
1458 : {
1459 1573138 : poly_int64 extra_offset;
1460 1573138 : tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1461 : &extra_offset);
1462 1573138 : if (!base)
1463 : {
1464 27668 : base = get_base_address (TREE_OPERAND (op, 0));
1465 27668 : if (TREE_CODE (base) != MEM_REF)
1466 : break;
1467 : offset_known = false;
1468 : }
1469 : else
1470 : {
1471 1545470 : if (TREE_CODE (base) != MEM_REF)
1472 : break;
1473 260108 : offset += extra_offset;
1474 : }
1475 260108 : op = TREE_OPERAND (base, 0);
1476 260108 : if (mem_ref_offset (base).to_shwi (&extra_offset))
1477 260108 : offset += extra_offset;
1478 : else
1479 : offset_known = false;
1480 : }
1481 18581747 : else if (TREE_CODE (op) == SSA_NAME
1482 18581747 : && !SSA_NAME_IS_DEFAULT_DEF (op))
1483 : {
1484 6407858 : gimple *pstmt = SSA_NAME_DEF_STMT (op);
1485 :
1486 6407858 : if (gimple_assign_single_p (pstmt))
1487 3086403 : op = gimple_assign_rhs1 (pstmt);
1488 3321455 : else if (is_gimple_assign (pstmt)
1489 3321455 : && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1490 : {
1491 1200464 : poly_int64 extra_offset = 0;
1492 1200464 : if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1493 : &extra_offset))
1494 1200464 : offset += extra_offset;
1495 : else
1496 : offset_known = false;
1497 1200464 : op = gimple_assign_rhs1 (pstmt);
1498 : }
1499 : else
1500 : break;
1501 : }
1502 : else
1503 : break;
1504 : }
1505 15608924 : *ret = op;
1506 15608924 : *offset_ret = offset;
1507 15608924 : return offset_known;
1508 : }
1509 :
1510 : /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1511 : of an assignment statement STMT, try to determine whether we are actually
1512 : handling any of the following cases and construct an appropriate jump
1513 : function into JFUNC if so:
1514 :
1515 : 1) The passed value is loaded from a formal parameter which is not a gimple
1516 : register (most probably because it is addressable, the value has to be
1517 : scalar) and we can guarantee the value has not changed. This case can
1518 : therefore be described by a simple pass-through jump function. For example:
1519 :
1520 : foo (int a)
1521 : {
1522 : int a.0;
1523 :
1524 : a.0_2 = a;
1525 : bar (a.0_2);
1526 :
1527 : 2) The passed value can be described by a simple arithmetic pass-through
1528 : jump function. E.g.
1529 :
1530 : foo (int a)
1531 : {
1532 : int D.2064;
1533 :
1534 : D.2064_4 = a.1(D) + 4;
1535 : bar (D.2064_4);
1536 :
1537 : This case can also occur in combination of the previous one, e.g.:
1538 :
1539 : foo (int a, int z)
1540 : {
1541 : int a.0;
1542 : int D.2064;
1543 :
1544 : a.0_3 = a;
1545 : D.2064_4 = a.0_3 + 4;
1546 : foo (D.2064_4);
1547 :
1548 : 3) The passed value is an address of an object within another one (which
1549 : also passed by reference). Such situations are described by an ancestor
1550 : jump function and describe situations such as:
1551 :
1552 : B::foo() (struct B * const this)
1553 : {
1554 : struct A * D.1845;
1555 :
1556 : D.1845_2 = &this_1(D)->D.1748;
1557 : A::bar (D.1845_2);
1558 :
1559 : INFO is the structure describing individual parameters access different
1560 : stages of IPA optimizations. PARMS_AINFO contains the information that is
1561 : only needed for intraprocedural analysis. */
1562 :
1563 : static void
1564 1236856 : compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1565 : class ipa_node_params *info,
1566 : struct ipa_jump_func *jfunc,
1567 : gcall *call, gimple *stmt, tree name,
1568 : tree param_type)
1569 : {
1570 1236856 : HOST_WIDE_INT offset, size;
1571 1236856 : tree op1, tc_ssa, base, ssa;
1572 1236856 : bool reverse;
1573 1236856 : int index;
1574 :
1575 1236856 : op1 = gimple_assign_rhs1 (stmt);
1576 :
1577 1236856 : if (TREE_CODE (op1) == SSA_NAME)
1578 : {
1579 388909 : if (SSA_NAME_IS_DEFAULT_DEF (op1))
1580 111308 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1581 : else
1582 277601 : index = load_from_unmodified_param (fbi, info->descriptors,
1583 277601 : SSA_NAME_DEF_STMT (op1));
1584 : tc_ssa = op1;
1585 : }
1586 : else
1587 : {
1588 847947 : index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1589 847947 : tc_ssa = gimple_assign_lhs (stmt);
1590 : }
1591 :
1592 1236856 : if (index >= 0)
1593 : {
1594 112648 : if (lto_variably_modified_type_p (TREE_TYPE (name)))
1595 1073734 : return;
1596 :
1597 112602 : switch (gimple_assign_rhs_class (stmt))
1598 : {
1599 64475 : case GIMPLE_BINARY_RHS:
1600 64475 : {
1601 64475 : tree op2 = gimple_assign_rhs2 (stmt);
1602 64475 : if (!is_gimple_ip_invariant (op2)
1603 64475 : || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1604 : != tcc_comparison)
1605 32609 : && !useless_type_conversion_p (TREE_TYPE (name),
1606 32609 : TREE_TYPE (op1))))
1607 23709 : return;
1608 :
1609 40766 : ipa_set_jf_arith_pass_through (jfunc, index, op2,
1610 : gimple_assign_rhs_code (stmt),
1611 40766 : TREE_TYPE (name));
1612 40766 : break;
1613 : }
1614 1119 : case GIMPLE_SINGLE_RHS:
1615 1119 : {
1616 1119 : bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1617 : tc_ssa);
1618 1119 : ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1619 1119 : break;
1620 : }
1621 47006 : case GIMPLE_UNARY_RHS:
1622 47006 : if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1623 994 : ipa_set_jf_unary_pass_through (jfunc, index,
1624 : gimple_assign_rhs_code (stmt),
1625 994 : TREE_TYPE (name));
1626 88893 : default:;
1627 : }
1628 88893 : return;
1629 : }
1630 :
1631 1124208 : if (TREE_CODE (op1) != ADDR_EXPR)
1632 : return;
1633 237159 : op1 = TREE_OPERAND (op1, 0);
1634 237159 : base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1635 237159 : offset_int mem_offset;
1636 237159 : if (!base
1637 210103 : || TREE_CODE (base) != MEM_REF
1638 436361 : || !mem_ref_offset (base).is_constant (&mem_offset))
1639 37957 : return;
1640 199202 : offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1641 199202 : ssa = TREE_OPERAND (base, 0);
1642 199202 : if (TREE_CODE (ssa) != SSA_NAME
1643 199202 : || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1644 362423 : || offset < 0)
1645 : return;
1646 :
1647 : /* Dynamic types are changed in constructors and destructors. */
1648 163122 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1649 163122 : if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1650 160238 : ipa_set_ancestor_jf (jfunc, offset, index,
1651 160238 : parm_ref_data_pass_through_p (fbi, index, call, ssa),
1652 : false);
1653 : }
1654 :
1655 : /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1656 : it looks like:
1657 :
1658 : iftmp.1_3 = &obj_2(D)->D.1762;
1659 :
1660 : The base of the MEM_REF must be a default definition SSA NAME of a
1661 : parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1662 : whole MEM_REF expression is returned and the offset calculated from any
1663 : handled components and the MEM_REF itself is stored into *OFFSET. The whole
1664 : RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1665 :
1666 : static tree
1667 15503 : get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1668 : {
1669 15503 : HOST_WIDE_INT size;
1670 15503 : tree expr, parm, obj;
1671 15503 : bool reverse;
1672 :
1673 15503 : if (!gimple_assign_single_p (assign))
1674 : return NULL_TREE;
1675 8568 : expr = gimple_assign_rhs1 (assign);
1676 :
1677 8568 : if (TREE_CODE (expr) != ADDR_EXPR)
1678 : return NULL_TREE;
1679 4405 : expr = TREE_OPERAND (expr, 0);
1680 4405 : obj = expr;
1681 4405 : expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1682 :
1683 4405 : offset_int mem_offset;
1684 4405 : if (!expr
1685 4403 : || TREE_CODE (expr) != MEM_REF
1686 8808 : || !mem_ref_offset (expr).is_constant (&mem_offset))
1687 2 : return NULL_TREE;
1688 4403 : parm = TREE_OPERAND (expr, 0);
1689 4403 : if (TREE_CODE (parm) != SSA_NAME
1690 4403 : || !SSA_NAME_IS_DEFAULT_DEF (parm)
1691 5178 : || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1692 : return NULL_TREE;
1693 :
1694 775 : *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1695 775 : *obj_p = obj;
1696 775 : return expr;
1697 : }
1698 :
1699 :
1700 : /* Given that an actual argument is an SSA_NAME that is a result of a phi
1701 : statement PHI, try to find out whether NAME is in fact a
1702 : multiple-inheritance typecast from a descendant into an ancestor of a formal
1703 : parameter and thus can be described by an ancestor jump function and if so,
1704 : write the appropriate function into JFUNC.
1705 :
1706 : Essentially we want to match the following pattern:
1707 :
1708 : if (obj_2(D) != 0B)
1709 : goto <bb 3>;
1710 : else
1711 : goto <bb 4>;
1712 :
1713 : <bb 3>:
1714 : iftmp.1_3 = &obj_2(D)->D.1762;
1715 :
1716 : <bb 4>:
1717 : # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1718 : D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1719 : return D.1879_6; */
1720 :
1721 : static void
1722 89426 : compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1723 : class ipa_node_params *info,
1724 : struct ipa_jump_func *jfunc,
1725 : gcall *call, gphi *phi)
1726 : {
1727 89426 : HOST_WIDE_INT offset;
1728 89426 : gimple *assign;
1729 89426 : basic_block phi_bb, assign_bb, cond_bb;
1730 89426 : tree tmp, parm, expr, obj;
1731 89426 : int index, i;
1732 :
1733 89426 : if (gimple_phi_num_args (phi) != 2)
1734 89412 : return;
1735 :
1736 75985 : if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1737 4081 : tmp = PHI_ARG_DEF (phi, 0);
1738 71904 : else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1739 13276 : tmp = PHI_ARG_DEF (phi, 1);
1740 : else
1741 : return;
1742 17357 : if (TREE_CODE (tmp) != SSA_NAME
1743 15231 : || SSA_NAME_IS_DEFAULT_DEF (tmp)
1744 15120 : || !POINTER_TYPE_P (TREE_TYPE (tmp))
1745 20566 : || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1746 : return;
1747 :
1748 1129 : assign = SSA_NAME_DEF_STMT (tmp);
1749 1129 : assign_bb = gimple_bb (assign);
1750 90123 : if (!single_pred_p (assign_bb))
1751 : return;
1752 711 : expr = get_ancestor_addr_info (assign, &obj, &offset);
1753 711 : if (!expr)
1754 : return;
1755 26 : parm = TREE_OPERAND (expr, 0);
1756 26 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1757 26 : if (index < 0)
1758 : return;
1759 :
1760 26 : cond_bb = single_pred (assign_bb);
1761 52 : gcond *cond = safe_dyn_cast <gcond *> (*gsi_last_bb (cond_bb));
1762 26 : if (!cond
1763 26 : || gimple_cond_code (cond) != NE_EXPR
1764 26 : || gimple_cond_lhs (cond) != parm
1765 14 : || !integer_zerop (gimple_cond_rhs (cond)))
1766 12 : return;
1767 :
1768 14 : phi_bb = gimple_bb (phi);
1769 42 : for (i = 0; i < 2; i++)
1770 : {
1771 28 : basic_block pred = EDGE_PRED (phi_bb, i)->src;
1772 28 : if (pred != assign_bb && pred != cond_bb)
1773 : return;
1774 : }
1775 :
1776 14 : ipa_set_ancestor_jf (jfunc, offset, index,
1777 14 : parm_ref_data_pass_through_p (fbi, index, call, parm),
1778 : true);
1779 : }
1780 :
1781 : /* Inspect the given TYPE and return true iff it has the same structure (the
1782 : same number of fields of the same types) as a C++ member pointer. If
1783 : METHOD_PTR and DELTA are non-NULL, store the trees representing the
1784 : corresponding fields there. */
1785 :
1786 : static bool
1787 876 : type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1788 : {
1789 876 : tree fld;
1790 :
1791 876 : if (TREE_CODE (type) != RECORD_TYPE)
1792 : return false;
1793 :
1794 876 : fld = TYPE_FIELDS (type);
1795 876 : if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1796 876 : || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1797 1752 : || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1798 : return false;
1799 :
1800 876 : if (method_ptr)
1801 876 : *method_ptr = fld;
1802 :
1803 876 : fld = DECL_CHAIN (fld);
1804 876 : if (!fld || INTEGRAL_TYPE_P (fld)
1805 1752 : || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1806 : return false;
1807 876 : if (delta)
1808 876 : *delta = fld;
1809 :
1810 876 : if (DECL_CHAIN (fld))
1811 : return false;
1812 :
1813 : return true;
1814 : }
1815 :
1816 : /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1817 : return the rhs of its defining statement, and this statement is stored in
1818 : *RHS_STMT. Otherwise return RHS as it is. */
1819 :
1820 : static inline tree
1821 126883 : get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1822 : {
1823 168599 : while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1824 : {
1825 91555 : gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1826 :
1827 91555 : if (gimple_assign_single_p (def_stmt))
1828 41716 : rhs = gimple_assign_rhs1 (def_stmt);
1829 : else
1830 : break;
1831 41716 : *rhs_stmt = def_stmt;
1832 : }
1833 126883 : return rhs;
1834 : }
1835 :
1836 : /* Simple linked list, describing contents of an aggregate before call. */
1837 :
1838 : struct ipa_known_agg_contents_list
1839 : {
1840 : /* Offset and size of the described part of the aggregate. */
1841 : HOST_WIDE_INT offset, size;
1842 :
1843 : /* Type of the described part of the aggregate. */
1844 : tree type;
1845 :
1846 : /* Known constant value or jump function data describing contents. */
1847 : struct ipa_load_agg_data value;
1848 :
1849 : /* Pointer to the next structure in the list. */
1850 : struct ipa_known_agg_contents_list *next;
1851 : };
1852 :
1853 : /* Add an aggregate content item into a linked list of
1854 : ipa_known_agg_contents_list structure, in which all elements
1855 : are sorted ascendingly by offset. */
1856 :
1857 : static inline void
1858 2709158 : add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1859 : struct ipa_known_agg_contents_list *item)
1860 : {
1861 2709158 : struct ipa_known_agg_contents_list *list = *plist;
1862 :
1863 5024894 : for (; list; list = list->next)
1864 : {
1865 3830808 : if (list->offset >= item->offset)
1866 : break;
1867 :
1868 2315736 : plist = &list->next;
1869 : }
1870 :
1871 2709158 : item->next = list;
1872 2709158 : *plist = item;
1873 : }
1874 :
1875 : /* Check whether a given aggregate content is clobbered by certain element in
1876 : a linked list of ipa_known_agg_contents_list. */
1877 :
1878 : static inline bool
1879 1055405 : clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1880 : struct ipa_known_agg_contents_list *item)
1881 : {
1882 2253826 : for (; list; list = list->next)
1883 : {
1884 1822304 : if (list->offset >= item->offset)
1885 611283 : return list->offset < item->offset + item->size;
1886 :
1887 1211021 : if (list->offset + list->size > item->offset)
1888 : return true;
1889 : }
1890 :
1891 : return false;
1892 : }
1893 :
1894 : /* Build aggregate jump function from LIST, assuming there are exactly
1895 : VALUE_COUNT entries there and that offset of the passed argument
1896 : is ARG_OFFSET and store it into JFUNC. */
1897 :
1898 : static void
1899 341136 : build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1900 : int value_count, HOST_WIDE_INT arg_offset,
1901 : struct ipa_jump_func *jfunc)
1902 : {
1903 341136 : vec_safe_reserve (jfunc->agg.items, value_count, true);
1904 1335816 : for (; list; list = list->next)
1905 : {
1906 994680 : struct ipa_agg_jf_item item;
1907 994680 : tree operand = list->value.pass_through.operand;
1908 :
1909 994680 : if (list->value.pass_through.formal_id >= 0)
1910 : {
1911 : /* Content value is derived from some formal paramerter. */
1912 87416 : if (list->value.offset >= 0)
1913 46339 : item.jftype = IPA_JF_LOAD_AGG;
1914 : else
1915 41077 : item.jftype = IPA_JF_PASS_THROUGH;
1916 :
1917 87416 : item.value.load_agg = list->value;
1918 87416 : if (operand)
1919 8954 : item.value.pass_through.operand
1920 8954 : = unshare_expr_without_location (operand);
1921 : }
1922 907264 : else if (operand)
1923 : {
1924 : /* Content value is known constant. */
1925 907264 : item.jftype = IPA_JF_CONST;
1926 907264 : item.value.constant = unshare_expr_without_location (operand);
1927 : }
1928 : else
1929 0 : continue;
1930 :
1931 994680 : item.type = list->type;
1932 994680 : gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1933 :
1934 994680 : item.offset = list->offset - arg_offset;
1935 994680 : gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1936 :
1937 994680 : jfunc->agg.items->quick_push (item);
1938 : }
1939 341136 : }
1940 :
1941 : /* Given an assignment statement STMT, try to collect information into
1942 : AGG_VALUE that will be used to construct jump function for RHS of the
1943 : assignment, from which content value of an aggregate part comes.
1944 :
1945 : Besides constant and simple pass-through jump functions, also try to
1946 : identify whether it matches the following pattern that can be described by
1947 : a load-value-from-aggregate jump function, which is a derivative of simple
1948 : pass-through jump function.
1949 :
1950 : foo (int *p)
1951 : {
1952 : ...
1953 :
1954 : *(q_5 + 4) = *(p_3(D) + 28) op 1;
1955 : bar (q_5);
1956 : }
1957 :
1958 : Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1959 : constant, simple pass-through and load-vale-from-aggregate. If value
1960 : is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1961 : set to -1. For simple pass-through and load-value-from-aggregate, field
1962 : FORMAL_ID specifies the related formal parameter index, and field
1963 : OFFSET can be used to distinguish them, -1 means simple pass-through,
1964 : otherwise means load-value-from-aggregate. */
1965 :
1966 : static void
1967 1716709 : analyze_agg_content_value (struct ipa_func_body_info *fbi,
1968 : struct ipa_load_agg_data *agg_value,
1969 : gimple *stmt)
1970 : {
1971 1716709 : tree lhs = gimple_assign_lhs (stmt);
1972 1716709 : tree rhs1 = gimple_assign_rhs1 (stmt);
1973 1716709 : enum tree_code code;
1974 1716709 : int index = -1;
1975 :
1976 : /* Initialize jump function data for the aggregate part. */
1977 1716709 : memset (agg_value, 0, sizeof (*agg_value));
1978 1716709 : agg_value->pass_through.operation = NOP_EXPR;
1979 1716709 : agg_value->pass_through.formal_id = -1;
1980 1716709 : agg_value->offset = -1;
1981 :
1982 1716709 : if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1983 1528917 : || TREE_THIS_VOLATILE (lhs)
1984 1528249 : || TREE_CODE (lhs) == BIT_FIELD_REF
1985 1528241 : || contains_bitfld_component_ref_p (lhs))
1986 197288 : return;
1987 :
1988 : /* Skip SSA copies. */
1989 1795086 : while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1990 : {
1991 1707529 : if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1992 : break;
1993 :
1994 368507 : stmt = SSA_NAME_DEF_STMT (rhs1);
1995 368507 : if (!is_gimple_assign (stmt))
1996 : break;
1997 :
1998 275665 : lhs = gimple_assign_lhs (stmt);
1999 275665 : rhs1 = gimple_assign_rhs1 (stmt);
2000 : }
2001 :
2002 1519421 : if (gphi *phi = dyn_cast<gphi *> (stmt))
2003 : {
2004 : /* Also special case like the following (a is a formal parameter):
2005 :
2006 : _12 = *a_11(D).dim[0].stride;
2007 : ...
2008 : # iftmp.22_9 = PHI <_12(2), 1(3)>
2009 : ...
2010 : parm.6.dim[0].stride = iftmp.22_9;
2011 : ...
2012 : __x_MOD_foo (&parm.6, b_31(D));
2013 :
2014 : The aggregate function describing parm.6.dim[0].stride is encoded as a
2015 : PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
2016 : (the constant from the PHI node). */
2017 :
2018 33955 : if (gimple_phi_num_args (phi) != 2
2019 33955 : || lto_variably_modified_type_p (TREE_TYPE (lhs)))
2020 6696 : return;
2021 27259 : tree arg0 = gimple_phi_arg_def (phi, 0);
2022 27259 : tree arg1 = gimple_phi_arg_def (phi, 1);
2023 27259 : tree operand;
2024 :
2025 27259 : if (is_gimple_ip_invariant (arg1))
2026 : {
2027 : operand = arg1;
2028 : rhs1 = arg0;
2029 : }
2030 22378 : else if (is_gimple_ip_invariant (arg0))
2031 : {
2032 : operand = arg0;
2033 : rhs1 = arg1;
2034 : }
2035 : else
2036 : return;
2037 :
2038 10263 : rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
2039 10263 : if (!is_gimple_assign (stmt))
2040 : return;
2041 :
2042 4463 : code = ASSERT_EXPR;
2043 4463 : agg_value->pass_through.operand = operand;
2044 4463 : agg_value->pass_through.op_type = TREE_TYPE (lhs);
2045 : }
2046 1485466 : else if (is_gimple_assign (stmt))
2047 : {
2048 1426579 : code = gimple_assign_rhs_code (stmt);
2049 1426579 : switch (gimple_assign_rhs_class (stmt))
2050 : {
2051 1339022 : case GIMPLE_SINGLE_RHS:
2052 1339022 : if (is_gimple_ip_invariant (rhs1))
2053 : {
2054 966165 : agg_value->pass_through.operand = rhs1;
2055 966165 : return;
2056 : }
2057 : code = NOP_EXPR;
2058 : break;
2059 :
2060 29820 : case GIMPLE_UNARY_RHS:
2061 : /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
2062 : (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
2063 : tcc_binary, this subtleness is somewhat misleading.
2064 :
2065 : Since tcc_unary is widely used in IPA-CP code to check an operation
2066 : with one operand, here we only allow tc_unary operation to avoid
2067 : possible problem. Then we can use (opclass == tc_unary) or not to
2068 : distinguish unary and binary. */
2069 29820 : if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code)
2070 31958 : || lto_variably_modified_type_p (TREE_TYPE (lhs)))
2071 27682 : return;
2072 :
2073 2138 : rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
2074 2138 : agg_value->pass_through.op_type = TREE_TYPE (lhs);
2075 2138 : break;
2076 :
2077 57245 : case GIMPLE_BINARY_RHS:
2078 57245 : {
2079 57245 : gimple *rhs1_stmt = stmt;
2080 57245 : gimple *rhs2_stmt = stmt;
2081 57245 : tree rhs2 = gimple_assign_rhs2 (stmt);
2082 :
2083 57245 : if (lto_variably_modified_type_p (TREE_TYPE (lhs)))
2084 29784 : return;
2085 :
2086 57241 : rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
2087 57241 : rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
2088 :
2089 57241 : if (is_gimple_ip_invariant (rhs2))
2090 : {
2091 27461 : agg_value->pass_through.operand = rhs2;
2092 27461 : agg_value->pass_through.op_type = TREE_TYPE (lhs);
2093 27461 : stmt = rhs1_stmt;
2094 : }
2095 29780 : else if (is_gimple_ip_invariant (rhs1))
2096 : {
2097 2190 : if (TREE_CODE_CLASS (code) == tcc_comparison)
2098 0 : code = swap_tree_comparison (code);
2099 2190 : else if (!commutative_tree_code (code))
2100 : return;
2101 :
2102 0 : agg_value->pass_through.operand = rhs1;
2103 0 : agg_value->pass_through.op_type = TREE_TYPE (lhs);
2104 0 : stmt = rhs2_stmt;
2105 0 : rhs1 = rhs2;
2106 : }
2107 : else
2108 : return;
2109 :
2110 27461 : if (TREE_CODE_CLASS (code) != tcc_comparison
2111 54382 : && !useless_type_conversion_p (TREE_TYPE (lhs),
2112 26921 : TREE_TYPE (rhs1)))
2113 : return;
2114 : }
2115 27461 : break;
2116 :
2117 : default:
2118 : return;
2119 : }
2120 : }
2121 : else
2122 : return;
2123 :
2124 406919 : if (TREE_CODE (rhs1) != SSA_NAME)
2125 347901 : index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
2126 : &agg_value->offset,
2127 : &agg_value->by_ref);
2128 59018 : else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
2129 44863 : index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
2130 :
2131 392764 : if (index >= 0)
2132 : {
2133 89240 : if (agg_value->offset >= 0)
2134 47878 : agg_value->type = TREE_TYPE (rhs1);
2135 89240 : agg_value->pass_through.formal_id = index;
2136 89240 : agg_value->pass_through.operation = code;
2137 : }
2138 : else
2139 317679 : agg_value->pass_through.operand = NULL_TREE;
2140 : }
2141 :
2142 : /* If STMT is a memory store to the object whose address is BASE, extract
2143 : information (offset, size, and value) into CONTENT, and return true,
2144 : otherwise we conservatively assume the whole object is modified with
2145 : unknown content, and return false. CHECK_REF means that access to object
2146 : is expected to be in form of MEM_REF expression. */
2147 :
2148 : static bool
2149 2810884 : extract_mem_content (struct ipa_func_body_info *fbi,
2150 : gimple *stmt, tree base, bool check_ref,
2151 : struct ipa_known_agg_contents_list *content)
2152 : {
2153 2810884 : HOST_WIDE_INT lhs_offset, lhs_size;
2154 2810884 : bool reverse;
2155 :
2156 2810884 : if (!is_gimple_assign (stmt))
2157 : return false;
2158 :
2159 1828814 : tree lhs = gimple_assign_lhs (stmt);
2160 1828814 : tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
2161 : &reverse);
2162 1828814 : if (!lhs_base)
2163 : return false;
2164 :
2165 1827149 : if (check_ref)
2166 : {
2167 146724 : if (TREE_CODE (lhs_base) != MEM_REF
2168 118274 : || TREE_OPERAND (lhs_base, 0) != base
2169 193347 : || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
2170 106400 : return false;
2171 : }
2172 1680425 : else if (lhs_base != base)
2173 : return false;
2174 :
2175 1716709 : content->offset = lhs_offset;
2176 1716709 : content->size = lhs_size;
2177 1716709 : content->type = TREE_TYPE (lhs);
2178 1716709 : content->next = NULL;
2179 :
2180 1716709 : analyze_agg_content_value (fbi, &content->value, stmt);
2181 1716709 : return true;
2182 : }
2183 :
2184 : /* Traverse statements from CALL backwards, scanning whether an aggregate given
2185 : in ARG is filled in constants or values that are derived from caller's
2186 : formal parameter in the way described by some kinds of jump functions. FBI
2187 : is the context of the caller function for interprocedural analysis. ARG can
2188 : either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2189 : the type of the aggregate, JFUNC is the jump function for the aggregate. */
2190 :
2191 : static void
2192 3366124 : determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
2193 : gcall *call, tree arg,
2194 : tree arg_type,
2195 : struct ipa_jump_func *jfunc)
2196 : {
2197 3366124 : struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
2198 3366124 : bitmap visited = NULL;
2199 3366124 : int item_count = 0, value_count = 0;
2200 3366124 : HOST_WIDE_INT arg_offset, arg_size;
2201 3366124 : tree arg_base;
2202 3366124 : bool check_ref, by_ref;
2203 3366124 : ao_ref r;
2204 3366124 : int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
2205 :
2206 3366124 : if (max_agg_items == 0)
2207 846337 : return;
2208 :
2209 : /* The function operates in three stages. First, we prepare check_ref, r,
2210 : arg_base and arg_offset based on what is actually passed as an actual
2211 : argument. */
2212 :
2213 3366124 : if (POINTER_TYPE_P (arg_type))
2214 : {
2215 3007482 : by_ref = true;
2216 3007482 : if (TREE_CODE (arg) == SSA_NAME)
2217 : {
2218 1097560 : tree type_size;
2219 1097560 : if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
2220 1097560 : || !POINTER_TYPE_P (TREE_TYPE (arg)))
2221 : return;
2222 780963 : check_ref = true;
2223 780963 : arg_base = arg;
2224 780963 : arg_offset = 0;
2225 780963 : type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2226 780963 : arg_size = tree_to_uhwi (type_size);
2227 780963 : ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2228 : }
2229 1909922 : else if (TREE_CODE (arg) == ADDR_EXPR)
2230 : {
2231 1833976 : bool reverse;
2232 :
2233 1833976 : arg = TREE_OPERAND (arg, 0);
2234 1833976 : arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2235 : &arg_size, &reverse);
2236 1833976 : if (!arg_base)
2237 453718 : return;
2238 1832761 : if (DECL_P (arg_base))
2239 : {
2240 1380258 : check_ref = false;
2241 1380258 : ao_ref_init (&r, arg_base);
2242 : }
2243 : else
2244 : return;
2245 : }
2246 : else
2247 : return;
2248 : }
2249 : else
2250 : {
2251 358642 : bool reverse;
2252 :
2253 358642 : gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2254 :
2255 358642 : by_ref = false;
2256 358642 : check_ref = false;
2257 358642 : arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2258 : &arg_size, &reverse);
2259 358642 : if (!arg_base)
2260 76 : return;
2261 :
2262 358566 : ao_ref_init (&r, arg);
2263 : }
2264 :
2265 : /* Second stage traverses virtual SSA web backwards starting from the call
2266 : statement, only looks at individual dominating virtual operand (its
2267 : definition dominates the call), as long as it is confident that content
2268 : of the aggregate is affected by definition of the virtual operand, it
2269 : builds a sorted linked list of ipa_agg_jf_list describing that. */
2270 :
2271 2519787 : for (tree dom_vuse = gimple_vuse (call);
2272 24721171 : dom_vuse && fbi->aa_walk_budget > 0;)
2273 : {
2274 24216948 : gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2275 :
2276 24216948 : if (gphi *phi = dyn_cast <gphi *> (stmt))
2277 : {
2278 2365824 : dom_vuse = get_continuation_for_phi (phi, &r, true,
2279 1182912 : fbi->aa_walk_budget,
2280 : &visited, false, NULL, NULL);
2281 1182912 : continue;
2282 : }
2283 :
2284 23034036 : fbi->aa_walk_budget--;
2285 23034036 : if (stmt_may_clobber_ref_p_1 (stmt, &r))
2286 : {
2287 2810884 : struct ipa_known_agg_contents_list *content
2288 2810884 : = XALLOCA (struct ipa_known_agg_contents_list);
2289 :
2290 2810884 : if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2291 : break;
2292 :
2293 : /* Now we get a dominating virtual operand, and need to check
2294 : whether its value is clobbered any other dominating one. */
2295 1716709 : if ((content->value.pass_through.formal_id >= 0
2296 1627469 : || content->value.pass_through.operand)
2297 1055405 : && !clobber_by_agg_contents_list_p (all_list, content)
2298 : /* Since IPA-CP stores results with unsigned int offsets, we can
2299 : discard those which would not fit now before we stream them to
2300 : WPA. */
2301 2711527 : && (content->offset + content->size - arg_offset
2302 : <= (HOST_WIDE_INT) UINT_MAX * BITS_PER_UNIT))
2303 : {
2304 994680 : struct ipa_known_agg_contents_list *copy
2305 994680 : = XALLOCA (struct ipa_known_agg_contents_list);
2306 :
2307 : /* Add to the list consisting of only dominating virtual
2308 : operands, whose definitions can finally reach the call. */
2309 994680 : add_to_agg_contents_list (&list, (*copy = *content, copy));
2310 :
2311 994680 : if (++value_count == max_agg_items)
2312 : break;
2313 : }
2314 :
2315 : /* Add to the list consisting of all dominating virtual operands. */
2316 1714478 : add_to_agg_contents_list (&all_list, content);
2317 :
2318 1714478 : if (++item_count == 2 * max_agg_items)
2319 : break;
2320 : }
2321 42954902 : dom_vuse = gimple_vuse (stmt);
2322 : }
2323 :
2324 2519787 : if (visited)
2325 615717 : BITMAP_FREE (visited);
2326 :
2327 : /* Third stage just goes over the list and creates an appropriate vector of
2328 : ipa_agg_jf_item structures out of it, of course only if there are
2329 : any meaningful items to begin with. */
2330 :
2331 2519787 : if (value_count)
2332 : {
2333 341136 : jfunc->agg.by_ref = by_ref;
2334 341136 : build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2335 : }
2336 : }
2337 :
2338 :
2339 : /* Return the Ith param type of callee associated with call graph
2340 : edge E. */
2341 :
2342 : tree
2343 6236502 : ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2344 : {
2345 6236502 : int n;
2346 6236502 : tree type = (e->callee
2347 6236502 : ? TREE_TYPE (e->callee->decl)
2348 6236502 : : gimple_call_fntype (e->call_stmt));
2349 6236502 : tree t = TYPE_ARG_TYPES (type);
2350 :
2351 12812795 : for (n = 0; n < i; n++)
2352 : {
2353 6829204 : if (!t)
2354 : break;
2355 6576293 : t = TREE_CHAIN (t);
2356 : }
2357 6236502 : if (t && t != void_list_node)
2358 5890357 : return TREE_VALUE (t);
2359 346145 : if (!e->callee)
2360 : return NULL;
2361 324359 : t = DECL_ARGUMENTS (e->callee->decl);
2362 851026 : for (n = 0; n < i; n++)
2363 : {
2364 806529 : if (!t)
2365 : return NULL;
2366 526667 : t = TREE_CHAIN (t);
2367 : }
2368 44497 : if (t)
2369 2193 : return TREE_TYPE (t);
2370 : return NULL;
2371 : }
2372 :
2373 : /* Return a pointer to an ipa_vr just like TMP, but either find it in
2374 : ipa_vr_hash_table or allocate it in GC memory. */
2375 :
2376 : static ipa_vr *
2377 5755749 : ipa_get_value_range (const vrange &tmp)
2378 : {
2379 5755749 : inchash::hash hstate;
2380 5755749 : inchash::add_vrange (tmp, hstate);
2381 5755749 : hashval_t hash = hstate.end ();
2382 5755749 : ipa_vr **slot = ipa_vr_hash_table->find_slot_with_hash (&tmp, hash, INSERT);
2383 5755749 : if (*slot)
2384 : return *slot;
2385 :
2386 638362 : ipa_vr *vr = new (ggc_alloc<ipa_vr> ()) ipa_vr (tmp);
2387 638362 : *slot = vr;
2388 638362 : return vr;
2389 : }
2390 :
2391 : /* Assign to JF a pointer to a range just like TMP but either fetch a
2392 : copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2393 :
2394 : static void
2395 4989066 : ipa_set_jfunc_vr (ipa_jump_func *jf, const vrange &tmp)
2396 : {
2397 1901319 : jf->m_vr = ipa_get_value_range (tmp);
2398 3087747 : }
2399 :
2400 : static void
2401 618159 : ipa_set_jfunc_vr (ipa_jump_func *jf, const ipa_vr &vr)
2402 : {
2403 618159 : value_range tmp;
2404 618159 : vr.get_vrange (tmp);
2405 618159 : ipa_set_jfunc_vr (jf, tmp);
2406 618159 : }
2407 :
2408 : /* Given VAL that conforms to is_gimple_ip_invariant, produce a VRANGE that
2409 : represents it as a range. CONTEXT_NODE is the call graph node representing
2410 : the function for which optimization flags should be evaluated. */
2411 :
2412 : void
2413 1425675 : ipa_get_range_from_ip_invariant (vrange &r, tree val, cgraph_node *context_node)
2414 : {
2415 1425675 : if (TREE_CODE (val) == ADDR_EXPR)
2416 : {
2417 1023 : symtab_node *symbol;
2418 1023 : tree base = TREE_OPERAND (val, 0);
2419 1023 : if (!DECL_P (base))
2420 : {
2421 182 : r.set_varying (TREE_TYPE (val));
2422 182 : return;
2423 : }
2424 841 : if (!decl_in_symtab_p (base))
2425 : {
2426 0 : r.set_nonzero (TREE_TYPE (val));
2427 0 : return;
2428 : }
2429 841 : if (!(symbol = symtab_node::get (base)))
2430 : {
2431 0 : r.set_varying (TREE_TYPE (val));
2432 0 : return;
2433 : }
2434 :
2435 841 : bool delete_null_pointer_checks
2436 841 : = opt_for_fn (context_node->decl, flag_delete_null_pointer_checks);
2437 841 : if (symbol->nonzero_address (delete_null_pointer_checks))
2438 841 : r.set_nonzero (TREE_TYPE (val));
2439 : else
2440 0 : r.set_varying (TREE_TYPE (val));
2441 : }
2442 : else
2443 1424652 : r.set (val, val);
2444 : }
2445 :
2446 : /* If T is an SSA_NAME that is the result of a simple type conversion statement
2447 : from an integer type to another integer type which is known to be able to
2448 : represent the values the operand of the conversion can hold, return the
2449 : operand of that conversion, otherwise return T. */
2450 :
2451 : static tree
2452 6236502 : skip_a_safe_conversion_op (tree t)
2453 : {
2454 6236502 : if (TREE_CODE (t) != SSA_NAME
2455 6236502 : || SSA_NAME_IS_DEFAULT_DEF (t))
2456 : return t;
2457 :
2458 1527604 : gimple *def = SSA_NAME_DEF_STMT (t);
2459 1527604 : if (!is_gimple_assign (def)
2460 1259455 : || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))
2461 244780 : || !INTEGRAL_TYPE_P (TREE_TYPE (t))
2462 1707101 : || !INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (def))))
2463 : return t;
2464 :
2465 171479 : tree rhs1 = gimple_assign_rhs1 (def);
2466 171479 : if (TYPE_PRECISION (TREE_TYPE (t))
2467 171479 : >= TYPE_PRECISION (TREE_TYPE (rhs1)))
2468 : return gimple_assign_rhs1 (def);
2469 :
2470 8279 : value_range vr (TREE_TYPE (rhs1));
2471 16558 : if (!get_range_query (cfun)->range_of_expr (vr, rhs1, def)
2472 8279 : || vr.undefined_p ())
2473 : return t;
2474 :
2475 8261 : irange &ir = as_a <irange> (vr);
2476 8261 : if (range_fits_type_p (&ir, TYPE_PRECISION (TREE_TYPE (t)),
2477 8261 : TYPE_SIGN (TREE_TYPE (t))))
2478 4008 : return gimple_assign_rhs1 (def);
2479 :
2480 : return t;
2481 8279 : }
2482 :
2483 : /* Initializes ipa_edge_args summary of CBE given its callback-carrying edge.
2484 : This primarily means allocating the correct amount of jump functions. */
2485 :
2486 : static inline void
2487 15083 : init_callback_edge_summary (struct cgraph_edge *cbe, tree attr)
2488 : {
2489 15083 : ipa_edge_args *cb_args = ipa_edge_args_sum->get_create (cbe);
2490 15083 : size_t jf_vec_length = callback_num_args(attr);
2491 15083 : vec_safe_grow_cleared (cb_args->jump_functions,
2492 : jf_vec_length, true);
2493 15083 : }
2494 :
2495 : /* Compute jump function for all arguments of callsite CS and insert the
2496 : information in the jump_functions array in the ipa_edge_args corresponding
2497 : to this callsite. */
2498 :
2499 : static void
2500 2952634 : ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2501 : struct cgraph_edge *cs)
2502 : {
2503 2952634 : ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2504 2952634 : ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
2505 2952634 : gcall *call = cs->call_stmt;
2506 2952634 : int n, arg_num = gimple_call_num_args (call);
2507 2952634 : bool useful_context = false;
2508 :
2509 2952634 : if (arg_num == 0 || args->jump_functions)
2510 273080 : return;
2511 2679554 : vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2512 2679554 : if (flag_devirtualize)
2513 2498721 : vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2514 :
2515 2679554 : if (gimple_call_internal_p (call))
2516 : return;
2517 2679554 : if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2518 : return;
2519 :
2520 2679554 : auto_vec<cgraph_edge*> callback_edges;
2521 8916056 : for (n = 0; n < arg_num; n++)
2522 : {
2523 6236502 : struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2524 6236502 : tree arg = gimple_call_arg (call, n);
2525 6236502 : tree param_type = ipa_get_callee_param_type (cs, n);
2526 6236502 : if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2527 : {
2528 3216218 : tree instance;
2529 3216218 : class ipa_polymorphic_call_context context (cs->caller->decl,
2530 3216218 : arg, cs->call_stmt,
2531 3216218 : &instance);
2532 3216218 : context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2533 : &fbi->aa_walk_budget);
2534 3216218 : *ipa_get_ith_polymorhic_call_context (args, n) = context;
2535 6432436 : if (!context.useless_p ())
2536 : useful_context = true;
2537 : }
2538 :
2539 6236502 : value_range vr (TREE_TYPE (arg));
2540 6236502 : if (POINTER_TYPE_P (TREE_TYPE (arg)))
2541 : {
2542 6944262 : if (!get_range_query (cfun)->range_of_expr (vr, arg, cs->call_stmt)
2543 3472131 : || vr.varying_p ()
2544 6035703 : || vr.undefined_p ())
2545 : {
2546 908971 : bool strict_overflow = false;
2547 908971 : if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2548 0 : vr.set_nonzero (TREE_TYPE (arg));
2549 : else
2550 908971 : vr.set_varying (TREE_TYPE (arg));
2551 : }
2552 3472131 : gcc_assert (!vr.undefined_p ());
2553 3472131 : unsigned HOST_WIDE_INT bitpos;
2554 3472131 : unsigned align = BITS_PER_UNIT;
2555 :
2556 3472131 : if (!vr.singleton_p ())
2557 3396602 : get_pointer_alignment_1 (arg, &align, &bitpos);
2558 :
2559 3472131 : if (align > BITS_PER_UNIT
2560 3472131 : && opt_for_fn (cs->caller->decl, flag_ipa_bit_cp))
2561 : {
2562 1283160 : unsigned prec = TYPE_PRECISION (TREE_TYPE (arg));
2563 1283160 : wide_int mask
2564 2566320 : = wi::bit_and_not (wi::mask (prec, false, prec),
2565 1283160 : wide_int::from (align / BITS_PER_UNIT - 1,
2566 1283160 : prec, UNSIGNED));
2567 1283160 : wide_int value = wide_int::from (bitpos / BITS_PER_UNIT, prec,
2568 1283160 : UNSIGNED);
2569 1283160 : irange_bitmask bm (value, mask);
2570 1283160 : vr.update_bitmask (bm);
2571 1283160 : ipa_set_jfunc_vr (jfunc, vr);
2572 1283160 : }
2573 2188971 : else if (!vr.varying_p ())
2574 1342986 : ipa_set_jfunc_vr (jfunc, vr);
2575 : else
2576 845985 : gcc_assert (!jfunc->m_vr);
2577 : }
2578 : else
2579 : {
2580 2764371 : if (param_type
2581 2453951 : && ipa_vr_supported_type_p (TREE_TYPE (arg))
2582 2764509 : && ipa_vr_supported_type_p (param_type)
2583 3657866 : && get_range_query (cfun)->range_of_expr (vr, arg, cs->call_stmt)
2584 4593304 : && !vr.undefined_p ())
2585 : {
2586 1828795 : value_range resvr (vr);
2587 1828795 : range_cast (resvr, param_type);
2588 1828795 : if (!resvr.undefined_p () && !resvr.varying_p ())
2589 1459881 : ipa_set_jfunc_vr (jfunc, resvr);
2590 : else
2591 368914 : gcc_assert (!jfunc->m_vr);
2592 1828795 : }
2593 : else
2594 935576 : gcc_assert (!jfunc->m_vr);
2595 : }
2596 :
2597 6236502 : arg = skip_a_safe_conversion_op (arg);
2598 6236502 : if (is_gimple_ip_invariant (arg)
2599 6236502 : || (VAR_P (arg) && is_global_var (arg) && TREE_READONLY (arg)))
2600 : {
2601 2311053 : ipa_set_jf_constant (jfunc, arg, cs);
2602 2311053 : if (TREE_CODE (arg) == ADDR_EXPR)
2603 : {
2604 872404 : tree pointee = TREE_OPERAND (arg, 0);
2605 872404 : if (TREE_CODE (pointee) == FUNCTION_DECL && !cs->callback
2606 44150 : && cs->callee)
2607 : {
2608 : /* Argument is a pointer to a function. Look for a callback
2609 : attribute describing this argument. */
2610 43800 : tree callback_attr
2611 43800 : = lookup_attribute (CALLBACK_ATTR_IDENT,
2612 43800 : DECL_ATTRIBUTES (cs->callee->decl));
2613 87600 : for (; callback_attr;
2614 : callback_attr
2615 0 : = lookup_attribute (CALLBACK_ATTR_IDENT,
2616 0 : TREE_CHAIN (callback_attr)))
2617 13313 : if (callback_get_fn_index (callback_attr) == n)
2618 : break;
2619 :
2620 : /* If no callback attribute is found, check if the function is
2621 : a special case. */
2622 43800 : if (!callback_attr
2623 43800 : && callback_is_special_cased (cs->callee->decl, call))
2624 : {
2625 1770 : callback_attr
2626 1770 : = callback_special_case_attr (cs->callee->decl);
2627 : /* Check if the special attribute describes the correct
2628 : attribute, as a special cased function might have
2629 : multiple callbacks. */
2630 1770 : if (callback_get_fn_index (callback_attr) != n)
2631 : callback_attr = NULL;
2632 : }
2633 :
2634 : /* If a callback attribute describing this pointer is found,
2635 : create a callback edge to the pointee function to
2636 : allow for further optimizations. */
2637 43800 : if (callback_attr)
2638 : {
2639 15083 : cgraph_node *kernel_node
2640 15083 : = cgraph_node::get_create (pointee);
2641 15083 : unsigned callback_id = n;
2642 15083 : cgraph_edge *cbe
2643 15083 : = cs->make_callback (kernel_node, callback_id);
2644 15083 : init_callback_edge_summary (cbe, callback_attr);
2645 15083 : callback_edges.safe_push (cbe);
2646 : }
2647 : }
2648 : }
2649 : }
2650 3925449 : else if (!is_gimple_reg_type (TREE_TYPE (arg))
2651 3925449 : && TREE_CODE (arg) == PARM_DECL)
2652 : {
2653 79450 : int index = ipa_get_param_decl_index (info, arg);
2654 :
2655 79450 : gcc_assert (index >=0);
2656 : /* Aggregate passed by value, check for pass-through, otherwise we
2657 : will attempt to fill in aggregate contents later in this
2658 : for cycle. */
2659 79450 : if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2660 : {
2661 64234 : ipa_set_jf_simple_pass_through (jfunc, index, false);
2662 64234 : continue;
2663 : }
2664 : }
2665 3845999 : else if (TREE_CODE (arg) == SSA_NAME)
2666 : {
2667 2518450 : if (SSA_NAME_IS_DEFAULT_DEF (arg))
2668 : {
2669 1001632 : int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2670 1001632 : if (index >= 0)
2671 : {
2672 991515 : bool agg_p;
2673 991515 : agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2674 991515 : ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2675 : }
2676 : }
2677 : else
2678 : {
2679 1516818 : gimple *stmt = SSA_NAME_DEF_STMT (arg);
2680 1516818 : if (is_gimple_assign (stmt))
2681 1236856 : compute_complex_assign_jump_func (fbi, info, jfunc,
2682 : call, stmt, arg, param_type);
2683 279962 : else if (gimple_code (stmt) == GIMPLE_PHI)
2684 89426 : compute_complex_ancestor_jump_func (fbi, info, jfunc,
2685 : call,
2686 : as_a <gphi *> (stmt));
2687 : }
2688 : }
2689 :
2690 : /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2691 : passed (because type conversions are ignored in gimple). Usually we can
2692 : safely get type from function declaration, but in case of K&R prototypes or
2693 : variadic functions we can try our luck with type of the pointer passed.
2694 : TODO: Since we look for actual initialization of the memory object, we may better
2695 : work out the type based on the memory stores we find. */
2696 6172268 : if (!param_type)
2697 343950 : param_type = TREE_TYPE (arg);
2698 :
2699 6172268 : if ((jfunc->type != IPA_JF_PASS_THROUGH
2700 1034394 : || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2701 5787982 : && (jfunc->type != IPA_JF_ANCESTOR
2702 160252 : || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2703 11900420 : && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2704 5369450 : || POINTER_TYPE_P (param_type)))
2705 3366124 : determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2706 6236502 : }
2707 :
2708 2679554 : if (!callback_edges.is_empty ())
2709 : {
2710 : /* For every callback edge, fetch jump functions of arguments
2711 : passed to them and copy them over to their respective summaries.
2712 : This avoids recalculating them for every callback edge, since their
2713 : arguments are just passed through. */
2714 : unsigned j;
2715 30166 : for (j = 0; j < callback_edges.length (); j++)
2716 : {
2717 15083 : cgraph_edge *callback_edge = callback_edges[j];
2718 15083 : ipa_edge_args *cb_summary
2719 15083 : = ipa_edge_args_sum->get_create (callback_edge);
2720 15083 : auto_vec<int> arg_mapping
2721 15083 : = callback_get_arg_mapping (callback_edge, cs);
2722 15083 : unsigned i;
2723 30166 : for (i = 0; i < arg_mapping.length (); i++)
2724 : {
2725 15083 : if (arg_mapping[i] == -1)
2726 0 : continue;
2727 15083 : class ipa_jump_func *src
2728 15083 : = ipa_get_ith_jump_func (args, arg_mapping[i]);
2729 15083 : class ipa_jump_func *dst = ipa_get_ith_jump_func (cb_summary, i);
2730 15083 : ipa_duplicate_jump_function (cs, callback_edge, src, dst);
2731 : }
2732 15083 : }
2733 : }
2734 :
2735 2679554 : if (!useful_context)
2736 4654387 : vec_free (args->polymorphic_call_contexts);
2737 2679554 : }
2738 :
2739 : /* Compute jump functions for all edges - both direct and indirect - outgoing
2740 : from BB. */
2741 :
2742 : static void
2743 11011014 : ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2744 : {
2745 11011014 : struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2746 11011014 : int i;
2747 11011014 : struct cgraph_edge *cs;
2748 :
2749 20592194 : FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2750 : {
2751 5535429 : struct cgraph_node *callee = cs->callee;
2752 :
2753 5535429 : if (callee)
2754 : {
2755 5396619 : callee = callee->ultimate_alias_target ();
2756 : /* We do not need to bother analyzing calls to unknown functions
2757 : unless they may become known during lto/whopr. */
2758 3539461 : if (!callee->definition && !flag_lto
2759 5411689 : && !gimple_call_fnspec (cs->call_stmt).known_p ()
2760 7994484 : && !callback_edge_callee_has_attr (cs))
2761 2582795 : continue;
2762 : }
2763 2952634 : ipa_compute_jump_functions_for_edge (fbi, cs);
2764 : }
2765 11011014 : }
2766 :
2767 : /* If REF is a memory access that loads a function pointer (but not a method
2768 : pointer) from a RECORD_TYPE, return true and store the type of the RECORD to
2769 : *REC_TYPE and the byte offset of the field to *FLD_OFFSET. Otherwise return
2770 : false. OHS es the "other hand side" which is used to check type
2771 : compatibility with field in question, when possible. */
2772 :
2773 : static bool
2774 118404 : is_func_ptr_from_record (tree ref, tree *rec_type, unsigned *fld_offset,
2775 : tree ohs)
2776 : {
2777 118416 : if (!POINTER_TYPE_P (TREE_TYPE (ref))
2778 118416 : || TREE_CODE (TREE_TYPE (TREE_TYPE (ref))) != FUNCTION_TYPE)
2779 : return false;
2780 :
2781 103286 : if (TREE_CODE (ref) == COMPONENT_REF
2782 103286 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
2783 : {
2784 55412 : gcc_assert (POINTER_TYPE_P (TREE_TYPE (ohs)));
2785 55412 : ohs = TREE_TYPE (TREE_TYPE (ohs));
2786 55412 : tree ftype = TREE_TYPE (TREE_OPERAND (ref, 1));
2787 55412 : if (!POINTER_TYPE_P (ftype))
2788 : return false;
2789 55412 : ftype = TREE_TYPE (ftype);
2790 55412 : if (!types_compatible_p (ohs, ftype))
2791 : return false;
2792 :
2793 55283 : tree tree_off = bit_position (TREE_OPERAND (ref, 1));
2794 55283 : if (!tree_fits_shwi_p (tree_off))
2795 : return false;
2796 55283 : HOST_WIDE_INT bit_offset = tree_to_shwi (tree_off);
2797 55283 : if (bit_offset % BITS_PER_UNIT)
2798 : return false;
2799 55283 : HOST_WIDE_INT unit_offset = bit_offset / BITS_PER_UNIT;
2800 55283 : if (unit_offset > UINT_MAX)
2801 : return false;
2802 55283 : *rec_type = TREE_TYPE (TREE_OPERAND (ref, 0));
2803 55283 : *fld_offset = unit_offset;
2804 55283 : return true;
2805 : }
2806 47874 : else if (TREE_CODE (ref) == MEM_REF
2807 4980 : && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
2808 4980 : && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0))))
2809 : == RECORD_TYPE)
2810 50023 : && tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
2811 : {
2812 2149 : HOST_WIDE_INT unit_offset = tree_to_shwi (TREE_OPERAND (ref, 1));
2813 2149 : if (unit_offset > UINT_MAX)
2814 : return false;
2815 2149 : *rec_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
2816 2149 : *fld_offset = unit_offset;
2817 2149 : return true;
2818 : }
2819 : return false;
2820 : }
2821 :
2822 : /* If STMT looks like a statement loading a value from a member pointer formal
2823 : parameter, return that parameter and store the offset of the field to
2824 : *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2825 : might be clobbered). If USE_DELTA, then we look for a use of the delta
2826 : field rather than the pfn. */
2827 :
2828 : static tree
2829 2164 : ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2830 : HOST_WIDE_INT *offset_p)
2831 : {
2832 2164 : tree rhs, fld, ptr_field, delta_field;
2833 2164 : tree ref_field = NULL_TREE;
2834 2164 : tree ref_offset = NULL_TREE;
2835 :
2836 2164 : if (!gimple_assign_single_p (stmt))
2837 : return NULL_TREE;
2838 :
2839 2164 : rhs = gimple_assign_rhs1 (stmt);
2840 2164 : if (TREE_CODE (rhs) == COMPONENT_REF)
2841 : {
2842 1301 : ref_field = TREE_OPERAND (rhs, 1);
2843 1301 : rhs = TREE_OPERAND (rhs, 0);
2844 : }
2845 :
2846 2164 : if (TREE_CODE (rhs) == MEM_REF)
2847 : {
2848 1463 : ref_offset = TREE_OPERAND (rhs, 1);
2849 1463 : if (ref_field && integer_nonzerop (ref_offset))
2850 : return NULL_TREE;
2851 : }
2852 701 : else if (!ref_field)
2853 : return NULL_TREE;
2854 :
2855 2164 : if (TREE_CODE (rhs) == MEM_REF
2856 1463 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
2857 3627 : && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (rhs, 0)))
2858 : {
2859 598 : rhs = TREE_OPERAND (rhs, 0);
2860 598 : if (TREE_CODE (SSA_NAME_VAR (rhs)) != PARM_DECL
2861 598 : || !type_like_member_ptr_p (TREE_TYPE (TREE_TYPE (rhs)), &ptr_field,
2862 : &delta_field))
2863 0 : return NULL_TREE;
2864 : }
2865 : else
2866 : {
2867 1566 : if (TREE_CODE (rhs) == MEM_REF
2868 1566 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR)
2869 0 : rhs = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
2870 1566 : if (TREE_CODE (rhs) != PARM_DECL
2871 1566 : || !type_like_member_ptr_p (TREE_TYPE (rhs), &ptr_field,
2872 : &delta_field))
2873 1288 : return NULL_TREE;
2874 : }
2875 :
2876 876 : if (use_delta)
2877 0 : fld = delta_field;
2878 : else
2879 876 : fld = ptr_field;
2880 :
2881 876 : if (ref_field)
2882 : {
2883 876 : if (ref_field != fld)
2884 : return NULL_TREE;
2885 : }
2886 0 : else if (!tree_int_cst_equal (byte_position (fld), ref_offset))
2887 : return NULL_TREE;
2888 :
2889 876 : if (offset_p)
2890 438 : *offset_p = int_bit_position (fld);
2891 : return rhs;
2892 : }
2893 :
2894 : /* Returns true iff T is an SSA_NAME defined by a statement. */
2895 :
2896 : static bool
2897 3040 : ipa_is_ssa_with_stmt_def (tree t)
2898 : {
2899 3040 : if (TREE_CODE (t) == SSA_NAME
2900 3040 : && !SSA_NAME_IS_DEFAULT_DEF (t))
2901 : return true;
2902 : else
2903 0 : return false;
2904 : }
2905 :
2906 : /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2907 : (described by INFO). PARMS_AINFO is a pointer to a vector containing
2908 : intermediate information about each formal parameter. Currently it checks
2909 : whether the call calls a pointer that is a formal parameter and if so, the
2910 : parameter is marked with the called flag and an indirect call graph edge
2911 : describing the call is created. This is very simple for ordinary pointers
2912 : represented in SSA but not-so-nice when it comes to member pointers. The
2913 : ugly part of this function does nothing more than trying to match the
2914 : pattern of such a call. Look up the documentation of macro
2915 : TARGET_PTRMEMFUNC_VBIT_LOCATION for details. An example of such a pattern
2916 : is the gimple dump below, the call is on the last line:
2917 :
2918 : <bb 2>:
2919 : f$__delta_5 = f.__delta;
2920 : f$__pfn_24 = f.__pfn;
2921 :
2922 : or
2923 : <bb 2>:
2924 : f$__delta_5 = MEM[(struct *)&f];
2925 : f$__pfn_24 = MEM[(struct *)&f + 4B];
2926 :
2927 : and a few lines below:
2928 :
2929 : <bb 5>
2930 : D.2496_3 = (int) f$__pfn_24;
2931 : D.2497_4 = D.2496_3 & 1;
2932 : if (D.2497_4 != 0)
2933 : goto <bb 3>;
2934 : else
2935 : goto <bb 4>;
2936 :
2937 : <bb 6>:
2938 : D.2500_7 = (unsigned int) f$__delta_5;
2939 : D.2501_8 = &S + D.2500_7;
2940 : D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2941 : D.2503_10 = *D.2502_9;
2942 : D.2504_12 = f$__pfn_24 + -1;
2943 : D.2505_13 = (unsigned int) D.2504_12;
2944 : D.2506_14 = D.2503_10 + D.2505_13;
2945 : D.2507_15 = *D.2506_14;
2946 : iftmp.11_16 = (String:: *) D.2507_15;
2947 :
2948 : <bb 7>:
2949 : # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2950 : D.2500_19 = (unsigned int) f$__delta_5;
2951 : D.2508_20 = &S + D.2500_19;
2952 : D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2953 :
2954 : Such patterns are results of simple calls to a member pointer:
2955 :
2956 : int doprinting (int (MyString::* f)(int) const)
2957 : {
2958 : MyString S ("somestring");
2959 :
2960 : return (S.*f)(4);
2961 : }
2962 :
2963 : Moreover, the function also looks for called pointers loaded from aggregates
2964 : passed by value or reference. */
2965 :
2966 : static void
2967 114450 : ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2968 : tree target)
2969 : {
2970 114450 : class ipa_node_params *info = fbi->info;
2971 114450 : HOST_WIDE_INT offset;
2972 114450 : bool by_ref;
2973 :
2974 114450 : if (SSA_NAME_IS_DEFAULT_DEF (target))
2975 : {
2976 3791 : tree var = SSA_NAME_VAR (target);
2977 3791 : int index = ipa_get_param_decl_index (info, var);
2978 3791 : if (index >= 0)
2979 : {
2980 3789 : cgraph_edge *cs = fbi->node->get_edge (call);
2981 3789 : cgraph_simple_indirect_info *sii =
2982 3789 : as_a <cgraph_simple_indirect_info *> (cs->indirect_info);
2983 3789 : sii->param_index = index;
2984 3789 : gcc_assert (!sii->agg_contents && !sii->member_ptr);
2985 3789 : ipa_set_param_used_by_indirect_call (info, index, true);
2986 : }
2987 3791 : return;
2988 : }
2989 :
2990 110659 : int index;
2991 110659 : gimple *def = SSA_NAME_DEF_STMT (target);
2992 110659 : bool guaranteed_unmodified;
2993 110659 : if (gimple_assign_single_p (def))
2994 : {
2995 88081 : cgraph_edge *cs = fbi->node->get_edge (call);
2996 88081 : cgraph_simple_indirect_info *sii =
2997 88081 : as_a <cgraph_simple_indirect_info *> (cs->indirect_info);
2998 88081 : tree rectype;
2999 88081 : unsigned fldoff;
3000 88081 : if (is_func_ptr_from_record (gimple_assign_rhs1 (def), &rectype, &fldoff,
3001 : target))
3002 : {
3003 46259 : sii->fnptr_loaded_from_record = 1;
3004 46259 : sii->fld_offset = fldoff;
3005 46259 : sii->rec_type = rectype;
3006 : }
3007 88081 : if (ipa_load_from_parm_agg (fbi, info->descriptors, def,
3008 : gimple_assign_rhs1 (def), &index, &offset,
3009 : NULL, &by_ref, &guaranteed_unmodified))
3010 : {
3011 2179 : sii->param_index = index;
3012 2179 : sii->offset = offset;
3013 2179 : sii->agg_contents = 1;
3014 2179 : sii->by_ref = by_ref;
3015 2179 : sii->guaranteed_unmodified = guaranteed_unmodified;
3016 2179 : ipa_set_param_used_by_indirect_call (info, index, true);
3017 2179 : return;
3018 : }
3019 : }
3020 :
3021 : /* Now we need to try to match the complex pattern of calling a member
3022 : pointer. */
3023 108480 : if (gimple_code (def) != GIMPLE_PHI
3024 1143 : || gimple_phi_num_args (def) != 2
3025 1135 : || !POINTER_TYPE_P (TREE_TYPE (target))
3026 109615 : || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
3027 : return;
3028 :
3029 : /* First, we need to check whether one of these is a load from a member
3030 : pointer that is a parameter to this function. */
3031 863 : tree n1 = PHI_ARG_DEF (def, 0);
3032 863 : tree n2 = PHI_ARG_DEF (def, 1);
3033 1726 : if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3034 : return;
3035 863 : gimple *d1 = SSA_NAME_DEF_STMT (n1);
3036 863 : gimple *d2 = SSA_NAME_DEF_STMT (n2);
3037 :
3038 863 : tree rec;
3039 863 : basic_block bb, virt_bb;
3040 863 : basic_block join = gimple_bb (def);
3041 863 : if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3042 : {
3043 0 : if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3044 : return;
3045 :
3046 0 : bb = EDGE_PRED (join, 0)->src;
3047 0 : virt_bb = gimple_bb (d2);
3048 : }
3049 863 : else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3050 : {
3051 438 : bb = EDGE_PRED (join, 1)->src;
3052 438 : virt_bb = gimple_bb (d1);
3053 : }
3054 : else
3055 : return;
3056 :
3057 : /* Second, we need to check that the basic blocks are laid out in the way
3058 : corresponding to the pattern. */
3059 :
3060 876 : if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
3061 876 : || single_succ (virt_bb) != join)
3062 : return;
3063 :
3064 :
3065 438 : if (single_pred (virt_bb) != bb)
3066 : {
3067 : /* In cases when the distinction between a normal and a virtual
3068 : function is encoded in the delta field, the load of the
3069 : actual non-virtual function pointer can be in its own BB. */
3070 :
3071 0 : if (!single_pred_p (bb) || !single_succ_p (bb))
3072 : return;
3073 0 : bb = single_pred (bb);
3074 0 : if (bb != single_pred (virt_bb))
3075 : return;
3076 : }
3077 :
3078 : /* Third, let's see that the branching is done depending on the least
3079 : significant bit of the pfn. */
3080 :
3081 876 : gcond *branch = safe_dyn_cast <gcond *> (*gsi_last_bb (bb));
3082 438 : if (!branch)
3083 : return;
3084 :
3085 438 : if ((gimple_cond_code (branch) != NE_EXPR
3086 0 : && gimple_cond_code (branch) != EQ_EXPR)
3087 438 : || !integer_zerop (gimple_cond_rhs (branch)))
3088 0 : return;
3089 :
3090 438 : tree cond = gimple_cond_lhs (branch);
3091 438 : if (!ipa_is_ssa_with_stmt_def (cond))
3092 : return;
3093 :
3094 438 : def = SSA_NAME_DEF_STMT (cond);
3095 438 : if (!is_gimple_assign (def)
3096 438 : || gimple_assign_rhs_code (def) != BIT_AND_EXPR
3097 876 : || !integer_onep (gimple_assign_rhs2 (def)))
3098 0 : return;
3099 :
3100 438 : cond = gimple_assign_rhs1 (def);
3101 438 : if (!ipa_is_ssa_with_stmt_def (cond))
3102 : return;
3103 :
3104 438 : def = SSA_NAME_DEF_STMT (cond);
3105 :
3106 438 : if (is_gimple_assign (def)
3107 438 : && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3108 : {
3109 438 : cond = gimple_assign_rhs1 (def);
3110 438 : if (!ipa_is_ssa_with_stmt_def (cond))
3111 : return;
3112 438 : def = SSA_NAME_DEF_STMT (cond);
3113 : }
3114 :
3115 438 : tree rec2;
3116 438 : rec2 = ipa_get_stmt_member_ptr_load_param (def,
3117 : (TARGET_PTRMEMFUNC_VBIT_LOCATION
3118 : == ptrmemfunc_vbit_in_delta),
3119 : NULL);
3120 438 : if (rec != rec2)
3121 : return;
3122 :
3123 438 : if (TREE_CODE (rec) == SSA_NAME)
3124 : {
3125 299 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (rec));
3126 299 : if (index < 0
3127 299 : || !parm_ref_data_preserved_p (fbi, index, call,
3128 : gimple_assign_rhs1 (def)))
3129 4 : return;
3130 295 : by_ref = true;
3131 : }
3132 : else
3133 : {
3134 139 : index = ipa_get_param_decl_index (info, rec);
3135 139 : if (index < 0
3136 139 : || !parm_preserved_before_stmt_p (fbi, index, call, rec))
3137 0 : return;
3138 139 : by_ref = false;
3139 : }
3140 :
3141 434 : cgraph_edge *cs = fbi->node->get_edge (call);
3142 434 : cgraph_simple_indirect_info *sii =
3143 434 : as_a <cgraph_simple_indirect_info *> (cs->indirect_info);
3144 434 : sii->param_index = index;
3145 434 : sii->offset = offset;
3146 434 : sii->agg_contents = 1;
3147 434 : sii->member_ptr = 1;
3148 434 : sii->by_ref = by_ref;
3149 434 : sii->guaranteed_unmodified = 1;
3150 434 : ipa_set_param_used_by_indirect_call (info, index, true);
3151 434 : return;
3152 : }
3153 :
3154 : /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
3155 : object referenced in the expression is a formal parameter of the caller
3156 : FBI->node (described by FBI->info), create a call note for the
3157 : statement. */
3158 :
3159 : static void
3160 24288 : ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
3161 : gcall *call, tree target)
3162 : {
3163 24288 : tree obj = OBJ_TYPE_REF_OBJECT (target);
3164 24288 : int index;
3165 24288 : HOST_WIDE_INT anc_offset;
3166 :
3167 24288 : if (!flag_devirtualize)
3168 14672 : return;
3169 :
3170 24018 : if (TREE_CODE (obj) != SSA_NAME)
3171 : return;
3172 :
3173 23659 : class ipa_node_params *info = fbi->info;
3174 23659 : if (SSA_NAME_IS_DEFAULT_DEF (obj))
3175 : {
3176 8867 : if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
3177 : return;
3178 :
3179 8867 : anc_offset = 0;
3180 8867 : index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
3181 8867 : gcc_assert (index >= 0);
3182 8867 : if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
3183 : call))
3184 : return;
3185 : }
3186 : else
3187 : {
3188 14792 : gimple *stmt = SSA_NAME_DEF_STMT (obj);
3189 14792 : tree expr;
3190 :
3191 14792 : expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
3192 14792 : if (!expr)
3193 : return;
3194 749 : index = ipa_get_param_decl_index (info,
3195 749 : SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
3196 749 : gcc_assert (index >= 0);
3197 749 : if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
3198 : call, anc_offset))
3199 : return;
3200 : }
3201 :
3202 9616 : cgraph_edge *cs = fbi->node->get_edge (call);
3203 9616 : cgraph_polymorphic_indirect_info *pii =
3204 9616 : as_a <cgraph_polymorphic_indirect_info *> (cs->indirect_info);
3205 9616 : pii->param_index = index;
3206 9616 : pii->offset = anc_offset;
3207 9616 : gcc_assert (pii->otr_token == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
3208 9616 : gcc_assert (pii->otr_type = obj_type_ref_class (target));
3209 9616 : ipa_set_param_used_by_indirect_call (info, index, true);
3210 9616 : ipa_set_param_used_by_polymorphic_call (info, index, true);
3211 : }
3212 :
3213 : /* Analyze a call statement CALL whether and how it utilizes formal parameters
3214 : of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
3215 : containing intermediate information about each formal parameter. */
3216 :
3217 : static void
3218 5757735 : ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
3219 : {
3220 5757735 : tree target = gimple_call_fn (call);
3221 :
3222 5757735 : if (!target
3223 5757735 : || (TREE_CODE (target) != SSA_NAME
3224 5420979 : && !virtual_method_call_p (target)))
3225 5618995 : return;
3226 :
3227 138740 : struct cgraph_edge *cs = fbi->node->get_edge (call);
3228 : /* If we previously turned the call into a direct call, there is
3229 : no need to analyze. */
3230 138740 : if (cs && !cs->indirect_unknown_callee)
3231 : return;
3232 :
3233 138738 : cgraph_polymorphic_indirect_info *pii;
3234 138738 : if (flag_devirtualize
3235 138738 : && (pii
3236 134474 : = dyn_cast <cgraph_polymorphic_indirect_info *> (cs->indirect_info)))
3237 : {
3238 24018 : tree instance;
3239 24018 : tree target = gimple_call_fn (call);
3240 24018 : ipa_polymorphic_call_context context (current_function_decl,
3241 24018 : target, call, &instance);
3242 :
3243 24018 : gcc_checking_assert (pii->otr_type == obj_type_ref_class (target));
3244 24018 : gcc_checking_assert (pii->otr_token
3245 : == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
3246 :
3247 24018 : pii->vptr_changed
3248 48036 : = !context.get_dynamic_type (instance,
3249 24018 : OBJ_TYPE_REF_OBJECT (target),
3250 : obj_type_ref_class (target), call,
3251 : &fbi->aa_walk_budget);
3252 24018 : pii->context = context;
3253 : }
3254 :
3255 138738 : if (TREE_CODE (target) == SSA_NAME)
3256 114450 : ipa_analyze_indirect_call_uses (fbi, call, target);
3257 24288 : else if (virtual_method_call_p (target))
3258 24288 : ipa_analyze_virtual_call_uses (fbi, call, target);
3259 : }
3260 :
3261 : /* Store that that there was a store of FN to a record of type REC_TYPE and
3262 : FLD_OFFSET. */
3263 :
3264 : static void
3265 63375 : note_fnptr_in_record (tree rec_type, unsigned fld_offset, tree fn)
3266 : {
3267 63375 : gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3268 63375 : gcc_assert (TREE_CODE (rec_type) == RECORD_TYPE);
3269 63375 : if (!noted_fnptrs_in_records)
3270 6752 : noted_fnptrs_in_records = hash_table<noted_fnptr_hasher>::create_ggc (37);
3271 :
3272 63375 : noted_fnptr_store repr;
3273 63375 : repr.rec_type = rec_type;
3274 63375 : repr.fld_offset = fld_offset;
3275 :
3276 63375 : noted_fnptr_store **slot = noted_fnptrs_in_records->find_slot (&repr,
3277 : NO_INSERT);
3278 63375 : if (slot)
3279 : {
3280 7574 : if ((*slot)->fn && (*slot)->fn != fn)
3281 804 : (*slot)->fn = nullptr;
3282 7574 : return;
3283 : }
3284 :
3285 55801 : slot = noted_fnptrs_in_records->find_slot (&repr, INSERT);
3286 55801 : *slot = ggc_cleared_alloc<noted_fnptr_store> ();
3287 55801 : (*slot)->rec_type = rec_type;
3288 55801 : (*slot)->fn = fn;
3289 55801 : (*slot)->fld_offset = fld_offset;
3290 :
3291 55801 : return;
3292 : }
3293 :
3294 : /* Dump contents of noted_fnptrs_in_records to F in humad readable form. */
3295 :
3296 : void DEBUG_FUNCTION
3297 41 : ipa_dump_noted_record_fnptrs (FILE *f)
3298 : {
3299 41 : if (!noted_fnptrs_in_records)
3300 : {
3301 38 : fprintf (f, "No noted function pointers stored in records.\n\n");
3302 38 : return;
3303 : }
3304 :
3305 3 : fprintf (f, "Noted function pointers stored in records:\n");
3306 7 : for (auto iter = noted_fnptrs_in_records->begin ();
3307 7 : iter != noted_fnptrs_in_records->end ();
3308 4 : ++iter)
3309 : {
3310 4 : const noted_fnptr_store *elem = *iter;
3311 4 : fprintf (f, " Type:");
3312 4 : print_generic_expr (f, elem->rec_type);
3313 4 : fprintf (f, ", offset %ul, function: ", elem->fld_offset);
3314 4 : print_generic_expr (f, elem->fn);
3315 4 : fprintf (f, "\n");
3316 : }
3317 3 : fprintf (f, "\n");
3318 : }
3319 :
3320 : /* Dump contents of noted_fnptrs_in_records to stderr in humad readable
3321 : form. */
3322 :
3323 : void DEBUG_FUNCTION
3324 0 : ipa_debug_noted_record_fnptrs (void)
3325 : {
3326 0 : ipa_dump_noted_record_fnptrs (stderr);
3327 0 : }
3328 :
3329 :
3330 : /* If we have noticed a single function pointer stored into a record of type
3331 : REC_TYPE at the given FLD_OFFSET (measured in bytes), return its
3332 : declaration. Otherwise return NULL_TREE. */
3333 :
3334 : tree
3335 37248 : ipa_single_noted_fnptr_in_record (tree rec_type, unsigned fld_offset)
3336 : {
3337 37248 : if (!noted_fnptrs_in_records)
3338 : return NULL_TREE;
3339 :
3340 35148 : noted_fnptr_store repr;
3341 35148 : repr.rec_type = rec_type;
3342 35148 : repr.fld_offset = fld_offset;
3343 :
3344 35148 : noted_fnptr_store **slot = noted_fnptrs_in_records->find_slot (&repr,
3345 : NO_INSERT);
3346 35148 : if (!slot)
3347 : return NULL_TREE;
3348 3287 : return (*slot)->fn;
3349 : }
3350 :
3351 : /* Free the hash table storing the information about function pointers stored
3352 : to a particular position in record typed strucutres. */
3353 :
3354 : void
3355 128872 : ipa_free_noted_fnptr_calls ()
3356 : {
3357 128872 : if (noted_fnptrs_in_records)
3358 : {
3359 6389 : noted_fnptrs_in_records->empty ();
3360 6389 : noted_fnptrs_in_records = nullptr;
3361 : }
3362 128872 : }
3363 :
3364 : /* Analyze the call statement STMT with respect to formal parameters (described
3365 : in INFO) of caller given by FBI->NODE. Also note any stores of function
3366 : pointers to record typed memory. */
3367 :
3368 : static void
3369 31379039 : ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3370 : {
3371 31379039 : if (is_gimple_call (stmt))
3372 5757735 : ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
3373 25621304 : else if (gimple_assign_single_p (stmt)
3374 13474312 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR
3375 26857459 : && (TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0))
3376 : == FUNCTION_DECL))
3377 : {
3378 30323 : tree rec_type;
3379 30323 : unsigned fld_offset;
3380 30323 : if (is_func_ptr_from_record (gimple_assign_lhs (stmt), &rec_type,
3381 : &fld_offset, gimple_assign_rhs1 (stmt)))
3382 11173 : note_fnptr_in_record (rec_type, fld_offset,
3383 11173 : TREE_OPERAND (gimple_assign_rhs1 (stmt), 0));
3384 : }
3385 31379039 : }
3386 :
3387 : /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
3388 : If OP is a parameter declaration, mark it as used in the info structure
3389 : passed in DATA. */
3390 :
3391 : static bool
3392 19576032 : visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
3393 : {
3394 19576032 : class ipa_node_params *info = (class ipa_node_params *) data;
3395 :
3396 19576032 : op = get_base_address (op);
3397 19576032 : if (op
3398 19576032 : && TREE_CODE (op) == PARM_DECL)
3399 : {
3400 464188 : int index = ipa_get_param_decl_index (info, op);
3401 464188 : gcc_assert (index >= 0);
3402 464188 : ipa_set_param_used (info, index, true);
3403 : }
3404 :
3405 19576032 : return false;
3406 : }
3407 :
3408 : /* Scan the statements in BB and inspect the uses of formal parameters. Store
3409 : the findings in various structures of the associated ipa_node_params
3410 : structure, such as parameter flags, notes etc. FBI holds various data about
3411 : the function being analyzed. */
3412 :
3413 : static void
3414 11011014 : ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3415 : {
3416 11011014 : gimple_stmt_iterator gsi;
3417 76676261 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3418 : {
3419 54654233 : gimple *stmt = gsi_stmt (gsi);
3420 :
3421 54654233 : if (is_gimple_debug (stmt))
3422 23275194 : continue;
3423 :
3424 31379039 : ipa_analyze_stmt_uses (fbi, stmt);
3425 31379039 : walk_stmt_load_store_addr_ops (stmt, fbi->info,
3426 : visit_ref_for_mod_analysis,
3427 : visit_ref_for_mod_analysis,
3428 : visit_ref_for_mod_analysis);
3429 : }
3430 13975681 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3431 2964667 : walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
3432 : visit_ref_for_mod_analysis,
3433 : visit_ref_for_mod_analysis,
3434 : visit_ref_for_mod_analysis);
3435 11011014 : }
3436 :
3437 : /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
3438 :
3439 : static bool
3440 4158441 : load_from_dereferenced_name (tree expr, tree name)
3441 : {
3442 4158441 : tree base = get_base_address (expr);
3443 4158441 : return (TREE_CODE (base) == MEM_REF
3444 4158441 : && TREE_OPERAND (base, 0) == name);
3445 : }
3446 :
3447 : /* Calculate controlled uses of parameters of NODE. */
3448 :
3449 : static void
3450 1359412 : ipa_analyze_controlled_uses (struct cgraph_node *node)
3451 : {
3452 1359412 : ipa_node_params *info = ipa_node_params_sum->get (node);
3453 :
3454 7417735 : for (int i = 0; i < ipa_get_param_count (info); i++)
3455 : {
3456 2476311 : tree parm = ipa_get_param (info, i);
3457 2476311 : int call_uses = 0;
3458 2476311 : bool load_dereferenced = false;
3459 :
3460 : /* For SSA regs see if parameter is used. For non-SSA we compute
3461 : the flag during modification analysis. */
3462 2476311 : if (is_gimple_reg (parm))
3463 : {
3464 2248299 : tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
3465 : parm);
3466 2248299 : if (ddef && !has_zero_uses (ddef))
3467 : {
3468 1966873 : imm_use_iterator imm_iter;
3469 1966873 : gimple *stmt;
3470 :
3471 1966873 : ipa_set_param_used (info, i, true);
3472 6735481 : FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
3473 : {
3474 3922677 : if (is_gimple_debug (stmt))
3475 745953 : continue;
3476 :
3477 3176724 : int all_stmt_uses = 0;
3478 3176724 : use_operand_p use_p;
3479 6381453 : FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3480 3204729 : all_stmt_uses++;
3481 :
3482 3176724 : if (is_gimple_call (stmt))
3483 : {
3484 1197872 : if (gimple_call_internal_p (stmt))
3485 : {
3486 : call_uses = IPA_UNDESCRIBED_USE;
3487 : break;
3488 : }
3489 1143218 : int recognized_stmt_uses;
3490 1143218 : if (gimple_call_fn (stmt) == ddef)
3491 : recognized_stmt_uses = 1;
3492 : else
3493 1139494 : recognized_stmt_uses = 0;
3494 1143218 : unsigned arg_count = gimple_call_num_args (stmt);
3495 5071731 : for (unsigned i = 0; i < arg_count; i++)
3496 : {
3497 3928513 : tree arg = gimple_call_arg (stmt, i);
3498 3928513 : if (arg == ddef)
3499 1129835 : recognized_stmt_uses++;
3500 2798678 : else if (load_from_dereferenced_name (arg, ddef))
3501 : {
3502 15985 : load_dereferenced = true;
3503 15985 : recognized_stmt_uses++;
3504 : }
3505 : }
3506 :
3507 1143218 : if (recognized_stmt_uses != all_stmt_uses)
3508 : {
3509 : call_uses = IPA_UNDESCRIBED_USE;
3510 : break;
3511 : }
3512 1135286 : if (call_uses >= 0)
3513 1135286 : call_uses += all_stmt_uses;
3514 : }
3515 1978852 : else if (gimple_assign_single_p (stmt))
3516 : {
3517 1360616 : tree rhs = gimple_assign_rhs1 (stmt);
3518 1360616 : if (all_stmt_uses != 1
3519 1360616 : || !load_from_dereferenced_name (rhs, ddef))
3520 : {
3521 : call_uses = IPA_UNDESCRIBED_USE;
3522 : break;
3523 : }
3524 : load_dereferenced = true;
3525 : }
3526 : else
3527 : {
3528 : call_uses = IPA_UNDESCRIBED_USE;
3529 : break;
3530 : }
3531 1966873 : }
3532 : }
3533 : else
3534 : call_uses = 0;
3535 : }
3536 : else
3537 : call_uses = IPA_UNDESCRIBED_USE;
3538 2476311 : ipa_set_controlled_uses (info, i, call_uses);
3539 2476311 : ipa_set_param_load_dereferenced (info, i, load_dereferenced);
3540 : }
3541 1359412 : }
3542 :
3543 : /* Free stuff in BI. */
3544 :
3545 : static void
3546 63333966 : free_ipa_bb_info (struct ipa_bb_info *bi)
3547 : {
3548 0 : bi->cg_edges.release ();
3549 63333966 : bi->param_aa_statuses.release ();
3550 0 : }
3551 :
3552 : /* Dominator walker driving the analysis. */
3553 :
3554 2718824 : class analysis_dom_walker : public dom_walker
3555 : {
3556 : public:
3557 1359412 : analysis_dom_walker (struct ipa_func_body_info *fbi)
3558 2718824 : : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3559 :
3560 : edge before_dom_children (basic_block) final override;
3561 :
3562 : private:
3563 : struct ipa_func_body_info *m_fbi;
3564 : };
3565 :
3566 : edge
3567 11011014 : analysis_dom_walker::before_dom_children (basic_block bb)
3568 : {
3569 11011014 : ipa_analyze_params_uses_in_bb (m_fbi, bb);
3570 11011014 : ipa_compute_jump_functions_for_bb (m_fbi, bb);
3571 11011014 : return NULL;
3572 : }
3573 :
3574 : /* Release body info FBI. */
3575 :
3576 : void
3577 8552141 : ipa_release_body_info (struct ipa_func_body_info *fbi)
3578 : {
3579 8552141 : int i;
3580 8552141 : struct ipa_bb_info *bi;
3581 :
3582 71861782 : FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3583 126619282 : free_ipa_bb_info (bi);
3584 8552141 : fbi->bb_infos.release ();
3585 8552141 : }
3586 :
3587 : /* Initialize the array describing properties of formal parameters
3588 : of NODE, analyze their uses and compute jump functions associated
3589 : with actual arguments of calls from within NODE. */
3590 :
3591 : void
3592 2665874 : ipa_analyze_node (struct cgraph_node *node)
3593 : {
3594 2665874 : struct ipa_func_body_info fbi;
3595 2665874 : class ipa_node_params *info;
3596 :
3597 2665874 : ipa_check_create_node_params ();
3598 2665874 : ipa_check_create_edge_args ();
3599 2665874 : info = ipa_node_params_sum->get_create (node);
3600 :
3601 2665874 : if (info->analysis_done)
3602 1306462 : return;
3603 1360416 : info->analysis_done = 1;
3604 :
3605 1360416 : if (ipa_func_spec_opts_forbid_analysis_p (node)
3606 1360416 : || (count_formal_params (node->decl)
3607 : >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS)))
3608 : {
3609 1306462 : gcc_assert (!ipa_get_param_count (info));
3610 : return;
3611 : }
3612 :
3613 1359412 : struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3614 1359412 : push_cfun (func);
3615 1359412 : calculate_dominance_info (CDI_DOMINATORS);
3616 1359412 : ipa_initialize_node_params (node);
3617 1359412 : ipa_analyze_controlled_uses (node);
3618 :
3619 1359412 : fbi.node = node;
3620 1359412 : fbi.info = info;
3621 1359412 : fbi.bb_infos = vNULL;
3622 1359412 : fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3623 1359412 : fbi.param_count = ipa_get_param_count (info);
3624 1359412 : fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3625 :
3626 6756031 : for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3627 : {
3628 5396619 : ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3629 5396619 : bi->cg_edges.safe_push (cs);
3630 : }
3631 :
3632 1498222 : for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3633 : {
3634 138810 : ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3635 138810 : bi->cg_edges.safe_push (cs);
3636 : }
3637 :
3638 1359412 : enable_ranger (cfun, false);
3639 1359412 : analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3640 1359412 : disable_ranger (cfun);
3641 :
3642 1359412 : ipa_release_body_info (&fbi);
3643 1359412 : free_dominance_info (CDI_DOMINATORS);
3644 1359412 : pop_cfun ();
3645 : }
3646 :
3647 : /* Analyze NODE and note any function pointers in record-typed static
3648 : initializers.
3649 :
3650 : TODO: The current implementation does not traverse the initializers to scan
3651 : records nested inside other types. It should catch the most basic way of
3652 : writing "virtual functions" in C but can be extended, of course.
3653 : */
3654 :
3655 : void
3656 1669125 : ipa_analyze_var_static_initializer (varpool_node *node)
3657 : {
3658 1669125 : tree decl = node->decl;
3659 1669125 : tree rec_type = TREE_TYPE (decl);
3660 1669125 : if (TREE_CODE (rec_type) != RECORD_TYPE
3661 1669125 : || TREE_CODE (DECL_INITIAL (decl)) != CONSTRUCTOR)
3662 : return;
3663 :
3664 : unsigned ix;
3665 : tree index, val;
3666 3833299 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (DECL_INITIAL (decl)), ix, index,
3667 : val)
3668 : {
3669 5348072 : if (TREE_CODE (val) != ADDR_EXPR
3670 1001943 : || TREE_CODE (TREE_OPERAND (val, 0)) != FUNCTION_DECL
3671 : /* ObjC can produce constructor elements with NULL indices. */
3672 2751337 : || !index)
3673 2648269 : continue;
3674 51534 : HOST_WIDE_INT elt_offset = int_bit_position (index);
3675 51534 : if ((elt_offset % BITS_PER_UNIT) != 0)
3676 0 : continue;
3677 51534 : elt_offset = elt_offset / BITS_PER_UNIT;
3678 51534 : if (elt_offset > UINT_MAX)
3679 0 : continue;
3680 51534 : note_fnptr_in_record (rec_type, elt_offset, TREE_OPERAND (val, 0));
3681 : }
3682 : }
3683 :
3684 : /* Update the jump functions associated with call graph edge E when the call
3685 : graph edge CS is being inlined, assuming that E->caller is already (possibly
3686 : indirectly) inlined into CS->callee and that E has not been inlined. */
3687 :
3688 : static void
3689 2688548 : update_jump_functions_after_inlining (struct cgraph_edge *cs,
3690 : struct cgraph_edge *e)
3691 : {
3692 2688548 : ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3693 2688548 : ipa_edge_args *args = ipa_edge_args_sum->get (e);
3694 2688548 : if (!args)
3695 : return;
3696 1506673 : ipa_node_params *old_inline_root_info = ipa_node_params_sum->get (cs->callee);
3697 1506673 : ipa_node_params *new_inline_root_info
3698 1506673 : = ipa_node_params_sum->get (cs->caller->inlined_to
3699 : ? cs->caller->inlined_to : cs->caller);
3700 1506673 : int count = ipa_get_cs_argument_count (args);
3701 1506673 : int i;
3702 :
3703 4462026 : for (i = 0; i < count; i++)
3704 : {
3705 2955353 : struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3706 2955353 : class ipa_polymorphic_call_context *dst_ctx
3707 2955353 : = ipa_get_ith_polymorhic_call_context (args, i);
3708 :
3709 2955353 : if (dst->agg.items)
3710 : {
3711 : struct ipa_agg_jf_item *item;
3712 : int j;
3713 :
3714 254403 : FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3715 : {
3716 173071 : int dst_fid;
3717 173071 : struct ipa_jump_func *src;
3718 :
3719 307051 : if (item->jftype != IPA_JF_PASS_THROUGH
3720 173071 : && item->jftype != IPA_JF_LOAD_AGG)
3721 133980 : continue;
3722 :
3723 39091 : dst_fid = item->value.pass_through.formal_id;
3724 78179 : if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3725 : {
3726 3 : item->jftype = IPA_JF_UNKNOWN;
3727 3 : continue;
3728 : }
3729 :
3730 39088 : item->value.pass_through.formal_id = -1;
3731 39088 : src = ipa_get_ith_jump_func (top, dst_fid);
3732 39088 : if (src->type == IPA_JF_CONST)
3733 : {
3734 2424 : if (item->jftype == IPA_JF_PASS_THROUGH
3735 2149 : && item->value.pass_through.operation == NOP_EXPR)
3736 : {
3737 2086 : item->jftype = IPA_JF_CONST;
3738 2086 : item->value.constant = src->value.constant.value;
3739 2086 : continue;
3740 : }
3741 : }
3742 36664 : else if (src->type == IPA_JF_PASS_THROUGH
3743 5704 : && src->value.pass_through.operation == NOP_EXPR)
3744 : {
3745 5556 : if (item->jftype == IPA_JF_PASS_THROUGH
3746 3829 : || !item->value.load_agg.by_ref
3747 2621 : || src->value.pass_through.agg_preserved)
3748 3719 : item->value.pass_through.formal_id
3749 3719 : = src->value.pass_through.formal_id;
3750 : }
3751 31108 : else if (src->type == IPA_JF_ANCESTOR)
3752 : {
3753 4645 : if (item->jftype == IPA_JF_PASS_THROUGH)
3754 : {
3755 988 : if (!src->value.ancestor.offset)
3756 689 : item->value.pass_through.formal_id
3757 689 : = src->value.ancestor.formal_id;
3758 : }
3759 3657 : else if (src->value.ancestor.agg_preserved)
3760 : {
3761 1439 : gcc_checking_assert (item->value.load_agg.by_ref);
3762 :
3763 1439 : item->value.pass_through.formal_id
3764 1439 : = src->value.ancestor.formal_id;
3765 1439 : item->value.load_agg.offset
3766 1439 : += src->value.ancestor.offset;
3767 : }
3768 : }
3769 :
3770 37002 : if (item->value.pass_through.formal_id < 0)
3771 31155 : item->jftype = IPA_JF_UNKNOWN;
3772 : }
3773 : }
3774 :
3775 2955353 : if (!top)
3776 : {
3777 13241 : ipa_set_jf_unknown (dst);
3778 13241 : continue;
3779 : }
3780 :
3781 2942112 : if (dst->type == IPA_JF_ANCESTOR)
3782 : {
3783 134749 : struct ipa_jump_func *src;
3784 134749 : int dst_fid = dst->value.ancestor.formal_id;
3785 134749 : class ipa_polymorphic_call_context *src_ctx
3786 134749 : = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3787 :
3788 : /* Variable number of arguments can cause havoc if we try to access
3789 : one that does not exist in the inlined edge. So make sure we
3790 : don't. */
3791 269498 : if (dst_fid >= ipa_get_cs_argument_count (top))
3792 : {
3793 0 : ipa_set_jf_unknown (dst);
3794 0 : continue;
3795 : }
3796 :
3797 134749 : src = ipa_get_ith_jump_func (top, dst_fid);
3798 :
3799 134749 : if (src_ctx && !src_ctx->useless_p ())
3800 : {
3801 43120 : class ipa_polymorphic_call_context ctx = *src_ctx;
3802 :
3803 : /* TODO: Make type preserved safe WRT contexts. */
3804 43120 : if (!ipa_get_jf_ancestor_type_preserved (dst))
3805 29622 : ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3806 43120 : ctx.offset_by (dst->value.ancestor.offset);
3807 86240 : if (!ctx.useless_p ())
3808 : {
3809 37575 : if (!dst_ctx)
3810 : {
3811 4473 : vec_safe_grow_cleared (args->polymorphic_call_contexts,
3812 : count, true);
3813 4473 : dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3814 : }
3815 :
3816 37575 : dst_ctx->combine_with (ctx);
3817 : }
3818 : }
3819 :
3820 : /* Parameter and argument in ancestor jump function must be pointer
3821 : type, which means access to aggregate must be by-reference. */
3822 134749 : gcc_assert (!src->agg.items || src->agg.by_ref);
3823 :
3824 134749 : if (src->agg.items && dst->value.ancestor.agg_preserved)
3825 : {
3826 1588 : struct ipa_agg_jf_item *item;
3827 1588 : int j;
3828 :
3829 : /* Currently we do not produce clobber aggregate jump functions,
3830 : replace with merging when we do. */
3831 1588 : gcc_assert (!dst->agg.items);
3832 :
3833 1588 : dst->agg.items = vec_safe_copy (src->agg.items);
3834 1588 : dst->agg.by_ref = src->agg.by_ref;
3835 5451 : FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3836 3863 : item->offset -= dst->value.ancestor.offset;
3837 : }
3838 :
3839 134749 : if (src->type == IPA_JF_PASS_THROUGH
3840 23916 : && src->value.pass_through.operation == NOP_EXPR)
3841 : {
3842 23912 : dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3843 23912 : dst->value.ancestor.agg_preserved &=
3844 23912 : src->value.pass_through.agg_preserved;
3845 : }
3846 110837 : else if (src->type == IPA_JF_ANCESTOR)
3847 : {
3848 8179 : dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3849 8179 : dst->value.ancestor.offset += src->value.ancestor.offset;
3850 8179 : dst->value.ancestor.agg_preserved &=
3851 8179 : src->value.ancestor.agg_preserved;
3852 8179 : dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3853 : }
3854 : else
3855 102658 : ipa_set_jf_unknown (dst);
3856 : }
3857 2807363 : else if (dst->type == IPA_JF_PASS_THROUGH)
3858 : {
3859 793032 : struct ipa_jump_func *src;
3860 : /* We must check range due to calls with variable number of arguments
3861 : and we cannot combine jump functions with operations. */
3862 793032 : if (dst->value.pass_through.operation == NOP_EXPR
3863 793032 : && (top && dst->value.pass_through.formal_id
3864 759642 : < ipa_get_cs_argument_count (top)))
3865 : {
3866 759628 : int dst_fid = dst->value.pass_through.formal_id;
3867 759628 : src = ipa_get_ith_jump_func (top, dst_fid);
3868 759628 : bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3869 759628 : class ipa_polymorphic_call_context *src_ctx
3870 872771 : = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3871 :
3872 113143 : if (src_ctx && !src_ctx->useless_p ())
3873 : {
3874 63582 : class ipa_polymorphic_call_context ctx = *src_ctx;
3875 :
3876 : /* TODO: Make type preserved safe WRT contexts. */
3877 63582 : if (!ipa_get_jf_pass_through_type_preserved (dst))
3878 27431 : ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3879 127164 : if (!ctx.useless_p ())
3880 : {
3881 60761 : if (!dst_ctx)
3882 : {
3883 12180 : vec_safe_grow_cleared (args->polymorphic_call_contexts,
3884 : count, true);
3885 12180 : dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3886 : }
3887 60761 : dst_ctx->combine_with (ctx);
3888 : }
3889 : }
3890 759628 : switch (src->type)
3891 : {
3892 325195 : case IPA_JF_UNKNOWN:
3893 325195 : ipa_set_jf_unknown (dst);
3894 325195 : break;
3895 157788 : case IPA_JF_CONST:
3896 157788 : ipa_convert_prop_cst_jf (dst, src,
3897 : ipa_get_type (old_inline_root_info,
3898 : dst_fid));
3899 157788 : break;
3900 :
3901 253679 : case IPA_JF_PASS_THROUGH:
3902 253679 : {
3903 253679 : int formal_id = ipa_get_jf_pass_through_formal_id (src);
3904 253679 : enum tree_code operation;
3905 253679 : operation = ipa_get_jf_pass_through_operation (src);
3906 :
3907 253679 : tree old_ir_ptype = ipa_get_type (old_inline_root_info,
3908 : dst_fid);
3909 253679 : tree new_ir_ptype = ipa_get_type (new_inline_root_info,
3910 : formal_id);
3911 253679 : if (!useless_type_conversion_p (old_ir_ptype, new_ir_ptype))
3912 : {
3913 : /* Jump-function construction now permits type-casts
3914 : from an integer to another if the latter can hold
3915 : all values or has at least the same precision.
3916 : However, as we're combining multiple pass-through
3917 : functions together, we are losing information about
3918 : signedness and thus if conversions should sign or
3919 : zero extend. Therefore we must prevent combining
3920 : such jump-function if signednesses do not match. */
3921 1745 : if (!INTEGRAL_TYPE_P (old_ir_ptype)
3922 913 : || !INTEGRAL_TYPE_P (new_ir_ptype)
3923 1826 : || (TYPE_UNSIGNED (new_ir_ptype)
3924 913 : != TYPE_UNSIGNED (old_ir_ptype)))
3925 : {
3926 832 : ipa_set_jf_unknown (dst);
3927 832 : continue;
3928 : }
3929 : }
3930 :
3931 252847 : if (operation == NOP_EXPR)
3932 : {
3933 251899 : bool agg_p;
3934 503798 : agg_p = dst_agg_p
3935 251899 : && ipa_get_jf_pass_through_agg_preserved (src);
3936 251899 : ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3937 : }
3938 948 : else if (TREE_CODE_CLASS (operation) == tcc_unary)
3939 : {
3940 8 : tree op_t = ipa_get_jf_pass_through_op_type (src);
3941 8 : ipa_set_jf_unary_pass_through (dst, formal_id, operation,
3942 : op_t);
3943 : }
3944 : else
3945 : {
3946 940 : tree operand = ipa_get_jf_pass_through_operand (src);
3947 940 : tree op_t = ipa_get_jf_pass_through_op_type (src);
3948 940 : ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3949 : operation, op_t);
3950 : }
3951 : break;
3952 : }
3953 22966 : case IPA_JF_ANCESTOR:
3954 22966 : {
3955 22966 : bool agg_p;
3956 45932 : agg_p = dst_agg_p
3957 22966 : && ipa_get_jf_ancestor_agg_preserved (src);
3958 22966 : ipa_set_ancestor_jf (dst,
3959 : ipa_get_jf_ancestor_offset (src),
3960 : ipa_get_jf_ancestor_formal_id (src),
3961 : agg_p,
3962 22966 : ipa_get_jf_ancestor_keep_null (src));
3963 22966 : break;
3964 : }
3965 0 : default:
3966 0 : gcc_unreachable ();
3967 : }
3968 :
3969 758796 : if (src->m_vr && src->m_vr->known_p ())
3970 : {
3971 514916 : value_range svr (src->m_vr->type ());
3972 514916 : if (!dst->m_vr || !dst->m_vr->known_p ())
3973 219793 : ipa_set_jfunc_vr (dst, *src->m_vr);
3974 295123 : else if (ipa_vr_operation_and_type_effects (svr, *src->m_vr,
3975 : NOP_EXPR,
3976 295123 : dst->m_vr->type (),
3977 295123 : src->m_vr->type ()))
3978 : {
3979 295115 : value_range dvr;
3980 295115 : dst->m_vr->get_vrange (dvr);
3981 295115 : dvr.intersect (svr);
3982 295115 : if (!dvr.undefined_p ())
3983 284880 : ipa_set_jfunc_vr (dst, dvr);
3984 295115 : }
3985 514916 : }
3986 :
3987 758796 : if (src->agg.items
3988 31052 : && (dst_agg_p || !src->agg.by_ref))
3989 : {
3990 : /* Currently we do not produce clobber aggregate jump
3991 : functions, replace with merging when we do. */
3992 23383 : gcc_assert (!dst->agg.items);
3993 :
3994 23383 : dst->agg.by_ref = src->agg.by_ref;
3995 23383 : dst->agg.items = vec_safe_copy (src->agg.items);
3996 : }
3997 : }
3998 : else
3999 33404 : ipa_set_jf_unknown (dst);
4000 : }
4001 : }
4002 : }
4003 :
4004 : /* If TARGET is an addr_expr of a function declaration, make it the
4005 : (SPECULATIVE)destination of an indirect edge IE and return the edge.
4006 : Otherwise, return NULL. */
4007 :
4008 : struct cgraph_edge *
4009 3991 : ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
4010 : bool speculative)
4011 : {
4012 3991 : struct cgraph_node *callee;
4013 3991 : bool unreachable = false;
4014 :
4015 3991 : if (TREE_CODE (target) == ADDR_EXPR)
4016 1319 : target = TREE_OPERAND (target, 0);
4017 3991 : if (TREE_CODE (target) != FUNCTION_DECL)
4018 : {
4019 17 : target = canonicalize_constructor_val (target, NULL);
4020 17 : if (!target || TREE_CODE (target) != FUNCTION_DECL)
4021 : {
4022 17 : cgraph_simple_indirect_info *sii
4023 17 : = dyn_cast <cgraph_simple_indirect_info *> (ie->indirect_info);
4024 : /* Member pointer call that goes through a VMT lookup. */
4025 17 : if ((sii && sii->member_ptr)
4026 : /* Or if target is not an invariant expression and we do not
4027 : know if it will evaulate to function at runtime.
4028 : This can happen when folding through &VAR, where &VAR
4029 : is IP invariant, but VAR itself is not.
4030 :
4031 : TODO: It seems that we may try to fold the expression and see
4032 : if VAR is readonly. */
4033 11 : || !is_gimple_ip_invariant (target))
4034 : {
4035 6 : if (dump_enabled_p ())
4036 : {
4037 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
4038 : "discovered direct call non-invariant %s\n",
4039 0 : ie->caller->dump_name ());
4040 : }
4041 6 : return NULL;
4042 : }
4043 :
4044 :
4045 11 : if (dump_enabled_p ())
4046 : {
4047 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
4048 : "discovered direct call to non-function in %s, "
4049 : "making it __builtin_unreachable\n",
4050 0 : ie->caller->dump_name ());
4051 : }
4052 :
4053 11 : target = builtin_decl_unreachable ();
4054 11 : callee = cgraph_node::get_create (target);
4055 11 : unreachable = true;
4056 11 : }
4057 : else
4058 0 : callee = cgraph_node::get (target);
4059 : }
4060 : else
4061 3974 : callee = cgraph_node::get (target);
4062 :
4063 : /* Because may-edges are not explicitly represented and vtable may be external,
4064 : we may create the first reference to the object in the unit. */
4065 3985 : if (!callee || callee->inlined_to)
4066 : {
4067 :
4068 : /* We are better to ensure we can refer to it.
4069 : In the case of static functions we are out of luck, since we already
4070 : removed its body. In the case of public functions we may or may
4071 : not introduce the reference. */
4072 0 : if (!canonicalize_constructor_val (target, NULL)
4073 0 : || !TREE_PUBLIC (target))
4074 : {
4075 0 : if (dump_file)
4076 0 : fprintf (dump_file, "ipa-prop: Discovered call to a known target "
4077 : "(%s -> %s) but cannot refer to it. Giving up.\n",
4078 0 : ie->caller->dump_name (),
4079 0 : ie->callee->dump_name ());
4080 0 : return NULL;
4081 : }
4082 0 : callee = cgraph_node::get_create (target);
4083 : }
4084 :
4085 : /* If the edge is already speculated. */
4086 3985 : if (speculative && ie->speculative)
4087 : {
4088 0 : if (dump_file)
4089 : {
4090 0 : cgraph_edge *e2 = ie->speculative_call_for_target (callee);
4091 0 : if (!e2)
4092 : {
4093 0 : if (dump_file)
4094 0 : fprintf (dump_file, "ipa-prop: Discovered call to a "
4095 : "speculative target (%s -> %s) but the call is "
4096 : "already speculated to different target. "
4097 : "Giving up.\n",
4098 0 : ie->caller->dump_name (), callee->dump_name ());
4099 : }
4100 : else
4101 : {
4102 0 : if (dump_file)
4103 0 : fprintf (dump_file,
4104 : "ipa-prop: Discovered call to a speculative target "
4105 : "(%s -> %s) this agree with previous speculation.\n",
4106 0 : ie->caller->dump_name (), callee->dump_name ());
4107 : }
4108 : }
4109 0 : return NULL;
4110 : }
4111 :
4112 3985 : if (!dbg_cnt (devirt))
4113 : return NULL;
4114 :
4115 3985 : ipa_check_create_node_params ();
4116 :
4117 : /* We cannot make edges to inline clones. It is bug that someone removed
4118 : the cgraph node too early. */
4119 3985 : gcc_assert (!callee->inlined_to);
4120 :
4121 3985 : if (dump_file && !unreachable)
4122 : {
4123 402 : fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
4124 : "(%s -> %s), for stmt ",
4125 402 : is_a <cgraph_polymorphic_indirect_info *> (ie->indirect_info)
4126 : ? "a virtual" : "an indirect",
4127 : speculative ? "speculative" : "known",
4128 201 : ie->caller->dump_name (),
4129 : callee->dump_name ());
4130 201 : if (ie->call_stmt)
4131 193 : print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
4132 : else
4133 8 : fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
4134 : }
4135 3985 : if (dump_enabled_p ())
4136 : {
4137 402 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
4138 : "converting indirect call in %s to direct call to %s\n",
4139 201 : ie->caller->dump_name (), callee->dump_name ());
4140 : }
4141 3985 : if (!speculative)
4142 : {
4143 3958 : struct cgraph_edge *orig = ie;
4144 3958 : ie = cgraph_edge::make_direct (ie, callee);
4145 : /* If we resolved speculative edge the cost is already up to date
4146 : for direct call (adjusted by inline_edge_duplication_hook). */
4147 3958 : if (ie == orig)
4148 : {
4149 3139 : ipa_call_summary *es = ipa_call_summaries->get (ie);
4150 3139 : es->call_stmt_size -= (eni_size_weights.indirect_call_cost
4151 3139 : - eni_size_weights.call_cost);
4152 3139 : es->call_stmt_time -= (eni_time_weights.indirect_call_cost
4153 3139 : - eni_time_weights.call_cost);
4154 : }
4155 : }
4156 : else
4157 : {
4158 27 : if (!callee->can_be_discarded_p ())
4159 : {
4160 4 : cgraph_node *alias;
4161 4 : alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
4162 : if (alias)
4163 27 : callee = alias;
4164 : }
4165 : /* make_speculative will update ie's cost to direct call cost. */
4166 27 : ie = ie->make_speculative
4167 27 : (callee, ie->count.apply_scale (8, 10));
4168 : }
4169 :
4170 : return ie;
4171 : }
4172 :
4173 : /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
4174 : CONSTRUCTOR and return it. Return NULL if the search fails for some
4175 : reason. */
4176 :
4177 : static tree
4178 11038 : find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
4179 : {
4180 16398 : tree type = TREE_TYPE (constructor);
4181 16398 : if (TREE_CODE (type) != ARRAY_TYPE
4182 16398 : && TREE_CODE (type) != RECORD_TYPE)
4183 : return NULL;
4184 :
4185 16386 : unsigned ix;
4186 16386 : tree index, val;
4187 22495 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
4188 : {
4189 21823 : HOST_WIDE_INT elt_offset;
4190 21823 : if (TREE_CODE (type) == ARRAY_TYPE)
4191 : {
4192 290 : offset_int off;
4193 290 : tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
4194 290 : gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
4195 :
4196 290 : if (index)
4197 : {
4198 290 : if (TREE_CODE (index) == RANGE_EXPR)
4199 60 : off = wi::to_offset (TREE_OPERAND (index, 0));
4200 : else
4201 230 : off = wi::to_offset (index);
4202 290 : if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
4203 : {
4204 290 : tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
4205 290 : gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
4206 290 : off = wi::sext (off - wi::to_offset (low_bound),
4207 290 : TYPE_PRECISION (TREE_TYPE (index)));
4208 : }
4209 290 : off *= wi::to_offset (unit_size);
4210 : /* ??? Handle more than just the first index of a
4211 : RANGE_EXPR. */
4212 : }
4213 : else
4214 0 : off = wi::to_offset (unit_size) * ix;
4215 :
4216 290 : off = wi::lshift (off, LOG2_BITS_PER_UNIT);
4217 290 : if (!wi::fits_shwi_p (off) || wi::neg_p (off))
4218 0 : continue;
4219 290 : elt_offset = off.to_shwi ();
4220 : }
4221 21533 : else if (TREE_CODE (type) == RECORD_TYPE)
4222 : {
4223 21533 : gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
4224 21533 : if (DECL_BIT_FIELD (index))
4225 0 : continue;
4226 21533 : elt_offset = int_bit_position (index);
4227 : }
4228 : else
4229 0 : gcc_unreachable ();
4230 :
4231 21823 : if (elt_offset > req_offset)
4232 : return NULL;
4233 :
4234 21823 : if (TREE_CODE (val) == CONSTRUCTOR)
4235 5360 : return find_constructor_constant_at_offset (val,
4236 5360 : req_offset - elt_offset);
4237 :
4238 16463 : if (elt_offset == req_offset
4239 11026 : && is_gimple_reg_type (TREE_TYPE (val))
4240 27489 : && is_gimple_ip_invariant (val))
4241 : return val;
4242 : }
4243 : return NULL;
4244 : }
4245 :
4246 : /* Check whether SCALAR could be used to look up an aggregate interprocedural
4247 : invariant from a static constructor and if so, return it. Otherwise return
4248 : NULL. */
4249 :
4250 : tree
4251 13658958 : ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
4252 : {
4253 13658958 : if (by_ref)
4254 : {
4255 13643867 : if (TREE_CODE (scalar) != ADDR_EXPR)
4256 : return NULL;
4257 4251656 : scalar = TREE_OPERAND (scalar, 0);
4258 : }
4259 :
4260 4266747 : if (!VAR_P (scalar)
4261 2777647 : || !is_global_var (scalar)
4262 311971 : || !TREE_READONLY (scalar)
4263 15586 : || !DECL_INITIAL (scalar)
4264 4279905 : || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
4265 : return NULL;
4266 :
4267 11038 : return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
4268 : }
4269 :
4270 : /* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
4271 : is none. BY_REF specifies whether the value has to be passed by reference
4272 : or by value. */
4273 :
4274 : static tree
4275 24839 : ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function *agg_jfunc,
4276 : ipa_node_params *src_info,
4277 : cgraph_node *src_node,
4278 : HOST_WIDE_INT offset, bool by_ref)
4279 : {
4280 24839 : if (by_ref != agg_jfunc->by_ref)
4281 : return NULL_TREE;
4282 :
4283 4223 : for (const ipa_agg_jf_item &item : agg_jfunc->items)
4284 1496 : if (item.offset == offset)
4285 1081 : return ipa_agg_value_from_jfunc (src_info, src_node, &item);
4286 :
4287 : return NULL_TREE;
4288 : }
4289 :
4290 : /* Remove a reference to SYMBOL from the list of references of a node given by
4291 : reference description RDESC. Return true if the reference has been
4292 : successfully found and removed. */
4293 :
4294 : static bool
4295 7855 : remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4296 : {
4297 7855 : struct ipa_ref *to_del;
4298 7855 : struct cgraph_edge *origin;
4299 :
4300 7855 : origin = rdesc->cs;
4301 7855 : if (!origin)
4302 : return false;
4303 7855 : to_del = origin->caller->find_reference (symbol, origin->call_stmt,
4304 : origin->lto_stmt_uid, IPA_REF_ADDR);
4305 7855 : if (!to_del)
4306 : return false;
4307 :
4308 7855 : to_del->remove_reference ();
4309 7855 : if (dump_file)
4310 26 : fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
4311 13 : origin->caller->dump_name (), symbol->dump_name ());
4312 : return true;
4313 : }
4314 :
4315 : /* If JFUNC has a reference description with refcount different from
4316 : IPA_UNDESCRIBED_USE, return the reference description, otherwise return
4317 : NULL. JFUNC must be a constant jump function. */
4318 :
4319 : static struct ipa_cst_ref_desc *
4320 1319569 : jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
4321 : {
4322 1319569 : struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
4323 1319569 : if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
4324 : return rdesc;
4325 : else
4326 1131006 : return NULL;
4327 : }
4328 :
4329 : /* If the value of constant jump function JFUNC is an address of a function
4330 : declaration, return the associated call graph node. Otherwise return
4331 : NULL. */
4332 :
4333 : static symtab_node *
4334 1782 : symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
4335 : {
4336 1782 : gcc_checking_assert (jfunc->type == IPA_JF_CONST);
4337 1782 : tree cst = ipa_get_jf_constant (jfunc);
4338 1782 : if (TREE_CODE (cst) != ADDR_EXPR
4339 1782 : || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
4340 31 : && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
4341 : return NULL;
4342 :
4343 1772 : return symtab_node::get (TREE_OPERAND (cst, 0));
4344 : }
4345 :
4346 :
4347 : /* If JFUNC is a constant jump function with a usable rdesc, decrement its
4348 : refcount and if it hits zero, remove reference to SYMBOL from the caller of
4349 : the edge specified in the rdesc. Return false if either the symbol or the
4350 : reference could not be found, otherwise return true. */
4351 :
4352 : static bool
4353 969 : try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
4354 : {
4355 969 : struct ipa_cst_ref_desc *rdesc;
4356 969 : if (jfunc->type == IPA_JF_CONST
4357 969 : && (rdesc = jfunc_rdesc_usable (jfunc))
4358 1911 : && --rdesc->refcount == 0)
4359 : {
4360 778 : symtab_node *symbol = symtab_node_for_jfunc (jfunc);
4361 778 : if (!symbol)
4362 : return false;
4363 :
4364 778 : return remove_described_reference (symbol, rdesc);
4365 : }
4366 : return true;
4367 : }
4368 :
4369 : /* Try to find a destination for indirect edge IE that corresponds to a simple
4370 : call or a call of a member function pointer and where the destination is a
4371 : pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
4372 : the type of the parameter to which the result of JFUNC is passed. If it can
4373 : be determined, return the newly direct edge, otherwise return NULL.
4374 : NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
4375 : relative to. */
4376 :
4377 : static struct cgraph_edge *
4378 9624 : try_make_edge_direct_simple_call (struct cgraph_edge *ie,
4379 : struct ipa_jump_func *jfunc, tree target_type,
4380 : struct cgraph_node *new_root,
4381 : class ipa_node_params *new_root_info)
4382 : {
4383 9624 : tree target = NULL_TREE;
4384 9624 : cgraph_simple_indirect_info *sii
4385 9624 : = as_a <cgraph_simple_indirect_info *> (ie->indirect_info);
4386 9624 : bool agg_contents = sii->agg_contents;
4387 9624 : tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
4388 9624 : if (agg_contents)
4389 : {
4390 7517 : if (scalar)
4391 14 : target = ipa_find_agg_cst_from_init (scalar, sii->offset, sii->by_ref);
4392 7517 : if (!target && sii->guaranteed_unmodified)
4393 6358 : target = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
4394 : new_root, sii->offset,
4395 : sii->by_ref);
4396 : }
4397 : else
4398 : target = scalar;
4399 9623 : if (!target)
4400 : return NULL;
4401 1336 : cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target);
4402 :
4403 1336 : if (cs && !agg_contents)
4404 : {
4405 969 : bool ok;
4406 969 : gcc_checking_assert (cs->callee
4407 : && (cs != ie
4408 : || jfunc->type != IPA_JF_CONST
4409 : || !symtab_node_for_jfunc (jfunc)
4410 : || cs->callee == symtab_node_for_jfunc (jfunc)));
4411 969 : ok = try_decrement_rdesc_refcount (jfunc);
4412 969 : gcc_checking_assert (ok);
4413 : }
4414 :
4415 : return cs;
4416 : }
4417 :
4418 : /* Return the target to be used in cases of impossible devirtualization. IE
4419 : and target (the latter can be NULL) are dumped when dumping is enabled. */
4420 :
4421 : tree
4422 508 : ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
4423 : {
4424 508 : if (dump_file)
4425 : {
4426 57 : if (target)
4427 18 : fprintf (dump_file,
4428 : "Type inconsistent devirtualization: %s->%s\n",
4429 18 : ie->caller->dump_name (),
4430 18 : IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
4431 : else
4432 39 : fprintf (dump_file,
4433 : "No devirtualization target in %s\n",
4434 39 : ie->caller->dump_name ());
4435 : }
4436 508 : tree new_target = builtin_decl_unreachable ();
4437 508 : cgraph_node::get_create (new_target);
4438 508 : return new_target;
4439 : }
4440 :
4441 : /* Try to find a destination for indirect edge IE that corresponds to a virtual
4442 : call based on a formal parameter which is described by jump function JFUNC
4443 : and if it can be determined, make it direct and return the direct edge.
4444 : Otherwise, return NULL. CTX describes the polymorphic context that the
4445 : parameter the call is based on brings along with it. NEW_ROOT and
4446 : NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
4447 : to. */
4448 :
4449 : static struct cgraph_edge *
4450 18484 : try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
4451 : struct ipa_jump_func *jfunc,
4452 : class ipa_polymorphic_call_context ctx,
4453 : struct cgraph_node *new_root,
4454 : class ipa_node_params *new_root_info)
4455 : {
4456 18484 : tree target = NULL;
4457 18484 : bool speculative = false;
4458 :
4459 18484 : if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
4460 : return NULL;
4461 18484 : cgraph_polymorphic_indirect_info *pii
4462 18484 : = as_a <cgraph_polymorphic_indirect_info *> (ie->indirect_info);
4463 18484 : if (!pii->usable_p ())
4464 : return nullptr;
4465 :
4466 : /* Try to do lookup via known virtual table pointer value. */
4467 18484 : if (!pii->vptr_changed
4468 18484 : || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
4469 : {
4470 18484 : tree vtable;
4471 18484 : unsigned HOST_WIDE_INT offset;
4472 18484 : tree t = NULL_TREE;
4473 18484 : if (jfunc->type == IPA_JF_CONST)
4474 330 : t = ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc),
4475 : pii->offset, true);
4476 330 : if (!t)
4477 18481 : t = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
4478 : new_root, pii->offset, true);
4479 18484 : if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
4480 : {
4481 697 : bool can_refer;
4482 697 : t = gimple_get_virt_method_for_vtable (pii->otr_token, vtable, offset,
4483 : &can_refer);
4484 697 : if (can_refer)
4485 : {
4486 680 : if (!t
4487 680 : || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE,
4488 : BUILT_IN_UNREACHABLE_TRAP)
4489 1312 : || !possible_polymorphic_call_target_p
4490 632 : (ie, cgraph_node::get (t)))
4491 : {
4492 : /* Do not speculate builtin_unreachable, it is stupid! */
4493 90 : if (!pii->vptr_changed)
4494 90 : target = ipa_impossible_devirt_target (ie, target);
4495 : else
4496 : target = NULL;
4497 : }
4498 : else
4499 : {
4500 590 : target = t;
4501 590 : speculative = pii->vptr_changed;
4502 : }
4503 : }
4504 : }
4505 : }
4506 :
4507 18484 : ipa_polymorphic_call_context ie_context (ie);
4508 18484 : vec <cgraph_node *>targets;
4509 18484 : bool final;
4510 :
4511 18484 : ctx.offset_by (pii->offset);
4512 18484 : if (pii->vptr_changed)
4513 6222 : ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
4514 : pii->otr_type);
4515 18484 : ctx.combine_with (ie_context, pii->otr_type);
4516 18484 : targets = possible_polymorphic_call_targets (pii->otr_type, pii->otr_token,
4517 : ctx, &final);
4518 20545 : if (final && targets.length () <= 1)
4519 : {
4520 2042 : speculative = false;
4521 2042 : if (targets.length () == 1)
4522 1991 : target = targets[0]->decl;
4523 : else
4524 51 : target = ipa_impossible_devirt_target (ie, NULL_TREE);
4525 : }
4526 16434 : else if (!target && opt_for_fn (ie->caller->decl,
4527 : flag_devirtualize_speculatively)
4528 32794 : && !ie->speculative && ie->maybe_hot_p ())
4529 : {
4530 8849 : cgraph_node *n;
4531 8849 : n = try_speculative_devirtualization (pii->otr_type, pii->otr_token,
4532 : pii->context);
4533 8849 : if (n)
4534 : {
4535 18 : target = n->decl;
4536 18 : speculative = true;
4537 : }
4538 : }
4539 :
4540 18484 : if (target)
4541 : {
4542 2068 : if (!possible_polymorphic_call_target_p
4543 2068 : (ie, cgraph_node::get_create (target)))
4544 : {
4545 51 : if (speculative)
4546 : return NULL;
4547 51 : target = ipa_impossible_devirt_target (ie, target);
4548 : }
4549 2068 : return ipa_make_edge_direct_to_target (ie, target, speculative);
4550 : }
4551 : else
4552 : return NULL;
4553 : }
4554 :
4555 : /* Update the param called notes associated with NODE when CS is being inlined,
4556 : assuming NODE is (potentially indirectly) inlined into CS->callee.
4557 : Moreover, if the callee is discovered to be constant, create a new cgraph
4558 : edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
4559 : unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
4560 :
4561 : static bool
4562 1627811 : update_indirect_edges_after_inlining (struct cgraph_edge *cs,
4563 : struct cgraph_node *node,
4564 : vec<cgraph_edge *> *new_edges)
4565 : {
4566 1627811 : bool res = false;
4567 :
4568 1627811 : ipa_check_create_edge_args ();
4569 1627811 : class ipa_edge_args *top = ipa_edge_args_sum->get (cs);
4570 1128072 : cgraph_node *new_root
4571 1627811 : = cs->caller->inlined_to ? cs->caller->inlined_to : cs->caller;
4572 1627811 : ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4573 1627811 : ipa_node_params *inlined_node_info
4574 1627811 : = ipa_node_params_sum->get (cs->callee->function_symbol ());
4575 :
4576 1627811 : cgraph_edge *next_ie;
4577 1699101 : for (cgraph_edge *ie = node->indirect_calls; ie; ie = next_ie)
4578 : {
4579 71290 : next_ie = ie->next_callee;
4580 :
4581 114291 : if (!top
4582 71165 : || ie->indirect_info->param_index < 0
4583 127872 : || ie->indirect_info->param_index >= ipa_get_cs_argument_count (top))
4584 : {
4585 43001 : ie->indirect_info->param_index = -1;
4586 46375 : continue;
4587 : }
4588 :
4589 28289 : int param_index = ie->indirect_info->param_index;
4590 28289 : cgraph_polymorphic_indirect_info *pii
4591 28289 : = dyn_cast <cgraph_polymorphic_indirect_info *> (ie->indirect_info);
4592 28289 : cgraph_simple_indirect_info *sii
4593 28289 : = dyn_cast <cgraph_simple_indirect_info *> (ie->indirect_info);
4594 28289 : struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (top, param_index);
4595 :
4596 28289 : auto_vec<cgraph_node *, 4> spec_targets;
4597 28289 : if (ie->speculative)
4598 8409 : for (cgraph_edge *direct = ie->first_speculative_call_target ();
4599 21068 : direct;
4600 12659 : direct = direct->next_speculative_call_target ())
4601 12659 : spec_targets.safe_push (direct->callee);
4602 :
4603 28289 : cgraph_edge *new_direct_edge;
4604 28289 : if (!opt_for_fn (node->decl, flag_indirect_inlining))
4605 181 : new_direct_edge = NULL;
4606 28108 : else if (pii)
4607 : {
4608 18484 : ipa_polymorphic_call_context ctx;
4609 18484 : ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
4610 18484 : new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
4611 : new_root,
4612 : new_root_info);
4613 : }
4614 9624 : else if (sii)
4615 : {
4616 9624 : tree target_type = ipa_get_type (inlined_node_info, param_index);
4617 9624 : new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
4618 : target_type,
4619 : new_root,
4620 : new_root_info);
4621 : }
4622 : else
4623 0 : gcc_unreachable ();
4624 :
4625 : /* If speculation was removed, then we need to do nothing. */
4626 3398 : if (new_direct_edge && new_direct_edge != ie
4627 29036 : && spec_targets.contains (new_direct_edge->callee))
4628 : {
4629 723 : new_direct_edge->indirect_inlining_edge = 1;
4630 723 : res = true;
4631 723 : if (!new_direct_edge->speculative)
4632 723 : continue;
4633 : }
4634 27566 : else if (new_direct_edge)
4635 : {
4636 2675 : new_direct_edge->indirect_inlining_edge = 1;
4637 2675 : if (new_edges)
4638 : {
4639 2667 : new_edges->safe_push (new_direct_edge);
4640 2667 : res = true;
4641 : }
4642 : /* If speculative edge was introduced we still need to update
4643 : call info of the indirect edge. */
4644 2675 : if (!new_direct_edge->speculative)
4645 2651 : continue;
4646 : }
4647 24915 : if (jfunc->type == IPA_JF_PASS_THROUGH
4648 24915 : && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4649 : {
4650 5898 : if (!pii
4651 2600 : && sii->agg_contents
4652 7407 : && !ipa_get_jf_pass_through_agg_preserved (jfunc))
4653 981 : ie->indirect_info->param_index = -1;
4654 : else
4655 : {
4656 4917 : param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4657 4917 : ie->indirect_info->param_index = param_index;
4658 4917 : ipa_set_param_used_by_indirect_call (new_root_info, param_index,
4659 : true);
4660 4917 : if (pii)
4661 : {
4662 3298 : if (!ipa_get_jf_pass_through_type_preserved (jfunc))
4663 2122 : pii->vptr_changed = true;
4664 3298 : ipa_set_param_used_by_polymorphic_call (new_root_info,
4665 : param_index, true);
4666 : }
4667 : }
4668 : }
4669 19017 : else if (jfunc->type == IPA_JF_ANCESTOR)
4670 : {
4671 513 : if (!pii
4672 158 : && sii->agg_contents
4673 671 : && !ipa_get_jf_ancestor_agg_preserved (jfunc))
4674 56 : ie->indirect_info->param_index = -1;
4675 : else
4676 : {
4677 457 : param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4678 457 : ie->indirect_info->param_index = param_index;
4679 457 : ipa_set_param_used_by_indirect_call (new_root_info, param_index,
4680 : true);
4681 457 : if (pii)
4682 : {
4683 355 : pii->offset += ipa_get_jf_ancestor_offset (jfunc);
4684 355 : if (!ipa_get_jf_ancestor_type_preserved (jfunc))
4685 315 : pii->vptr_changed = true;
4686 355 : ipa_set_param_used_by_polymorphic_call (new_root_info,
4687 : param_index, true);
4688 : }
4689 : else
4690 102 : sii->offset += ipa_get_jf_ancestor_offset (jfunc);
4691 : }
4692 : }
4693 : else
4694 : /* Either we can find a destination for this edge now or never. */
4695 18504 : ie->indirect_info->param_index = -1;
4696 28289 : }
4697 :
4698 1627811 : return res;
4699 : }
4700 :
4701 : /* Recursively traverse subtree of NODE (including node) made of inlined
4702 : cgraph_edges when CS has been inlined and invoke
4703 : update_indirect_edges_after_inlining on all nodes and
4704 : update_jump_functions_after_inlining on all non-inlined edges that lead out
4705 : of this subtree. Newly discovered indirect edges will be added to
4706 : *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4707 : created. */
4708 :
4709 : static bool
4710 1627811 : propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4711 : struct cgraph_node *node,
4712 : vec<cgraph_edge *> *new_edges)
4713 : {
4714 1627811 : struct cgraph_edge *e;
4715 1627811 : bool res;
4716 :
4717 1627811 : res = update_indirect_edges_after_inlining (cs, node, new_edges);
4718 :
4719 4939768 : for (e = node->callees; e; e = e->next_callee)
4720 3311957 : if (!e->inline_failed)
4721 691325 : res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4722 : else
4723 2620632 : update_jump_functions_after_inlining (cs, e);
4724 1695727 : for (e = node->indirect_calls; e; e = e->next_callee)
4725 67916 : update_jump_functions_after_inlining (cs, e);
4726 :
4727 1627811 : return res;
4728 : }
4729 :
4730 : /* Combine two controlled uses counts as done during inlining. */
4731 :
4732 : static int
4733 387348 : combine_controlled_uses_counters (int c, int d)
4734 : {
4735 0 : if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4736 : return IPA_UNDESCRIBED_USE;
4737 : else
4738 100726 : return c + d - 1;
4739 : }
4740 :
4741 : /* Propagate number of controlled users from CS->caleee to the new root of the
4742 : tree of inlined nodes. */
4743 :
4744 : static void
4745 936486 : propagate_controlled_uses (struct cgraph_edge *cs)
4746 : {
4747 936486 : ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4748 936486 : if (!args)
4749 : return;
4750 660633 : struct cgraph_node *new_root = cs->caller->inlined_to
4751 934732 : ? cs->caller->inlined_to : cs->caller;
4752 934732 : ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4753 934732 : ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4754 934732 : int count, i;
4755 :
4756 934732 : if (!old_root_info)
4757 : return;
4758 :
4759 1815195 : count = MIN (ipa_get_cs_argument_count (args),
4760 : ipa_get_param_count (old_root_info));
4761 2857649 : for (i = 0; i < count; i++)
4762 : {
4763 1922997 : struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4764 1922997 : struct ipa_cst_ref_desc *rdesc;
4765 :
4766 1922997 : if (jf->type == IPA_JF_PASS_THROUGH
4767 1922997 : && !ipa_get_jf_pass_through_refdesc_decremented (jf))
4768 : {
4769 332342 : int src_idx, c, d;
4770 332342 : src_idx = ipa_get_jf_pass_through_formal_id (jf);
4771 332342 : c = ipa_get_controlled_uses (new_root_info, src_idx);
4772 332342 : d = ipa_get_controlled_uses (old_root_info, i);
4773 :
4774 332342 : gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4775 : == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4776 332342 : c = combine_controlled_uses_counters (c, d);
4777 332342 : ipa_set_controlled_uses (new_root_info, src_idx, c);
4778 332342 : bool lderef = true;
4779 332342 : if (c != IPA_UNDESCRIBED_USE)
4780 : {
4781 89887 : lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4782 89887 : || ipa_get_param_load_dereferenced (old_root_info, i));
4783 89887 : ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4784 : }
4785 :
4786 332342 : if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4787 : {
4788 195 : struct cgraph_node *n;
4789 195 : struct ipa_ref *ref;
4790 195 : tree t = new_root_info->known_csts[src_idx];
4791 :
4792 167 : if (t && TREE_CODE (t) == ADDR_EXPR
4793 145 : && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4794 9 : && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4795 204 : && (ref = new_root->find_reference (n, NULL, 0,
4796 : IPA_REF_ADDR)))
4797 : {
4798 9 : if (dump_file)
4799 1 : fprintf (dump_file, "ipa-prop: Removing cloning-created "
4800 : "reference from %s to %s.\n",
4801 : new_root->dump_name (),
4802 : n->dump_name ());
4803 9 : ref->remove_reference ();
4804 : }
4805 : }
4806 : }
4807 1590655 : else if (jf->type == IPA_JF_CONST
4808 1590655 : && (rdesc = jfunc_rdesc_usable (jf)))
4809 : {
4810 55006 : int d = ipa_get_controlled_uses (old_root_info, i);
4811 55006 : int c = rdesc->refcount;
4812 55006 : tree cst = ipa_get_jf_constant (jf);
4813 55006 : rdesc->refcount = combine_controlled_uses_counters (c, d);
4814 55006 : if (rdesc->refcount != IPA_UNDESCRIBED_USE
4815 10839 : && ipa_get_param_load_dereferenced (old_root_info, i)
4816 2260 : && TREE_CODE (cst) == ADDR_EXPR
4817 57266 : && VAR_P (TREE_OPERAND (cst, 0)))
4818 : {
4819 2259 : symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4820 2259 : new_root->create_reference (n, IPA_REF_LOAD, NULL);
4821 2259 : if (dump_file)
4822 3 : fprintf (dump_file, "ipa-prop: Address IPA constant will reach "
4823 : "a load so adding LOAD reference from %s to %s.\n",
4824 : new_root->dump_name (), n->dump_name ());
4825 : }
4826 55006 : if (rdesc->refcount == 0)
4827 : {
4828 7077 : gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4829 : && ((TREE_CODE (TREE_OPERAND (cst, 0))
4830 : == FUNCTION_DECL)
4831 : || VAR_P (TREE_OPERAND (cst, 0))));
4832 :
4833 7077 : symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4834 7077 : if (n)
4835 : {
4836 7077 : remove_described_reference (n, rdesc);
4837 7077 : cgraph_node *clone = cs->caller;
4838 7077 : while (clone->inlined_to
4839 1633 : && clone->ipcp_clone
4840 7309 : && clone != rdesc->cs->caller)
4841 : {
4842 95 : struct ipa_ref *ref;
4843 95 : ref = clone->find_reference (n, NULL, 0, IPA_REF_ADDR);
4844 95 : if (ref)
4845 : {
4846 92 : if (dump_file)
4847 1 : fprintf (dump_file, "ipa-prop: Removing "
4848 : "cloning-created reference "
4849 : "from %s to %s.\n",
4850 : clone->dump_name (),
4851 : n->dump_name ());
4852 92 : ref->remove_reference ();
4853 : }
4854 95 : clone = clone->callers->caller;
4855 : }
4856 : }
4857 : }
4858 : }
4859 : }
4860 :
4861 1815296 : for (i = ipa_get_param_count (old_root_info);
4862 1815863 : i < ipa_get_cs_argument_count (args);
4863 : i++)
4864 : {
4865 334 : struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4866 :
4867 334 : if (jf->type == IPA_JF_CONST)
4868 : {
4869 298 : struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4870 298 : if (rdesc)
4871 235 : rdesc->refcount = IPA_UNDESCRIBED_USE;
4872 : }
4873 36 : else if (jf->type == IPA_JF_PASS_THROUGH)
4874 5 : ipa_set_controlled_uses (new_root_info,
4875 : jf->value.pass_through.formal_id,
4876 : IPA_UNDESCRIBED_USE);
4877 : }
4878 : }
4879 :
4880 : /* Update jump functions and call note functions on inlining the call site CS.
4881 : CS is expected to lead to a node already cloned by
4882 : cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4883 : *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4884 : created. */
4885 :
4886 : bool
4887 3925550 : ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4888 : vec<cgraph_edge *> *new_edges)
4889 : {
4890 3925550 : bool changed;
4891 : /* Do nothing if the preparation phase has not been carried out yet
4892 : (i.e. during early inlining). */
4893 3925550 : if (!ipa_node_params_sum)
4894 : return false;
4895 936486 : gcc_assert (ipa_edge_args_sum);
4896 :
4897 936486 : propagate_controlled_uses (cs);
4898 936486 : changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4899 936486 : ipa_node_params_sum->remove (cs->callee);
4900 :
4901 936486 : ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4902 936486 : if (args)
4903 : {
4904 934732 : bool ok = true;
4905 934732 : if (args->jump_functions)
4906 : {
4907 : struct ipa_jump_func *jf;
4908 : int i;
4909 2612867 : FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4910 1790270 : if (jf->type == IPA_JF_CONST
4911 1790270 : && ipa_get_jf_constant_rdesc (jf))
4912 : {
4913 : ok = false;
4914 : break;
4915 : }
4916 : }
4917 880623 : if (ok)
4918 876706 : ipa_edge_args_sum->remove (cs);
4919 : }
4920 936486 : if (ipcp_transformation_sum)
4921 662363 : ipcp_transformation_sum->remove (cs->callee);
4922 :
4923 : return changed;
4924 : }
4925 :
4926 : /* Ensure that array of edge arguments infos is big enough to accommodate a
4927 : structure for all edges and reallocates it if not. Also, allocate
4928 : associated hash tables is they do not already exist. */
4929 :
4930 : void
4931 4817706 : ipa_check_create_edge_args (void)
4932 : {
4933 4817706 : if (!ipa_edge_args_sum)
4934 245020 : ipa_edge_args_sum
4935 245020 : = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4936 245020 : ipa_edge_args_sum_t (symtab, true));
4937 4817706 : if (!ipa_vr_hash_table)
4938 171910 : ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4939 4817706 : }
4940 :
4941 : /* Free all ipa_edge structures. */
4942 :
4943 : void
4944 464965 : ipa_free_all_edge_args (void)
4945 : {
4946 464965 : if (!ipa_edge_args_sum)
4947 : return;
4948 :
4949 232521 : ggc_delete (ipa_edge_args_sum);
4950 232521 : ipa_edge_args_sum = NULL;
4951 : }
4952 :
4953 : /* Free all ipa_node_params structures. */
4954 :
4955 : void
4956 5385953 : ipa_free_all_node_params (void)
4957 : {
4958 5385953 : if (ipa_node_params_sum)
4959 5153509 : ggc_delete (ipa_node_params_sum);
4960 5385953 : ipa_node_params_sum = NULL;
4961 5385953 : }
4962 :
4963 : /* Initialize IPA CP transformation summary and also allocate any necessary hash
4964 : tables if they do not already exist. */
4965 :
4966 : void
4967 96062 : ipcp_transformation_initialize (void)
4968 : {
4969 96062 : if (!ipa_vr_hash_table)
4970 1889 : ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4971 96062 : if (ipcp_transformation_sum == NULL)
4972 : {
4973 20005 : ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4974 20005 : ipcp_transformation_sum->disable_insertion_hook ();
4975 : }
4976 96062 : }
4977 :
4978 : /* Release the IPA CP transformation summary. */
4979 :
4980 : void
4981 259496 : ipcp_free_transformation_sum (void)
4982 : {
4983 259496 : if (!ipcp_transformation_sum)
4984 : return;
4985 :
4986 19996 : ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4987 19996 : ggc_free (ipcp_transformation_sum);
4988 19996 : ipcp_transformation_sum = NULL;
4989 : }
4990 :
4991 : /* Set the aggregate replacements of NODE to be AGGVALS. */
4992 :
4993 : void
4994 22585 : ipa_set_node_agg_value_chain (struct cgraph_node *node,
4995 : vec<ipa_argagg_value, va_gc> *aggs)
4996 : {
4997 22585 : ipcp_transformation_initialize ();
4998 22585 : ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4999 22585 : s->m_agg_values = aggs;
5000 22585 : }
5001 :
5002 : /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
5003 : count data structures accordingly. */
5004 :
5005 : void
5006 0 : ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
5007 : {
5008 0 : if (args->jump_functions)
5009 : {
5010 : struct ipa_jump_func *jf;
5011 : int i;
5012 0 : FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
5013 : {
5014 0 : struct ipa_cst_ref_desc *rdesc;
5015 0 : try_decrement_rdesc_refcount (jf);
5016 0 : if (jf->type == IPA_JF_CONST
5017 0 : && (rdesc = ipa_get_jf_constant_rdesc (jf))
5018 0 : && rdesc->cs == cs)
5019 0 : rdesc->cs = NULL;
5020 : }
5021 : }
5022 0 : }
5023 :
5024 : /* Copy information from SRC_JF to DST_JF which correstpond to call graph edges
5025 : SRC and DST. */
5026 :
5027 : static void
5028 2797586 : ipa_duplicate_jump_function (cgraph_edge *src, cgraph_edge *dst,
5029 : ipa_jump_func *src_jf, ipa_jump_func *dst_jf)
5030 : {
5031 2797586 : dst_jf->agg.items = vec_safe_copy (src_jf->agg.items);
5032 2797586 : dst_jf->agg.by_ref = src_jf->agg.by_ref;
5033 :
5034 : /* We can avoid calling ipa_set_jfunc_vr since it would only look up the
5035 : place in the hash_table where the source m_vr resides. */
5036 2797586 : dst_jf->m_vr = src_jf->m_vr;
5037 :
5038 2797586 : if (src_jf->type == IPA_JF_CONST)
5039 : {
5040 881139 : ipa_set_jf_cst_copy (dst_jf, src_jf);
5041 881139 : struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
5042 :
5043 881139 : if (!src_rdesc)
5044 749691 : dst_jf->value.constant.rdesc = NULL;
5045 131448 : else if (src->caller == dst->caller)
5046 : {
5047 : /* Creation of a speculative edge. If the source edge is the one
5048 : grabbing a reference, we must create a new (duplicate)
5049 : reference description. Otherwise they refer to the same
5050 : description corresponding to a reference taken in a function
5051 : src->caller is inlined to. In that case we just must
5052 : increment the refcount. */
5053 37 : if (src_rdesc->cs == src)
5054 : {
5055 37 : symtab_node *n = symtab_node_for_jfunc (src_jf);
5056 37 : gcc_checking_assert (n);
5057 37 : ipa_ref *ref
5058 37 : = src->caller->find_reference (n, src->call_stmt,
5059 : src->lto_stmt_uid,
5060 : IPA_REF_ADDR);
5061 37 : gcc_checking_assert (ref);
5062 37 : dst->caller->clone_reference (ref, ref->stmt);
5063 :
5064 37 : ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
5065 37 : dst_rdesc->cs = dst;
5066 37 : dst_rdesc->refcount = src_rdesc->refcount;
5067 37 : dst_rdesc->next_duplicate = NULL;
5068 37 : dst_jf->value.constant.rdesc = dst_rdesc;
5069 : }
5070 : else
5071 : {
5072 0 : src_rdesc->refcount++;
5073 0 : dst_jf->value.constant.rdesc = src_rdesc;
5074 : }
5075 : }
5076 131411 : else if (src_rdesc->cs == src)
5077 : {
5078 131144 : struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
5079 131144 : dst_rdesc->cs = dst;
5080 131144 : dst_rdesc->refcount = src_rdesc->refcount;
5081 131144 : dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
5082 131144 : src_rdesc->next_duplicate = dst_rdesc;
5083 131144 : dst_jf->value.constant.rdesc = dst_rdesc;
5084 : }
5085 : else
5086 : {
5087 267 : struct ipa_cst_ref_desc *dst_rdesc;
5088 : /* This can happen during inlining, when a JFUNC can refer to a
5089 : reference taken in a function up in the tree of inline clones.
5090 : We need to find the duplicate that refers to our tree of
5091 : inline clones. */
5092 :
5093 267 : gcc_assert (dst->caller->inlined_to);
5094 267 : for (dst_rdesc = src_rdesc->next_duplicate;
5095 267 : dst_rdesc;
5096 0 : dst_rdesc = dst_rdesc->next_duplicate)
5097 : {
5098 267 : struct cgraph_node *top;
5099 0 : top = dst_rdesc->cs->caller->inlined_to
5100 267 : ? dst_rdesc->cs->caller->inlined_to
5101 : : dst_rdesc->cs->caller;
5102 267 : if (dst->caller->inlined_to == top)
5103 : break;
5104 : }
5105 267 : gcc_assert (dst_rdesc);
5106 267 : dst_jf->value.constant.rdesc = dst_rdesc;
5107 : }
5108 : }
5109 1916447 : else if (src_jf->type == IPA_JF_PASS_THROUGH)
5110 : {
5111 701404 : dst_jf->type = IPA_JF_PASS_THROUGH;
5112 701404 : dst_jf->value.pass_through = src_jf->value.pass_through;
5113 701404 : if (src->caller == dst->caller)
5114 : {
5115 10511 : struct cgraph_node *inline_root = dst->caller->inlined_to
5116 10534 : ? dst->caller->inlined_to : dst->caller;
5117 10534 : ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
5118 10534 : int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
5119 :
5120 10534 : int c = ipa_get_controlled_uses (root_info, idx);
5121 10534 : if (c != IPA_UNDESCRIBED_USE)
5122 : {
5123 2669 : c++;
5124 2669 : ipa_set_controlled_uses (root_info, idx, c);
5125 : }
5126 : }
5127 : }
5128 1215043 : else if (src_jf->type == IPA_JF_ANCESTOR)
5129 : {
5130 91426 : dst_jf->type = IPA_JF_ANCESTOR;
5131 91426 : dst_jf->value.ancestor = src_jf->value.ancestor;
5132 : }
5133 : else
5134 1123617 : gcc_assert (src_jf->type == IPA_JF_UNKNOWN);
5135 2797586 : }
5136 :
5137 : /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
5138 : reference count data strucutres accordingly. */
5139 :
5140 : void
5141 1268192 : ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
5142 : ipa_edge_args *old_args, ipa_edge_args *new_args)
5143 : {
5144 1268192 : unsigned int i;
5145 :
5146 1268192 : if (old_args->polymorphic_call_contexts)
5147 142804 : new_args->polymorphic_call_contexts
5148 142804 : = vec_safe_copy (old_args->polymorphic_call_contexts);
5149 :
5150 1268192 : if (!vec_safe_length (old_args->jump_functions))
5151 : {
5152 52905 : new_args->jump_functions = NULL;
5153 52905 : return;
5154 : }
5155 1215287 : vec_safe_grow_cleared (new_args->jump_functions,
5156 1215287 : old_args->jump_functions->length (), true);
5157 :
5158 3997790 : for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
5159 : {
5160 2782503 : struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
5161 2782503 : struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
5162 :
5163 2782503 : ipa_duplicate_jump_function (src, dst, src_jf, dst_jf);
5164 : }
5165 : }
5166 :
5167 : /* Analyze newly added function into callgraph. */
5168 :
5169 : static void
5170 40525 : ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
5171 : {
5172 40525 : if (node->has_gimple_body_p ())
5173 40525 : ipa_analyze_node (node);
5174 40525 : }
5175 :
5176 : /* Hook that is called by summary when a node is duplicated. */
5177 :
5178 : void
5179 769908 : ipa_node_params_t::duplicate(cgraph_node *, cgraph_node *,
5180 : ipa_node_params *old_info,
5181 : ipa_node_params *new_info)
5182 : {
5183 769908 : new_info->descriptors = vec_safe_copy (old_info->descriptors);
5184 769908 : gcc_assert (new_info->lattices.is_empty ());
5185 769908 : new_info->ipcp_orig_node = old_info->ipcp_orig_node;
5186 769908 : new_info->known_csts = old_info->known_csts.copy ();
5187 769908 : new_info->known_contexts = old_info->known_contexts.copy ();
5188 :
5189 769908 : new_info->analysis_done = old_info->analysis_done;
5190 769908 : new_info->node_enqueued = old_info->node_enqueued;
5191 769908 : new_info->versionable = old_info->versionable;
5192 769908 : }
5193 :
5194 : /* Duplication of ipcp transformation summaries. */
5195 :
5196 : void
5197 60599 : ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
5198 : ipcp_transformation *src_trans,
5199 : ipcp_transformation *dst_trans)
5200 : {
5201 : /* Avoid redundant work of duplicating vectors we will never use. */
5202 60599 : if (dst->inlined_to)
5203 : return;
5204 8551 : dst_trans->m_agg_values = vec_safe_copy (src_trans->m_agg_values);
5205 15810 : dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
5206 : }
5207 :
5208 : /* Register our cgraph hooks if they are not already there. */
5209 :
5210 : void
5211 382704 : ipa_register_cgraph_hooks (void)
5212 : {
5213 382704 : ipa_check_create_node_params ();
5214 382704 : ipa_check_create_edge_args ();
5215 :
5216 765408 : function_insertion_hook_holder =
5217 382704 : symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
5218 382704 : }
5219 :
5220 : /* Unregister our cgraph hooks if they are not already there. */
5221 :
5222 : static void
5223 464965 : ipa_unregister_cgraph_hooks (void)
5224 : {
5225 464965 : if (function_insertion_hook_holder)
5226 232521 : symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
5227 464965 : function_insertion_hook_holder = NULL;
5228 464965 : }
5229 :
5230 : /* Free all ipa_node_params and all ipa_edge_args structures if they are no
5231 : longer needed after ipa-cp. */
5232 :
5233 : void
5234 128918 : ipa_free_all_structures_after_ipa_cp (void)
5235 : {
5236 128918 : if (!optimize && !in_lto_p)
5237 : {
5238 0 : ipa_free_all_edge_args ();
5239 0 : ipa_free_all_node_params ();
5240 0 : ipcp_sources_pool.release ();
5241 0 : ipcp_cst_values_pool.release ();
5242 0 : ipcp_poly_ctx_values_pool.release ();
5243 0 : ipcp_agg_lattice_pool.release ();
5244 0 : ipa_unregister_cgraph_hooks ();
5245 0 : ipa_refdesc_pool.release ();
5246 : }
5247 128918 : }
5248 :
5249 : /* Free all ipa_node_params and all ipa_edge_args structures if they are no
5250 : longer needed after indirect inlining. */
5251 :
5252 : void
5253 464965 : ipa_free_all_structures_after_iinln (void)
5254 : {
5255 464965 : ipa_free_all_edge_args ();
5256 464965 : ipa_free_all_node_params ();
5257 464965 : ipa_unregister_cgraph_hooks ();
5258 464965 : ipcp_sources_pool.release ();
5259 464965 : ipcp_cst_values_pool.release ();
5260 464965 : ipcp_poly_ctx_values_pool.release ();
5261 464965 : ipcp_agg_lattice_pool.release ();
5262 464965 : ipa_refdesc_pool.release ();
5263 464965 : }
5264 :
5265 : /* Print ipa_tree_map data structures of all functions in the
5266 : callgraph to F. */
5267 :
5268 : void
5269 240 : ipa_print_node_params (FILE *f, struct cgraph_node *node)
5270 : {
5271 240 : int i, count;
5272 240 : class ipa_node_params *info;
5273 :
5274 240 : if (!node->definition)
5275 : return;
5276 181 : info = ipa_node_params_sum->get (node);
5277 181 : fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
5278 181 : if (!info)
5279 : {
5280 0 : fprintf (f, " no params return\n");
5281 0 : return;
5282 : }
5283 181 : count = ipa_get_param_count (info);
5284 367 : for (i = 0; i < count; i++)
5285 : {
5286 186 : int c;
5287 :
5288 186 : fprintf (f, " ");
5289 186 : ipa_dump_param (f, info, i);
5290 186 : if (ipa_is_param_used (info, i))
5291 178 : fprintf (f, " used");
5292 186 : if (ipa_is_param_used_by_ipa_predicates (info, i))
5293 108 : fprintf (f, " used_by_ipa_predicates");
5294 186 : if (ipa_is_param_used_by_indirect_call (info, i))
5295 10 : fprintf (f, " used_by_indirect_call");
5296 186 : if (ipa_is_param_used_by_polymorphic_call (info, i))
5297 0 : fprintf (f, " used_by_polymorphic_call");
5298 186 : c = ipa_get_controlled_uses (info, i);
5299 186 : if (c == IPA_UNDESCRIBED_USE)
5300 108 : fprintf (f, " undescribed_use");
5301 : else
5302 134 : fprintf (f, " controlled_uses=%i %s", c,
5303 78 : ipa_get_param_load_dereferenced (info, i)
5304 : ? "(load_dereferenced)" : "");
5305 186 : fprintf (f, "\n");
5306 : }
5307 : }
5308 :
5309 : /* Print ipa_tree_map data structures of all functions in the
5310 : callgraph to F. */
5311 :
5312 : void
5313 48 : ipa_print_all_params (FILE * f)
5314 : {
5315 48 : struct cgraph_node *node;
5316 :
5317 48 : fprintf (f, "\nFunction parameters:\n");
5318 274 : FOR_EACH_FUNCTION (node)
5319 226 : ipa_print_node_params (f, node);
5320 48 : }
5321 :
5322 : /* Stream out jump function JUMP_FUNC to OB. */
5323 :
5324 : static void
5325 608790 : ipa_write_jump_function (struct output_block *ob,
5326 : struct ipa_jump_func *jump_func)
5327 : {
5328 608790 : struct ipa_agg_jf_item *item;
5329 608790 : struct bitpack_d bp;
5330 608790 : int i, count;
5331 608790 : int flag = 0;
5332 :
5333 : /* ADDR_EXPRs are very comon IP invariants; save some streamer data
5334 : as well as WPA memory by handling them specially. */
5335 608790 : if (jump_func->type == IPA_JF_CONST
5336 464780 : && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
5337 608790 : flag = 1;
5338 :
5339 608790 : streamer_write_uhwi (ob, jump_func->type * 2 + flag);
5340 608790 : switch (jump_func->type)
5341 : {
5342 : case IPA_JF_UNKNOWN:
5343 : break;
5344 464780 : case IPA_JF_CONST:
5345 464780 : gcc_assert (
5346 : EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
5347 464780 : stream_write_tree (ob,
5348 : flag
5349 : ? TREE_OPERAND (jump_func->value.constant.value, 0)
5350 : : jump_func->value.constant.value, true);
5351 464780 : break;
5352 76695 : case IPA_JF_PASS_THROUGH:
5353 76695 : streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
5354 76695 : if (jump_func->value.pass_through.operation == NOP_EXPR)
5355 : {
5356 75875 : streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
5357 75875 : bp = bitpack_create (ob->main_stream);
5358 75875 : bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
5359 75875 : gcc_assert (!jump_func->value.pass_through.refdesc_decremented);
5360 75875 : streamer_write_bitpack (&bp);
5361 : }
5362 820 : else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
5363 : == tcc_unary)
5364 : {
5365 39 : stream_write_tree (ob, jump_func->value.pass_through.op_type, true);
5366 39 : streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
5367 : }
5368 : else
5369 : {
5370 781 : stream_write_tree (ob, jump_func->value.pass_through.op_type, true);
5371 781 : stream_write_tree (ob, jump_func->value.pass_through.operand, true);
5372 781 : streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
5373 : }
5374 : break;
5375 1313 : case IPA_JF_ANCESTOR:
5376 1313 : streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
5377 1313 : streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
5378 1313 : bp = bitpack_create (ob->main_stream);
5379 1313 : bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
5380 1313 : bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1);
5381 1313 : streamer_write_bitpack (&bp);
5382 1313 : break;
5383 0 : default:
5384 0 : fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
5385 : }
5386 :
5387 608790 : count = vec_safe_length (jump_func->agg.items);
5388 608790 : streamer_write_uhwi (ob, count);
5389 608790 : if (count)
5390 : {
5391 3283 : bp = bitpack_create (ob->main_stream);
5392 3283 : bp_pack_value (&bp, jump_func->agg.by_ref, 1);
5393 3283 : streamer_write_bitpack (&bp);
5394 : }
5395 :
5396 615364 : FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
5397 : {
5398 6574 : stream_write_tree (ob, item->type, true);
5399 6574 : streamer_write_uhwi (ob, item->offset);
5400 6574 : streamer_write_uhwi (ob, item->jftype);
5401 6574 : switch (item->jftype)
5402 : {
5403 : case IPA_JF_UNKNOWN:
5404 : break;
5405 6049 : case IPA_JF_CONST:
5406 6049 : stream_write_tree (ob, item->value.constant, true);
5407 6049 : break;
5408 525 : case IPA_JF_PASS_THROUGH:
5409 525 : case IPA_JF_LOAD_AGG:
5410 525 : streamer_write_uhwi (ob, item->value.pass_through.operation);
5411 525 : streamer_write_uhwi (ob, item->value.pass_through.formal_id);
5412 525 : if (item->value.pass_through.operation != NOP_EXPR)
5413 4 : stream_write_tree (ob, item->value.pass_through.op_type, true);
5414 525 : if (TREE_CODE_CLASS (item->value.pass_through.operation)
5415 : != tcc_unary)
5416 4 : stream_write_tree (ob, item->value.pass_through.operand, true);
5417 525 : if (item->jftype == IPA_JF_LOAD_AGG)
5418 : {
5419 85 : stream_write_tree (ob, item->value.load_agg.type, true);
5420 85 : streamer_write_uhwi (ob, item->value.load_agg.offset);
5421 85 : bp = bitpack_create (ob->main_stream);
5422 85 : bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
5423 85 : streamer_write_bitpack (&bp);
5424 : }
5425 : break;
5426 0 : default:
5427 0 : fatal_error (UNKNOWN_LOCATION,
5428 : "invalid jump function in LTO stream");
5429 : }
5430 : }
5431 :
5432 608790 : bp = bitpack_create (ob->main_stream);
5433 608790 : if (jump_func->m_vr)
5434 423168 : jump_func->m_vr->streamer_write (ob);
5435 : else
5436 : {
5437 185622 : bp_pack_value (&bp, false, 1);
5438 185622 : streamer_write_bitpack (&bp);
5439 : }
5440 608790 : }
5441 :
5442 : /* Read in jump function JUMP_FUNC from IB. */
5443 :
5444 : static void
5445 569665 : ipa_read_jump_function (class lto_input_block *ib,
5446 : struct ipa_jump_func *jump_func,
5447 : struct cgraph_edge *cs,
5448 : class data_in *data_in,
5449 : bool prevails)
5450 : {
5451 569665 : enum jump_func_type jftype;
5452 569665 : enum tree_code operation;
5453 569665 : int i, count;
5454 569665 : int val = streamer_read_uhwi (ib);
5455 569665 : bool flag = val & 1;
5456 :
5457 569665 : jftype = (enum jump_func_type) (val / 2);
5458 569665 : switch (jftype)
5459 : {
5460 50630 : case IPA_JF_UNKNOWN:
5461 50630 : ipa_set_jf_unknown (jump_func);
5462 50630 : break;
5463 448450 : case IPA_JF_CONST:
5464 448450 : {
5465 448450 : tree t = stream_read_tree (ib, data_in);
5466 448450 : if (flag && prevails)
5467 161596 : t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
5468 448450 : ipa_set_jf_constant (jump_func, t, cs);
5469 : }
5470 448450 : break;
5471 69931 : case IPA_JF_PASS_THROUGH:
5472 69931 : operation = (enum tree_code) streamer_read_uhwi (ib);
5473 69931 : if (operation == NOP_EXPR)
5474 : {
5475 69384 : int formal_id = streamer_read_uhwi (ib);
5476 69384 : struct bitpack_d bp = streamer_read_bitpack (ib);
5477 69384 : bool agg_preserved = bp_unpack_value (&bp, 1);
5478 69384 : ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
5479 : }
5480 547 : else if (TREE_CODE_CLASS (operation) == tcc_unary)
5481 : {
5482 19 : tree op_type = stream_read_tree (ib, data_in);
5483 19 : int formal_id = streamer_read_uhwi (ib);
5484 19 : ipa_set_jf_unary_pass_through (jump_func, formal_id, operation,
5485 : op_type);
5486 : }
5487 : else
5488 : {
5489 528 : tree op_type = stream_read_tree (ib, data_in);
5490 528 : tree operand = stream_read_tree (ib, data_in);
5491 528 : int formal_id = streamer_read_uhwi (ib);
5492 528 : ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
5493 : operation, op_type);
5494 : }
5495 : break;
5496 654 : case IPA_JF_ANCESTOR:
5497 654 : {
5498 654 : HOST_WIDE_INT offset = streamer_read_uhwi (ib);
5499 654 : int formal_id = streamer_read_uhwi (ib);
5500 654 : struct bitpack_d bp = streamer_read_bitpack (ib);
5501 654 : bool agg_preserved = bp_unpack_value (&bp, 1);
5502 654 : bool keep_null = bp_unpack_value (&bp, 1);
5503 654 : ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved,
5504 : keep_null);
5505 654 : break;
5506 : }
5507 0 : default:
5508 0 : fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
5509 : }
5510 :
5511 569665 : count = streamer_read_uhwi (ib);
5512 569665 : if (prevails)
5513 : {
5514 569659 : jump_func->agg.items = NULL;
5515 569659 : vec_safe_reserve (jump_func->agg.items, count, true);
5516 : }
5517 569665 : if (count)
5518 : {
5519 2929 : struct bitpack_d bp = streamer_read_bitpack (ib);
5520 2929 : jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
5521 : }
5522 575657 : for (i = 0; i < count; i++)
5523 : {
5524 5992 : struct ipa_agg_jf_item item;
5525 5992 : item.type = stream_read_tree (ib, data_in);
5526 5992 : item.offset = streamer_read_uhwi (ib);
5527 5992 : item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
5528 :
5529 5992 : switch (item.jftype)
5530 : {
5531 : case IPA_JF_UNKNOWN:
5532 : break;
5533 5553 : case IPA_JF_CONST:
5534 5553 : item.value.constant = stream_read_tree (ib, data_in);
5535 5553 : break;
5536 439 : case IPA_JF_PASS_THROUGH:
5537 439 : case IPA_JF_LOAD_AGG:
5538 439 : operation = (enum tree_code) streamer_read_uhwi (ib);
5539 439 : item.value.pass_through.operation = operation;
5540 439 : item.value.pass_through.formal_id = streamer_read_uhwi (ib);
5541 439 : if (operation != NOP_EXPR)
5542 0 : item.value.pass_through.op_type = stream_read_tree (ib, data_in);
5543 : else
5544 439 : item.value.pass_through.op_type = NULL_TREE;
5545 439 : if (TREE_CODE_CLASS (operation) == tcc_unary)
5546 439 : item.value.pass_through.operand = NULL_TREE;
5547 : else
5548 0 : item.value.pass_through.operand = stream_read_tree (ib, data_in);
5549 439 : if (item.jftype == IPA_JF_LOAD_AGG)
5550 : {
5551 47 : struct bitpack_d bp;
5552 47 : item.value.load_agg.type = stream_read_tree (ib, data_in);
5553 47 : item.value.load_agg.offset = streamer_read_uhwi (ib);
5554 47 : bp = streamer_read_bitpack (ib);
5555 47 : item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
5556 : }
5557 : break;
5558 0 : default:
5559 0 : fatal_error (UNKNOWN_LOCATION,
5560 : "invalid jump function in LTO stream");
5561 : }
5562 5992 : if (prevails)
5563 5992 : jump_func->agg.items->quick_push (item);
5564 : }
5565 :
5566 569665 : ipa_vr vr;
5567 569665 : vr.streamer_read (ib, data_in);
5568 569665 : if (vr.known_p ())
5569 : {
5570 398372 : if (prevails)
5571 398366 : ipa_set_jfunc_vr (jump_func, vr);
5572 : }
5573 : else
5574 171293 : jump_func->m_vr = NULL;
5575 569665 : }
5576 :
5577 : /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
5578 : relevant to indirect inlining to OB. */
5579 :
5580 : static void
5581 2610 : ipa_write_indirect_edge_info (struct output_block *ob,
5582 : struct cgraph_edge *cs)
5583 : {
5584 2610 : struct bitpack_d bp;
5585 :
5586 2610 : bp = bitpack_create (ob->main_stream);
5587 2610 : bp_pack_enum (&bp, cgraph_indirect_info_kind, CIIK_N_KINDS,
5588 : cs->indirect_info->kind);
5589 2610 : streamer_write_bitpack (&bp);
5590 :
5591 2610 : if (cgraph_polymorphic_indirect_info *pii
5592 2610 : = dyn_cast <cgraph_polymorphic_indirect_info *> (cs->indirect_info))
5593 : {
5594 1023 : bp = bitpack_create (ob->main_stream);
5595 1023 : bp_pack_value (&bp, pii->vptr_changed, 1);
5596 1023 : streamer_write_bitpack (&bp);
5597 :
5598 1023 : streamer_write_hwi (ob, pii->param_index);
5599 1023 : pii->context.stream_out (ob);
5600 1023 : streamer_write_hwi (ob, pii->otr_token);
5601 1023 : stream_write_tree (ob, pii->otr_type, true);
5602 1023 : streamer_write_hwi (ob, pii->offset);
5603 : }
5604 1587 : else if (cgraph_simple_indirect_info *sii
5605 1587 : = dyn_cast <cgraph_simple_indirect_info *> (cs->indirect_info))
5606 : {
5607 1568 : bp = bitpack_create (ob->main_stream);
5608 1568 : bp_pack_value (&bp, sii->agg_contents, 1);
5609 1568 : bp_pack_value (&bp, sii->member_ptr, 1);
5610 1568 : bp_pack_value (&bp, sii->fnptr_loaded_from_record, 1);
5611 1568 : bp_pack_value (&bp, sii->by_ref, 1);
5612 1568 : bp_pack_value (&bp, sii->guaranteed_unmodified, 1);
5613 1568 : streamer_write_bitpack (&bp);
5614 :
5615 1568 : streamer_write_hwi (ob, sii->param_index);
5616 1568 : if (sii->agg_contents)
5617 55 : streamer_write_hwi (ob, sii->offset);
5618 : else
5619 1513 : gcc_assert (sii->offset == 0);
5620 1568 : if (sii->fnptr_loaded_from_record)
5621 : {
5622 128 : stream_write_tree (ob, sii->rec_type, true);
5623 128 : streamer_write_uhwi (ob, sii->fld_offset);
5624 : }
5625 : }
5626 : else
5627 19 : gcc_assert (cs->indirect_info->param_index == -1);
5628 2610 : }
5629 :
5630 : /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5631 : relevant to indirect inlining from IB. */
5632 :
5633 : static void
5634 1417 : ipa_read_indirect_edge_info (class lto_input_block *ib,
5635 : class data_in *data_in,
5636 : struct cgraph_edge *cs,
5637 : class ipa_node_params *info)
5638 : {
5639 1417 : struct bitpack_d bp;
5640 :
5641 1417 : bp = streamer_read_bitpack (ib);
5642 1417 : enum cgraph_indirect_info_kind ii_kind
5643 1417 : = bp_unpack_enum (&bp, cgraph_indirect_info_kind, CIIK_N_KINDS);
5644 1417 : gcc_assert (ii_kind == cs->indirect_info->kind);
5645 :
5646 1417 : if (cgraph_polymorphic_indirect_info *pii
5647 1417 : = dyn_cast <cgraph_polymorphic_indirect_info *> (cs->indirect_info))
5648 : {
5649 93 : bp = streamer_read_bitpack (ib);
5650 93 : pii->vptr_changed = bp_unpack_value (&bp, 1);
5651 :
5652 93 : pii->param_index = (int) streamer_read_hwi (ib);
5653 93 : pii->context.stream_in (ib, data_in);
5654 93 : pii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5655 93 : pii->otr_type = stream_read_tree (ib, data_in);
5656 93 : pii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5657 :
5658 93 : if (info && pii->param_index >= 0)
5659 : {
5660 70 : ipa_set_param_used_by_polymorphic_call (info, pii->param_index, true);
5661 70 : ipa_set_param_used_by_indirect_call (info, pii->param_index, true);
5662 : }
5663 : }
5664 1324 : else if (cgraph_simple_indirect_info *sii
5665 1324 : = dyn_cast <cgraph_simple_indirect_info *> (cs->indirect_info))
5666 : {
5667 1319 : bp = streamer_read_bitpack (ib);
5668 1319 : sii->agg_contents = bp_unpack_value (&bp, 1);
5669 1319 : sii->member_ptr = bp_unpack_value (&bp, 1);
5670 1319 : sii->fnptr_loaded_from_record = bp_unpack_value (&bp, 1);
5671 1319 : sii->by_ref = bp_unpack_value (&bp, 1);
5672 1319 : sii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5673 :
5674 1319 : sii->param_index = (int) streamer_read_hwi (ib);
5675 1319 : if (sii->agg_contents)
5676 31 : sii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5677 : else
5678 1288 : sii->offset = 0;
5679 1319 : if (sii->fnptr_loaded_from_record)
5680 : {
5681 66 : sii->rec_type = stream_read_tree (ib, data_in);
5682 66 : sii->fld_offset = (unsigned) streamer_read_uhwi (ib);
5683 : }
5684 1319 : if (info && sii->param_index >= 0)
5685 263 : ipa_set_param_used_by_indirect_call (info, sii->param_index, true);
5686 : }
5687 : else
5688 5 : cs->indirect_info->param_index = -1;
5689 1417 : }
5690 :
5691 : /* Stream out NODE info to OB. */
5692 :
5693 : static void
5694 92899 : ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5695 : {
5696 92899 : int node_ref;
5697 92899 : lto_symtab_encoder_t encoder;
5698 92899 : ipa_node_params *info = ipa_node_params_sum->get (node);
5699 92899 : int j;
5700 92899 : struct cgraph_edge *e;
5701 92899 : struct bitpack_d bp;
5702 :
5703 92899 : encoder = ob->decl_state->symtab_node_encoder;
5704 92899 : node_ref = lto_symtab_encoder_encode (encoder, node);
5705 92899 : streamer_write_uhwi (ob, node_ref);
5706 :
5707 92899 : streamer_write_uhwi (ob, ipa_get_param_count (info));
5708 449976 : for (j = 0; j < ipa_get_param_count (info); j++)
5709 100807 : streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5710 92899 : bp = bitpack_create (ob->main_stream);
5711 92899 : gcc_assert (info->analysis_done
5712 : || ipa_get_param_count (info) == 0);
5713 92899 : gcc_assert (!info->node_enqueued);
5714 92899 : gcc_assert (!info->ipcp_orig_node);
5715 357077 : for (j = 0; j < ipa_get_param_count (info); j++)
5716 : {
5717 : /* TODO: We could just not stream the bit in the undescribed case. */
5718 100807 : bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE)
5719 100807 : ? ipa_get_param_load_dereferenced (info, j) : true;
5720 100807 : bp_pack_value (&bp, d, 1);
5721 100807 : bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5722 : }
5723 92899 : streamer_write_bitpack (&bp);
5724 449976 : for (j = 0; j < ipa_get_param_count (info); j++)
5725 : {
5726 100807 : streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5727 100807 : stream_write_tree (ob, ipa_get_type (info, j), true);
5728 : }
5729 455941 : for (e = node->callees; e; e = e->next_callee)
5730 : {
5731 363042 : ipa_edge_args *args = ipa_edge_args_sum->get (e);
5732 :
5733 363042 : if (!args)
5734 : {
5735 806 : streamer_write_uhwi (ob, 0);
5736 806 : continue;
5737 : }
5738 :
5739 362236 : streamer_write_uhwi (ob,
5740 362236 : ipa_get_cs_argument_count (args) * 2
5741 362236 : + (args->polymorphic_call_contexts != NULL));
5742 2188925 : for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5743 : {
5744 605664 : ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5745 605664 : if (args->polymorphic_call_contexts != NULL)
5746 2419 : ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5747 : }
5748 : }
5749 95509 : for (e = node->indirect_calls; e; e = e->next_callee)
5750 : {
5751 2610 : ipa_edge_args *args = ipa_edge_args_sum->get (e);
5752 2610 : if (!args)
5753 6 : streamer_write_uhwi (ob, 0);
5754 : else
5755 : {
5756 2604 : streamer_write_uhwi (ob,
5757 2604 : ipa_get_cs_argument_count (args) * 2
5758 2604 : + (args->polymorphic_call_contexts != NULL));
5759 13922 : for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5760 : {
5761 3126 : ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5762 3126 : if (args->polymorphic_call_contexts != NULL)
5763 1321 : ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5764 : }
5765 : }
5766 2610 : ipa_write_indirect_edge_info (ob, e);
5767 : }
5768 92899 : }
5769 :
5770 : /* Stream in edge E from IB. */
5771 :
5772 : static void
5773 334916 : ipa_read_edge_info (class lto_input_block *ib,
5774 : class data_in *data_in,
5775 : struct cgraph_edge *e, bool prevails)
5776 : {
5777 334916 : int count = streamer_read_uhwi (ib);
5778 334916 : bool contexts_computed = count & 1;
5779 :
5780 334916 : count /= 2;
5781 334916 : if (!count)
5782 : return;
5783 234291 : if (prevails
5784 234291 : && (e->possibly_call_in_translation_unit_p ()
5785 : /* Also stream in jump functions to builtins in hope that they
5786 : will get fnspecs. */
5787 115796 : || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5788 : {
5789 223065 : ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
5790 223065 : vec_safe_grow_cleared (args->jump_functions, count, true);
5791 223065 : if (contexts_computed)
5792 634 : vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5793 774749 : for (int k = 0; k < count; k++)
5794 : {
5795 551684 : ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5796 : data_in, prevails);
5797 551684 : if (contexts_computed)
5798 984 : ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5799 984 : (ib, data_in);
5800 : }
5801 : }
5802 : else
5803 : {
5804 29207 : for (int k = 0; k < count; k++)
5805 : {
5806 17981 : struct ipa_jump_func dummy;
5807 17981 : ipa_read_jump_function (ib, &dummy, e,
5808 : data_in, prevails);
5809 17981 : if (contexts_computed)
5810 : {
5811 447 : class ipa_polymorphic_call_context ctx;
5812 447 : ctx.stream_in (ib, data_in);
5813 : }
5814 : }
5815 : }
5816 : }
5817 :
5818 : /* Stream in NODE info from IB. */
5819 :
5820 : static void
5821 77587 : ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5822 : class data_in *data_in)
5823 : {
5824 77587 : int k;
5825 77587 : struct cgraph_edge *e;
5826 77587 : struct bitpack_d bp;
5827 77587 : bool prevails = node->prevailing_p ();
5828 77587 : ipa_node_params *info
5829 77587 : = prevails ? ipa_node_params_sum->get_create (node) : NULL;
5830 :
5831 77587 : int param_count = streamer_read_uhwi (ib);
5832 77587 : if (prevails)
5833 : {
5834 77569 : ipa_alloc_node_params (node, param_count);
5835 233304 : for (k = 0; k < param_count; k++)
5836 78166 : (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5837 77569 : if (ipa_get_param_count (info) != 0)
5838 53115 : info->analysis_done = true;
5839 77569 : info->node_enqueued = false;
5840 : }
5841 : else
5842 27 : for (k = 0; k < param_count; k++)
5843 9 : streamer_read_uhwi (ib);
5844 :
5845 77587 : bp = streamer_read_bitpack (ib);
5846 155762 : for (k = 0; k < param_count; k++)
5847 : {
5848 78175 : bool load_dereferenced = bp_unpack_value (&bp, 1);
5849 78175 : bool used = bp_unpack_value (&bp, 1);
5850 :
5851 78175 : if (prevails)
5852 : {
5853 78166 : ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5854 78166 : ipa_set_param_used (info, k, used);
5855 : }
5856 : }
5857 155762 : for (k = 0; k < param_count; k++)
5858 : {
5859 78175 : int nuses = streamer_read_hwi (ib);
5860 78175 : tree type = stream_read_tree (ib, data_in);
5861 :
5862 78175 : if (prevails)
5863 : {
5864 78166 : ipa_set_controlled_uses (info, k, nuses);
5865 78166 : (*info->descriptors)[k].decl_or_type = type;
5866 : }
5867 : }
5868 411086 : for (e = node->callees; e; e = e->next_callee)
5869 333499 : ipa_read_edge_info (ib, data_in, e, prevails);
5870 79004 : for (e = node->indirect_calls; e; e = e->next_callee)
5871 : {
5872 1417 : ipa_read_edge_info (ib, data_in, e, prevails);
5873 1417 : ipa_read_indirect_edge_info (ib, data_in, e, info);
5874 : }
5875 77587 : }
5876 :
5877 : /* Stream out ipa_return_summary. */
5878 : static void
5879 31669 : ipa_write_return_summaries (output_block *ob)
5880 : {
5881 31669 : if (!ipa_return_value_sum)
5882 : {
5883 15193 : streamer_write_uhwi (ob, 0);
5884 15193 : return;
5885 : }
5886 :
5887 16476 : lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
5888 16476 : unsigned int count = 0;
5889 437846 : for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5890 : {
5891 202447 : toplevel_node *tnode = lto_symtab_encoder_deref (encoder, i);
5892 404894 : cgraph_node *cnode = dyn_cast <cgraph_node *> (tnode);
5893 166853 : ipa_return_value_summary *v;
5894 :
5895 166853 : if (cnode && cnode->definition && !cnode->alias
5896 122831 : && (v = ipa_return_value_sum->get (cnode))
5897 25220 : && v->vr)
5898 25220 : count++;
5899 : }
5900 16476 : streamer_write_uhwi (ob, count);
5901 :
5902 437846 : for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5903 : {
5904 202447 : toplevel_node *tnode = lto_symtab_encoder_deref (encoder, i);
5905 404894 : cgraph_node *cnode = dyn_cast <cgraph_node *> (tnode);
5906 166853 : ipa_return_value_summary *v;
5907 :
5908 166853 : if (cnode && cnode->definition && !cnode->alias
5909 122831 : && (v = ipa_return_value_sum->get (cnode))
5910 25220 : && v->vr)
5911 : {
5912 25220 : streamer_write_uhwi
5913 25220 : (ob,
5914 25220 : lto_symtab_encoder_encode (encoder, cnode));
5915 25220 : v->vr->streamer_write (ob);
5916 : }
5917 : }
5918 : }
5919 :
5920 : /* Write jump functions for nodes in SET. */
5921 :
5922 : void
5923 23316 : ipa_prop_write_jump_functions (void)
5924 : {
5925 23316 : struct output_block *ob;
5926 23316 : unsigned int count = 0;
5927 23316 : lto_symtab_encoder_iterator lsei;
5928 23316 : lto_symtab_encoder_t encoder;
5929 :
5930 23316 : if (!ipa_node_params_sum || !ipa_edge_args_sum)
5931 0 : return;
5932 :
5933 23316 : ob = create_output_block (LTO_section_jump_functions);
5934 23316 : encoder = ob->decl_state->symtab_node_encoder;
5935 23316 : ob->symbol = NULL;
5936 128405 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5937 105089 : lsei_next_function_in_partition (&lsei))
5938 : {
5939 105089 : cgraph_node *node = lsei_cgraph_node (lsei);
5940 105089 : if (node->has_gimple_body_p ()
5941 105089 : && ipa_node_params_sum->get (node) != NULL)
5942 92899 : count++;
5943 : }
5944 :
5945 23316 : streamer_write_uhwi (ob, count);
5946 :
5947 : /* Process all of the functions. */
5948 128405 : for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5949 105089 : lsei_next_function_in_partition (&lsei))
5950 : {
5951 105089 : cgraph_node *node = lsei_cgraph_node (lsei);
5952 105089 : if (node->has_gimple_body_p ()
5953 105089 : && ipa_node_params_sum->get (node) != NULL)
5954 92899 : ipa_write_node_info (ob, node);
5955 : }
5956 23316 : ipa_write_return_summaries (ob);
5957 :
5958 23316 : if (noted_fnptrs_in_records)
5959 : {
5960 340 : count = 0;
5961 1069 : for (auto iter = noted_fnptrs_in_records->begin ();
5962 1069 : iter != noted_fnptrs_in_records->end();
5963 729 : ++iter)
5964 729 : if ((*iter)->fn)
5965 721 : count++;
5966 340 : streamer_write_uhwi (ob, count);
5967 :
5968 1069 : for (auto iter = noted_fnptrs_in_records->begin ();
5969 1409 : iter != noted_fnptrs_in_records->end();
5970 729 : ++iter)
5971 729 : if ((*iter)->fn)
5972 : {
5973 721 : stream_write_tree (ob, (*iter)->rec_type, true);
5974 721 : stream_write_tree (ob, (*iter)->fn, true);
5975 721 : streamer_write_uhwi (ob, (*iter)->fld_offset);
5976 : }
5977 : }
5978 : else
5979 22976 : streamer_write_uhwi (ob, 0);
5980 :
5981 23316 : produce_asm (ob);
5982 23316 : destroy_output_block (ob);
5983 : }
5984 :
5985 : /* Record that return value range of N is VAL. */
5986 :
5987 : static void
5988 766683 : ipa_record_return_value_range_1 (cgraph_node *n, value_range val)
5989 : {
5990 766683 : if (!ipa_return_value_sum)
5991 : {
5992 86805 : if (!ipa_vr_hash_table)
5993 76049 : ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
5994 86805 : ipa_return_value_sum = new (ggc_alloc_no_dtor <ipa_return_value_sum_t> ())
5995 86805 : ipa_return_value_sum_t (symtab, true);
5996 86805 : ipa_return_value_sum->disable_insertion_hook ();
5997 : }
5998 766683 : ipa_return_value_sum->get_create (n)->vr = ipa_get_value_range (val);
5999 766683 : if (dump_file && (dump_flags & TDF_DETAILS))
6000 : {
6001 21 : fprintf (dump_file, "Recording return range of %s:", n->dump_name ());
6002 21 : val.dump (dump_file);
6003 21 : fprintf (dump_file, "\n");
6004 : }
6005 766683 : }
6006 :
6007 : /* Stream out ipa_return_summary. */
6008 : static void
6009 21807 : ipa_read_return_summaries (lto_input_block *ib,
6010 : struct lto_file_decl_data *file_data,
6011 : class data_in *data_in)
6012 : {
6013 21807 : unsigned int f_count = streamer_read_uhwi (ib);
6014 43501 : for (unsigned int i = 0; i < f_count; i++)
6015 : {
6016 21694 : unsigned int index = streamer_read_uhwi (ib);
6017 21694 : lto_symtab_encoder_t encoder = file_data->symtab_node_encoder;
6018 21694 : struct cgraph_node *node
6019 : = dyn_cast <cgraph_node *>
6020 21694 : (lto_symtab_encoder_deref (encoder, index));
6021 21694 : ipa_vr rvr;
6022 21694 : rvr.streamer_read (ib, data_in);
6023 21694 : if (node->prevailing_p ())
6024 : {
6025 21692 : value_range tmp;
6026 21692 : rvr.get_vrange (tmp);
6027 21692 : ipa_record_return_value_range_1 (node, tmp);
6028 21692 : }
6029 : }
6030 21807 : }
6031 :
6032 : /* Read section in file FILE_DATA of length LEN with data DATA. */
6033 :
6034 : static void
6035 13454 : ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
6036 : size_t len)
6037 : {
6038 13454 : const struct lto_function_header *header =
6039 : (const struct lto_function_header *) data;
6040 13454 : const int cfg_offset = sizeof (struct lto_function_header);
6041 13454 : const int main_offset = cfg_offset + header->cfg_size;
6042 13454 : const int string_offset = main_offset + header->main_size;
6043 13454 : class data_in *data_in;
6044 13454 : unsigned int i;
6045 13454 : unsigned int count;
6046 :
6047 13454 : lto_input_block ib_main ((const char *) data + main_offset,
6048 13454 : header->main_size, file_data);
6049 :
6050 13454 : data_in =
6051 26908 : lto_data_in_create (file_data, (const char *) data + string_offset,
6052 13454 : header->string_size, vNULL);
6053 13454 : count = streamer_read_uhwi (&ib_main);
6054 :
6055 91041 : for (i = 0; i < count; i++)
6056 : {
6057 77587 : unsigned int index;
6058 77587 : struct cgraph_node *node;
6059 77587 : lto_symtab_encoder_t encoder;
6060 :
6061 77587 : index = streamer_read_uhwi (&ib_main);
6062 77587 : encoder = file_data->symtab_node_encoder;
6063 77587 : node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
6064 : index));
6065 77587 : gcc_assert (node->definition);
6066 77587 : ipa_read_node_info (&ib_main, node, data_in);
6067 : }
6068 13454 : ipa_read_return_summaries (&ib_main, file_data, data_in);
6069 :
6070 13454 : count = streamer_read_uhwi (&ib_main);
6071 14122 : for (i = 0; i < count; i++)
6072 : {
6073 668 : tree rec_type = stream_read_tree (&ib_main, data_in);
6074 668 : tree fn = stream_read_tree (&ib_main, data_in);
6075 668 : unsigned fld_offset = (unsigned) streamer_read_uhwi (&ib_main);
6076 668 : note_fnptr_in_record (rec_type, fld_offset, fn);
6077 : }
6078 :
6079 13454 : lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
6080 : len);
6081 13454 : lto_data_in_delete (data_in);
6082 13454 : }
6083 :
6084 : /* Read ipcp jump functions. */
6085 :
6086 : void
6087 12399 : ipa_prop_read_jump_functions (void)
6088 : {
6089 12399 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
6090 12399 : struct lto_file_decl_data *file_data;
6091 12399 : unsigned int j = 0;
6092 :
6093 12399 : ipa_check_create_node_params ();
6094 12399 : ipa_check_create_edge_args ();
6095 12399 : ipa_register_cgraph_hooks ();
6096 :
6097 38252 : while ((file_data = file_data_vec[j++]))
6098 : {
6099 13454 : size_t len;
6100 13454 : const char *data
6101 13454 : = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
6102 : &len);
6103 13454 : if (data)
6104 13454 : ipa_prop_read_section (file_data, data, len);
6105 : }
6106 12399 : }
6107 :
6108 : /* Return true if the IPA-CP transformation summary TS is non-NULL and contains
6109 : useful info. */
6110 : static bool
6111 166466 : useful_ipcp_transformation_info_p (ipcp_transformation *ts)
6112 : {
6113 166466 : if (!ts)
6114 : return false;
6115 24264 : if (!vec_safe_is_empty (ts->m_agg_values)
6116 23860 : || !vec_safe_is_empty (ts->m_vr))
6117 23978 : return true;
6118 : return false;
6119 : }
6120 :
6121 : /* Write into OB IPA-CP transfromation summary TS describing NODE. */
6122 :
6123 : void
6124 11971 : write_ipcp_transformation_info (output_block *ob, cgraph_node *node,
6125 : ipcp_transformation *ts)
6126 : {
6127 11971 : lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
6128 11971 : int node_ref = lto_symtab_encoder_encode (encoder, node);
6129 11971 : streamer_write_uhwi (ob, node_ref);
6130 :
6131 12170 : streamer_write_uhwi (ob, vec_safe_length (ts->m_agg_values));
6132 13602 : for (const ipa_argagg_value &av : ts->m_agg_values)
6133 : {
6134 1233 : struct bitpack_d bp;
6135 :
6136 1233 : stream_write_tree (ob, av.value, true);
6137 1233 : streamer_write_uhwi (ob, av.unit_offset);
6138 1233 : streamer_write_uhwi (ob, av.index);
6139 :
6140 1233 : bp = bitpack_create (ob->main_stream);
6141 1233 : bp_pack_value (&bp, av.by_ref, 1);
6142 1233 : bp_pack_value (&bp, av.killed, 1);
6143 1233 : streamer_write_bitpack (&bp);
6144 : }
6145 :
6146 : /* If all instances of this node are inlined, ipcp info is not useful. */
6147 11971 : if (!lto_symtab_encoder_only_for_inlining_p (encoder, node))
6148 : {
6149 21744 : streamer_write_uhwi (ob, vec_safe_length (ts->m_vr));
6150 53057 : for (const ipa_vr &parm_vr : ts->m_vr)
6151 20443 : parm_vr.streamer_write (ob);
6152 : }
6153 : else
6154 1097 : streamer_write_uhwi (ob, 0);
6155 11971 : }
6156 :
6157 : /* Stream in the aggregate value replacement chain for NODE from IB. */
6158 :
6159 : static void
6160 11971 : read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
6161 : data_in *data_in)
6162 : {
6163 11971 : unsigned int count, i;
6164 11971 : ipcp_transformation_initialize ();
6165 11971 : ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
6166 :
6167 11971 : count = streamer_read_uhwi (ib);
6168 11971 : if (count > 0)
6169 : {
6170 199 : vec_safe_grow_cleared (ts->m_agg_values, count, true);
6171 1432 : for (i = 0; i <count; i++)
6172 : {
6173 1233 : ipa_argagg_value *av = &(*ts->m_agg_values)[i];;
6174 :
6175 1233 : av->value = stream_read_tree (ib, data_in);
6176 1233 : av->unit_offset = streamer_read_uhwi (ib);
6177 1233 : av->index = streamer_read_uhwi (ib);
6178 :
6179 1233 : bitpack_d bp = streamer_read_bitpack (ib);
6180 1233 : av->by_ref = bp_unpack_value (&bp, 1);
6181 1233 : av->killed = bp_unpack_value (&bp, 1);
6182 : }
6183 : }
6184 :
6185 11971 : count = streamer_read_uhwi (ib);
6186 11971 : if (count > 0)
6187 : {
6188 10870 : vec_safe_grow_cleared (ts->m_vr, count, true);
6189 31313 : for (i = 0; i < count; i++)
6190 : {
6191 20443 : ipa_vr *parm_vr;
6192 20443 : parm_vr = &(*ts->m_vr)[i];
6193 20443 : parm_vr->streamer_read (ib, data_in);
6194 : }
6195 : }
6196 11971 : }
6197 :
6198 :
6199 : /* Write all aggregate replacement for nodes in set. */
6200 :
6201 : void
6202 8353 : ipcp_write_transformation_summaries (void)
6203 : {
6204 8353 : struct output_block *ob;
6205 8353 : unsigned int count = 0;
6206 8353 : lto_symtab_encoder_t encoder;
6207 :
6208 8353 : ob = create_output_block (LTO_section_ipcp_transform);
6209 8353 : encoder = ob->decl_state->symtab_node_encoder;
6210 8353 : ob->symbol = NULL;
6211 :
6212 232878 : for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
6213 : {
6214 108090 : toplevel_node *tnode = lto_symtab_encoder_deref (encoder, i);
6215 108090 : cgraph_node *cnode = dyn_cast <cgraph_node *> (tnode);
6216 108090 : if (!cnode)
6217 24857 : continue;
6218 83233 : ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
6219 83233 : if (useful_ipcp_transformation_info_p (ts)
6220 83233 : && lto_symtab_encoder_encode_body_p (encoder, cnode))
6221 11971 : count++;
6222 : }
6223 :
6224 8353 : streamer_write_uhwi (ob, count);
6225 :
6226 232878 : for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
6227 : {
6228 108090 : toplevel_node *tnode = lto_symtab_encoder_deref (encoder, i);
6229 108090 : cgraph_node *cnode = dyn_cast <cgraph_node *> (tnode);
6230 108090 : if (!cnode)
6231 24857 : continue;
6232 83233 : ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
6233 83233 : if (useful_ipcp_transformation_info_p (ts)
6234 83233 : && lto_symtab_encoder_encode_body_p (encoder, cnode))
6235 11971 : write_ipcp_transformation_info (ob, cnode, ts);
6236 : }
6237 8353 : ipa_write_return_summaries (ob);
6238 8353 : produce_asm (ob);
6239 8353 : destroy_output_block (ob);
6240 8353 : }
6241 :
6242 : /* Read replacements section in file FILE_DATA of length LEN with data
6243 : DATA. */
6244 :
6245 : static void
6246 8353 : read_replacements_section (struct lto_file_decl_data *file_data,
6247 : const char *data,
6248 : size_t len)
6249 : {
6250 8353 : const struct lto_function_header *header =
6251 : (const struct lto_function_header *) data;
6252 8353 : const int cfg_offset = sizeof (struct lto_function_header);
6253 8353 : const int main_offset = cfg_offset + header->cfg_size;
6254 8353 : const int string_offset = main_offset + header->main_size;
6255 8353 : class data_in *data_in;
6256 8353 : unsigned int i;
6257 8353 : unsigned int count;
6258 :
6259 8353 : lto_input_block ib_main ((const char *) data + main_offset,
6260 8353 : header->main_size, file_data);
6261 :
6262 8353 : data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6263 8353 : header->string_size, vNULL);
6264 8353 : count = streamer_read_uhwi (&ib_main);
6265 :
6266 20324 : for (i = 0; i < count; i++)
6267 : {
6268 11971 : unsigned int index;
6269 11971 : struct cgraph_node *node;
6270 11971 : lto_symtab_encoder_t encoder;
6271 :
6272 11971 : index = streamer_read_uhwi (&ib_main);
6273 11971 : encoder = file_data->symtab_node_encoder;
6274 11971 : node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
6275 : index));
6276 11971 : read_ipcp_transformation_info (&ib_main, node, data_in);
6277 : }
6278 8353 : ipa_read_return_summaries (&ib_main, file_data, data_in);
6279 8353 : lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
6280 : len);
6281 8353 : lto_data_in_delete (data_in);
6282 8353 : }
6283 :
6284 : /* Read IPA-CP aggregate replacements. */
6285 :
6286 : void
6287 8353 : ipcp_read_transformation_summaries (void)
6288 : {
6289 8353 : struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
6290 8353 : struct lto_file_decl_data *file_data;
6291 8353 : unsigned int j = 0;
6292 :
6293 25059 : while ((file_data = file_data_vec[j++]))
6294 : {
6295 8353 : size_t len;
6296 8353 : const char *data
6297 8353 : = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
6298 : &len);
6299 8353 : if (data)
6300 8353 : read_replacements_section (file_data, data, len);
6301 : }
6302 8353 : }
6303 :
6304 : /* Adjust the aggregate replacements in TS to reflect any parameter removals
6305 : which might have already taken place. If after adjustments there are no
6306 : aggregate replacements left, the m_agg_values will be set to NULL. In other
6307 : cases, it may be shrunk. */
6308 :
6309 : static void
6310 1905 : adjust_agg_replacement_values (cgraph_node *node, ipcp_transformation *ts)
6311 : {
6312 1905 : clone_info *cinfo = clone_info::get (node);
6313 1905 : if (!cinfo || !cinfo->param_adjustments)
6314 : return;
6315 :
6316 927 : auto_vec<int, 16> new_indices;
6317 927 : cinfo->param_adjustments->get_updated_indices (&new_indices);
6318 927 : bool removed_item = false;
6319 927 : unsigned dst_index = 0;
6320 927 : unsigned count = ts->m_agg_values->length ();
6321 5092 : for (unsigned i = 0; i < count; i++)
6322 : {
6323 4165 : ipa_argagg_value *v = &(*ts->m_agg_values)[i];
6324 4165 : gcc_checking_assert (v->index >= 0);
6325 :
6326 4165 : int new_idx = -1;
6327 4165 : if ((unsigned) v->index < new_indices.length ())
6328 2513 : new_idx = new_indices[v->index];
6329 :
6330 2513 : if (new_idx >= 0)
6331 : {
6332 1674 : v->index = new_idx;
6333 1674 : if (removed_item)
6334 23 : (*ts->m_agg_values)[dst_index] = *v;
6335 1674 : dst_index++;
6336 : }
6337 : else
6338 : removed_item = true;
6339 : }
6340 :
6341 927 : if (dst_index == 0)
6342 : {
6343 556 : ggc_free (ts->m_agg_values);
6344 556 : ts->m_agg_values = NULL;
6345 : }
6346 371 : else if (removed_item)
6347 35 : ts->m_agg_values->truncate (dst_index);
6348 :
6349 927 : return;
6350 927 : }
6351 :
6352 : /* Dominator walker driving the ipcp modification phase. */
6353 :
6354 : class ipcp_modif_dom_walker : public dom_walker
6355 : {
6356 : public:
6357 1349 : ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
6358 : vec<ipa_param_descriptor, va_gc> *descs,
6359 : ipcp_transformation *ts, bool *sc)
6360 2698 : : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
6361 1349 : m_ts (ts), m_something_changed (sc) {}
6362 :
6363 : edge before_dom_children (basic_block) final override;
6364 1349 : bool cleanup_eh ()
6365 1349 : { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
6366 :
6367 : private:
6368 : struct ipa_func_body_info *m_fbi;
6369 : vec<ipa_param_descriptor, va_gc> *m_descriptors;
6370 : ipcp_transformation *m_ts;
6371 : bool *m_something_changed;
6372 : auto_bitmap m_need_eh_cleanup;
6373 : };
6374 :
6375 : edge
6376 22976 : ipcp_modif_dom_walker::before_dom_children (basic_block bb)
6377 : {
6378 22976 : gimple_stmt_iterator gsi;
6379 127865 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6380 : {
6381 81913 : gimple *stmt = gsi_stmt (gsi);
6382 81913 : tree rhs, val, t;
6383 81913 : HOST_WIDE_INT bit_offset;
6384 81913 : poly_int64 size;
6385 81913 : int index;
6386 81913 : bool by_ref, vce;
6387 :
6388 81913 : if (!gimple_assign_load_p (stmt))
6389 80341 : continue;
6390 12100 : rhs = gimple_assign_rhs1 (stmt);
6391 12100 : if (!is_gimple_reg_type (TREE_TYPE (rhs)))
6392 647 : continue;
6393 :
6394 26845 : vce = false;
6395 : t = rhs;
6396 26845 : while (handled_component_p (t))
6397 : {
6398 : /* V_C_E can do things like convert an array of integers to one
6399 : bigger integer and similar things we do not handle below. */
6400 15392 : if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
6401 : {
6402 : vce = true;
6403 : break;
6404 : }
6405 15392 : t = TREE_OPERAND (t, 0);
6406 : }
6407 11453 : if (vce)
6408 0 : continue;
6409 :
6410 11453 : if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
6411 : &bit_offset, &size, &by_ref))
6412 8375 : continue;
6413 3078 : unsigned unit_offset = bit_offset / BITS_PER_UNIT;
6414 3078 : ipa_argagg_value_list avl (m_ts);
6415 3078 : tree v = avl.get_value (index, unit_offset, by_ref);
6416 :
6417 4584 : if (!v
6418 3078 : || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), size))
6419 1506 : continue;
6420 :
6421 1572 : gcc_checking_assert (is_gimple_ip_invariant (v));
6422 1572 : if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v)))
6423 : {
6424 0 : if (fold_convertible_p (TREE_TYPE (rhs), v))
6425 0 : val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v);
6426 0 : else if (TYPE_SIZE (TREE_TYPE (rhs))
6427 0 : == TYPE_SIZE (TREE_TYPE (v)))
6428 0 : val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v);
6429 : else
6430 : {
6431 0 : if (dump_file)
6432 : {
6433 0 : fprintf (dump_file, " const ");
6434 0 : print_generic_expr (dump_file, v);
6435 0 : fprintf (dump_file, " can't be converted to type of ");
6436 0 : print_generic_expr (dump_file, rhs);
6437 0 : fprintf (dump_file, "\n");
6438 : }
6439 0 : continue;
6440 : }
6441 : }
6442 : else
6443 : val = v;
6444 :
6445 1572 : if (dump_file && (dump_flags & TDF_DETAILS))
6446 : {
6447 41 : fprintf (dump_file, "Modifying stmt:\n ");
6448 41 : print_gimple_stmt (dump_file, stmt, 0);
6449 : }
6450 1572 : gimple_assign_set_rhs_from_tree (&gsi, val);
6451 1572 : update_stmt (stmt);
6452 :
6453 1572 : if (dump_file && (dump_flags & TDF_DETAILS))
6454 : {
6455 41 : fprintf (dump_file, "into:\n ");
6456 41 : print_gimple_stmt (dump_file, stmt, 0);
6457 41 : fprintf (dump_file, "\n");
6458 : }
6459 :
6460 1572 : *m_something_changed = true;
6461 1572 : if (maybe_clean_eh_stmt (stmt))
6462 9 : bitmap_set_bit (m_need_eh_cleanup, bb->index);
6463 : }
6464 22976 : return NULL;
6465 : }
6466 :
6467 : /* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
6468 : - whether passed by reference or not is given by BY_REF - return that
6469 : constant. Otherwise return NULL_TREE. The is supposed to be used only
6470 : after clone materialization and transformation is done (because it asserts
6471 : that killed constants have been pruned). */
6472 :
6473 : tree
6474 4222351 : ipcp_get_aggregate_const (struct function *func, tree parm, bool by_ref,
6475 : HOST_WIDE_INT bit_offset, HOST_WIDE_INT bit_size)
6476 : {
6477 4222351 : cgraph_node *node = cgraph_node::get (func->decl);
6478 4222351 : ipcp_transformation *ts = ipcp_get_transformation_summary (node);
6479 :
6480 4222351 : if (!ts || !ts->m_agg_values)
6481 : return NULL_TREE;
6482 :
6483 10413 : int index = ts->get_param_index (func->decl, parm);
6484 10413 : if (index < 0)
6485 : return NULL_TREE;
6486 :
6487 10360 : ipa_argagg_value_list avl (ts);
6488 10360 : unsigned unit_offset = bit_offset / BITS_PER_UNIT;
6489 10360 : const ipa_argagg_value *av = avl.get_elt (index, unit_offset);
6490 10360 : if (!av || av->by_ref != by_ref)
6491 : return NULL_TREE;
6492 1924 : gcc_assert (!av->killed);
6493 1924 : tree v = av->value;
6494 1924 : if (!v
6495 1924 : || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), bit_size))
6496 688 : return NULL_TREE;
6497 :
6498 : return v;
6499 : }
6500 :
6501 : /* Return true if we have recorded VALUE and MASK about PARM.
6502 : Set VALUE and MASk accordingly. */
6503 :
6504 : bool
6505 7588480 : ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
6506 : {
6507 7588480 : cgraph_node *cnode = cgraph_node::get (current_function_decl);
6508 7588480 : ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
6509 7588480 : if (!ts
6510 123061 : || vec_safe_length (ts->m_vr) == 0
6511 7745498 : || !ipa_vr_supported_type_p (TREE_TYPE (parm)))
6512 : return false;
6513 :
6514 115682 : int i = ts->get_param_index (current_function_decl, parm);
6515 115682 : if (i < 0)
6516 : return false;
6517 114287 : clone_info *cinfo = clone_info::get (cnode);
6518 114287 : if (cinfo && cinfo->param_adjustments)
6519 : {
6520 33504 : i = cinfo->param_adjustments->get_original_index (i);
6521 33504 : if (i < 0)
6522 : return false;
6523 : }
6524 :
6525 105368 : vec<ipa_vr, va_gc> &vr = *ts->m_vr;
6526 105368 : if (!vr[i].known_p ())
6527 : return false;
6528 84007 : value_range tmp;
6529 84007 : vr[i].get_vrange (tmp);
6530 84007 : if (tmp.undefined_p () || tmp.varying_p ())
6531 : return false;
6532 84007 : irange_bitmask bm;
6533 84007 : bm = tmp.get_bitmask ();
6534 84007 : *mask = widest_int::from (bm.mask (), TYPE_SIGN (TREE_TYPE (parm)));
6535 84007 : *value = wide_int_to_tree (TREE_TYPE (parm), bm.value ());
6536 84007 : return true;
6537 84007 : }
6538 :
6539 : /* Update value range of formal parameters of NODE as described in TS. */
6540 :
6541 : static void
6542 23207 : ipcp_update_vr (struct cgraph_node *node, ipcp_transformation *ts)
6543 : {
6544 23207 : if (vec_safe_is_empty (ts->m_vr))
6545 549 : return;
6546 22658 : const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
6547 22658 : unsigned count = vr.length ();
6548 22658 : if (!count)
6549 : return;
6550 :
6551 22658 : auto_vec<int, 16> new_indices;
6552 22658 : bool need_remapping = false;
6553 22658 : clone_info *cinfo = clone_info::get (node);
6554 22658 : if (cinfo && cinfo->param_adjustments)
6555 : {
6556 7942 : cinfo->param_adjustments->get_updated_indices (&new_indices);
6557 7942 : need_remapping = true;
6558 : }
6559 22658 : auto_vec <tree, 16> parm_decls;
6560 22658 : push_function_arg_decls (&parm_decls, node->decl);
6561 :
6562 79451 : for (unsigned i = 0; i < count; ++i)
6563 : {
6564 56793 : tree parm;
6565 56793 : int remapped_idx;
6566 56793 : if (need_remapping)
6567 : {
6568 23615 : if (i >= new_indices.length ())
6569 11573 : continue;
6570 12042 : remapped_idx = new_indices[i];
6571 12042 : if (remapped_idx < 0)
6572 2553 : continue;
6573 : }
6574 : else
6575 33178 : remapped_idx = i;
6576 :
6577 42667 : parm = parm_decls[remapped_idx];
6578 :
6579 42667 : gcc_checking_assert (parm);
6580 42667 : tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
6581 :
6582 42667 : if (!ddef || !is_gimple_reg (parm))
6583 6554 : continue;
6584 :
6585 36113 : if (vr[i].known_p ())
6586 : {
6587 28012 : value_range tmp;
6588 28012 : vr[i].get_vrange (tmp);
6589 :
6590 28012 : if (!tmp.undefined_p () && !tmp.varying_p ())
6591 : {
6592 28012 : if (dump_file)
6593 : {
6594 112 : fprintf (dump_file, "Setting value range of param %u "
6595 : "(now %i) ", i, remapped_idx);
6596 112 : tmp.dump (dump_file);
6597 112 : fprintf (dump_file, "]\n");
6598 : }
6599 28012 : set_range_info (ddef, tmp);
6600 :
6601 43224 : if (POINTER_TYPE_P (TREE_TYPE (parm))
6602 31254 : && opt_for_fn (node->decl, flag_ipa_bit_cp))
6603 : {
6604 16041 : irange_bitmask bm = tmp.get_bitmask ();
6605 16041 : unsigned tem = bm.mask ().to_uhwi ();
6606 16041 : unsigned HOST_WIDE_INT bitpos = bm.value ().to_uhwi ();
6607 16041 : unsigned align = tem & -tem;
6608 16041 : unsigned misalign = bitpos & (align - 1);
6609 :
6610 16041 : if (align > 1)
6611 : {
6612 13235 : if (dump_file)
6613 : {
6614 85 : fprintf (dump_file,
6615 : "Adjusting mask for param %u to ", i);
6616 85 : print_hex (bm.mask (), dump_file);
6617 85 : fprintf (dump_file, "\n");
6618 : }
6619 :
6620 13235 : if (dump_file)
6621 85 : fprintf (dump_file,
6622 : "Adjusting align: %u, misalign: %u\n",
6623 : align, misalign);
6624 :
6625 13235 : unsigned old_align, old_misalign;
6626 13235 : struct ptr_info_def *pi = get_ptr_info (ddef);
6627 13235 : bool old_known = get_ptr_info_alignment (pi, &old_align,
6628 : &old_misalign);
6629 :
6630 13235 : if (old_known && old_align > align)
6631 : {
6632 0 : if (dump_file)
6633 : {
6634 0 : fprintf (dump_file,
6635 : "But alignment was already %u.\n",
6636 : old_align);
6637 0 : if ((old_misalign & (align - 1)) != misalign)
6638 0 : fprintf (dump_file,
6639 : "old_misalign (%u) and misalign "
6640 : "(%u) mismatch\n",
6641 : old_misalign, misalign);
6642 : }
6643 0 : continue;
6644 : }
6645 :
6646 13235 : if (dump_file
6647 85 : && old_known
6648 0 : && ((misalign & (old_align - 1)) != old_misalign))
6649 0 : fprintf (dump_file,
6650 : "old_misalign (%u) and misalign (%u) "
6651 : "mismatch\n",
6652 : old_misalign, misalign);
6653 :
6654 13235 : set_ptr_info_alignment (pi, align, misalign);
6655 : }
6656 16041 : }
6657 11971 : else if (dump_file && INTEGRAL_TYPE_P (TREE_TYPE (parm)))
6658 : {
6659 23 : irange &r = as_a<irange> (tmp);
6660 23 : irange_bitmask bm = r.get_bitmask ();
6661 23 : unsigned prec = TYPE_PRECISION (TREE_TYPE (parm));
6662 23 : if (wi::ne_p (bm.mask (), wi::shwi (-1, prec)))
6663 : {
6664 16 : fprintf (dump_file,
6665 : "Adjusting mask for param %u to ", i);
6666 16 : print_hex (bm.mask (), dump_file);
6667 16 : fprintf (dump_file, "\n");
6668 : }
6669 23 : }
6670 : }
6671 28012 : }
6672 : }
6673 22658 : }
6674 :
6675 : /* IPCP transformation phase doing propagation of aggregate values. */
6676 :
6677 : unsigned int
6678 968676 : ipcp_transform_function (struct cgraph_node *node)
6679 : {
6680 968676 : struct ipa_func_body_info fbi;
6681 968676 : int param_count;
6682 :
6683 968676 : gcc_checking_assert (cfun);
6684 968676 : gcc_checking_assert (current_function_decl);
6685 :
6686 968676 : if (dump_file)
6687 693 : fprintf (dump_file, "Modification phase of node %s\n",
6688 : node->dump_name ());
6689 :
6690 968676 : ipcp_transformation *ts = ipcp_get_transformation_summary (node);
6691 968676 : if (!ts
6692 968676 : || (vec_safe_is_empty (ts->m_agg_values)
6693 22222 : && vec_safe_is_empty (ts->m_vr)))
6694 : return 0;
6695 :
6696 23207 : ts->maybe_create_parm_idx_map (cfun->decl);
6697 23207 : ipcp_update_vr (node, ts);
6698 969510 : if (vec_safe_is_empty (ts->m_agg_values))
6699 : return 0;
6700 2183 : param_count = count_formal_params (node->decl);
6701 2183 : if (param_count == 0)
6702 : return 0;
6703 :
6704 1905 : adjust_agg_replacement_values (node, ts);
6705 1905 : if (vec_safe_is_empty (ts->m_agg_values))
6706 : {
6707 556 : if (dump_file)
6708 4 : fprintf (dump_file, " All affected aggregate parameters were either "
6709 : "removed or converted into scalars, phase done.\n");
6710 556 : return 0;
6711 : }
6712 1349 : if (dump_file)
6713 : {
6714 49 : fprintf (dump_file, " Aggregate replacements:");
6715 49 : ipa_argagg_value_list avs (ts);
6716 49 : avs.dump (dump_file);
6717 : }
6718 :
6719 1349 : fbi.node = node;
6720 1349 : fbi.info = NULL;
6721 1349 : fbi.bb_infos = vNULL;
6722 1349 : fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
6723 1349 : fbi.param_count = param_count;
6724 1349 : fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
6725 :
6726 1349 : vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
6727 1349 : vec_safe_grow_cleared (descriptors, param_count, true);
6728 1349 : ipa_populate_param_decls (node, *descriptors);
6729 1349 : bool modified_mem_access = false;
6730 1349 : calculate_dominance_info (CDI_DOMINATORS);
6731 1349 : ipcp_modif_dom_walker walker (&fbi, descriptors, ts, &modified_mem_access);
6732 1349 : walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6733 1349 : free_dominance_info (CDI_DOMINATORS);
6734 1349 : bool cfg_changed = walker.cleanup_eh ();
6735 :
6736 1349 : int i;
6737 1349 : struct ipa_bb_info *bi;
6738 27023 : FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
6739 48650 : free_ipa_bb_info (bi);
6740 1349 : fbi.bb_infos.release ();
6741 :
6742 1349 : ts->remove_argaggs_if ([](const ipa_argagg_value &v)
6743 : {
6744 5079 : return v.killed;
6745 : });
6746 :
6747 1349 : vec_free (descriptors);
6748 1349 : if (cfg_changed)
6749 1 : delete_unreachable_blocks_update_callgraph (node, false);
6750 :
6751 1349 : return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
6752 1349 : }
6753 :
6754 : /* Record that current function return value range is VAL. */
6755 :
6756 : void
6757 744991 : ipa_record_return_value_range (value_range val)
6758 : {
6759 744991 : ipa_record_return_value_range_1
6760 744991 : (cgraph_node::get (current_function_decl), val);
6761 744991 : }
6762 :
6763 : /* Return true if value range of DECL is known and if so initialize RANGE. */
6764 :
6765 : bool
6766 12036529 : ipa_return_value_range (value_range &range, tree decl)
6767 : {
6768 12036529 : cgraph_node *n = cgraph_node::get (decl);
6769 12036529 : if (!n || !ipa_return_value_sum)
6770 : return false;
6771 9801997 : enum availability avail;
6772 9801997 : n = n->ultimate_alias_target (&avail);
6773 9801997 : if (avail < AVAIL_AVAILABLE)
6774 : return false;
6775 2113902 : if (n->decl != decl && !useless_type_conversion_p (TREE_TYPE (decl), TREE_TYPE (n->decl)))
6776 : return false;
6777 2113902 : ipa_return_value_summary *v = ipa_return_value_sum->get (n);
6778 2113902 : if (!v)
6779 : return false;
6780 634618 : v->vr->get_vrange (range);
6781 634618 : return true;
6782 : }
6783 :
6784 : /* Reset all state within ipa-prop.cc so that we can rerun the compiler
6785 : within the same process. For use by toplev::finalize. */
6786 :
6787 : void
6788 259496 : ipa_prop_cc_finalize (void)
6789 : {
6790 259496 : if (function_insertion_hook_holder)
6791 12168 : symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
6792 259496 : function_insertion_hook_holder = NULL;
6793 :
6794 259496 : if (ipa_edge_args_sum)
6795 12494 : ggc_delete (ipa_edge_args_sum);
6796 259496 : ipa_edge_args_sum = NULL;
6797 :
6798 259496 : if (ipa_node_params_sum)
6799 12494 : ggc_delete (ipa_node_params_sum);
6800 259496 : ipa_node_params_sum = NULL;
6801 259496 : }
6802 :
6803 : /* Return true if the two pass_through components of two jump functions are
6804 : known to be equivalent. AGG_JF denotes whether they are part of aggregate
6805 : functions or not. The function can be used before the IPA phase of IPA-CP
6806 : or inlining because it cannot cope with refdesc changes these passes can
6807 : carry out. */
6808 :
6809 : static bool
6810 43977 : ipa_agg_pass_through_jf_equivalent_p (ipa_pass_through_data *ipt1,
6811 : ipa_pass_through_data *ipt2,
6812 : bool agg_jf)
6813 :
6814 : {
6815 43977 : gcc_assert (agg_jf ||
6816 : (!ipt1->refdesc_decremented && !ipt2->refdesc_decremented));
6817 43977 : if (ipt1->operation != ipt2->operation
6818 43977 : || ipt1->formal_id != ipt2->formal_id
6819 43977 : || (!agg_jf && (ipt1->agg_preserved != ipt2->agg_preserved)))
6820 : return false;
6821 43977 : if (ipt1->operation != NOP_EXPR
6822 43977 : && (TYPE_MAIN_VARIANT (ipt1->op_type)
6823 6387 : != TYPE_MAIN_VARIANT (ipt2->op_type)))
6824 : return false;
6825 43969 : if (((ipt1->operand != NULL_TREE) != (ipt2->operand != NULL_TREE))
6826 43969 : || (ipt1->operand
6827 6379 : && !values_equal_for_ipcp_p (ipt1->operand, ipt2->operand)))
6828 0 : return false;
6829 : return true;
6830 : }
6831 :
6832 : /* Return true if the two aggregate jump functions are known to be equivalent.
6833 : The function can be used before the IPA phase of IPA-CP or inlining because
6834 : it cannot cope with refdesc changes these passes can carry out. */
6835 :
6836 : static bool
6837 3615 : ipa_agg_jump_functions_equivalent_p (ipa_agg_jf_item *ajf1,
6838 : ipa_agg_jf_item *ajf2)
6839 : {
6840 3615 : if (ajf1->offset != ajf2->offset
6841 3615 : || ajf1->jftype != ajf2->jftype
6842 7230 : || !types_compatible_p (ajf1->type, ajf2->type))
6843 0 : return false;
6844 :
6845 3615 : switch (ajf1->jftype)
6846 : {
6847 2082 : case IPA_JF_CONST:
6848 2082 : if (!values_equal_for_ipcp_p (ajf1->value.constant,
6849 : ajf2->value.constant))
6850 : return false;
6851 : break;
6852 766 : case IPA_JF_PASS_THROUGH:
6853 766 : {
6854 766 : ipa_pass_through_data *ipt1 = &ajf1->value.pass_through;
6855 766 : ipa_pass_through_data *ipt2 = &ajf2->value.pass_through;
6856 766 : if (!ipa_agg_pass_through_jf_equivalent_p (ipt1, ipt2, true))
6857 : return false;
6858 : }
6859 : break;
6860 767 : case IPA_JF_LOAD_AGG:
6861 767 : {
6862 767 : ipa_load_agg_data *ila1 = &ajf1->value.load_agg;
6863 767 : ipa_load_agg_data *ila2 = &ajf2->value.load_agg;
6864 767 : if (!ipa_agg_pass_through_jf_equivalent_p (&ila1->pass_through,
6865 : &ila2->pass_through, true))
6866 : return false;
6867 767 : if (ila1->offset != ila2->offset
6868 767 : || ila1->by_ref != ila2->by_ref
6869 1534 : || !types_compatible_p (ila1->type, ila2->type))
6870 0 : return false;
6871 : }
6872 : break;
6873 0 : default:
6874 0 : gcc_unreachable ();
6875 : }
6876 : return true;
6877 : }
6878 :
6879 : /* Return true if the two jump functions are known to be equivalent. The
6880 : function can be used before the IPA phase of IPA-CP or inlining because it
6881 : cannot cope with refdesc changes these passes can carry out. */
6882 :
6883 : bool
6884 121674 : ipa_jump_functions_equivalent_p (ipa_jump_func *jf1, ipa_jump_func *jf2)
6885 : {
6886 121674 : if (jf1->type != jf2->type)
6887 : return false;
6888 :
6889 121674 : switch (jf1->type)
6890 : {
6891 : case IPA_JF_UNKNOWN:
6892 : break;
6893 24422 : case IPA_JF_CONST:
6894 24422 : {
6895 24422 : tree cst1 = ipa_get_jf_constant (jf1);
6896 24422 : tree cst2 = ipa_get_jf_constant (jf2);
6897 24422 : if (!values_equal_for_ipcp_p (cst1, cst2))
6898 : return false;
6899 :
6900 24421 : ipa_cst_ref_desc *rd1 = jfunc_rdesc_usable (jf1);
6901 24421 : ipa_cst_ref_desc *rd2 = jfunc_rdesc_usable (jf2);
6902 24421 : if (rd1 && rd2)
6903 : {
6904 466 : gcc_assert (rd1->refcount == 1
6905 : && rd2->refcount == 1);
6906 466 : gcc_assert (!rd1->next_duplicate && !rd2->next_duplicate);
6907 : }
6908 23955 : else if (rd1)
6909 : return false;
6910 23955 : else if (rd2)
6911 : return false;
6912 : }
6913 : break;
6914 42444 : case IPA_JF_PASS_THROUGH:
6915 42444 : {
6916 42444 : ipa_pass_through_data *ipt1 = &jf1->value.pass_through;
6917 42444 : ipa_pass_through_data *ipt2 = &jf2->value.pass_through;
6918 42444 : if (!ipa_agg_pass_through_jf_equivalent_p (ipt1, ipt2, false))
6919 : return false;
6920 : }
6921 : break;
6922 15020 : case IPA_JF_ANCESTOR:
6923 15020 : {
6924 15020 : ipa_ancestor_jf_data *ia1 = &jf1->value.ancestor;
6925 15020 : ipa_ancestor_jf_data *ia2 = &jf2->value.ancestor;
6926 :
6927 15020 : if (ia1->formal_id != ia2->formal_id
6928 15020 : || ia1->agg_preserved != ia2->agg_preserved
6929 15020 : || ia1->keep_null != ia2->keep_null
6930 15020 : || ia1->offset != ia2->offset)
6931 : return false;
6932 : }
6933 : break;
6934 0 : default:
6935 0 : gcc_unreachable ();
6936 : }
6937 :
6938 121665 : if (((jf1->m_vr != nullptr) != (jf2->m_vr != nullptr))
6939 121665 : || (jf1->m_vr && !jf1->m_vr->equal_p (*jf2->m_vr)))
6940 6 : return false;
6941 :
6942 121659 : unsigned alen = vec_safe_length (jf1->agg.items);
6943 124046 : if (vec_safe_length (jf2->agg.items) != alen)
6944 : return false;
6945 :
6946 121658 : if (!alen)
6947 : return true;
6948 :
6949 2387 : if (jf1->agg.by_ref != jf2->agg.by_ref)
6950 : return false;
6951 :
6952 6002 : for (unsigned i = 0 ; i < alen; i++)
6953 3615 : if (!ipa_agg_jump_functions_equivalent_p (&(*jf1->agg.items)[i],
6954 3615 : &(*jf2->agg.items)[i]))
6955 : return false;
6956 :
6957 : return true;
6958 : }
6959 :
6960 : #include "gt-ipa-prop.h"
|