LCOV - code coverage report
Current view: top level - gcc - tree-inline.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 88.4 % 3296 2914
Test Date: 2026-02-28 14:20:25 Functions: 94.4 % 90 85
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Tree inlining.
       2              :    Copyright (C) 2001-2026 Free Software Foundation, Inc.
       3              :    Contributed by Alexandre Oliva <aoliva@redhat.com>
       4              : 
       5              : This file is part of GCC.
       6              : 
       7              : GCC is free software; you can redistribute it and/or modify
       8              : it under the terms of the GNU General Public License as published by
       9              : the Free Software Foundation; either version 3, or (at your option)
      10              : any later version.
      11              : 
      12              : GCC is distributed in the hope that it will be useful,
      13              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      14              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      15              : GNU General Public License for more details.
      16              : 
      17              : You should have received a copy of the GNU General Public License
      18              : along with GCC; see the file COPYING3.  If not see
      19              : <http://www.gnu.org/licenses/>.  */
      20              : 
      21              : #include "config.h"
      22              : #include "system.h"
      23              : #include "coretypes.h"
      24              : #include "backend.h"
      25              : #include "target.h"
      26              : #include "rtl.h"
      27              : #include "tree.h"
      28              : #include "gimple.h"
      29              : #include "cfghooks.h"
      30              : #include "tree-pass.h"
      31              : #include "ssa.h"
      32              : #include "cgraph.h"
      33              : #include "tree-pretty-print.h"
      34              : #include "diagnostic-core.h"
      35              : #include "gimple-predict.h"
      36              : #include "fold-const.h"
      37              : #include "stor-layout.h"
      38              : #include "calls.h"
      39              : #include "tree-inline.h"
      40              : #include "langhooks.h"
      41              : #include "cfganal.h"
      42              : #include "tree-iterator.h"
      43              : #include "intl.h"
      44              : #include "gimple-iterator.h"
      45              : #include "gimple-fold.h"
      46              : #include "tree-eh.h"
      47              : #include "gimplify.h"
      48              : #include "gimplify-me.h"
      49              : #include "gimple-walk.h"
      50              : #include "tree-cfg.h"
      51              : #include "tree-into-ssa.h"
      52              : #include "tree-dfa.h"
      53              : #include "tree-ssa.h"
      54              : #include "except.h"
      55              : #include "debug.h"
      56              : #include "value-prof.h"
      57              : #include "cfgloop.h"
      58              : #include "builtins.h"
      59              : #include "stringpool.h"
      60              : #include "attribs.h"
      61              : #include "sreal.h"
      62              : #include "tree-cfgcleanup.h"
      63              : #include "tree-ssa-live.h"
      64              : #include "alloc-pool.h"
      65              : #include "symbol-summary.h"
      66              : #include "symtab-thunks.h"
      67              : #include "symtab-clones.h"
      68              : #include "asan.h"
      69              : 
      70              : /* I'm not real happy about this, but we need to handle gimple and
      71              :    non-gimple trees.  */
      72              : 
      73              : /* Inlining, Cloning, Versioning, Parallelization
      74              : 
      75              :    Inlining: a function body is duplicated, but the PARM_DECLs are
      76              :    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
      77              :    MODIFY_EXPRs that store to a dedicated returned-value variable.
      78              :    The duplicated eh_region info of the copy will later be appended
      79              :    to the info for the caller; the eh_region info in copied throwing
      80              :    statements and RESX statements are adjusted accordingly.
      81              : 
      82              :    Cloning: (only in C++) We have one body for a con/de/structor, and
      83              :    multiple function decls, each with a unique parameter list.
      84              :    Duplicate the body, using the given splay tree; some parameters
      85              :    will become constants (like 0 or 1).
      86              : 
      87              :    Versioning: a function body is duplicated and the result is a new
      88              :    function rather than into blocks of an existing function as with
      89              :    inlining.  Some parameters will become constants.
      90              : 
      91              :    Parallelization: a region of a function is duplicated resulting in
      92              :    a new function.  Variables may be replaced with complex expressions
      93              :    to enable shared variable semantics.
      94              : 
      95              :    All of these will simultaneously lookup any callgraph edges.  If
      96              :    we're going to inline the duplicated function body, and the given
      97              :    function has some cloned callgraph nodes (one for each place this
      98              :    function will be inlined) those callgraph edges will be duplicated.
      99              :    If we're cloning the body, those callgraph edges will be
     100              :    updated to point into the new body.  (Note that the original
     101              :    callgraph node and edge list will not be altered.)
     102              : 
     103              :    See the CALL_EXPR handling case in copy_tree_body_r ().  */
     104              : 
     105              : /* To Do:
     106              : 
     107              :    o In order to make inlining-on-trees work, we pessimized
     108              :      function-local static constants.  In particular, they are now
     109              :      always output, even when not addressed.  Fix this by treating
     110              :      function-local static constants just like global static
     111              :      constants; the back-end already knows not to output them if they
     112              :      are not needed.
     113              : 
     114              :    o Provide heuristics to clamp inlining of recursive template
     115              :      calls?  */
     116              : 
     117              : 
     118              : /* Weights that estimate_num_insns uses to estimate the size of the
     119              :    produced code.  */
     120              : 
     121              : eni_weights eni_size_weights;
     122              : 
     123              : /* Weights that estimate_num_insns uses to estimate the time necessary
     124              :    to execute the produced code.  */
     125              : 
     126              : eni_weights eni_time_weights;
     127              : 
     128              : /* Prototypes.  */
     129              : 
     130              : static tree declare_return_variable (copy_body_data *, tree, tree,
     131              :                                      basic_block);
     132              : static void remap_block (tree *, copy_body_data *);
     133              : static void copy_bind_expr (tree *, int *, copy_body_data *);
     134              : static void declare_inline_vars (tree, tree);
     135              : static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
     136              : static void prepend_lexical_block (tree current_block, tree new_block);
     137              : static tree copy_result_decl_to_var (tree, copy_body_data *);
     138              : static tree copy_decl_maybe_to_var (tree, copy_body_data *);
     139              : static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
     140              : static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
     141              : 
     142              : /* Insert a tree->tree mapping for ID.  Despite the name suggests
     143              :    that the trees should be variables, it is used for more than that.  */
     144              : 
     145              : void
     146    723791158 : insert_decl_map (copy_body_data *id, tree key, tree value)
     147              : {
     148    723791158 :   id->decl_map->put (key, value);
     149              : 
     150              :   /* Always insert an identity map as well.  If we see this same new
     151              :      node again, we won't want to duplicate it a second time.  */
     152    723791158 :   if (key != value && value)
     153    211102431 :     id->decl_map->put (value, value);
     154    723791158 : }
     155              : 
     156              : /* If nonzero, we're remapping the contents of inlined debug
     157              :    statements.  If negative, an error has occurred, such as a
     158              :    reference to a variable that isn't available in the inlined
     159              :    context.  */
     160              : static int processing_debug_stmt = 0;
     161              : 
     162              : /* Construct new SSA name for old NAME. ID is the inline context.  */
     163              : 
     164              : static tree
     165     66369125 : remap_ssa_name (tree name, copy_body_data *id)
     166              : {
     167     66369125 :   tree new_tree, var;
     168     66369125 :   tree *n;
     169              : 
     170     66369125 :   gcc_assert (TREE_CODE (name) == SSA_NAME);
     171              : 
     172     66369125 :   n = id->decl_map->get (name);
     173     66369125 :   if (n)
     174              :     {
     175              :       /* When we perform edge redirection as part of CFG copy, IPA-SRA can
     176              :          remove an unused LHS from a call statement.  Such LHS can however
     177              :          still appear in debug statements, but their value is lost in this
     178              :          function and we do not want to map them.  */
     179     48724117 :       if (id->killed_new_ssa_names
     180     48724117 :           && id->killed_new_ssa_names->contains (*n))
     181              :         {
     182          939 :           gcc_assert (processing_debug_stmt);
     183          939 :           processing_debug_stmt = -1;
     184          939 :           return name;
     185              :         }
     186              : 
     187     48723178 :       return unshare_expr (*n);
     188              :     }
     189              : 
     190     17645008 :   if (processing_debug_stmt)
     191              :     {
     192        93601 :       if (SSA_NAME_IS_DEFAULT_DEF (name)
     193        93558 :           && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
     194        93511 :           && id->entry_bb == NULL
     195       187111 :           && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
     196              :         {
     197        93510 :           gimple *def_temp;
     198        93510 :           gimple_stmt_iterator gsi;
     199        93510 :           tree val = SSA_NAME_VAR (name);
     200              : 
     201        93510 :           n = id->decl_map->get (val);
     202        93510 :           if (n != NULL)
     203        93510 :             val = *n;
     204        93510 :           if (TREE_CODE (val) != PARM_DECL
     205        93510 :               && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
     206              :             {
     207            0 :               processing_debug_stmt = -1;
     208            0 :               return name;
     209              :             }
     210        93510 :           n = id->decl_map->get (val);
     211        93510 :           if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
     212              :             return *n;
     213        32216 :           tree vexpr = build_debug_expr_decl (TREE_TYPE (name));
     214              :           /* FIXME: Is setting the mode really necessary? */
     215        32216 :           SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
     216        32216 :           def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
     217        32216 :           gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
     218        32216 :           gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
     219        32216 :           insert_decl_map (id, val, vexpr);
     220        32216 :           return vexpr;
     221              :         }
     222              : 
     223           91 :       processing_debug_stmt = -1;
     224           91 :       return name;
     225              :     }
     226              : 
     227              :   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
     228     17551407 :   var = SSA_NAME_VAR (name);
     229      3510312 :   if (!var
     230      3510312 :       || (!SSA_NAME_IS_DEFAULT_DEF (name)
     231      3132236 :           && VAR_P (var)
     232      2947955 :           && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
     233      2947955 :           && DECL_ARTIFICIAL (var)
     234       467886 :           && DECL_IGNORED_P (var)
     235       139772 :           && !DECL_NAME (var)))
     236              :     {
     237     14046404 :       struct ptr_info_def *pi;
     238     14046404 :       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
     239     14046404 :       if (!var && SSA_NAME_IDENTIFIER (name))
     240      1921104 :         SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
     241     14046404 :       insert_decl_map (id, name, new_tree);
     242     28092808 :       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
     243     14046404 :         = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
     244              :       /* At least IPA points-to info can be directly transferred.  */
     245     14046404 :       if (id->src_cfun->gimple_df
     246     14046404 :           && id->src_cfun->gimple_df->ipa_pta
     247        14739 :           && POINTER_TYPE_P (TREE_TYPE (name))
     248         2595 :           && (pi = SSA_NAME_PTR_INFO (name))
     249     14048994 :           && !pi->pt.anything)
     250              :         {
     251         2569 :           struct ptr_info_def *new_pi = get_ptr_info (new_tree);
     252         2569 :           new_pi->pt = pi->pt;
     253              :         }
     254              :       /* So can range-info.  */
     255     23170998 :       if (!POINTER_TYPE_P (TREE_TYPE (name))
     256     22652986 :           && SSA_NAME_RANGE_INFO (name))
     257      2937738 :         duplicate_ssa_name_range_info (new_tree, name);
     258     14046404 :       return new_tree;
     259              :     }
     260              : 
     261              :   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
     262              :      in copy_bb.  */
     263      3505003 :   new_tree = remap_decl (var, id);
     264              : 
     265              :   /* We might've substituted constant or another SSA_NAME for
     266              :      the variable.
     267              : 
     268              :      Replace the SSA name representing RESULT_DECL by variable during
     269              :      inlining:  this saves us from need to introduce PHI node in a case
     270              :      return value is just partly initialized.  */
     271       360287 :   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
     272      3865290 :       && (!SSA_NAME_VAR (name)
     273      3505003 :           || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
     274            0 :           || !id->transform_return_to_modify))
     275              :     {
     276      3505003 :       struct ptr_info_def *pi;
     277      3505003 :       new_tree = make_ssa_name (new_tree);
     278      3505003 :       insert_decl_map (id, name, new_tree);
     279      7010006 :       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
     280      3505003 :         = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
     281              :       /* At least IPA points-to info can be directly transferred.  */
     282      3505003 :       if (id->src_cfun->gimple_df
     283      3505003 :           && id->src_cfun->gimple_df->ipa_pta
     284         2523 :           && POINTER_TYPE_P (TREE_TYPE (name))
     285          288 :           && (pi = SSA_NAME_PTR_INFO (name))
     286      3505285 :           && !pi->pt.anything)
     287              :         {
     288          282 :           struct ptr_info_def *new_pi = get_ptr_info (new_tree);
     289          282 :           new_pi->pt = pi->pt;
     290              :         }
     291              :       /* So can range-info.  */
     292      6182463 :       if (!POINTER_TYPE_P (TREE_TYPE (name))
     293      6097484 :           && SSA_NAME_RANGE_INFO (name))
     294       928306 :         duplicate_ssa_name_range_info (new_tree, name);
     295      3505003 :       if (SSA_NAME_IS_DEFAULT_DEF (name))
     296              :         {
     297              :           /* By inlining function having uninitialized variable, we might
     298              :              extend the lifetime (variable might get reused).  This cause
     299              :              ICE in the case we end up extending lifetime of SSA name across
     300              :              abnormal edge, but also increase register pressure.
     301              : 
     302              :              We simply initialize all uninitialized vars by 0 except
     303              :              for case we are inlining to very first BB.  We can avoid
     304              :              this for all BBs that are not inside strongly connected
     305              :              regions of the CFG, but this is expensive to test.  */
     306       378076 :           if (id->entry_bb
     307        41735 :               && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
     308            0 :               && (!SSA_NAME_VAR (name)
     309            0 :                   || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
     310       378076 :               && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
     311            0 :                                              0)->dest
     312            0 :                   || EDGE_COUNT (id->entry_bb->preds) != 1))
     313              :             {
     314            0 :               gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
     315            0 :               gimple *init_stmt;
     316            0 :               tree zero = build_zero_cst (TREE_TYPE (new_tree));
     317              : 
     318            0 :               init_stmt = gimple_build_assign (new_tree, zero);
     319            0 :               gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
     320            0 :               SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
     321              :             }
     322              :           else
     323              :             {
     324       378076 :               SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
     325       378076 :               set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
     326              :             }
     327              :         }
     328              :     }
     329              :   else
     330            0 :     insert_decl_map (id, name, new_tree);
     331              :   return new_tree;
     332              : }
     333              : 
     334              : /* Remap DECL during the copying of the BLOCK tree for the function.  */
     335              : 
     336              : tree
     337    368089920 : remap_decl (tree decl, copy_body_data *id)
     338              : {
     339    368089920 :   tree *n;
     340              : 
     341              :   /* We only remap local variables in the current function.  */
     342              : 
     343              :   /* See if we have remapped this declaration.  */
     344              : 
     345    368089920 :   n = id->decl_map->get (decl);
     346              : 
     347    368089920 :   if (!n && processing_debug_stmt)
     348              :     {
     349       573743 :       processing_debug_stmt = -1;
     350       573743 :       return decl;
     351              :     }
     352              : 
     353              :   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
     354              :      necessary DECLs have already been remapped and we do not want to duplicate
     355              :      a decl coming from outside of the sequence we are copying.  */
     356    133288423 :   if (!n
     357    133288423 :       && id->prevent_decl_creation_for_types
     358            0 :       && id->remapping_type_depth > 0
     359            0 :       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
     360              :     return decl;
     361              : 
     362              :   /* If we didn't already have an equivalent for this declaration, create one
     363              :      now.  */
     364    367516177 :   if (!n)
     365              :     {
     366              :       /* Make a copy of the variable or label.  */
     367    133288423 :       tree t = id->copy_decl (decl, id);
     368              : 
     369              :       /* Remember it, so that if we encounter this local entity again
     370              :          we can reuse this copy.  Do this early because remap_type may
     371              :          need this decl for TYPE_STUB_DECL.  */
     372    133288423 :       insert_decl_map (id, decl, t);
     373              : 
     374    133288423 :       if (!DECL_P (t) || t == decl)
     375              :         return t;
     376              : 
     377              :       /* Remap types, if necessary.  */
     378    130707013 :       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
     379    130707013 :       if (TREE_CODE (t) == TYPE_DECL)
     380              :         {
     381      1154910 :           DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
     382              : 
     383              :           /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
     384              :              which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
     385              :              is not set on the TYPE_DECL, for example in LTO mode.  */
     386      1154910 :           if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
     387              :             {
     388           10 :               tree x = build_variant_type_copy (TREE_TYPE (t));
     389           10 :               TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
     390           10 :               TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
     391           10 :               DECL_ORIGINAL_TYPE (t) = x;
     392              :             }
     393              :         }
     394              : 
     395              :       /* Remap sizes as necessary.  */
     396    130707013 :       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
     397    130707013 :       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
     398              : 
     399              :       /* If fields, do likewise for offset and qualifier.  */
     400    130707013 :       if (TREE_CODE (t) == FIELD_DECL)
     401              :         {
     402          781 :           walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
     403          781 :           if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
     404            0 :             walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
     405              :         }
     406              : 
     407    130707013 :       return t;
     408              :     }
     409              : 
     410    234227754 :   if (id->do_not_unshare)
     411    158473064 :     return *n;
     412              :   else
     413     75754690 :     return unshare_expr (*n);
     414              : }
     415              : 
     416              : static tree
     417       101439 : remap_type_1 (tree type, copy_body_data *id)
     418              : {
     419       101439 :   tree new_tree, t;
     420              : 
     421              :   /* We do need a copy.  build and register it now.  If this is a pointer or
     422              :      reference type, remap the designated type and make a new pointer or
     423              :      reference type.  */
     424       101439 :   if (TREE_CODE (type) == POINTER_TYPE)
     425              :     {
     426        27938 :       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
     427        27938 :                                          TYPE_MODE (type),
     428        27938 :                                          TYPE_REF_CAN_ALIAS_ALL (type));
     429        27938 :       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
     430        14820 :         new_tree = build_type_attribute_qual_variant (new_tree,
     431        14820 :                                                       TYPE_ATTRIBUTES (type),
     432        14820 :                                                       TYPE_QUALS (type));
     433        27938 :       insert_decl_map (id, type, new_tree);
     434        27938 :       return new_tree;
     435              :     }
     436        73501 :   else if (TREE_CODE (type) == REFERENCE_TYPE)
     437              :     {
     438         7838 :       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
     439         7838 :                                             TYPE_MODE (type),
     440         7838 :                                             TYPE_REF_CAN_ALIAS_ALL (type));
     441         7838 :       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
     442         5438 :         new_tree = build_type_attribute_qual_variant (new_tree,
     443         5438 :                                                       TYPE_ATTRIBUTES (type),
     444         5438 :                                                       TYPE_QUALS (type));
     445         7838 :       insert_decl_map (id, type, new_tree);
     446         7838 :       return new_tree;
     447              :     }
     448              :   else
     449        65663 :     new_tree = copy_node (type);
     450              : 
     451        65663 :   insert_decl_map (id, type, new_tree);
     452              : 
     453              :   /* This is a new type, not a copy of an old type.  Need to reassociate
     454              :      variants.  We can handle everything except the main variant lazily.  */
     455        65663 :   t = TYPE_MAIN_VARIANT (type);
     456        65663 :   if (type != t)
     457              :     {
     458           87 :       t = remap_type (t, id);
     459           87 :       TYPE_MAIN_VARIANT (new_tree) = t;
     460           87 :       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
     461           87 :       TYPE_NEXT_VARIANT (t) = new_tree;
     462              :     }
     463              :   else
     464              :     {
     465        65576 :       TYPE_MAIN_VARIANT (new_tree) = new_tree;
     466        65576 :       TYPE_NEXT_VARIANT (new_tree) = NULL;
     467              :     }
     468              : 
     469        65663 :   if (TYPE_STUB_DECL (type))
     470          186 :     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
     471              : 
     472              :   /* Lazily create pointer and reference types.  */
     473        65663 :   TYPE_POINTER_TO (new_tree) = NULL;
     474        65663 :   TYPE_REFERENCE_TO (new_tree) = NULL;
     475              : 
     476              :   /* Copy all types that may contain references to local variables; be sure to
     477              :      preserve sharing in between type and its main variant when possible.  */
     478        65663 :   switch (TREE_CODE (new_tree))
     479              :     {
     480        31452 :     case INTEGER_TYPE:
     481        31452 :     case REAL_TYPE:
     482        31452 :     case FIXED_POINT_TYPE:
     483        31452 :     case ENUMERAL_TYPE:
     484        31452 :     case BOOLEAN_TYPE:
     485        31452 :       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
     486              :         {
     487            0 :           gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
     488            0 :           gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
     489              : 
     490            0 :           TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
     491            0 :           TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
     492              :         }
     493              :       else
     494              :         {
     495        31452 :           t = TYPE_MIN_VALUE (new_tree);
     496        31452 :           if (t && TREE_CODE (t) != INTEGER_CST)
     497            0 :             walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
     498              : 
     499        31452 :           t = TYPE_MAX_VALUE (new_tree);
     500        31452 :           if (t && TREE_CODE (t) != INTEGER_CST)
     501        31452 :             walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
     502              :         }
     503              :       return new_tree;
     504              : 
     505           16 :     case FUNCTION_TYPE:
     506           16 :       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
     507           16 :           && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
     508            0 :         TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
     509              :       else
     510           16 :         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
     511           16 :       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
     512           16 :           && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
     513            0 :         TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
     514              :       else
     515           16 :         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
     516              :       return new_tree;
     517              : 
     518        33960 :     case ARRAY_TYPE:
     519        33960 :       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
     520        33960 :           && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
     521           29 :         TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
     522              :       else
     523        33931 :         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
     524              : 
     525        33960 :       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
     526              :         {
     527           70 :           gcc_checking_assert (TYPE_DOMAIN (type)
     528              :                                == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
     529           70 :           TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
     530              :         }
     531              :       else
     532              :         {
     533        33890 :           TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
     534              :           /* For array bounds where we have decided not to copy over the bounds
     535              :              variable which isn't used in OpenMP/OpenACC region, change them to
     536              :              an uninitialized VAR_DECL temporary.  */
     537        33890 :           if (id->adjust_array_error_bounds
     538         3712 :               && TYPE_DOMAIN (new_tree)
     539         3712 :               && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
     540        36774 :               && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
     541              :             {
     542         2884 :               tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
     543         2884 :               DECL_ATTRIBUTES (v)
     544         2884 :                 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
     545         2884 :                              DECL_ATTRIBUTES (v));
     546         2884 :               TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
     547              :             }
     548              :         }
     549              :       break;
     550              : 
     551          235 :     case RECORD_TYPE:
     552          235 :     case UNION_TYPE:
     553          235 :     case QUAL_UNION_TYPE:
     554          235 :       if (TYPE_MAIN_VARIANT (type) != type
     555          235 :           && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
     556           17 :         TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
     557              :       else
     558              :         {
     559          218 :           tree f, nf = NULL;
     560              : 
     561          999 :           for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
     562              :             {
     563          781 :               t = remap_decl (f, id);
     564          781 :               DECL_CONTEXT (t) = new_tree;
     565          781 :               DECL_CHAIN (t) = nf;
     566          781 :               nf = t;
     567              :             }
     568          218 :           TYPE_FIELDS (new_tree) = nreverse (nf);
     569              :         }
     570              :       break;
     571              : 
     572            0 :     case OFFSET_TYPE:
     573            0 :     default:
     574              :       /* Shouldn't have been thought variable sized.  */
     575            0 :       gcc_unreachable ();
     576              :     }
     577              : 
     578              :   /* All variants of type share the same size, so use the already remaped data.  */
     579        34195 :   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
     580              :     {
     581           87 :       tree s = TYPE_SIZE (type);
     582           87 :       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
     583           87 :       tree su = TYPE_SIZE_UNIT (type);
     584           87 :       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
     585           87 :       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
     586              :                             && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
     587              :                            || s == mvs);
     588           87 :       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
     589              :                             && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
     590              :                            || su == mvsu);
     591           87 :       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
     592           87 :       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
     593              :     }
     594              :   else
     595              :     {
     596        34108 :       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
     597        34108 :       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
     598              :     }
     599              : 
     600              :   return new_tree;
     601              : }
     602              : 
     603              : /* Helper function for remap_type_2, called through walk_tree.  */
     604              : 
     605              : static tree
     606        30409 : remap_type_3 (tree *tp, int *walk_subtrees, void *data)
     607              : {
     608        30409 :   copy_body_data *id = (copy_body_data *) data;
     609              : 
     610        30409 :   if (TYPE_P (*tp))
     611            0 :     *walk_subtrees = 0;
     612              : 
     613        30409 :   else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
     614              :     return *tp;
     615              : 
     616              :   return NULL_TREE;
     617              : }
     618              : 
     619              : /* Return true if TYPE needs to be remapped because remap_decl on any
     620              :    needed embedded decl returns something other than that decl.  */
     621              : 
     622              : static bool
     623        51315 : remap_type_2 (tree type, copy_body_data *id)
     624              : {
     625        61841 :   tree t;
     626              : 
     627              : #define RETURN_TRUE_IF_VAR(T) \
     628              :   do                                                            \
     629              :     {                                                           \
     630              :       tree _t = (T);                                            \
     631              :       if (_t)                                                   \
     632              :         {                                                       \
     633              :           if (DECL_P (_t) && remap_decl (_t, id) != _t)         \
     634              :             return true;                                        \
     635              :           if (!TYPE_SIZES_GIMPLIFIED (type)                     \
     636              :               && walk_tree (&_t, remap_type_3, id, NULL))   \
     637              :             return true;                                        \
     638              :         }                                                       \
     639              :     }                                                           \
     640              :   while (0)
     641              : 
     642        61841 :   switch (TREE_CODE (type))
     643              :     {
     644        10526 :     case POINTER_TYPE:
     645        10526 :     case REFERENCE_TYPE:
     646        10526 :     case FUNCTION_TYPE:
     647        10526 :     case METHOD_TYPE:
     648        10526 :       return remap_type_2 (TREE_TYPE (type), id);
     649              : 
     650        34490 :     case INTEGER_TYPE:
     651        34490 :     case REAL_TYPE:
     652        34490 :     case FIXED_POINT_TYPE:
     653        34490 :     case ENUMERAL_TYPE:
     654        34490 :     case BOOLEAN_TYPE:
     655        34490 :       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
     656        34490 :       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
     657        22660 :       return false;
     658              : 
     659        16354 :     case ARRAY_TYPE:
     660        16354 :       if (remap_type_2 (TREE_TYPE (type), id)
     661        32054 :           || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
     662         9065 :         return true;
     663              :       break;
     664              : 
     665          357 :     case RECORD_TYPE:
     666          357 :     case UNION_TYPE:
     667          357 :     case QUAL_UNION_TYPE:
     668         3561 :       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
     669         3204 :         if (TREE_CODE (t) == FIELD_DECL)
     670              :           {
     671         1242 :             RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
     672         1242 :             RETURN_TRUE_IF_VAR (DECL_SIZE (t));
     673         1242 :             RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
     674         1242 :             if (TREE_CODE (type) == QUAL_UNION_TYPE)
     675            0 :               RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
     676              :           }
     677              :       break;
     678              : 
     679              :     default:
     680              :       return false;
     681              :     }
     682              : 
     683         7646 :   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
     684         7637 :   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
     685         6962 :   return false;
     686              : #undef RETURN_TRUE_IF_VAR
     687              : }
     688              : 
     689              : tree
     690   1501341349 : remap_type (tree type, copy_body_data *id)
     691              : {
     692   1501341349 :   tree *node;
     693   1501341349 :   tree tmp;
     694              : 
     695   1501341349 :   if (type == NULL)
     696              :     return type;
     697              : 
     698              :   /* See if we have remapped this type.  */
     699   1501188665 :   node = id->decl_map->get (type);
     700   1501188665 :   if (node)
     701    991497758 :     return *node;
     702              : 
     703              :   /* The type only needs remapping if it's variably modified.  */
     704    509690907 :   if (! variably_modified_type_p (type, id->src_fn)
     705              :       /* Don't remap if copy_decl method doesn't always return a new
     706              :          decl and for all embedded decls returns the passed in decl.  */
     707    509690907 :       || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
     708              :     {
     709    509589531 :       insert_decl_map (id, type, type);
     710    509589531 :       return type;
     711              :     }
     712              : 
     713       101376 :   id->remapping_type_depth++;
     714       101376 :   tmp = remap_type_1 (type, id);
     715       101376 :   id->remapping_type_depth--;
     716              : 
     717       101376 :   return tmp;
     718              : }
     719              : 
     720              : /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
     721              : 
     722              : static bool
     723     48275134 : can_be_nonlocal (tree decl, copy_body_data *id)
     724              : {
     725              :   /* We cannot duplicate function decls.  */
     726     48275134 :   if (TREE_CODE (decl) == FUNCTION_DECL)
     727              :     return true;
     728              : 
     729              :   /* Local static vars must be non-local or we get multiple declaration
     730              :      problems.  */
     731     48257130 :   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
     732              :     return true;
     733              : 
     734              :   return false;
     735              : }
     736              : 
     737              : static tree
     738     51773245 : remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
     739              :              copy_body_data *id)
     740              : {
     741     51773245 :   tree old_var;
     742     51773245 :   tree new_decls = NULL_TREE;
     743              : 
     744              :   /* Remap its variables.  */
     745     96139116 :   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
     746              :     {
     747     44365871 :       tree new_var;
     748              : 
     749     44365871 :       if (can_be_nonlocal (old_var, id))
     750              :         {
     751              :           /* We need to add this variable to the local decls as otherwise
     752              :              nothing else will do so.  */
     753       126065 :           if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
     754       107923 :             add_local_decl (cfun, old_var);
     755        88485 :           if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
     756        99759 :               && !DECL_IGNORED_P (old_var)
     757       225761 :               && nonlocalized_list)
     758        67053 :             vec_safe_push (*nonlocalized_list, old_var);
     759       126065 :           continue;
     760              :         }
     761              : 
     762              :       /* Remap the variable.  */
     763     44239806 :       new_var = remap_decl (old_var, id);
     764              : 
     765              :       /* If we didn't remap this variable, we can't mess with its
     766              :          TREE_CHAIN.  If we remapped this variable to the return slot, it's
     767              :          already declared somewhere else, so don't declare it here.  */
     768              : 
     769     44239806 :       if (new_var == old_var || new_var == id->retvar)
     770              :         ;
     771     39513682 :       else if (!new_var)
     772              :         {
     773            0 :           if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
     774            0 :               && !DECL_IGNORED_P (old_var)
     775            0 :               && nonlocalized_list)
     776            0 :             vec_safe_push (*nonlocalized_list, old_var);
     777              :         }
     778              :       else
     779              :         {
     780     39513682 :           gcc_assert (DECL_P (new_var));
     781     39513682 :           DECL_CHAIN (new_var) = new_decls;
     782     39513682 :           new_decls = new_var;
     783              : 
     784              :           /* Also copy value-expressions.  */
     785     39513682 :           if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
     786              :             {
     787      2293620 :               tree tem = DECL_VALUE_EXPR (new_var);
     788      2293620 :               bool old_regimplify = id->regimplify;
     789      2293620 :               id->remapping_type_depth++;
     790      2293620 :               walk_tree (&tem, copy_tree_body_r, id, NULL);
     791      2293620 :               id->remapping_type_depth--;
     792      2293620 :               id->regimplify = old_regimplify;
     793      2293620 :               SET_DECL_VALUE_EXPR (new_var, tem);
     794              :             }
     795              :         }
     796              :     }
     797              : 
     798     51773245 :   return nreverse (new_decls);
     799              : }
     800              : 
     801              : /* Copy the BLOCK to contain remapped versions of the variables
     802              :    therein.  And hook the new block into the block-tree.  */
     803              : 
     804              : static void
     805     45678679 : remap_block (tree *block, copy_body_data *id)
     806              : {
     807     45678679 :   tree old_block;
     808     45678679 :   tree new_block;
     809              : 
     810              :   /* Make the new block.  */
     811     45678679 :   old_block = *block;
     812     45678679 :   new_block = make_node (BLOCK);
     813     45678679 :   TREE_USED (new_block) = TREE_USED (old_block);
     814     45678679 :   BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
     815     45678679 :   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
     816     45678679 :   BLOCK_NONLOCALIZED_VARS (new_block)
     817     45741432 :     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
     818     45678679 :   *block = new_block;
     819              : 
     820              :   /* Remap its variables.  */
     821     91357358 :   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
     822     45678679 :                                         &BLOCK_NONLOCALIZED_VARS (new_block),
     823              :                                         id);
     824              : 
     825              :   /* Remember the remapped block.  */
     826     45678679 :   insert_decl_map (id, old_block, new_block);
     827     45678679 : }
     828              : 
     829              : /* Copy the whole block tree and root it in id->block.  */
     830              : 
     831              : static tree
     832     22654693 : remap_blocks (tree block, copy_body_data *id)
     833              : {
     834     22654693 :   tree t;
     835     22654693 :   tree new_tree = block;
     836              : 
     837     22654693 :   if (!block)
     838              :     return NULL;
     839              : 
     840     22654693 :   remap_block (&new_tree, id);
     841     22654693 :   gcc_assert (new_tree != block);
     842     40787261 :   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
     843     18132568 :     prepend_lexical_block (new_tree, remap_blocks (t, id));
     844              :   /* Blocks are in arbitrary order, but make things slightly prettier and do
     845              :      not swap order when producing a copy.  */
     846     22654693 :   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
     847     22654693 :   return new_tree;
     848              : }
     849              : 
     850              : /* Remap the block tree rooted at BLOCK to nothing.  */
     851              : 
     852              : static void
     853        68684 : remap_blocks_to_null (tree block, copy_body_data *id)
     854              : {
     855        68684 :   tree t;
     856        68684 :   insert_decl_map (id, block, NULL_TREE);
     857       110666 :   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
     858        41982 :     remap_blocks_to_null (t, id);
     859        68684 : }
     860              : 
     861              : /* Remap the location info pointed to by LOCUS.  */
     862              : 
     863              : static location_t
     864     24832739 : remap_location (location_t locus, copy_body_data *id)
     865              : {
     866     24832739 :   if (LOCATION_BLOCK (locus))
     867              :     {
     868     10046740 :       tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
     869     10046740 :       gcc_assert (n);
     870     10046740 :       if (*n)
     871     10013266 :         return set_block (locus, *n);
     872              :     }
     873              : 
     874     14819473 :   locus = LOCATION_LOCUS (locus);
     875              : 
     876     14819473 :   if (locus != UNKNOWN_LOCATION && id->block)
     877           15 :     return set_block (locus, id->block);
     878              : 
     879              :   return locus;
     880              : }
     881              : 
     882              : static void
     883     39828100 : copy_statement_list (tree *tp)
     884              : {
     885     39828100 :   tree_stmt_iterator oi, ni;
     886     39828100 :   tree new_tree;
     887              : 
     888     39828100 :   new_tree = alloc_stmt_list ();
     889     39828100 :   ni = tsi_start (new_tree);
     890     39828100 :   oi = tsi_start (*tp);
     891     39828100 :   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
     892     39828100 :   *tp = new_tree;
     893              : 
     894    133426027 :   for (; !tsi_end_p (oi); tsi_next (&oi))
     895              :     {
     896     93597927 :       tree stmt = tsi_stmt (oi);
     897     93597927 :       if (TREE_CODE (stmt) == STATEMENT_LIST)
     898              :         /* This copy is not redundant; tsi_link_after will smash this
     899              :            STATEMENT_LIST into the end of the one we're building, and we
     900              :            don't want to do that with the original.  */
     901        72817 :         copy_statement_list (&stmt);
     902     93597927 :       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
     903              :     }
     904     39828100 : }
     905              : 
     906              : static void
     907     23024234 : copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
     908              : {
     909     23024234 :   tree block = BIND_EXPR_BLOCK (*tp);
     910              :   /* Copy (and replace) the statement.  */
     911     23024234 :   copy_tree_r (tp, walk_subtrees, NULL);
     912     23024234 :   if (block)
     913              :     {
     914     23023699 :       remap_block (&block, id);
     915     23023699 :       BIND_EXPR_BLOCK (*tp) = block;
     916              :     }
     917              : 
     918     23024234 :   if (BIND_EXPR_VARS (*tp))
     919              :     /* This will remap a lot of the same decls again, but this should be
     920              :        harmless.  */
     921      6094398 :     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
     922     23024234 : }
     923              : 
     924              : 
     925              : /* Create a new gimple_seq by remapping all the statements in BODY
     926              :    using the inlining information in ID.  */
     927              : 
     928              : static gimple_seq
     929           67 : remap_gimple_seq (gimple_seq body, copy_body_data *id)
     930              : {
     931           67 :   gimple_stmt_iterator si;
     932           67 :   gimple_seq new_body = NULL;
     933              : 
     934           67 :   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
     935              :     {
     936            0 :       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
     937            0 :       gimple_seq_add_seq (&new_body, new_stmts);
     938              :     }
     939              : 
     940           67 :   return new_body;
     941              : }
     942              : 
     943              : 
     944              : /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
     945              :    block using the mapping information in ID.  */
     946              : 
     947              : static gimple *
     948            0 : copy_gimple_bind (gbind *stmt, copy_body_data *id)
     949              : {
     950            0 :   gimple *new_bind;
     951            0 :   tree new_block, new_vars;
     952            0 :   gimple_seq body, new_body;
     953              : 
     954              :   /* Copy the statement.  Note that we purposely don't use copy_stmt
     955              :      here because we need to remap statements as we copy.  */
     956            0 :   body = gimple_bind_body (stmt);
     957            0 :   new_body = remap_gimple_seq (body, id);
     958              : 
     959            0 :   new_block = gimple_bind_block (stmt);
     960            0 :   if (new_block)
     961            0 :     remap_block (&new_block, id);
     962              : 
     963              :   /* This will remap a lot of the same decls again, but this should be
     964              :      harmless.  */
     965            0 :   new_vars = gimple_bind_vars (stmt);
     966            0 :   if (new_vars)
     967            0 :     new_vars = remap_decls (new_vars, NULL, id);
     968              : 
     969            0 :   new_bind = gimple_build_bind (new_vars, new_body, new_block);
     970              : 
     971            0 :   return new_bind;
     972              : }
     973              : 
     974              : /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
     975              : 
     976              : static bool
     977        31531 : is_parm (tree decl)
     978              : {
     979        31531 :   if (TREE_CODE (decl) == SSA_NAME)
     980              :     {
     981        29640 :       decl = SSA_NAME_VAR (decl);
     982              :       if (!decl)
     983              :         return false;
     984              :     }
     985              : 
     986        18992 :   return (TREE_CODE (decl) == PARM_DECL);
     987              : }
     988              : 
     989              : /* Copy the TREE_THIS_NOTRAP flag from OLD to T if it is appropriate to do so.
     990              :    T and OLD must be both either INDIRECT_REF or MEM_REF.  */
     991              : 
     992              : static void
     993     22145616 : maybe_copy_this_notrap (copy_body_data *id, tree t, tree old)
     994              : {
     995     22145616 :   gcc_assert (TREE_CODE (t) == TREE_CODE (old));
     996              : 
     997              :   /* We cannot blindly propagate the TREE_THIS_NOTRAP flag if we have remapped
     998              :      a parameter as the property might be valid only for the parameter itself,
     999              :      typically when it is passed by reference.  But we propagate the flag when
    1000              :      this is the dereference of an entire object done in a type that has self-
    1001              :      referential size, to avoid the static size check in tree_could_trap_p.  */
    1002     22145616 :   if (TREE_THIS_NOTRAP (old)
    1003     22145616 :       && (!is_parm (TREE_OPERAND (old, 0))
    1004        13192 :           || (!id->transform_parameter && is_parm (TREE_OPERAND (t, 0)))
    1005         9537 :           || ((TREE_CODE (t) == INDIRECT_REF
    1006         9537 :                || integer_zerop (TREE_OPERAND (t, 1)))
    1007         9537 :               && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
    1008         8867 :               && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
    1009         8867 :               && type_contains_placeholder_p (TREE_TYPE (t)))))
    1010        18336 :     TREE_THIS_NOTRAP (t) = 1;
    1011     22145616 : }
    1012              : 
    1013              : /* Remap the dependence CLIQUE from the source to the destination function
    1014              :    as specified in ID.  */
    1015              : 
    1016              : static unsigned short
    1017      2625528 : remap_dependence_clique (copy_body_data *id, unsigned short clique)
    1018              : {
    1019      2625528 :   if (clique == 0 || processing_debug_stmt)
    1020              :     return 0;
    1021      2590738 :   if (!id->dependence_map)
    1022       676528 :     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
    1023      2590738 :   bool existed;
    1024      2590738 :   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
    1025      2590738 :   if (!existed)
    1026              :     {
    1027              :       /* Clique 1 is reserved for local ones set by PTA.  */
    1028      1007217 :       if (cfun->last_clique == 0)
    1029       314865 :         cfun->last_clique = 1;
    1030      2014434 :       newc = get_new_clique (cfun);
    1031              :     }
    1032      2590738 :   return newc;
    1033              : }
    1034              : 
    1035              : /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
    1036              :    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
    1037              :    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
    1038              :    recursing into the children nodes of *TP.  */
    1039              : 
    1040              : static tree
    1041    187719403 : remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
    1042              : {
    1043    187719403 :   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
    1044    187719403 :   copy_body_data *id = (copy_body_data *) wi_p->info;
    1045    187719403 :   tree fn = id->src_fn;
    1046              : 
    1047              :   /* For recursive invocations this is no longer the LHS itself.  */
    1048    187719403 :   bool is_lhs = wi_p->is_lhs;
    1049    187719403 :   wi_p->is_lhs = false;
    1050              : 
    1051    187719403 :   if (TREE_CODE (*tp) == SSA_NAME)
    1052              :     {
    1053     62529925 :       *tp = remap_ssa_name (*tp, id);
    1054     62529925 :       *walk_subtrees = 0;
    1055     62529925 :       if (is_lhs)
    1056     15546210 :         SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
    1057     62529925 :       return NULL;
    1058              :     }
    1059    125189478 :   else if (auto_var_in_fn_p (*tp, fn))
    1060              :     {
    1061              :       /* Local variables and labels need to be replaced by equivalent
    1062              :          variables.  We don't want to copy static variables; there's
    1063              :          only one of those, no matter how many times we inline the
    1064              :          containing function.  Similarly for globals from an outer
    1065              :          function.  */
    1066     40274607 :       tree new_decl;
    1067              : 
    1068              :       /* Remap the declaration.  */
    1069     40274607 :       new_decl = remap_decl (*tp, id);
    1070     40274607 :       gcc_assert (new_decl);
    1071              :       /* Replace this variable with the copy.  */
    1072     40274607 :       STRIP_TYPE_NOPS (new_decl);
    1073              :       /* ???  The C++ frontend uses void * pointer zero to initialize
    1074              :          any other type.  This confuses the middle-end type verification.
    1075              :          As cloned bodies do not go through gimplification again the fixup
    1076              :          there doesn't trigger.  */
    1077     40274607 :       if (TREE_CODE (new_decl) == INTEGER_CST
    1078     40274607 :           && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
    1079            0 :         new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
    1080     40274607 :       *tp = new_decl;
    1081     40274607 :       *walk_subtrees = 0;
    1082              :     }
    1083     84914871 :   else if (TREE_CODE (*tp) == STATEMENT_LIST)
    1084            0 :     gcc_unreachable ();
    1085     84914871 :   else if (TREE_CODE (*tp) == SAVE_EXPR)
    1086            0 :     gcc_unreachable ();
    1087     84914871 :   else if (TREE_CODE (*tp) == LABEL_DECL
    1088     84914871 :            && (!DECL_CONTEXT (*tp)
    1089          441 :                || decl_function_context (*tp) == id->src_fn))
    1090              :     /* These may need to be remapped for EH handling.  */
    1091            0 :     *tp = remap_decl (*tp, id);
    1092     84914871 :   else if (TREE_CODE (*tp) == FIELD_DECL)
    1093              :     {
    1094              :       /* If the enclosing record type is variably_modified_type_p, the field
    1095              :          has already been remapped.  Otherwise, it need not be.  */
    1096     15068127 :       tree *n = id->decl_map->get (*tp);
    1097     15068127 :       if (n)
    1098           95 :         *tp = *n;
    1099     15068127 :       *walk_subtrees = 0;
    1100              :     }
    1101     69846744 :   else if (TYPE_P (*tp))
    1102              :     /* Types may need remapping as well.  */
    1103            0 :     *tp = remap_type (*tp, id);
    1104     69846744 :   else if (CONSTANT_CLASS_P (*tp))
    1105              :     {
    1106              :       /* If this is a constant, we have to copy the node iff the type
    1107              :          will be remapped.  copy_tree_r will not copy a constant.  */
    1108     11464452 :       tree new_type = remap_type (TREE_TYPE (*tp), id);
    1109              : 
    1110     11464452 :       if (new_type == TREE_TYPE (*tp))
    1111     11460515 :         *walk_subtrees = 0;
    1112              : 
    1113         3937 :       else if (TREE_CODE (*tp) == INTEGER_CST)
    1114         3937 :         *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
    1115              :       else
    1116              :         {
    1117            0 :           *tp = copy_node (*tp);
    1118            0 :           TREE_TYPE (*tp) = new_type;
    1119              :         }
    1120              :     }
    1121              :   else
    1122              :     {
    1123              :       /* Otherwise, just copy the node.  Note that copy_tree_r already
    1124              :          knows not to copy VAR_DECLs, etc., so this is safe.  */
    1125              : 
    1126     58382292 :       if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
    1127              :         {
    1128              :           /* We need to re-canonicalize MEM_REFs from inline substitutions
    1129              :              that can happen when a pointer argument is an ADDR_EXPR.
    1130              :              Recurse here manually to allow that.  */
    1131     13219946 :           tree ptr = TREE_OPERAND (*tp, 0);
    1132     13219946 :           tree type = remap_type (TREE_TYPE (*tp), id);
    1133     13219946 :           tree old = *tp;
    1134     13219946 :           walk_tree (&ptr, remap_gimple_op_r, data, NULL);
    1135     13219946 :           *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
    1136     13219946 :           TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
    1137     13219946 :           TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
    1138     13219946 :           copy_warning (*tp, old);
    1139     13219946 :           if (MR_DEPENDENCE_CLIQUE (old) != 0)
    1140              :             {
    1141      2546062 :               MR_DEPENDENCE_CLIQUE (*tp)
    1142      2546062 :                 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
    1143      2546062 :               MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
    1144              :             }
    1145     13219946 :           maybe_copy_this_notrap (id, *tp, old);
    1146     13219946 :           REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
    1147     13219946 :           *walk_subtrees = 0;
    1148     13219946 :           return NULL;
    1149              :         }
    1150              : 
    1151              :       /* Here is the "usual case".  Copy this tree node, and then
    1152              :          tweak some special cases.  */
    1153     45162346 :       copy_tree_r (tp, walk_subtrees, NULL);
    1154              : 
    1155     45162346 :       if (TREE_CODE (*tp) != OMP_CLAUSE)
    1156     45162346 :         TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
    1157              : 
    1158     45162346 :       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
    1159              :         {
    1160              :           /* The copied TARGET_EXPR has never been expanded, even if the
    1161              :              original node was expanded already.  */
    1162            0 :           TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
    1163            0 :           TREE_OPERAND (*tp, 3) = NULL_TREE;
    1164              :         }
    1165     45162346 :       else if (TREE_CODE (*tp) == ADDR_EXPR)
    1166              :         {
    1167              :           /* Variable substitution need not be simple.  In particular,
    1168              :              the MEM_REF substitution above.  Make sure that
    1169              :              TREE_CONSTANT and friends are up-to-date.  */
    1170     14005928 :           int invariant = is_gimple_min_invariant (*tp);
    1171     14005928 :           walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
    1172     14005928 :           recompute_tree_invariant_for_addr_expr (*tp);
    1173              : 
    1174              :           /* If this used to be invariant, but is not any longer,
    1175              :              then regimplification is probably needed.  */
    1176     14005928 :           if (invariant && !is_gimple_min_invariant (*tp))
    1177         4332 :             id->regimplify = true;
    1178              : 
    1179     14005928 :           *walk_subtrees = 0;
    1180              :         }
    1181     31156418 :       else if (TREE_CODE (*tp) == OMP_NEXT_VARIANT)
    1182              :         {
    1183              :           /* Neither operand is interesting, and walking the selector
    1184              :              causes problems because it's not an expression.  */
    1185          288 :           gcc_assert (TREE_CODE (TREE_OPERAND (*tp, 0)) == INTEGER_CST);
    1186          288 :           *walk_subtrees = 0;
    1187              :         }
    1188              :     }
    1189              : 
    1190              :   /* Update the TREE_BLOCK for the cloned expr.  */
    1191    111969532 :   if (EXPR_P (*tp))
    1192              :     {
    1193     30159543 :       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
    1194     30159543 :       tree old_block = TREE_BLOCK (*tp);
    1195     30159543 :       if (old_block)
    1196              :         {
    1197     16663754 :           tree *n;
    1198     16663754 :           n = id->decl_map->get (TREE_BLOCK (*tp));
    1199     16663754 :           if (n)
    1200     16663279 :             new_block = *n;
    1201              :         }
    1202     30159543 :       TREE_SET_BLOCK (*tp, new_block);
    1203              :     }
    1204              : 
    1205              :   /* Keep iterating.  */
    1206              :   return NULL_TREE;
    1207              : }
    1208              : 
    1209              : 
    1210              : /* Called from copy_body_id via walk_tree.  DATA is really a
    1211              :    `copy_body_data *'.  */
    1212              : 
    1213              : tree
    1214   1552455277 : copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
    1215              : {
    1216   1552455277 :   copy_body_data *id = (copy_body_data *) data;
    1217   1552455277 :   tree fn = id->src_fn;
    1218   1552455277 :   tree new_block;
    1219              : 
    1220              :   /* Begin by recognizing trees that we'll completely rewrite for the
    1221              :      inlining context.  Our output for these trees is completely
    1222              :      different from out input (e.g. RETURN_EXPR is deleted, and morphs
    1223              :      into an edge).  Further down, we'll handle trees that get
    1224              :      duplicated and/or tweaked.  */
    1225              : 
    1226              :   /* When requested, RETURN_EXPRs should be transformed to just the
    1227              :      contained MODIFY_EXPR.  The branch semantics of the return will
    1228              :      be handled elsewhere by manipulating the CFG rather than a statement.  */
    1229   1552455277 :   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
    1230              :     {
    1231            0 :       tree assignment = TREE_OPERAND (*tp, 0);
    1232              : 
    1233              :       /* If we're returning something, just turn that into an
    1234              :          assignment into the equivalent of the original RESULT_DECL.
    1235              :          If the "assignment" is just the result decl, the result
    1236              :          decl has already been set (e.g. a recent "foo (&result_decl,
    1237              :          ...)"); just toss the entire RETURN_EXPR.  */
    1238            0 :       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
    1239              :         {
    1240              :           /* Replace the RETURN_EXPR with (a copy of) the
    1241              :              MODIFY_EXPR hanging underneath.  */
    1242            0 :           *tp = copy_node (assignment);
    1243              :         }
    1244              :       else /* Else the RETURN_EXPR returns no value.  */
    1245              :         {
    1246            0 :           *tp = NULL;
    1247            0 :           return (tree) (void *)1;
    1248              :         }
    1249            0 :     }
    1250   1552455277 :   else if (TREE_CODE (*tp) == SSA_NAME)
    1251              :     {
    1252      3794552 :       *tp = remap_ssa_name (*tp, id);
    1253      3794552 :       *walk_subtrees = 0;
    1254      3794552 :       return NULL;
    1255              :     }
    1256              : 
    1257              :   /* Local variables and labels need to be replaced by equivalent
    1258              :      variables.  We don't want to copy static variables; there's only
    1259              :      one of those, no matter how many times we inline the containing
    1260              :      function.  Similarly for globals from an outer function.  */
    1261   1548660725 :   else if (auto_var_in_fn_p (*tp, fn))
    1262              :     {
    1263    193005892 :       tree new_decl;
    1264              : 
    1265              :       /* Remap the declaration.  */
    1266    193005892 :       new_decl = remap_decl (*tp, id);
    1267    193005892 :       gcc_assert (new_decl);
    1268              :       /* Replace this variable with the copy.  */
    1269    193005892 :       STRIP_TYPE_NOPS (new_decl);
    1270    193005892 :       *tp = new_decl;
    1271    193005892 :       *walk_subtrees = 0;
    1272              :     }
    1273   1355654833 :   else if (TREE_CODE (*tp) == STATEMENT_LIST)
    1274     39755085 :     copy_statement_list (tp);
    1275   1315899748 :   else if (TREE_CODE (*tp) == SAVE_EXPR
    1276   1315209672 :            || TREE_CODE (*tp) == TARGET_EXPR)
    1277     16273940 :     remap_save_expr (tp, id->decl_map, walk_subtrees);
    1278   1299625808 :   else if (TREE_CODE (*tp) == LABEL_DECL
    1279   1299625808 :            && (! DECL_CONTEXT (*tp)
    1280           14 :                || decl_function_context (*tp) == id->src_fn))
    1281              :     /* These may need to be remapped for EH handling.  */
    1282            0 :     *tp = remap_decl (*tp, id);
    1283   1299625808 :   else if (TREE_CODE (*tp) == BIND_EXPR)
    1284     23024234 :     copy_bind_expr (tp, walk_subtrees, id);
    1285              :   /* Types may need remapping as well.  */
    1286   1276601574 :   else if (TYPE_P (*tp))
    1287       277372 :     *tp = remap_type (*tp, id);
    1288              : 
    1289              :   /* If this is a constant, we have to copy the node iff the type will be
    1290              :      remapped.  copy_tree_r will not copy a constant.  */
    1291   1276324202 :   else if (CONSTANT_CLASS_P (*tp))
    1292              :     {
    1293    376233477 :       tree new_type = remap_type (TREE_TYPE (*tp), id);
    1294              : 
    1295    376233477 :       if (new_type == TREE_TYPE (*tp))
    1296    376232280 :         *walk_subtrees = 0;
    1297              : 
    1298         1197 :       else if (TREE_CODE (*tp) == INTEGER_CST)
    1299         1197 :         *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
    1300              :       else
    1301              :         {
    1302            0 :           *tp = copy_node (*tp);
    1303            0 :           TREE_TYPE (*tp) = new_type;
    1304              :         }
    1305              :     }
    1306              : 
    1307              :   /* Otherwise, just copy the node.  Note that copy_tree_r already
    1308              :      knows not to copy VAR_DECLs, etc., so this is safe.  */
    1309              :   else
    1310              :     {
    1311              :       /* Here we handle trees that are not completely rewritten.
    1312              :          First we detect some inlining-induced bogosities for
    1313              :          discarding.  */
    1314    900090725 :       if (TREE_CODE (*tp) == MODIFY_EXPR
    1315     12350974 :           && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
    1316    900090744 :           && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
    1317              :         {
    1318              :           /* Some assignments VAR = VAR; don't generate any rtl code
    1319              :              and thus don't count as variable modification.  Avoid
    1320              :              keeping bogosities like 0 = 0.  */
    1321            0 :           tree decl = TREE_OPERAND (*tp, 0), value;
    1322            0 :           tree *n;
    1323              : 
    1324            0 :           n = id->decl_map->get (decl);
    1325            0 :           if (n)
    1326              :             {
    1327            0 :               value = *n;
    1328            0 :               STRIP_TYPE_NOPS (value);
    1329            0 :               if (TREE_CONSTANT (value) || TREE_READONLY (value))
    1330              :                 {
    1331            0 :                   *tp = build_empty_stmt (EXPR_LOCATION (*tp));
    1332            0 :                   return copy_tree_body_r (tp, walk_subtrees, data);
    1333              :                 }
    1334              :             }
    1335              :         }
    1336    900090725 :       else if (INDIRECT_REF_P (*tp))
    1337              :         {
    1338              :           /* Get rid of *& from inline substitutions that can happen when a
    1339              :              pointer argument is an ADDR_EXPR.  */
    1340     51704564 :           tree decl = TREE_OPERAND (*tp, 0);
    1341     51704564 :           tree *n = id->decl_map->get (decl);
    1342     51704564 :           if (n)
    1343              :             {
    1344              :               /* If we happen to get an ADDR_EXPR in n->value, strip
    1345              :                  it manually here as we'll eventually get ADDR_EXPRs
    1346              :                  which lie about their types pointed to.  In this case
    1347              :                  build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
    1348              :                  but we absolutely rely on that.  As fold_indirect_ref
    1349              :                  does other useful transformations, try that first, though.  */
    1350      8859432 :               tree type = TREE_TYPE (*tp);
    1351      8859432 :               tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
    1352      8859432 :               tree old = *tp;
    1353      8859432 :               *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
    1354      8859432 :               if (! *tp)
    1355              :                 {
    1356      8859308 :                   type = remap_type (type, id);
    1357      8859308 :                   if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
    1358              :                     {
    1359            0 :                       *tp
    1360            0 :                         = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
    1361              :                       /* ???  We should either assert here or build
    1362              :                          a VIEW_CONVERT_EXPR instead of blindly leaking
    1363              :                          incompatible types to our IL.  */
    1364            0 :                       if (! *tp)
    1365            0 :                         *tp = TREE_OPERAND (ptr, 0);
    1366              :                     }
    1367              :                   else
    1368              :                     {
    1369      8859308 :                       *tp = build1 (INDIRECT_REF, type, ptr);
    1370      8859308 :                       TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
    1371      8859308 :                       TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
    1372      8859308 :                       TREE_READONLY (*tp) = TREE_READONLY (old);
    1373      8859308 :                       maybe_copy_this_notrap (id, *tp, old);
    1374              :                     }
    1375              :                 }
    1376      8859432 :               *walk_subtrees = 0;
    1377      8859432 :               return NULL;
    1378              :             }
    1379              :         }
    1380    848386161 :       else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
    1381              :         {
    1382              :           /* We need to re-canonicalize MEM_REFs from inline substitutions
    1383              :              that can happen when a pointer argument is an ADDR_EXPR.
    1384              :              Recurse here manually to allow that.  */
    1385        66362 :           tree ptr = TREE_OPERAND (*tp, 0);
    1386        66362 :           tree type = remap_type (TREE_TYPE (*tp), id);
    1387        66362 :           tree old = *tp;
    1388        66362 :           walk_tree (&ptr, copy_tree_body_r, data, NULL);
    1389        66362 :           *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
    1390        66362 :           TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
    1391        66362 :           TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
    1392        66362 :           copy_warning (*tp, old);
    1393        66362 :           if (MR_DEPENDENCE_CLIQUE (old) != 0)
    1394              :             {
    1395         4575 :               MR_DEPENDENCE_CLIQUE (*tp)
    1396         4575 :                 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
    1397         4575 :               MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
    1398              :             }
    1399        66362 :           maybe_copy_this_notrap (id, *tp, old);
    1400        66362 :           REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
    1401        66362 :           *walk_subtrees = 0;
    1402        66362 :           return NULL;
    1403              :         }
    1404              : 
    1405              :       /* Here is the "usual case".  Copy this tree node, and then
    1406              :          tweak some special cases.  */
    1407    891164931 :       copy_tree_r (tp, walk_subtrees, NULL);
    1408              : 
    1409              :       /* If EXPR has block defined, map it to newly constructed block.
    1410              :          When inlining we want EXPRs without block appear in the block
    1411              :          of function call if we are not remapping a type.  */
    1412    891164931 :       if (EXPR_P (*tp))
    1413              :         {
    1414    777636800 :           new_block = id->remapping_type_depth == 0 ? id->block : NULL;
    1415    777636800 :           if (TREE_BLOCK (*tp))
    1416              :             {
    1417        12116 :               tree *n;
    1418        12116 :               n = id->decl_map->get (TREE_BLOCK (*tp));
    1419        12116 :               if (n)
    1420        12116 :                 new_block = *n;
    1421              :             }
    1422    777636800 :           TREE_SET_BLOCK (*tp, new_block);
    1423              :         }
    1424              : 
    1425    891164931 :       if (TREE_CODE (*tp) != OMP_CLAUSE)
    1426    891164840 :         TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
    1427              : 
    1428              :       /* The copied TARGET_EXPR has never been expanded, even if the
    1429              :          original node was expanded already.  */
    1430    891164931 :       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
    1431              :         {
    1432            0 :           TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
    1433            0 :           TREE_OPERAND (*tp, 3) = NULL_TREE;
    1434              :         }
    1435              : 
    1436              :       /* Variable substitution need not be simple.  In particular, the
    1437              :          INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
    1438              :          and friends are up-to-date.  */
    1439    891164931 :       else if (TREE_CODE (*tp) == ADDR_EXPR)
    1440              :         {
    1441     86164397 :           int invariant = is_gimple_min_invariant (*tp);
    1442     86164397 :           walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
    1443              : 
    1444              :           /* Handle the case where we substituted an INDIRECT_REF
    1445              :              into the operand of the ADDR_EXPR.  */
    1446     86164397 :           if (INDIRECT_REF_P (TREE_OPERAND (*tp, 0))
    1447     86164397 :               && !id->do_not_fold)
    1448              :             {
    1449          147 :               tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
    1450          147 :               if (TREE_TYPE (t) != TREE_TYPE (*tp))
    1451          147 :                 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
    1452          147 :               *tp = t;
    1453              :             }
    1454              :           else
    1455     86164250 :             recompute_tree_invariant_for_addr_expr (*tp);
    1456              : 
    1457              :           /* If this used to be invariant, but is not any longer,
    1458              :              then regimplification is probably needed.  */
    1459     86164397 :           if (invariant && !is_gimple_min_invariant (*tp))
    1460           19 :             id->regimplify = true;
    1461              : 
    1462     86164397 :           *walk_subtrees = 0;
    1463              :         }
    1464    805000534 :       else if (TREE_CODE (*tp) == OMP_CLAUSE
    1465    805000534 :                && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
    1466           76 :                    || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
    1467              :         {
    1468           30 :           tree t = OMP_CLAUSE_DECL (*tp);
    1469           30 :           if (t && OMP_ITERATOR_DECL_P (t))
    1470              :             {
    1471           18 :               *walk_subtrees = 0;
    1472           18 :               OMP_CLAUSE_DECL (*tp) = copy_node (t);
    1473           18 :               t = OMP_CLAUSE_DECL (*tp);
    1474           18 :               TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
    1475          108 :               for (int i = 0; i <= 4; i++)
    1476           90 :                 walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
    1477              :                            copy_tree_body_r, id, NULL);
    1478           18 :               if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
    1479           18 :                 remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
    1480           18 :               walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
    1481              :             }
    1482              :         }
    1483              :     }
    1484              : 
    1485              :   /* Keep iterating.  */
    1486              :   return NULL_TREE;
    1487              : }
    1488              : 
    1489              : /* Helper for remap_gimple_stmt.  Given an EH region number for the
    1490              :    source function, map that to the duplicate EH region number in
    1491              :    the destination function.  */
    1492              : 
    1493              : static int
    1494        84994 : remap_eh_region_nr (int old_nr, copy_body_data *id)
    1495              : {
    1496        84994 :   eh_region old_r, new_r;
    1497              : 
    1498        84994 :   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
    1499        84994 :   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
    1500              : 
    1501        84994 :   return new_r->index;
    1502              : }
    1503              : 
    1504              : /* Similar, but operate on INTEGER_CSTs.  */
    1505              : 
    1506              : static tree
    1507         7799 : remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
    1508              : {
    1509         7799 :   int old_nr, new_nr;
    1510              : 
    1511         7799 :   old_nr = tree_to_shwi (old_t_nr);
    1512         7799 :   new_nr = remap_eh_region_nr (old_nr, id);
    1513              : 
    1514         7799 :   return build_int_cst (integer_type_node, new_nr);
    1515              : }
    1516              : 
    1517              : /* Helper for copy_bb.  Remap statement STMT using the inlining
    1518              :    information in ID.  Return the new statement copy.  */
    1519              : 
    1520              : static gimple_seq
    1521     86060509 : remap_gimple_stmt (gimple *stmt, copy_body_data *id)
    1522              : {
    1523     86060509 :   gimple *copy = NULL;
    1524     86060509 :   struct walk_stmt_info wi;
    1525     86060509 :   bool skip_first = false;
    1526     86060509 :   gimple_seq stmts = NULL;
    1527              : 
    1528     86060509 :   if (is_gimple_debug (stmt)
    1529     86060509 :       && (gimple_debug_nonbind_marker_p (stmt)
    1530     12132093 :           ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
    1531     40354067 :           : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
    1532              :     return NULL;
    1533              : 
    1534     86034312 :   if (!is_gimple_debug (stmt)
    1535     33574349 :       && id->param_body_adjs
    1536     89191876 :       && id->param_body_adjs->m_dead_stmts.contains (stmt))
    1537              :     {
    1538         2376 :       tree *dval = id->param_body_adjs->m_dead_stmt_debug_equiv.get (stmt);
    1539         2376 :       if (!dval)
    1540              :         return NULL;
    1541              : 
    1542         1296 :       gcc_assert (is_gimple_assign (stmt));
    1543         1296 :       tree lhs = gimple_assign_lhs (stmt);
    1544         1296 :       tree *dvar = id->param_body_adjs->m_dead_ssa_debug_equiv.get (lhs);
    1545         1296 :       gdebug *bind = gimple_build_debug_bind (*dvar, *dval, stmt);
    1546         1296 :       if (id->reset_location)
    1547            0 :         gimple_set_location (bind, input_location);
    1548         1296 :       id->debug_stmts.safe_push (bind);
    1549         1296 :       gimple_seq_add_stmt_without_update (&stmts, bind);
    1550         1296 :       return stmts;
    1551              :     }
    1552              : 
    1553              :   /* Begin by recognizing trees that we'll completely rewrite for the
    1554              :      inlining context.  Our output for these trees is completely
    1555              :      different from our input (e.g. RETURN_EXPR is deleted and morphs
    1556              :      into an edge).  Further down, we'll handle trees that get
    1557              :      duplicated and/or tweaked.  */
    1558              : 
    1559              :   /* When requested, GIMPLE_RETURN should be transformed to just the
    1560              :      contained GIMPLE_ASSIGN.  The branch semantics of the return will
    1561              :      be handled elsewhere by manipulating the CFG rather than the
    1562              :      statement.  */
    1563     86031936 :   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
    1564              :     {
    1565      4283537 :       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
    1566              : 
    1567              :       /* If we're returning something, just turn that into an
    1568              :          assignment to the equivalent of the original RESULT_DECL.
    1569              :          If RETVAL is just the result decl, the result decl has
    1570              :          already been set (e.g. a recent "foo (&result_decl, ...)");
    1571              :          just toss the entire GIMPLE_RETURN.  Likewise for when the
    1572              :          call doesn't want the return value.  */
    1573      4283537 :       if (retval
    1574      4283537 :           && (TREE_CODE (retval) != RESULT_DECL
    1575      2237489 :               && (!id->call_stmt
    1576      2237489 :                   || gimple_call_lhs (id->call_stmt) != NULL_TREE)
    1577      2125372 :               && (TREE_CODE (retval) != SSA_NAME
    1578      1732059 :                   || ! SSA_NAME_VAR (retval)
    1579       401025 :                   || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
    1580              :         {
    1581      4102614 :           copy = gimple_build_assign (id->do_not_unshare
    1582      2051307 :                                       ? id->retvar : unshare_expr (id->retvar),
    1583              :                                       retval);
    1584              :           /* id->retvar is already substituted.  Skip it on later remapping.  */
    1585      2051307 :           skip_first = true;
    1586              :         }
    1587              :       else
    1588              :         return NULL;
    1589              :     }
    1590     81748399 :   else if (gimple_has_substatements (stmt))
    1591              :     {
    1592           67 :       gimple_seq s1, s2;
    1593              : 
    1594              :       /* When cloning bodies from the C++ front end, we will be handed bodies
    1595              :          in High GIMPLE form.  Handle here all the High GIMPLE statements that
    1596              :          have embedded statements.  */
    1597           67 :       switch (gimple_code (stmt))
    1598              :         {
    1599            0 :         case GIMPLE_BIND:
    1600            0 :           copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
    1601            0 :           break;
    1602              : 
    1603            0 :         case GIMPLE_CATCH:
    1604            0 :           {
    1605            0 :             gcatch *catch_stmt = as_a <gcatch *> (stmt);
    1606            0 :             s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
    1607            0 :             copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
    1608              :           }
    1609            0 :           break;
    1610              : 
    1611            0 :         case GIMPLE_EH_FILTER:
    1612            0 :           s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
    1613            0 :           copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
    1614            0 :           break;
    1615              : 
    1616            0 :         case GIMPLE_TRY:
    1617            0 :           s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
    1618            0 :           s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
    1619            0 :           copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
    1620            0 :           break;
    1621              : 
    1622            0 :         case GIMPLE_WITH_CLEANUP_EXPR:
    1623            0 :           s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
    1624            0 :           copy = gimple_build_wce (s1);
    1625            0 :           break;
    1626              : 
    1627            0 :         case GIMPLE_OMP_PARALLEL:
    1628            0 :           {
    1629            0 :             gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
    1630            0 :             s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
    1631            0 :             copy = gimple_build_omp_parallel
    1632            0 :                      (s1,
    1633              :                       gimple_omp_parallel_clauses (omp_par_stmt),
    1634              :                       gimple_omp_parallel_child_fn (omp_par_stmt),
    1635              :                       gimple_omp_parallel_data_arg (omp_par_stmt));
    1636              :           }
    1637            0 :           break;
    1638              : 
    1639            0 :         case GIMPLE_OMP_TASK:
    1640            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1641            0 :           copy = gimple_build_omp_task
    1642            0 :                    (s1,
    1643              :                     gimple_omp_task_clauses (stmt),
    1644              :                     gimple_omp_task_child_fn (stmt),
    1645              :                     gimple_omp_task_data_arg (stmt),
    1646              :                     gimple_omp_task_copy_fn (stmt),
    1647              :                     gimple_omp_task_arg_size (stmt),
    1648              :                     gimple_omp_task_arg_align (stmt));
    1649            0 :           break;
    1650              : 
    1651            0 :         case GIMPLE_OMP_FOR:
    1652            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1653            0 :           s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
    1654            0 :           copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
    1655              :                                        gimple_omp_for_clauses (stmt),
    1656              :                                        gimple_omp_for_collapse (stmt), s2);
    1657            0 :           {
    1658            0 :             size_t i;
    1659            0 :             for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
    1660              :               {
    1661            0 :                 gimple_omp_for_set_index (copy, i,
    1662              :                                           gimple_omp_for_index (stmt, i));
    1663            0 :                 gimple_omp_for_set_initial (copy, i,
    1664              :                                             gimple_omp_for_initial (stmt, i));
    1665            0 :                 gimple_omp_for_set_final (copy, i,
    1666              :                                           gimple_omp_for_final (stmt, i));
    1667            0 :                 gimple_omp_for_set_incr (copy, i,
    1668              :                                          gimple_omp_for_incr (stmt, i));
    1669            0 :                 gimple_omp_for_set_cond (copy, i,
    1670              :                                          gimple_omp_for_cond (stmt, i));
    1671              :               }
    1672              :           }
    1673              :           break;
    1674              : 
    1675            0 :         case GIMPLE_OMP_MASTER:
    1676            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1677            0 :           copy = gimple_build_omp_master (s1);
    1678            0 :           break;
    1679              : 
    1680            0 :         case GIMPLE_OMP_MASKED:
    1681            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1682            0 :           copy = gimple_build_omp_masked
    1683            0 :                    (s1, gimple_omp_masked_clauses (stmt));
    1684            0 :           break;
    1685              : 
    1686            0 :         case GIMPLE_OMP_SCOPE:
    1687            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1688            0 :           copy = gimple_build_omp_scope
    1689            0 :                    (s1, gimple_omp_scope_clauses (stmt));
    1690            0 :           break;
    1691              : 
    1692            0 :         case GIMPLE_OMP_DISPATCH:
    1693            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1694            0 :           copy = gimple_build_omp_dispatch (s1,
    1695              :                                             gimple_omp_dispatch_clauses (stmt));
    1696            0 :           break;
    1697              : 
    1698            0 :         case GIMPLE_OMP_TASKGROUP:
    1699            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1700            0 :           copy = gimple_build_omp_taskgroup
    1701            0 :                    (s1, gimple_omp_taskgroup_clauses (stmt));
    1702            0 :           break;
    1703              : 
    1704            0 :         case GIMPLE_OMP_ORDERED:
    1705            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1706            0 :           copy = gimple_build_omp_ordered
    1707            0 :                    (s1,
    1708            0 :                     gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
    1709            0 :           break;
    1710              : 
    1711            0 :         case GIMPLE_OMP_SCAN:
    1712            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1713            0 :           copy = gimple_build_omp_scan
    1714            0 :                    (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
    1715            0 :           break;
    1716              : 
    1717            0 :         case GIMPLE_OMP_SECTION:
    1718            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1719            0 :           copy = gimple_build_omp_section (s1);
    1720            0 :           break;
    1721              : 
    1722            0 :         case GIMPLE_OMP_SECTIONS:
    1723            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1724            0 :           copy = gimple_build_omp_sections
    1725            0 :                    (s1, gimple_omp_sections_clauses (stmt));
    1726            0 :           break;
    1727              : 
    1728            0 :         case GIMPLE_OMP_STRUCTURED_BLOCK:
    1729            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1730            0 :           copy = gimple_build_omp_structured_block (s1);
    1731            0 :           break;
    1732              : 
    1733            0 :         case GIMPLE_OMP_SINGLE:
    1734            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1735            0 :           copy = gimple_build_omp_single
    1736            0 :                    (s1, gimple_omp_single_clauses (stmt));
    1737            0 :           break;
    1738              : 
    1739            0 :         case GIMPLE_OMP_TARGET:
    1740            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1741            0 :           copy = gimple_build_omp_target
    1742            0 :                    (s1, gimple_omp_target_kind (stmt),
    1743              :                     gimple_omp_target_clauses (stmt));
    1744            0 :           break;
    1745              : 
    1746            0 :         case GIMPLE_OMP_TEAMS:
    1747            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1748            0 :           copy = gimple_build_omp_teams
    1749            0 :                    (s1, gimple_omp_teams_clauses (stmt));
    1750            0 :           break;
    1751              : 
    1752            0 :         case GIMPLE_OMP_CRITICAL:
    1753            0 :           s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
    1754            0 :           copy = gimple_build_omp_critical (s1,
    1755              :                                             gimple_omp_critical_name
    1756            0 :                                               (as_a <gomp_critical *> (stmt)),
    1757              :                                             gimple_omp_critical_clauses
    1758            0 :                                               (as_a <gomp_critical *> (stmt)));
    1759            0 :           break;
    1760              : 
    1761            0 :         case GIMPLE_ASSUME:
    1762            0 :           s1 = remap_gimple_seq (gimple_assume_body (stmt), id);
    1763            0 :           copy = gimple_build_assume (gimple_assume_guard (stmt), s1);
    1764            0 :           break;
    1765              : 
    1766           67 :         case GIMPLE_TRANSACTION:
    1767           67 :           {
    1768           67 :             gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
    1769           67 :             gtransaction *new_trans_stmt;
    1770           67 :             s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
    1771              :                                    id);
    1772           67 :             copy = new_trans_stmt = gimple_build_transaction (s1);
    1773           67 :             gimple_transaction_set_subcode (new_trans_stmt,
    1774              :               gimple_transaction_subcode (old_trans_stmt));
    1775           67 :             gimple_transaction_set_label_norm (new_trans_stmt,
    1776              :               gimple_transaction_label_norm (old_trans_stmt));
    1777           67 :             gimple_transaction_set_label_uninst (new_trans_stmt,
    1778              :               gimple_transaction_label_uninst (old_trans_stmt));
    1779           67 :             gimple_transaction_set_label_over (new_trans_stmt,
    1780              :               gimple_transaction_label_over (old_trans_stmt));
    1781              :           }
    1782           67 :           break;
    1783              : 
    1784            0 :         default:
    1785            0 :           gcc_unreachable ();
    1786              :         }
    1787              :     }
    1788              :   else
    1789              :     {
    1790     81748332 :       if (gimple_assign_single_p (stmt)
    1791     12758088 :           && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
    1792     81748332 :           && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
    1793              :         {
    1794              :           /* Here we handle statements that are not completely rewritten.
    1795              :              First we detect some inlining-induced bogosities for
    1796              :              discarding.  */
    1797              : 
    1798              :           /* Some assignments VAR = VAR; don't generate any rtl code
    1799              :              and thus don't count as variable modification.  Avoid
    1800              :              keeping bogosities like 0 = 0.  */
    1801            0 :           tree decl = gimple_assign_lhs (stmt), value;
    1802            0 :           tree *n;
    1803              : 
    1804            0 :           n = id->decl_map->get (decl);
    1805            0 :           if (n)
    1806              :             {
    1807            0 :               value = *n;
    1808            0 :               STRIP_TYPE_NOPS (value);
    1809            0 :               if (TREE_CONSTANT (value) || TREE_READONLY (value))
    1810            0 :                 return NULL;
    1811              :             }
    1812              :         }
    1813              : 
    1814              :       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
    1815              :          in a block that we aren't copying during tree_function_versioning,
    1816              :          just drop the clobber stmt.  */
    1817     81748332 :       if (id->blocks_to_copy && gimple_clobber_p (stmt))
    1818              :         {
    1819        17149 :           tree lhs = gimple_assign_lhs (stmt);
    1820        17149 :           if (TREE_CODE (lhs) == MEM_REF
    1821        17149 :               && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
    1822              :             {
    1823          864 :               gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
    1824          864 :               if (gimple_bb (def_stmt)
    1825         1165 :                   && !bitmap_bit_p (id->blocks_to_copy,
    1826          301 :                                     gimple_bb (def_stmt)->index))
    1827              :                 return NULL;
    1828              :             }
    1829              :         }
    1830              : 
    1831              :       /* We do not allow CLOBBERs of handled components.  In case
    1832              :          returned value is stored via such handled component, remove
    1833              :          the clobber so stmt verifier is happy.  */
    1834     81748329 :       if (gimple_clobber_p (stmt)
    1835     81748329 :           && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
    1836              :         {
    1837            0 :           tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
    1838            0 :           if (!DECL_P (remapped)
    1839            0 :               && TREE_CODE (remapped) != MEM_REF)
    1840              :             return NULL;
    1841              :         }
    1842              : 
    1843     81748329 :       if (gimple_debug_bind_p (stmt))
    1844              :         {
    1845     39888032 :           tree var = gimple_debug_bind_get_var (stmt);
    1846     39888032 :           tree value = gimple_debug_bind_get_value (stmt);
    1847     39888032 :           if (id->param_body_adjs
    1848     39888032 :               && id->param_body_adjs->m_dead_stmts.contains (stmt))
    1849              :             {
    1850         9291 :               value = unshare_expr_without_location (value);
    1851         9291 :               id->param_body_adjs->remap_with_debug_expressions (&value);
    1852              :             }
    1853              : 
    1854     39888032 :           gdebug *copy = gimple_build_debug_bind (var, value, stmt);
    1855     39888032 :           if (id->reset_location)
    1856            4 :             gimple_set_location (copy, input_location);
    1857     39888032 :           id->debug_stmts.safe_push (copy);
    1858     39888032 :           gimple_seq_add_stmt_without_update (&stmts, copy);
    1859     39888032 :           return stmts;
    1860              :         }
    1861     41860297 :       if (gimple_debug_source_bind_p (stmt))
    1862              :         {
    1863       466035 :           gdebug *copy = gimple_build_debug_source_bind
    1864       466035 :                            (gimple_debug_source_bind_get_var (stmt),
    1865              :                             gimple_debug_source_bind_get_value (stmt),
    1866       466035 :                             stmt);
    1867       466035 :           if (id->reset_location)
    1868            0 :             gimple_set_location (copy, input_location);
    1869       466035 :           id->debug_stmts.safe_push (copy);
    1870       466035 :           gimple_seq_add_stmt_without_update (&stmts, copy);
    1871       466035 :           return stmts;
    1872              :         }
    1873     41394262 :       if (gimple_debug_nonbind_marker_p (stmt))
    1874              :         {
    1875              :           /* If the inlined function has too many debug markers,
    1876              :              don't copy them.  */
    1877     12105896 :           if (id->src_cfun->debug_marker_count
    1878     12105896 :               > param_max_debug_marker_count
    1879     12105896 :               || id->reset_location)
    1880            0 :             return stmts;
    1881              : 
    1882     12105896 :           gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
    1883     12105896 :           id->debug_stmts.safe_push (copy);
    1884     12105896 :           gimple_seq_add_stmt_without_update (&stmts, copy);
    1885     12105896 :           return stmts;
    1886              :         }
    1887              : 
    1888              :       /* Create a new deep copy of the statement.  */
    1889     29288366 :       copy = gimple_copy (stmt);
    1890              : 
    1891              :       /* Clear flags that need revisiting.  */
    1892     29288366 :       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
    1893              :         {
    1894      4683811 :           if (gimple_call_tail_p (call_stmt))
    1895          127 :             gimple_call_set_tail (call_stmt, false);
    1896      4683811 :           if (gimple_call_from_thunk_p (call_stmt))
    1897          166 :             gimple_call_set_from_thunk (call_stmt, false);
    1898              :           /* Silently clear musttail flag when inlining a function
    1899              :              with must tail call from a non-musttail call.  The inlining
    1900              :              removes one frame so acts like musttail's intent, and we
    1901              :              can be inlining a function with musttail calls in the middle
    1902              :              of caller where musttail will always error.  */
    1903      4683811 :           if (gimple_call_must_tail_p (call_stmt)
    1904           49 :               && id->call_stmt
    1905      4683852 :               && !gimple_call_must_tail_p (id->call_stmt))
    1906           14 :             gimple_call_set_must_tail (call_stmt, false);
    1907      4683811 :           if (gimple_call_internal_p (call_stmt))
    1908        49212 :             switch (gimple_call_internal_fn (call_stmt))
    1909              :               {
    1910          163 :               case IFN_GOMP_SIMD_LANE:
    1911          163 :               case IFN_GOMP_SIMD_VF:
    1912          163 :               case IFN_GOMP_SIMD_LAST_LANE:
    1913          163 :               case IFN_GOMP_SIMD_ORDERED_START:
    1914          163 :               case IFN_GOMP_SIMD_ORDERED_END:
    1915          163 :                 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
    1916          163 :                 break;
    1917              :               default:
    1918              :                 break;
    1919              :               }
    1920              :         }
    1921              : 
    1922              :       /* Remap the region numbers for __builtin_eh_{pointer,filter},
    1923              :          RESX and EH_DISPATCH.  */
    1924     29288366 :       if (id->eh_map)
    1925     29288366 :         switch (gimple_code (copy))
    1926              :           {
    1927      4683811 :           case GIMPLE_CALL:
    1928      4683811 :             {
    1929      4683811 :               tree r, fndecl = gimple_call_fndecl (copy);
    1930      4683811 :               if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
    1931      1300783 :                 switch (DECL_FUNCTION_CODE (fndecl))
    1932              :                   {
    1933            0 :                   case BUILT_IN_EH_COPY_VALUES:
    1934            0 :                     r = gimple_call_arg (copy, 1);
    1935            0 :                     r = remap_eh_region_tree_nr (r, id);
    1936            0 :                     gimple_call_set_arg (copy, 1, r);
    1937              :                     /* FALLTHRU */
    1938              : 
    1939         7799 :                   case BUILT_IN_EH_POINTER:
    1940         7799 :                   case BUILT_IN_EH_FILTER:
    1941         7799 :                     r = gimple_call_arg (copy, 0);
    1942         7799 :                     r = remap_eh_region_tree_nr (r, id);
    1943         7799 :                     gimple_call_set_arg (copy, 0, r);
    1944         7799 :                     break;
    1945              : 
    1946              :                   default:
    1947              :                     break;
    1948              :                   }
    1949              : 
    1950              :               /* Reset alias info if we didn't apply measures to
    1951              :                  keep it valid over inlining by setting DECL_PT_UID.  */
    1952      4683811 :               if (!id->src_cfun->gimple_df
    1953      4683811 :                   || !id->src_cfun->gimple_df->ipa_pta)
    1954      4677293 :                 gimple_call_reset_alias_info (as_a <gcall *> (copy));
    1955              :             }
    1956              :             break;
    1957              : 
    1958        68105 :           case GIMPLE_RESX:
    1959        68105 :             {
    1960        68105 :               gresx *resx_stmt = as_a <gresx *> (copy);
    1961        68105 :               int r = gimple_resx_region (resx_stmt);
    1962        68105 :               r = remap_eh_region_nr (r, id);
    1963        68105 :               gimple_resx_set_region (resx_stmt, r);
    1964              :             }
    1965        68105 :             break;
    1966              : 
    1967         9090 :           case GIMPLE_EH_DISPATCH:
    1968         9090 :             {
    1969         9090 :               geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
    1970         9090 :               int r = gimple_eh_dispatch_region (eh_dispatch);
    1971         9090 :               r = remap_eh_region_nr (r, id);
    1972         9090 :               gimple_eh_dispatch_set_region (eh_dispatch, r);
    1973              :             }
    1974         9090 :             break;
    1975              : 
    1976              :           default:
    1977              :             break;
    1978              :           }
    1979              :     }
    1980              : 
    1981              :   /* If STMT has a block defined, map it to the newly constructed block.  */
    1982     31339740 :   if (tree block = gimple_block (copy))
    1983              :     {
    1984     28265263 :       tree *n;
    1985     28265263 :       n = id->decl_map->get (block);
    1986     28265263 :       gcc_assert (n);
    1987     28265263 :       gimple_set_block (copy, *n);
    1988              :     }
    1989     31339740 :   if (id->param_body_adjs)
    1990              :     {
    1991      3155188 :       gimple_seq extra_stmts = NULL;
    1992      3155188 :       id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts, stmt);
    1993      3155188 :       if (!gimple_seq_empty_p (extra_stmts))
    1994              :         {
    1995            2 :           memset (&wi, 0, sizeof (wi));
    1996            2 :           wi.info = id;
    1997            2 :           for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
    1998            4 :                !gsi_end_p (egsi);
    1999            2 :                gsi_next (&egsi))
    2000            2 :             walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
    2001            2 :           gimple_seq_add_seq_without_update (&stmts, extra_stmts);
    2002              :         }
    2003              :     }
    2004              : 
    2005     31339740 :   if (id->reset_location)
    2006          756 :     gimple_set_location (copy, input_location);
    2007              : 
    2008              :   /* Debug statements ought to be rebuilt and not copied.  */
    2009     31339740 :   gcc_checking_assert (!is_gimple_debug (copy));
    2010              : 
    2011              :   /* Remap all the operands in COPY.  */
    2012     31339740 :   memset (&wi, 0, sizeof (wi));
    2013     31339740 :   wi.info = id;
    2014     31339740 :   if (skip_first)
    2015      2051307 :     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
    2016              :   else
    2017     29288433 :     walk_gimple_op (copy, remap_gimple_op_r, &wi);
    2018              : 
    2019              :   /* Clear the copied virtual operands.  We are not remapping them here
    2020              :      but are going to recreate them from scratch.  */
    2021     31339740 :   if (gimple_has_mem_ops (copy))
    2022              :     {
    2023     26818203 :       gimple_set_vdef (copy, NULL_TREE);
    2024     26818203 :       gimple_set_vuse (copy, NULL_TREE);
    2025              :     }
    2026              : 
    2027     31339740 :   if (cfun->can_throw_non_call_exceptions)
    2028              :     {
    2029              :       /* When inlining a function which does not have non-call exceptions
    2030              :          enabled into a function that has (which only happens with
    2031              :          always-inline) we have to fixup stmts that cannot throw.  */
    2032      1707670 :       if (gcond *cond = dyn_cast <gcond *> (copy))
    2033       199356 :         if (gimple_could_trap_p (cond))
    2034              :           {
    2035            1 :             gassign *cmp
    2036            1 :               = gimple_build_assign (make_ssa_name (boolean_type_node),
    2037              :                                      gimple_cond_code (cond),
    2038              :                                      gimple_cond_lhs (cond),
    2039              :                                      gimple_cond_rhs (cond));
    2040            1 :             gimple_seq_add_stmt_without_update (&stmts, cmp);
    2041            1 :             gimple_cond_set_code (cond, NE_EXPR);
    2042            1 :             gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
    2043            1 :             gimple_cond_set_rhs (cond, boolean_false_node);
    2044              :           }
    2045              :     }
    2046              : 
    2047     31339740 :   gimple_seq_add_stmt_without_update (&stmts, copy);
    2048     31339740 :   return stmts;
    2049              : }
    2050              : 
    2051              : 
    2052              : /* Copy basic block, scale profile accordingly.  Edges will be taken care of
    2053              :    later  */
    2054              : 
    2055              : static basic_block
    2056     13790710 : copy_bb (copy_body_data *id, basic_block bb,
    2057              :          profile_count num, profile_count den)
    2058              : {
    2059     13790710 :   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
    2060     13790710 :   basic_block copy_basic_block;
    2061     13790710 :   tree decl;
    2062     13790710 :   basic_block prev;
    2063              : 
    2064     13790710 :   profile_count::adjust_for_ipa_scaling (&num, &den);
    2065              : 
    2066              :   /* Search for previous copied basic block.  */
    2067     13790710 :   prev = bb->prev_bb;
    2068     13940245 :   while (!prev->aux)
    2069       149535 :     prev = prev->prev_bb;
    2070              : 
    2071              :   /* create_basic_block() will append every new block to
    2072              :      basic_block_info automatically.  */
    2073     13790710 :   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
    2074     13790710 :   copy_basic_block->count = bb->count.apply_scale (num, den);
    2075              : 
    2076     13790710 :   copy_gsi = gsi_start_bb (copy_basic_block);
    2077              : 
    2078     13790710 :   unsigned min_cond_uid = 0;
    2079     13790710 :   if (id->src_cfun->cond_uids)
    2080              :     {
    2081           23 :       if (!cfun->cond_uids)
    2082            3 :         cfun->cond_uids = new hash_map <gcond*, unsigned> ();
    2083              : 
    2084           92 :       for (auto itr : *id->src_cfun->cond_uids)
    2085           23 :         if (itr.second >= min_cond_uid)
    2086           23 :           min_cond_uid = itr.second + 1;
    2087              :     }
    2088              : 
    2089    113641929 :   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
    2090              :     {
    2091     86060509 :       gimple_seq stmts;
    2092     86060509 :       gimple *stmt = gsi_stmt (gsi);
    2093     86060509 :       gimple *orig_stmt = stmt;
    2094     86060509 :       gimple_stmt_iterator stmts_gsi;
    2095     86060509 :       bool stmt_added = false;
    2096              : 
    2097     86060509 :       id->regimplify = false;
    2098     86060509 :       stmts = remap_gimple_stmt (stmt, id);
    2099              : 
    2100     86060509 :       if (gimple_seq_empty_p (stmts))
    2101      2259692 :         continue;
    2102              : 
    2103     83800999 :       seq_gsi = copy_gsi;
    2104              : 
    2105     83800999 :       for (stmts_gsi = gsi_start (stmts);
    2106    167602001 :            !gsi_end_p (stmts_gsi); )
    2107              :         {
    2108     83801002 :           stmt = gsi_stmt (stmts_gsi);
    2109              : 
    2110              :           /* Advance iterator now before stmt is moved to seq_gsi.  */
    2111     83801002 :           gsi_next (&stmts_gsi);
    2112              : 
    2113     83801002 :           if (gimple_nop_p (stmt))
    2114          182 :               continue;
    2115              : 
    2116              :           /* If -fcondition-coverage is used, register the inlined conditions
    2117              :              in the cond->expression mapping of the caller.  The expression tag
    2118              :              is shifted conditions from the two bodies are not mixed.  */
    2119     83800820 :           if (id->src_cfun->cond_uids && is_a <gcond*> (stmt))
    2120              :             {
    2121            5 :               gcond *orig_cond = as_a <gcond*> (orig_stmt);
    2122            5 :               gcond *cond = as_a <gcond*> (stmt);
    2123            5 :               unsigned *v = id->src_cfun->cond_uids->get (orig_cond);
    2124            5 :               if (v)
    2125            5 :                 cfun->cond_uids->put (cond, *v + min_cond_uid);
    2126              :             }
    2127              : 
    2128     83800820 :           gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
    2129              :                                             orig_stmt);
    2130              : 
    2131     83800820 :           gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
    2132              : 
    2133     83800820 :           if (id->regimplify)
    2134         4291 :             gimple_regimplify_operands (stmt, &seq_gsi);
    2135              : 
    2136              :           stmt_added = true;
    2137              :         }
    2138              : 
    2139     83800999 :       if (!stmt_added)
    2140          182 :         continue;
    2141              : 
    2142              :       /* If copy_basic_block has been empty at the start of this iteration,
    2143              :          call gsi_start_bb again to get at the newly added statements.  */
    2144     83800817 :       if (gsi_end_p (copy_gsi))
    2145     24272328 :         copy_gsi = gsi_start_bb (copy_basic_block);
    2146              :       else
    2147     71664653 :         gsi_next (&copy_gsi);
    2148              : 
    2149              :       /* Process the new statement.  The call to gimple_regimplify_operands
    2150              :          possibly turned the statement into multiple statements, we
    2151              :          need to process all of them.  */
    2152     83800824 :       do
    2153              :         {
    2154     83800824 :           tree fn;
    2155     83800824 :           gcall *call_stmt;
    2156              : 
    2157     83800824 :           stmt = gsi_stmt (copy_gsi);
    2158     83800824 :           call_stmt = dyn_cast <gcall *> (stmt);
    2159      4683811 :           if (call_stmt
    2160      4683811 :               && gimple_call_va_arg_pack_p (call_stmt)
    2161          323 :               && id->call_stmt
    2162          321 :               && ! gimple_call_va_arg_pack_p (id->call_stmt))
    2163              :             {
    2164              :               /* __builtin_va_arg_pack () should be replaced by
    2165              :                  all arguments corresponding to ... in the caller.  */
    2166          307 :               tree p;
    2167          307 :               gcall *new_call;
    2168          307 :               vec<tree> argarray;
    2169          307 :               size_t nargs_caller = gimple_call_num_args (id->call_stmt);
    2170          307 :               size_t nargs = nargs_caller;
    2171              : 
    2172          789 :               for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
    2173              :                 {
    2174              :                   /* Avoid crashing on invalid IL that doesn't have a
    2175              :                      varargs function or that passes not enough arguments.  */
    2176          498 :                   if (nargs == 0)
    2177              :                     break;
    2178          482 :                   nargs--;
    2179              :                 }
    2180              : 
    2181              :               /* Create the new array of arguments.  */
    2182          307 :               size_t nargs_callee = gimple_call_num_args (call_stmt);
    2183          307 :               size_t n = nargs + nargs_callee;
    2184          307 :               argarray.create (n);
    2185          307 :               argarray.safe_grow_cleared (n, true);
    2186              : 
    2187              :               /* Copy all the arguments before '...'  */
    2188          307 :               if (nargs_callee)
    2189          610 :                 memcpy (argarray.address (),
    2190          305 :                         gimple_call_arg_ptr (call_stmt, 0),
    2191              :                         nargs_callee * sizeof (tree));
    2192              : 
    2193              :               /* Append the arguments passed in '...'  */
    2194          307 :               if (nargs)
    2195          173 :                 memcpy (argarray.address () + nargs_callee,
    2196          173 :                         gimple_call_arg_ptr (id->call_stmt, 0)
    2197          173 :                         + (nargs_caller - nargs), nargs * sizeof (tree));
    2198              : 
    2199          307 :               new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
    2200              :                                                 argarray);
    2201              : 
    2202          307 :               argarray.release ();
    2203              : 
    2204              :               /* Copy all GIMPLE_CALL flags, location and block, except
    2205              :                  GF_CALL_VA_ARG_PACK.  */
    2206          307 :               gimple_call_copy_flags (new_call, call_stmt);
    2207          307 :               gimple_call_set_va_arg_pack (new_call, false);
    2208          614 :               gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
    2209              :               /* location includes block.  */
    2210          307 :               gimple_set_location (new_call, gimple_location (stmt));
    2211          307 :               gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
    2212              : 
    2213          307 :               gsi_replace (&copy_gsi, new_call, false);
    2214          307 :               stmt = new_call;
    2215              :             }
    2216     83800517 :           else if (call_stmt
    2217      4683504 :                    && id->call_stmt
    2218      3821448 :                    && (decl = gimple_call_fndecl (stmt))
    2219     87492638 :                    && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
    2220              :             {
    2221              :               /* __builtin_va_arg_pack_len () should be replaced by
    2222              :                  the number of anonymous arguments.  */
    2223          147 :               size_t nargs = gimple_call_num_args (id->call_stmt);
    2224          147 :               tree count, p;
    2225          147 :               gimple *new_stmt;
    2226              : 
    2227          423 :               for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
    2228          276 :                 nargs--;
    2229              : 
    2230          147 :               if (!gimple_call_lhs (stmt))
    2231              :                 {
    2232              :                   /* Drop unused calls.  */
    2233            1 :                   gsi_remove (&copy_gsi, false);
    2234            1 :                   continue;
    2235              :                 }
    2236          146 :               else if (!gimple_call_va_arg_pack_p (id->call_stmt))
    2237              :                 {
    2238          124 :                   count = build_int_cst (integer_type_node, nargs);
    2239          124 :                   new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
    2240          124 :                   gsi_replace (&copy_gsi, new_stmt, false);
    2241          124 :                   stmt = new_stmt;
    2242              :                 }
    2243           22 :               else if (nargs != 0)
    2244              :                 {
    2245            7 :                   tree newlhs = make_ssa_name (integer_type_node);
    2246            7 :                   count = build_int_cst (integer_type_node, nargs);
    2247            7 :                   new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
    2248              :                                                   PLUS_EXPR, newlhs, count);
    2249            7 :                   gimple_call_set_lhs (stmt, newlhs);
    2250            7 :                   gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
    2251              :                 }
    2252              :             }
    2253     83800370 :           else if (call_stmt
    2254      4683357 :                    && id->call_stmt
    2255     87621671 :                    && gimple_call_internal_p (stmt))
    2256        38078 :             switch (gimple_call_internal_fn (stmt))
    2257              :               {
    2258          146 :               case IFN_TSAN_FUNC_EXIT:
    2259              :                 /* Drop .TSAN_FUNC_EXIT () internal calls during inlining.  */
    2260          146 :                 gsi_remove (&copy_gsi, false);
    2261          146 :                 continue;
    2262         1392 :               case IFN_ASAN_MARK:
    2263              :                 /* Drop .ASAN_MARK internal calls during inlining into
    2264              :                    no_sanitize functions.  */
    2265         1392 :                 if (!sanitize_flags_p (SANITIZE_ADDRESS, id->dst_fn)
    2266         1392 :                     && !sanitize_flags_p (SANITIZE_HWADDRESS, id->dst_fn))
    2267              :                   {
    2268           14 :                     gsi_remove (&copy_gsi, false);
    2269           14 :                     continue;
    2270              :                   }
    2271              :                 break;
    2272              :               default:
    2273              :                 break;
    2274              :               }
    2275              : 
    2276              :           /* Statements produced by inlining can be unfolded, especially
    2277              :              when we constant propagated some operands.  We can't fold
    2278              :              them right now for two reasons:
    2279              :              1) folding require SSA_NAME_DEF_STMTs to be correct
    2280              :              2) we can't change function calls to builtins.
    2281              :              So we just mark statement for later folding.  We mark
    2282              :              all new statements, instead just statements that has changed
    2283              :              by some nontrivial substitution so even statements made
    2284              :              foldable indirectly are updated.  If this turns out to be
    2285              :              expensive, copy_body can be told to watch for nontrivial
    2286              :              changes.  */
    2287     83800663 :           if (id->statements_to_fold)
    2288     83800663 :             id->statements_to_fold->add (stmt);
    2289              : 
    2290              :           /* We're duplicating a CALL_EXPR.  Find any corresponding
    2291              :              callgraph edges and update or duplicate them.  */
    2292     83800663 :           if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
    2293              :             {
    2294      4683526 :               struct cgraph_edge *edge;
    2295              : 
    2296      4683526 :               switch (id->transform_call_graph_edges)
    2297              :                 {
    2298      3821470 :                 case CB_CGE_DUPLICATE:
    2299      3821470 :                   edge = id->src_node->get_edge (orig_stmt);
    2300      3821470 :                   if (edge)
    2301              :                     {
    2302      3783552 :                       struct cgraph_edge *old_edge = edge;
    2303              : 
    2304              :                       /* A speculative call is consist of multiple
    2305              :                          edges - indirect edge and one or more direct edges
    2306              :                          Duplicate the whole thing and distribute frequencies
    2307              :                          accordingly.  */
    2308      3783552 :                       if (edge->speculative)
    2309              :                         {
    2310        19796 :                           int n = 0;
    2311        19796 :                           profile_count direct_cnt
    2312        19796 :                                  = profile_count::zero ();
    2313              : 
    2314              :                           /* First figure out the distribution of counts
    2315              :                              so we can re-scale BB profile accordingly.  */
    2316        45498 :                           for (cgraph_edge *e = old_edge; e;
    2317        25702 :                                e = e->next_speculative_call_target ())
    2318        25702 :                             direct_cnt = direct_cnt + e->count;
    2319              : 
    2320        19796 :                           cgraph_edge *indirect
    2321        19796 :                                  = old_edge->speculative_call_indirect_edge ();
    2322        19796 :                           profile_count indir_cnt = indirect->count;
    2323              : 
    2324              :                           /* Next iterate all direct edges, clone it and its
    2325              :                              corresponding reference and update profile.  */
    2326        19796 :                           for (cgraph_edge *e = old_edge;
    2327        45498 :                                e;
    2328        25702 :                                e = e->next_speculative_call_target ())
    2329              :                             {
    2330        25702 :                               profile_count cnt = e->count;
    2331              : 
    2332        25702 :                               id->dst_node->clone_reference
    2333        25702 :                                  (e->speculative_call_target_ref (), stmt);
    2334        25702 :                               edge = e->clone (id->dst_node, call_stmt,
    2335              :                                                gimple_uid (stmt), num, den,
    2336              :                                                true);
    2337        25702 :                               profile_probability prob
    2338        25702 :                                  = cnt.probability_in (direct_cnt
    2339              :                                                        + indir_cnt);
    2340        25702 :                               edge->count
    2341              :                                  = copy_basic_block->count.apply_probability
    2342        25702 :                                          (prob);
    2343        25702 :                               n++;
    2344              :                             }
    2345        19796 :                           gcc_checking_assert
    2346              :                                  (indirect->num_speculative_call_targets_p ()
    2347              :                                   == n);
    2348              : 
    2349              :                           /* Duplicate the indirect edge after all direct edges
    2350              :                              cloned.  */
    2351        19796 :                           indirect = indirect->clone (id->dst_node, call_stmt,
    2352              :                                                       gimple_uid (stmt),
    2353              :                                                       num, den,
    2354              :                                                       true);
    2355              : 
    2356        19796 :                           profile_probability prob
    2357        19796 :                              = indir_cnt.probability_in (direct_cnt
    2358              :                                                          + indir_cnt);
    2359        19796 :                           indirect->count
    2360        19796 :                              = copy_basic_block->count.apply_probability (prob);
    2361              :                         }
    2362              :                       /* If edge is a callback-carrying edge, copy all its
    2363              :                          attached edges as well.  */
    2364      3763756 :                       else if (edge->has_callback)
    2365              :                         {
    2366         2354 :                           edge
    2367         2354 :                             = edge->clone (id->dst_node, call_stmt,
    2368              :                                            gimple_uid (stmt), num, den, true);
    2369         2354 :                           cgraph_edge *e;
    2370         2861 :                           for (e = old_edge->first_callback_edge (); e;
    2371          507 :                                e = e->next_callback_edge ())
    2372          507 :                             edge = e->clone (id->dst_node, call_stmt,
    2373              :                                              gimple_uid (stmt), num, den, true);
    2374              :                         }
    2375              :                       else
    2376              :                         {
    2377      3761402 :                           edge = edge->clone (id->dst_node, call_stmt,
    2378              :                                               gimple_uid (stmt),
    2379              :                                               num, den,
    2380              :                                               true);
    2381      3761402 :                           edge->count = copy_basic_block->count;
    2382              :                         }
    2383              :                     }
    2384              :                   break;
    2385              : 
    2386       674419 :                 case CB_CGE_MOVE_CLONES:
    2387       674419 :                   id->dst_node->set_call_stmt_including_clones (orig_stmt,
    2388              :                                                                 call_stmt);
    2389       674419 :                   edge = id->dst_node->get_edge (stmt);
    2390       674419 :                   break;
    2391              : 
    2392       187637 :                 case CB_CGE_MOVE:
    2393       187637 :                   edge = id->dst_node->get_edge (orig_stmt);
    2394       187637 :                   if (edge)
    2395       187279 :                     edge = cgraph_edge::set_call_stmt (edge, call_stmt);
    2396              :                   break;
    2397              : 
    2398            0 :                 default:
    2399            0 :                   gcc_unreachable ();
    2400              :                 }
    2401              : 
    2402              :               /* Constant propagation on argument done during inlining
    2403              :                  may create new direct call.  Produce an edge for it.  */
    2404      3761402 :               if ((!edge
    2405      4535355 :                    || (edge->indirect_inlining_edge
    2406         3909 :                        && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
    2407       148639 :                   && id->dst_node->definition
    2408       932331 :                   && (fn = gimple_call_fndecl (stmt)) != NULL)
    2409              :                 {
    2410            0 :                   struct cgraph_node *dest = cgraph_node::get_create (fn);
    2411              : 
    2412              :                   /* We have missing edge in the callgraph.  This can happen
    2413              :                      when previous inlining turned an indirect call into a
    2414              :                      direct call by constant propagating arguments or we are
    2415              :                      producing dead clone (for further cloning).  In all
    2416              :                      other cases we hit a bug (incorrect node sharing is the
    2417              :                      most common reason for missing edges).  */
    2418            0 :                   gcc_assert (!dest->definition
    2419              :                               || dest->address_taken
    2420              :                               || !id->src_node->definition
    2421              :                               || !id->dst_node->definition);
    2422            0 :                   if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
    2423            0 :                     id->dst_node->create_edge_including_clones
    2424            0 :                       (dest, orig_stmt, call_stmt, bb->count,
    2425              :                        CIF_ORIGINALLY_INDIRECT_CALL);
    2426              :                   else
    2427            0 :                     id->dst_node->create_edge (dest, call_stmt,
    2428              :                                         bb->count)->inline_failed
    2429            0 :                       = CIF_ORIGINALLY_INDIRECT_CALL;
    2430            0 :                   if (dump_file)
    2431              :                     {
    2432            0 :                       fprintf (dump_file, "Created new direct edge to %s\n",
    2433              :                                dest->dump_name ());
    2434              :                     }
    2435              :                 }
    2436              : 
    2437      4683526 :               notice_special_calls (as_a <gcall *> (stmt));
    2438              :             }
    2439              : 
    2440     83800663 :           maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
    2441              :                                       id->eh_map, id->eh_lp_nr);
    2442              : 
    2443     83800663 :           gsi_next (&copy_gsi);
    2444              :         }
    2445     83800824 :       while (!gsi_end_p (copy_gsi));
    2446              : 
    2447    167601634 :       copy_gsi = gsi_last_bb (copy_basic_block);
    2448              :     }
    2449              : 
    2450     13790710 :   return copy_basic_block;
    2451              : }
    2452              : 
    2453              : /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
    2454              :    form is quite easy, since dominator relationship for old basic blocks does
    2455              :    not change.
    2456              : 
    2457              :    There is however exception where inlining might change dominator relation
    2458              :    across EH edges from basic block within inlined functions destinating
    2459              :    to landing pads in function we inline into.
    2460              : 
    2461              :    The function fills in PHI_RESULTs of such PHI nodes if they refer
    2462              :    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
    2463              :    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
    2464              :    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
    2465              :    set, and this means that there will be no overlapping live ranges
    2466              :    for the underlying symbol.
    2467              : 
    2468              :    This might change in future if we allow redirecting of EH edges and
    2469              :    we might want to change way build CFG pre-inlining to include
    2470              :    all the possible edges then.  */
    2471              : static void
    2472       682808 : update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
    2473              :                                   bool can_throw, bool nonlocal_goto)
    2474              : {
    2475       682808 :   edge e;
    2476       682808 :   edge_iterator ei;
    2477              : 
    2478      1878663 :   FOR_EACH_EDGE (e, ei, bb->succs)
    2479      1195855 :     if (!e->dest->aux
    2480       633997 :         || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
    2481              :       {
    2482       561858 :         gphi *phi;
    2483       561858 :         gphi_iterator si;
    2484              : 
    2485       561858 :         if (!nonlocal_goto)
    2486       561425 :           gcc_assert (e->flags & EDGE_EH);
    2487              : 
    2488       561858 :         if (!can_throw)
    2489          224 :           gcc_assert (!(e->flags & EDGE_EH));
    2490              : 
    2491       952225 :         for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
    2492              :           {
    2493       390367 :             edge re;
    2494              : 
    2495       390367 :             phi = si.phi ();
    2496              : 
    2497              :             /* For abnormal goto/call edges the receiver can be the
    2498              :                ENTRY_BLOCK.  Do not assert this cannot happen.  */
    2499              : 
    2500       390367 :             gcc_assert ((e->flags & EDGE_EH)
    2501              :                         || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
    2502              : 
    2503       390367 :             re = find_edge (ret_bb, e->dest);
    2504       390367 :             gcc_checking_assert (re);
    2505       390367 :             gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
    2506              :                         == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
    2507              : 
    2508       390367 :             SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
    2509              :                      USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
    2510              :           }
    2511              :       }
    2512       682808 : }
    2513              : 
    2514              : /* Insert clobbers for automatic variables of inlined ID->src_fn
    2515              :    function at the start of basic block ID->eh_landing_pad_dest.  */
    2516              : 
    2517              : static void
    2518       393611 : add_clobbers_to_eh_landing_pad (copy_body_data *id)
    2519              : {
    2520       393611 :   tree var;
    2521       393611 :   basic_block bb = id->eh_landing_pad_dest;
    2522       393611 :   live_vars_map *vars = NULL;
    2523       393611 :   unsigned int cnt = 0;
    2524       393611 :   unsigned int i;
    2525       904619 :   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
    2526       511008 :     if (VAR_P (var)
    2527       511008 :         && !DECL_HARD_REGISTER (var)
    2528       511008 :         && !TREE_THIS_VOLATILE (var)
    2529       510988 :         && !DECL_HAS_VALUE_EXPR_P (var)
    2530       502546 :         && !is_gimple_reg (var)
    2531       203218 :         && auto_var_in_fn_p (var, id->src_fn)
    2532       712211 :         && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
    2533              :       {
    2534       201203 :         tree *t = id->decl_map->get (var);
    2535       201203 :         if (!t)
    2536            0 :           continue;
    2537       201203 :         tree new_var = *t;
    2538       201203 :         if (VAR_P (new_var)
    2539       201203 :             && !DECL_HARD_REGISTER (new_var)
    2540       201203 :             && !TREE_THIS_VOLATILE (new_var)
    2541       201203 :             && !DECL_HAS_VALUE_EXPR_P (new_var)
    2542       201203 :             && !is_gimple_reg (new_var)
    2543       402406 :             && auto_var_in_fn_p (new_var, id->dst_fn))
    2544              :           {
    2545       201203 :             if (vars == NULL)
    2546       120004 :               vars = new live_vars_map;
    2547       201203 :             vars->put (DECL_UID (var), cnt++);
    2548              :           }
    2549              :       }
    2550       393611 :   if (vars == NULL)
    2551       273607 :     return;
    2552              : 
    2553       120004 :   vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
    2554       421451 :   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
    2555       301447 :     if (VAR_P (var))
    2556              :       {
    2557       301447 :         edge e;
    2558       301447 :         edge_iterator ei;
    2559       301447 :         bool needed = false;
    2560       301447 :         unsigned int *v = vars->get (DECL_UID (var));
    2561       301447 :         if (v == NULL)
    2562       100244 :           continue;
    2563      4496227 :         FOR_EACH_EDGE (e, ei, bb->preds)
    2564      4408902 :           if ((e->flags & EDGE_EH) != 0
    2565      4408878 :               && e->src->index >= id->add_clobbers_to_eh_landing_pads)
    2566              :             {
    2567       327964 :               basic_block src_bb = (basic_block) e->src->aux;
    2568              : 
    2569       327964 :               if (bitmap_bit_p (&live[src_bb->index], *v))
    2570              :                 {
    2571              :                   needed = true;
    2572              :                   break;
    2573              :                 }
    2574              :             }
    2575       201203 :         if (needed)
    2576              :           {
    2577       113878 :             tree new_var = *id->decl_map->get (var);
    2578       113878 :             gimple_stmt_iterator gsi = gsi_after_labels (bb);
    2579       113878 :             tree clobber = build_clobber (TREE_TYPE (new_var));
    2580       113878 :             gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
    2581       113878 :             gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
    2582              :           }
    2583              :       }
    2584       120004 :   destroy_live_vars (live);
    2585       120004 :   delete vars;
    2586              : }
    2587              : 
    2588              : /* Copy edges from BB into its copy constructed earlier, scale profile
    2589              :    accordingly.  Edges will be taken care of later.  Assume aux
    2590              :    pointers to point to the copies of each BB.  Return true if any
    2591              :    debug stmts are left after a statement that must end the basic block.  */
    2592              : 
    2593              : static bool
    2594     22794672 : copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
    2595              :                    basic_block ret_bb, basic_block abnormal_goto_dest,
    2596              :                    copy_body_data *id)
    2597              : {
    2598     22794672 :   basic_block new_bb = (basic_block) bb->aux;
    2599     22794672 :   edge_iterator ei;
    2600     22794672 :   edge old_edge;
    2601     22794672 :   gimple_stmt_iterator si;
    2602     22794672 :   bool need_debug_cleanup = false;
    2603              : 
    2604              :   /* Use the indices from the original blocks to create edges for the
    2605              :      new ones.  */
    2606     44106691 :   FOR_EACH_EDGE (old_edge, ei, bb->succs)
    2607     21312019 :     if (!(old_edge->flags & EDGE_EH))
    2608              :       {
    2609     21190805 :         edge new_edge;
    2610     21190805 :         int flags = old_edge->flags;
    2611     21190805 :         location_t locus = old_edge->goto_locus;
    2612              : 
    2613              :         /* Return edges do get a FALLTHRU flag when they get inlined.  */
    2614     21190805 :         if (old_edge->dest->index == EXIT_BLOCK
    2615      4502658 :             && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
    2616      4502658 :             && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
    2617      4283537 :           flags |= EDGE_FALLTHRU;
    2618              : 
    2619     21190805 :         new_edge
    2620     21190805 :           = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
    2621     21190805 :         new_edge->probability = old_edge->probability;
    2622     21190805 :         if (!id->reset_location)
    2623     21190215 :           new_edge->goto_locus = remap_location (locus, id);
    2624              :       }
    2625              : 
    2626     22794672 :   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
    2627              :     return false;
    2628              : 
    2629              :   /* When doing function splitting, we must decrease count of the return block
    2630              :      which was previously reachable by block we did not copy.  */
    2631     13790710 :   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
    2632     10716569 :     FOR_EACH_EDGE (old_edge, ei, bb->preds)
    2633      6213911 :       if (old_edge->src->index != ENTRY_BLOCK
    2634      3555651 :           && !old_edge->src->aux)
    2635        44350 :         new_bb->count -= old_edge->count ().apply_scale (num, den);
    2636              : 
    2637              :   /* Walk stmts from end to start so that splitting will adjust the BB
    2638              :      pointer for each stmt at most once, even when we split the block
    2639              :      multiple times.  */
    2640     13790710 :   bool seen_nondebug = false;
    2641     13790710 :   for (si = gsi_last_bb (new_bb); !gsi_end_p (si);)
    2642              :     {
    2643     83800670 :       bool can_throw, nonlocal_goto;
    2644     83800670 :       gimple *copy_stmt = gsi_stmt (si);
    2645              : 
    2646              :       /* Do this before the possible split_block.  */
    2647     83800670 :       gsi_prev (&si);
    2648              : 
    2649              :       /* If this tree could throw an exception, there are two
    2650              :          cases where we need to add abnormal edge(s): the
    2651              :          tree wasn't in a region and there is a "current
    2652              :          region" in the caller; or the original tree had
    2653              :          EH edges.  In both cases split the block after the tree,
    2654              :          and add abnormal edge(s) as needed; we need both
    2655              :          those from the callee and the caller.
    2656              :          We check whether the copy can throw, because the const
    2657              :          propagation can change an INDIRECT_REF which throws
    2658              :          into a COMPONENT_REF which doesn't.  If the copy
    2659              :          can throw, the original could also throw.  */
    2660     83800670 :       can_throw = stmt_can_throw_internal (cfun, copy_stmt);
    2661     83800670 :       nonlocal_goto
    2662     83800670 :         = (stmt_can_make_abnormal_goto (copy_stmt)
    2663     83800670 :            && !computed_goto_p (copy_stmt));
    2664              : 
    2665     83800179 :       if (can_throw || nonlocal_goto)
    2666              :         {
    2667              :           /* If there's only debug insns after copy_stmt don't split
    2668              :              the block but instead mark the block for cleanup.  */
    2669       682946 :           if (!seen_nondebug)
    2670              :             need_debug_cleanup = true;
    2671              :           else
    2672              :             {
    2673              :               /* Note that bb's predecessor edges aren't necessarily
    2674              :                  right at this point; split_block doesn't care.  */
    2675       247483 :               edge e = split_block (new_bb, copy_stmt);
    2676       247483 :               e->dest->aux = new_bb->aux;
    2677       247483 :               seen_nondebug = false;
    2678              :             }
    2679              :         }
    2680              : 
    2681     83800670 :       if (!is_gimple_debug (copy_stmt))
    2682     31339411 :         seen_nondebug = true;
    2683              : 
    2684     83800670 :       bool update_probs = false;
    2685              : 
    2686     83800670 :       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
    2687              :         {
    2688         9090 :           make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
    2689         9090 :           update_probs = true;
    2690              :         }
    2691     83791580 :       else if (can_throw)
    2692              :         {
    2693       682584 :           make_eh_edge (copy_stmt);
    2694       682584 :           update_probs = true;
    2695              :         }
    2696              : 
    2697              :       /* EH edges may not match old edges.  Copy as much as possible.  */
    2698       691674 :       if (update_probs)
    2699              :         {
    2700       691674 :           edge e;
    2701       691674 :           edge_iterator ei;
    2702       691674 :           basic_block copy_stmt_bb = gimple_bb (copy_stmt);
    2703              : 
    2704      1367018 :           FOR_EACH_EDGE (old_edge, ei, bb->succs)
    2705       675344 :             if ((old_edge->flags & EDGE_EH)
    2706       129817 :                 && (e = find_edge (copy_stmt_bb,
    2707       129817 :                                    (basic_block) old_edge->dest->aux))
    2708       796403 :                 && (e->flags & EDGE_EH))
    2709       121059 :               e->probability = old_edge->probability;
    2710              : 
    2711      1897115 :           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
    2712      1205441 :             if (e->flags & EDGE_EH)
    2713              :               {
    2714       682584 :                 if (!e->probability.initialized_p ())
    2715       561713 :                   e->probability = profile_probability::never ();
    2716       682584 :                 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
    2717              :                   {
    2718       560601 :                     if (id->eh_landing_pad_dest == NULL)
    2719       393611 :                       id->eh_landing_pad_dest = e->dest;
    2720              :                     else
    2721       166990 :                       gcc_assert (id->eh_landing_pad_dest == e->dest);
    2722              :                   }
    2723              :               }
    2724              :         }
    2725              : 
    2726              : 
    2727              :       /* If the call we inline cannot make abnormal goto do not add
    2728              :          additional abnormal edges but only retain those already present
    2729              :          in the original function body.  */
    2730     83800670 :       if (abnormal_goto_dest == NULL)
    2731              :         nonlocal_goto = false;
    2732         1092 :       if (nonlocal_goto)
    2733              :         {
    2734          333 :           basic_block copy_stmt_bb = gimple_bb (copy_stmt);
    2735              : 
    2736          333 :           if (get_abnormal_succ_dispatcher (copy_stmt_bb))
    2737              :             nonlocal_goto = false;
    2738              :           /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
    2739              :              in OpenMP regions which aren't allowed to be left abnormally.
    2740              :              So, no need to add abnormal edge in that case.  */
    2741          333 :           else if (is_gimple_call (copy_stmt)
    2742          333 :                    && gimple_call_internal_p (copy_stmt)
    2743            0 :                    && (gimple_call_internal_fn (copy_stmt)
    2744              :                        == IFN_ABNORMAL_DISPATCHER)
    2745          333 :                    && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
    2746              :             nonlocal_goto = false;
    2747              :           else
    2748              :             {
    2749          333 :               make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
    2750              :                                      EDGE_ABNORMAL);
    2751          333 :               gimple_call_set_ctrl_altering (copy_stmt, true);
    2752          333 :               if (is_a <gcall *> (copy_stmt)
    2753          333 :                   && (gimple_call_flags (copy_stmt) & ECF_NORETURN))
    2754           75 :                 fixup_noreturn_call (copy_stmt);
    2755              :             }
    2756              :         }
    2757              : 
    2758     83800670 :       if ((can_throw || nonlocal_goto)
    2759    181392050 :           && gimple_in_ssa_p (cfun))
    2760       682808 :         update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
    2761              :                                           can_throw, nonlocal_goto);
    2762              :     }
    2763              :   return need_debug_cleanup;
    2764              : }
    2765              : 
    2766              : /* Copy the PHIs.  All blocks and edges are copied, some blocks
    2767              :    was possibly split and new outgoing EH edges inserted.
    2768              :    BB points to the block of original function and AUX pointers links
    2769              :    the original and newly copied blocks.  */
    2770              : 
    2771              : static void
    2772     22794672 : copy_phis_for_bb (basic_block bb, copy_body_data *id)
    2773              : {
    2774     22794672 :   basic_block const new_bb = (basic_block) bb->aux;
    2775     22794672 :   edge_iterator ei;
    2776     22794672 :   gphi *phi;
    2777     22794672 :   gphi_iterator si;
    2778     22794672 :   edge new_edge;
    2779     22794672 :   bool inserted = false;
    2780              : 
    2781     25744881 :   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
    2782              :     {
    2783      2950209 :       tree res, new_res;
    2784      2950209 :       gphi *new_phi;
    2785              : 
    2786      2950209 :       phi = si.phi ();
    2787      2950209 :       res = PHI_RESULT (phi);
    2788      2950209 :       new_res = res;
    2789      2950209 :       if (!virtual_operand_p (res)
    2790      2950209 :           && (!id->param_body_adjs
    2791      1519184 :               || !id->param_body_adjs->m_dead_stmts.contains (phi)))
    2792              :         {
    2793      1614924 :           walk_tree (&new_res, copy_tree_body_r, id, NULL);
    2794      1614924 :           if (EDGE_COUNT (new_bb->preds) == 0)
    2795              :             {
    2796              :               /* Technically we'd want a SSA_DEFAULT_DEF here... */
    2797            0 :               SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
    2798              :             }
    2799              :           else
    2800              :             {
    2801      1614924 :               new_phi = create_phi_node (new_res, new_bb);
    2802      5257448 :               FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
    2803              :                 {
    2804      3642524 :                   edge old_edge = find_edge ((basic_block) new_edge->src->aux,
    2805      3642524 :                                              bb);
    2806      3642524 :                   tree arg;
    2807      3642524 :                   tree new_arg;
    2808      3642524 :                   edge_iterator ei2;
    2809      3642524 :                   location_t locus;
    2810              : 
    2811              :                   /* When doing partial cloning, we allow PHIs on the entry
    2812              :                      block as long as all the arguments are the same.
    2813              :                      Find any input edge to see argument to copy.  */
    2814      3642524 :                   if (!old_edge)
    2815         2076 :                     FOR_EACH_EDGE (old_edge, ei2, bb->preds)
    2816         2076 :                       if (!old_edge->src->aux)
    2817              :                         break;
    2818              : 
    2819      3642524 :                   arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
    2820      3642524 :                   new_arg = arg;
    2821      3642524 :                   walk_tree (&new_arg, copy_tree_body_r, id, NULL);
    2822      3642524 :                   gcc_assert (new_arg);
    2823              :                   /* With return slot optimization we can end up with
    2824              :                      non-gimple (foo *)&this->m, fix that here.  */
    2825      3642524 :                   if (TREE_CODE (new_arg) != SSA_NAME
    2826      1516998 :                       && TREE_CODE (new_arg) != FUNCTION_DECL
    2827      5159522 :                       && !is_gimple_val (new_arg))
    2828              :                     {
    2829           12 :                       gimple_seq stmts = NULL;
    2830           12 :                       new_arg = force_gimple_operand (new_arg, &stmts, true,
    2831              :                                                       NULL);
    2832           12 :                       gsi_insert_seq_on_edge (new_edge, stmts);
    2833           12 :                       inserted = true;
    2834              :                     }
    2835      3642524 :                   locus = gimple_phi_arg_location_from_edge (phi, old_edge);
    2836      3642524 :                   if (id->reset_location)
    2837            0 :                     locus = input_location;
    2838              :                   else
    2839      3642524 :                     locus = remap_location (locus, id);
    2840      3642524 :                   add_phi_arg (new_phi, new_arg, new_edge, locus);
    2841              :                 }
    2842              :             }
    2843              :         }
    2844              :     }
    2845              : 
    2846              :   /* Commit the delayed edge insertions.  */
    2847     22794672 :   if (inserted)
    2848           36 :     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
    2849           24 :       gsi_commit_one_edge_insert (new_edge, NULL);
    2850     22794672 : }
    2851              : 
    2852              : 
    2853              : /* Wrapper for remap_decl so it can be used as a callback.  */
    2854              : 
    2855              : static tree
    2856        86571 : remap_decl_1 (tree decl, void *data)
    2857              : {
    2858        86571 :   return remap_decl (decl, (copy_body_data *) data);
    2859              : }
    2860              : 
    2861              : /* Build struct function and associated datastructures for the new clone
    2862              :    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
    2863              :    the cfun to the function of new_fndecl (and current_function_decl too).  */
    2864              : 
    2865              : static void
    2866       232121 : initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
    2867              : {
    2868       232121 :   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
    2869              : 
    2870              :   /* Register specific tree functions.  */
    2871       232121 :   gimple_register_cfg_hooks ();
    2872              : 
    2873              :   /* Get clean struct function.  */
    2874       232121 :   push_struct_function (new_fndecl, true);
    2875       232121 :   targetm.target_option.relayout_function (new_fndecl);
    2876              : 
    2877              :   /* We will rebuild these, so just sanity check that they are empty.  */
    2878       232121 :   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
    2879       232121 :   gcc_assert (cfun->local_decls == NULL);
    2880       232121 :   gcc_assert (cfun->cfg == NULL);
    2881       232121 :   gcc_assert (cfun->decl == new_fndecl);
    2882              : 
    2883              :   /* Copy items we preserve during cloning.  */
    2884       232121 :   cfun->static_chain_decl = src_cfun->static_chain_decl;
    2885       232121 :   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
    2886       232121 :   cfun->function_end_locus = src_cfun->function_end_locus;
    2887       232121 :   cfun->curr_properties = src_cfun->curr_properties;
    2888       232121 :   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
    2889       232121 :   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
    2890       232121 :   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
    2891       232121 :   cfun->calls_eh_return = src_cfun->calls_eh_return;
    2892       232121 :   cfun->stdarg = src_cfun->stdarg;
    2893       232121 :   cfun->after_inlining = src_cfun->after_inlining;
    2894       232121 :   cfun->can_throw_non_call_exceptions
    2895       232121 :     = src_cfun->can_throw_non_call_exceptions;
    2896       232121 :   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
    2897       232121 :   cfun->returns_struct = src_cfun->returns_struct;
    2898       232121 :   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
    2899              : 
    2900       232121 :   init_empty_tree_cfg ();
    2901              : 
    2902       232121 :   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
    2903       232121 :   cfun->cfg->full_profile = src_cfun->cfg->full_profile;
    2904              : 
    2905       232121 :   profile_count num = count;
    2906       232121 :   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
    2907       232121 :   profile_count::adjust_for_ipa_scaling (&num, &den);
    2908              : 
    2909       232121 :   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
    2910       232121 :     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (num, den);
    2911       232121 :   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
    2912       232121 :     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (num, den);
    2913       232121 :   if (src_cfun->eh)
    2914       232121 :     init_eh_for_function ();
    2915              : 
    2916       232121 :   if (src_cfun->gimple_df)
    2917              :     {
    2918       232121 :       init_tree_ssa (cfun);
    2919       232121 :       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
    2920       232121 :       if (cfun->gimple_df->in_ssa_p)
    2921       232121 :         init_ssa_operands (cfun);
    2922              :     }
    2923       232121 : }
    2924              : 
    2925              : /* Helper function for copy_cfg_body.  Move debug stmts from the end
    2926              :    of NEW_BB to the beginning of successor basic blocks when needed.  If the
    2927              :    successor has multiple predecessors, reset them, otherwise keep
    2928              :    their value.  */
    2929              : 
    2930              : static void
    2931      1472153 : maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
    2932              : {
    2933      1472153 :   edge e;
    2934      1472153 :   edge_iterator ei;
    2935      1472153 :   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
    2936              : 
    2937      1472153 :   if (gsi_end_p (si)
    2938      1531866 :       || gsi_one_before_end_p (si)
    2939      1622325 :       || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
    2940        59714 :            || stmt_can_make_abnormal_goto (gsi_stmt (si))))
    2941      1441408 :     return;
    2942              : 
    2943        92237 :   FOR_EACH_EDGE (e, ei, new_bb->succs)
    2944              :     {
    2945        61492 :       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
    2946        61492 :       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
    2947       212697 :       while (is_gimple_debug (gsi_stmt (ssi)))
    2948              :         {
    2949       151205 :           gimple *stmt = gsi_stmt (ssi);
    2950       151205 :           gdebug *new_stmt;
    2951       151205 :           tree var;
    2952       151205 :           tree value;
    2953              : 
    2954              :           /* For the last edge move the debug stmts instead of copying
    2955              :              them.  */
    2956       151205 :           if (ei_one_before_end_p (ei))
    2957              :             {
    2958        75601 :               si = ssi;
    2959        75601 :               gsi_prev (&ssi);
    2960        75601 :               if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
    2961              :                 {
    2962        74299 :                   gimple_debug_bind_reset_value (stmt);
    2963       148089 :                   gimple_set_location (stmt, UNKNOWN_LOCATION);
    2964              :                 }
    2965        75601 :               gsi_remove (&si, false);
    2966        75601 :               gsi_insert_before (&dsi, stmt, GSI_NEW_STMT);
    2967        75601 :               continue;
    2968              :             }
    2969              : 
    2970        75604 :           if (gimple_debug_bind_p (stmt))
    2971              :             {
    2972        74302 :               var = gimple_debug_bind_get_var (stmt);
    2973        74302 :               if (single_pred_p (e->dest))
    2974              :                 {
    2975        47097 :                   value = gimple_debug_bind_get_value (stmt);
    2976        47097 :                   value = unshare_expr (value);
    2977        47097 :                   new_stmt = gimple_build_debug_bind (var, value, stmt);
    2978              :                 }
    2979              :               else
    2980        27205 :                 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
    2981              :             }
    2982         1302 :           else if (gimple_debug_source_bind_p (stmt))
    2983              :             {
    2984            0 :               var = gimple_debug_source_bind_get_var (stmt);
    2985            0 :               value = gimple_debug_source_bind_get_value (stmt);
    2986            0 :               new_stmt = gimple_build_debug_source_bind (var, value, stmt);
    2987              :             }
    2988         1302 :           else if (gimple_debug_nonbind_marker_p (stmt))
    2989         1302 :             new_stmt = as_a <gdebug *> (gimple_copy (stmt));
    2990              :           else
    2991            0 :             gcc_unreachable ();
    2992        75604 :           gsi_insert_before (&dsi, new_stmt, GSI_NEW_STMT);
    2993        75604 :           id->debug_stmts.safe_push (new_stmt);
    2994        75604 :           gsi_prev (&ssi);
    2995              :         }
    2996              :     }
    2997              : }
    2998              : 
    2999              : /* Make a copy of the sub-loops of SRC_PARENT and place them
    3000              :    as siblings of DEST_PARENT.  */
    3001              : 
    3002              : static void
    3003      5157199 : copy_loops (copy_body_data *id,
    3004              :             class loop *dest_parent, class loop *src_parent)
    3005              : {
    3006      5157199 :   class loop *src_loop = src_parent->inner;
    3007      5769451 :   while (src_loop)
    3008              :     {
    3009       612252 :       if (!id->blocks_to_copy
    3010       612252 :           || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
    3011              :         {
    3012       608372 :           class loop *dest_loop = alloc_loop ();
    3013              : 
    3014              :           /* Assign the new loop its header and latch and associate
    3015              :              those with the new loop.  */
    3016       608372 :           dest_loop->header = (basic_block)src_loop->header->aux;
    3017       608372 :           dest_loop->header->loop_father = dest_loop;
    3018       608372 :           if (src_loop->latch != NULL)
    3019              :             {
    3020       608356 :               dest_loop->latch = (basic_block)src_loop->latch->aux;
    3021       608356 :               dest_loop->latch->loop_father = dest_loop;
    3022              :             }
    3023              : 
    3024              :           /* Copy loop meta-data.  */
    3025       608372 :           copy_loop_info (src_loop, dest_loop);
    3026       608372 :           if (dest_loop->unroll)
    3027         2278 :             cfun->has_unroll = true;
    3028       608372 :           if (dest_loop->force_vectorize)
    3029          117 :             cfun->has_force_vectorize_loops = true;
    3030       608372 :           if (id->src_cfun->last_clique != 0)
    3031        74891 :             dest_loop->owned_clique
    3032        74891 :               = remap_dependence_clique (id,
    3033        74891 :                                          src_loop->owned_clique
    3034              :                                          ? src_loop->owned_clique : 1);
    3035              : 
    3036              :           /* Finally place it into the loop array and the loop tree.  */
    3037       608372 :           place_new_loop (cfun, dest_loop);
    3038       608372 :           flow_loop_tree_node_add (dest_parent, dest_loop);
    3039              : 
    3040       608372 :           if (src_loop->simduid)
    3041              :             {
    3042           67 :               dest_loop->simduid = remap_decl (src_loop->simduid, id);
    3043           67 :               cfun->has_simduid_loops = true;
    3044              :             }
    3045              : 
    3046              :           /* Recurse.  */
    3047       608372 :           copy_loops (id, dest_loop, src_loop);
    3048              :         }
    3049       612252 :       src_loop = src_loop->next;
    3050              :     }
    3051      5157199 : }
    3052              : 
    3053              : /* Call redirect_call_stmt_to_callee on all calls in BB.  */
    3054              : 
    3055              : void
    3056     12335422 : redirect_all_calls (copy_body_data * id, basic_block bb)
    3057              : {
    3058     12335422 :   gimple_stmt_iterator si;
    3059     12335422 :   gimple *last = last_nondebug_stmt (bb);
    3060     97447676 :   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
    3061              :     {
    3062     72776832 :       gimple *stmt = gsi_stmt (si);
    3063     72776832 :       if (is_gimple_call (stmt))
    3064              :         {
    3065      3872874 :           struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
    3066      3872874 :           if (edge)
    3067              :             {
    3068      3834956 :               if (!id->killed_new_ssa_names)
    3069      2056263 :                 id->killed_new_ssa_names = new hash_set<tree> (16);
    3070      3834956 :               cgraph_edge::redirect_call_stmt_to_callee (
    3071              :                 edge, id->killed_new_ssa_names);
    3072      3834956 :               if (edge->has_callback)
    3073              :                 {
    3074              :                   /* When redirecting a carrying edge, we need to redirect its
    3075              :                      attached edges as well.  */
    3076         2354 :                   cgraph_edge *cbe;
    3077         2861 :                   for (cbe = edge->first_callback_edge (); cbe;
    3078          507 :                        cbe = cbe->next_callback_edge ())
    3079          507 :                     cgraph_edge::redirect_call_stmt_to_callee (
    3080              :                       cbe, id->killed_new_ssa_names);
    3081              :                 }
    3082              : 
    3083      3834956 :               if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
    3084        36804 :                 gimple_purge_dead_eh_edges (bb);
    3085              :             }
    3086              :         }
    3087              :     }
    3088     12335422 : }
    3089              : 
    3090              : /* Make a copy of the body of FN so that it can be inserted inline in
    3091              :    another function.  Walks FN via CFG, returns new fndecl.  */
    3092              : 
    3093              : static tree
    3094      4548827 : copy_cfg_body (copy_body_data * id,
    3095              :                basic_block entry_block_map, basic_block exit_block_map,
    3096              :                basic_block new_entry)
    3097              : {
    3098      4548827 :   tree callee_fndecl = id->src_fn;
    3099              :   /* Original cfun for the callee, doesn't change.  */
    3100      4548827 :   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
    3101      4548827 :   struct function *cfun_to_copy;
    3102      4548827 :   basic_block bb;
    3103      4548827 :   tree new_fndecl = NULL;
    3104      4548827 :   bool need_debug_cleanup = false;
    3105      4548827 :   int last;
    3106      4548827 :   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
    3107      4548827 :   profile_count num = entry_block_map->count;
    3108              : 
    3109      4548827 :   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
    3110              : 
    3111              :   /* Register specific tree functions.  */
    3112      4548827 :   gimple_register_cfg_hooks ();
    3113              : 
    3114              :   /* If we are offlining region of the function, make sure to connect
    3115              :      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
    3116              :      part of loop, we must compute frequency and probability of
    3117              :      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
    3118              :      probabilities of edges incoming from nonduplicated region.  */
    3119      4548827 :   if (new_entry)
    3120              :     {
    3121        46846 :       edge e;
    3122        46846 :       edge_iterator ei;
    3123        46846 :       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
    3124              : 
    3125       101680 :       FOR_EACH_EDGE (e, ei, new_entry->preds)
    3126        54834 :         if (!e->src->aux)
    3127        54834 :           ENTRY_BLOCK_PTR_FOR_FN (cfun)->count += e->count ();
    3128              :       /* Do not scale - the profile of offlined region should
    3129              :          remain unchanged.  */
    3130        46846 :       num = den = profile_count::one ();
    3131              :     }
    3132              : 
    3133      4548827 :   profile_count::adjust_for_ipa_scaling (&num, &den);
    3134              : 
    3135              :   /* Must have a CFG here at this point.  */
    3136      4548827 :   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
    3137              :               (DECL_STRUCT_FUNCTION (callee_fndecl)));
    3138              : 
    3139              : 
    3140      4548827 :   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
    3141      4548827 :   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
    3142      4548827 :   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
    3143      4548827 :   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
    3144              : 
    3145              :   /* Duplicate any exception-handling regions.  */
    3146      4548827 :   if (cfun->eh)
    3147      4548827 :     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
    3148              :                                        remap_decl_1, id);
    3149              : 
    3150              :   /* Use aux pointers to map the original blocks to copy.  */
    3151     18506241 :   FOR_EACH_BB_FN (bb, cfun_to_copy)
    3152     13957414 :     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
    3153              :       {
    3154     13790710 :         basic_block new_bb = copy_bb (id, bb, num, den);
    3155     13790710 :         bb->aux = new_bb;
    3156     13790710 :         new_bb->aux = bb;
    3157     13790710 :         new_bb->loop_father = entry_block_map->loop_father;
    3158              :       }
    3159              : 
    3160      4548827 :   last = last_basic_block_for_fn (cfun);
    3161              : 
    3162              :   /* Now that we've duplicated the blocks, duplicate their edges.  */
    3163      4548827 :   basic_block abnormal_goto_dest = NULL;
    3164      4548827 :   if (id->call_stmt
    3165      4548827 :       && stmt_can_make_abnormal_goto (id->call_stmt))
    3166              :     {
    3167          324 :       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
    3168              : 
    3169          324 :       bb = gimple_bb (id->call_stmt);
    3170          324 :       gsi_next (&gsi);
    3171          324 :       if (gsi_end_p (gsi))
    3172          324 :         abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
    3173              :     }
    3174     27603895 :   FOR_ALL_BB_FN (bb, cfun_to_copy)
    3175     23055068 :     if (!id->blocks_to_copy
    3176     23055068 :         || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
    3177     22794672 :       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
    3178              :                                                abnormal_goto_dest, id);
    3179              : 
    3180      4548827 :   if (id->eh_landing_pad_dest)
    3181              :     {
    3182       393611 :       add_clobbers_to_eh_landing_pad (id);
    3183       393611 :       id->eh_landing_pad_dest = NULL;
    3184              :     }
    3185              : 
    3186      4548827 :   if (new_entry)
    3187              :     {
    3188        46846 :       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
    3189              :                           EDGE_FALLTHRU);
    3190        46846 :       e->probability = profile_probability::always ();
    3191              :     }
    3192              : 
    3193              :   /* Duplicate the loop tree, if available and wanted.  */
    3194      4548827 :   if (loops_for_fn (src_cfun) != NULL
    3195      4548827 :       && current_loops != NULL)
    3196              :     {
    3197      4548827 :       copy_loops (id, entry_block_map->loop_father,
    3198              :                   get_loop (src_cfun, 0));
    3199              :       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
    3200      4548827 :       loops_state_set (LOOPS_NEED_FIXUP);
    3201              :     }
    3202              : 
    3203              :   /* If the loop tree in the source function needed fixup, mark the
    3204              :      destination loop tree for fixup, too.  */
    3205      4548827 :   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
    3206            0 :     loops_state_set (LOOPS_NEED_FIXUP);
    3207              : 
    3208      4548827 :   if (gimple_in_ssa_p (cfun))
    3209     27603895 :     FOR_ALL_BB_FN (bb, cfun_to_copy)
    3210     23055068 :       if (!id->blocks_to_copy
    3211     23055068 :           || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
    3212     22794672 :         copy_phis_for_bb (bb, id);
    3213              : 
    3214     27603895 :   FOR_ALL_BB_FN (bb, cfun_to_copy)
    3215     23055068 :     if (bb->aux)
    3216              :       {
    3217     22888364 :         if (need_debug_cleanup
    3218      1927883 :             && bb->index != ENTRY_BLOCK
    3219      1647181 :             && bb->index != EXIT_BLOCK)
    3220      1366479 :           maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
    3221              :         /* Update call edge destinations.  This cannot be done before loop
    3222              :            info is updated, because we may split basic blocks.  */
    3223     22888364 :         if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
    3224     20644245 :             && bb->index != ENTRY_BLOCK
    3225     16327539 :             && bb->index != EXIT_BLOCK)
    3226     12010833 :           redirect_all_calls (id, (basic_block)bb->aux);
    3227     22888364 :         ((basic_block)bb->aux)->aux = NULL;
    3228     22888364 :         bb->aux = NULL;
    3229              :       }
    3230              : 
    3231              :   /* Zero out AUX fields of newly created block during EH edge
    3232              :      insertion. */
    3233      4873416 :   for (; last < last_basic_block_for_fn (cfun); last++)
    3234              :     {
    3235       324589 :       if (need_debug_cleanup)
    3236       105674 :         maybe_move_debug_stmts_to_successors (id,
    3237       105674 :                                               BASIC_BLOCK_FOR_FN (cfun, last));
    3238       324589 :       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
    3239              :       /* Update call edge destinations.  This cannot be done before loop
    3240              :          info is updated, because we may split basic blocks.  */
    3241       324589 :       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
    3242       324589 :         redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
    3243              :     }
    3244      4548827 :   entry_block_map->aux = NULL;
    3245      4548827 :   exit_block_map->aux = NULL;
    3246              : 
    3247      4548827 :   if (id->eh_map)
    3248              :     {
    3249      4548827 :       delete id->eh_map;
    3250      4548827 :       id->eh_map = NULL;
    3251              :     }
    3252      4548827 :   if (id->dependence_map)
    3253              :     {
    3254       676528 :       delete id->dependence_map;
    3255       676528 :       id->dependence_map = NULL;
    3256              :     }
    3257              : 
    3258      4548827 :   return new_fndecl;
    3259              : }
    3260              : 
    3261              : /* Copy the debug STMT using ID.  We deal with these statements in a
    3262              :    special way: if any variable in their VALUE expression wasn't
    3263              :    remapped yet, we won't remap it, because that would get decl uids
    3264              :    out of sync, causing codegen differences between -g and -g0.  If
    3265              :    this arises, we drop the VALUE expression altogether.  */
    3266              : 
    3267              : static void
    3268     52536863 : copy_debug_stmt (gdebug *stmt, copy_body_data *id)
    3269              : {
    3270     52536863 :   tree t, *n;
    3271     52536863 :   struct walk_stmt_info wi;
    3272              : 
    3273     52536863 :   if (tree block = gimple_block (stmt))
    3274              :     {
    3275     34900852 :       n = id->decl_map->get (block);
    3276     34900852 :       gimple_set_block (stmt, n ? *n : id->block);
    3277              :     }
    3278              : 
    3279     52536863 :   if (gimple_debug_nonbind_marker_p (stmt))
    3280              :     {
    3281     12107198 :       if (id->call_stmt && !gimple_block (stmt))
    3282              :         {
    3283        34521 :           gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
    3284        34521 :           gsi_remove (&gsi, true);
    3285              :         }
    3286     12107198 :       return;
    3287              :     }
    3288              : 
    3289              :   /* Remap all the operands in COPY.  */
    3290     40429665 :   memset (&wi, 0, sizeof (wi));
    3291     40429665 :   wi.info = id;
    3292              : 
    3293     40429665 :   processing_debug_stmt = 1;
    3294              : 
    3295     40429665 :   if (gimple_debug_source_bind_p (stmt))
    3296       466035 :     t = gimple_debug_source_bind_get_var (stmt);
    3297     39963630 :   else if (gimple_debug_bind_p (stmt))
    3298     39963630 :     t = gimple_debug_bind_get_var (stmt);
    3299              :   else
    3300            0 :     gcc_unreachable ();
    3301              : 
    3302     40429665 :   if (TREE_CODE (t) == PARM_DECL
    3303       197390 :       && id->debug_map
    3304     40430606 :       && (n = id->debug_map->get (t)))
    3305              :     {
    3306            0 :       gcc_assert (VAR_P (*n));
    3307            0 :       t = *n;
    3308              :     }
    3309     40429665 :   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
    3310              :     /* T is a non-localized variable.  */;
    3311              :   else
    3312     35477695 :     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
    3313              : 
    3314     40429665 :   if (gimple_debug_bind_p (stmt))
    3315              :     {
    3316     39963630 :       gimple_debug_bind_set_var (stmt, t);
    3317              : 
    3318     39963630 :       if (gimple_debug_bind_has_value_p (stmt))
    3319     22523347 :         walk_tree (gimple_debug_bind_get_value_ptr (stmt),
    3320              :                    remap_gimple_op_r, &wi, NULL);
    3321              : 
    3322              :       /* Punt if any decl couldn't be remapped.  */
    3323     39963630 :       if (processing_debug_stmt < 0)
    3324       574755 :         gimple_debug_bind_reset_value (stmt);
    3325              :     }
    3326       466035 :   else if (gimple_debug_source_bind_p (stmt))
    3327              :     {
    3328       466035 :       gimple_debug_source_bind_set_var (stmt, t);
    3329              :       /* When inlining and source bind refers to one of the optimized
    3330              :          away parameters, change the source bind into normal debug bind
    3331              :          referring to the corresponding DEBUG_EXPR_DECL that should have
    3332              :          been bound before the call stmt.  */
    3333       466035 :       t = gimple_debug_source_bind_get_value (stmt);
    3334       466035 :       if (t != NULL_TREE
    3335       466035 :           && TREE_CODE (t) == PARM_DECL
    3336       331402 :           && id->call_stmt)
    3337              :         {
    3338       327036 :           vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
    3339       327036 :           unsigned int i;
    3340       327036 :           if (debug_args != NULL)
    3341              :             {
    3342       434941 :               for (i = 0; i < vec_safe_length (*debug_args); i += 2)
    3343       434940 :                 if ((**debug_args)[i] == DECL_ORIGIN (t)
    3344       434940 :                     && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
    3345              :                   {
    3346       325917 :                     t = (**debug_args)[i + 1];
    3347       325917 :                     stmt->subcode = GIMPLE_DEBUG_BIND;
    3348       325917 :                     gimple_debug_bind_set_value (stmt, t);
    3349       325917 :                     break;
    3350              :                   }
    3351              :             }
    3352              :         }
    3353       466035 :       if (gimple_debug_source_bind_p (stmt))
    3354       140118 :         walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
    3355              :                    remap_gimple_op_r, &wi, NULL);
    3356              :     }
    3357              : 
    3358     40429665 :   processing_debug_stmt = 0;
    3359              : 
    3360     40429665 :   update_stmt (stmt);
    3361              : }
    3362              : 
    3363              : /* Process deferred debug stmts.  In order to give values better odds
    3364              :    of being successfully remapped, we delay the processing of debug
    3365              :    stmts until all other stmts that might require remapping are
    3366              :    processed.  */
    3367              : 
    3368              : static void
    3369      4548827 : copy_debug_stmts (copy_body_data *id)
    3370              : {
    3371      4548827 :   if (!id->debug_stmts.exists ())
    3372              :     return;
    3373              : 
    3374     55352890 :   for (gdebug *stmt : id->debug_stmts)
    3375     52536863 :     copy_debug_stmt (stmt, id);
    3376              : 
    3377      2816027 :   id->debug_stmts.release ();
    3378              : }
    3379              : 
    3380              : /* Make a copy of the body of SRC_FN so that it can be inserted inline in
    3381              :    another function.  */
    3382              : 
    3383              : static tree
    3384     33066939 : copy_tree_body (copy_body_data *id)
    3385              : {
    3386     33066939 :   tree fndecl = id->src_fn;
    3387     33066939 :   tree body = DECL_SAVED_TREE (fndecl);
    3388              : 
    3389     33066939 :   walk_tree (&body, copy_tree_body_r, id, NULL);
    3390              : 
    3391     33066939 :   return body;
    3392              : }
    3393              : 
    3394              : /* Make a copy of the body of FN so that it can be inserted inline in
    3395              :    another function.  */
    3396              : 
    3397              : static tree
    3398      4548827 : copy_body (copy_body_data *id,
    3399              :            basic_block entry_block_map, basic_block exit_block_map,
    3400              :            basic_block new_entry)
    3401              : {
    3402      4548827 :   tree fndecl = id->src_fn;
    3403      4548827 :   tree body;
    3404              : 
    3405              :   /* If this body has a CFG, walk CFG and copy.  */
    3406      4548827 :   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
    3407      4548827 :   body = copy_cfg_body (id, entry_block_map, exit_block_map,
    3408              :                         new_entry);
    3409      4548827 :   copy_debug_stmts (id);
    3410      4548827 :   if (id->killed_new_ssa_names)
    3411              :     {
    3412      2056263 :       ipa_release_ssas_in_hash (id->killed_new_ssa_names);
    3413      4112526 :       delete id->killed_new_ssa_names;
    3414      2056263 :       id->killed_new_ssa_names = NULL;
    3415              :     }
    3416              : 
    3417      4548827 :   return body;
    3418              : }
    3419              : 
    3420              : /* Return true if VALUE is an ADDR_EXPR of an automatic variable
    3421              :    defined in function FN, or of a data member thereof.  */
    3422              : 
    3423              : static bool
    3424       151049 : self_inlining_addr_expr (tree value, tree fn)
    3425              : {
    3426       151049 :   tree var;
    3427              : 
    3428       151049 :   if (TREE_CODE (value) != ADDR_EXPR)
    3429              :     return false;
    3430              : 
    3431       145099 :   var = get_base_address (TREE_OPERAND (value, 0));
    3432              : 
    3433       145099 :   return var && auto_var_in_fn_p (var, fn);
    3434              : }
    3435              : 
    3436              : /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
    3437              :    lexical block and line number information from base_stmt, if given,
    3438              :    or from the last stmt of the block otherwise.  */
    3439              : 
    3440              : static gimple *
    3441      7438913 : insert_init_debug_bind (copy_body_data *id,
    3442              :                         basic_block bb, tree var, tree value,
    3443              :                         gimple *base_stmt)
    3444              : {
    3445      7438913 :   gimple *note;
    3446      7438913 :   gimple_stmt_iterator gsi;
    3447      7438913 :   tree tracked_var;
    3448              : 
    3449      8890944 :   if (!gimple_in_ssa_p (id->src_cfun))
    3450              :     return NULL;
    3451              : 
    3452      7438913 :   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
    3453              :     return NULL;
    3454              : 
    3455      6558301 :   tracked_var = target_for_debug_bind (var);
    3456      6558301 :   if (!tracked_var)
    3457              :     return NULL;
    3458              : 
    3459      5986882 :   if (bb)
    3460              :     {
    3461      5979526 :       gsi = gsi_last_bb (bb);
    3462      5979526 :       if (!base_stmt && !gsi_end_p (gsi))
    3463      5986882 :         base_stmt = gsi_stmt (gsi);
    3464              :     }
    3465              : 
    3466      5986882 :   note = gimple_build_debug_bind (tracked_var,
    3467      5986882 :                                   value == error_mark_node
    3468      5986882 :                                   ? NULL_TREE : unshare_expr (value),
    3469              :                                   base_stmt);
    3470              : 
    3471      5986882 :   if (bb)
    3472              :     {
    3473      5979526 :       if (!gsi_end_p (gsi))
    3474      5179788 :         gsi_insert_after (&gsi, note, GSI_SAME_STMT);
    3475              :       else
    3476       799738 :         gsi_insert_before (&gsi, note, GSI_SAME_STMT);
    3477              :     }
    3478              : 
    3479              :   return note;
    3480              : }
    3481              : 
    3482              : static void
    3483       449431 : insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
    3484              : {
    3485              :   /* If VAR represents a zero-sized variable, it's possible that the
    3486              :      assignment statement may result in no gimple statements.  */
    3487       449431 :   if (init_stmt)
    3488              :     {
    3489       449431 :       gimple_stmt_iterator si = gsi_last_bb (bb);
    3490              : 
    3491              :       /* We can end up with init statements that store to a non-register
    3492              :          from a rhs with a conversion.  Handle that here by forcing the
    3493              :          rhs into a temporary.  gimple_regimplify_operands is not
    3494              :          prepared to do this for us.  */
    3495       449431 :       if (!is_gimple_debug (init_stmt)
    3496       442075 :           && !is_gimple_reg (gimple_assign_lhs (init_stmt))
    3497       344872 :           && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
    3498       485322 :           && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
    3499              :         {
    3500            2 :           tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
    3501            1 :                              TREE_TYPE (gimple_assign_lhs (init_stmt)),
    3502              :                              gimple_assign_rhs1 (init_stmt));
    3503            1 :           rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
    3504              :                                           GSI_NEW_STMT);
    3505            1 :           gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
    3506            1 :           gimple_assign_set_rhs1 (init_stmt, rhs);
    3507              :         }
    3508       449431 :       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
    3509       449431 :       if (!is_gimple_debug (init_stmt))
    3510              :         {
    3511       442075 :           gimple_regimplify_operands (init_stmt, &si);
    3512              : 
    3513       442075 :           tree def = gimple_assign_lhs (init_stmt);
    3514       442075 :           insert_init_debug_bind (id, bb, def, def, init_stmt);
    3515              :         }
    3516              :     }
    3517       449431 : }
    3518              : 
    3519              : /* Deal with mismatched formal/actual parameters, in a rather brute-force way
    3520              :    if need be (which should only be necessary for invalid programs).  Attempt
    3521              :    to convert VAL to TYPE and return the result if it is possible, just return
    3522              :    a zero constant of the given type if it fails.  */
    3523              : 
    3524              : tree
    3525         2066 : force_value_to_type (tree type, tree value)
    3526              : {
    3527              :   /* If we can match up types by promotion/demotion do so.  */
    3528         2066 :   if (fold_convertible_p (type, value))
    3529          536 :     return fold_convert (type, value);
    3530              : 
    3531              :   /* ???  For valid programs we should not end up here.
    3532              :      Still if we end up with truly mismatched types here, fall back
    3533              :      to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
    3534              :      GIMPLE to the following passes.  */
    3535         1530 :   if (TREE_CODE (value) == WITH_SIZE_EXPR)
    3536            0 :     return error_mark_node;
    3537         1530 :   else if (!is_gimple_reg_type (TREE_TYPE (value))
    3538         1530 :            || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
    3539         1239 :     return fold_build1 (VIEW_CONVERT_EXPR, type, value);
    3540              :   else
    3541          291 :     return build_zero_cst (type);
    3542              : }
    3543              : 
    3544              : /* Initialize parameter P with VALUE.  If needed, produce init statement
    3545              :    at the end of BB.  When BB is NULL, we return init statement to be
    3546              :    output later.  */
    3547              : static gimple *
    3548      7421812 : setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
    3549              :                      basic_block bb, tree *vars)
    3550              : {
    3551      7421812 :   gimple *init_stmt = NULL;
    3552      7421812 :   tree var;
    3553      7421812 :   tree def = (gimple_in_ssa_p (cfun)
    3554      7421812 :               ? ssa_default_def (id->src_cfun, p) : NULL);
    3555              : 
    3556              :   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
    3557              :      here since the type of this decl must be visible to the calling
    3558              :      function.  */
    3559      7421812 :   var = copy_decl_to_var (p, id);
    3560              : 
    3561              :   /* Declare this new variable.  */
    3562      7421812 :   DECL_CHAIN (var) = *vars;
    3563      7421812 :   *vars = var;
    3564              : 
    3565              :   /* Make gimplifier happy about this variable.  */
    3566      7421812 :   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
    3567              : 
    3568              :   /* If the parameter is never assigned to, has no SSA_NAMEs created,
    3569              :      we would not need to create a new variable here at all, if it
    3570              :      weren't for debug info.  Still, we can just use the argument
    3571              :      value.  */
    3572      7421812 :   if (TREE_READONLY (p)
    3573      3940220 :       && !TREE_ADDRESSABLE (p)
    3574      3938966 :       && value
    3575      3938959 :       && !TREE_SIDE_EFFECTS (value)
    3576     11360765 :       && !def)
    3577              :     {
    3578              :       /* We may produce non-gimple trees by adding NOPs or introduce invalid
    3579              :          sharing when the value is not constant or DECL.  And we need to make
    3580              :          sure that it cannot be modified from another path in the callee.  */
    3581       358883 :       if (((is_gimple_min_invariant (value)
    3582              :             /* When the parameter is used in a context that forces it to
    3583              :                not be a GIMPLE register avoid substituting something that
    3584              :                is not a decl there.  */
    3585       150785 :             && ! DECL_NOT_GIMPLE_REG_P (p))
    3586       208109 :            || (DECL_P (value) && TREE_READONLY (value))
    3587       208025 :            || (auto_var_in_fn_p (value, id->dst_fn)
    3588          197 :                && !TREE_ADDRESSABLE (value)))
    3589       151052 :           && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
    3590              :           /* We have to be very careful about ADDR_EXPR.  Make sure
    3591              :              the base variable isn't a local variable of the inlined
    3592              :              function, e.g., when doing recursive inlining, direct or
    3593              :              mutually-recursive or whatever, which is why we don't
    3594              :              just test whether fn == current_function_decl.  */
    3595       509932 :           && ! self_inlining_addr_expr (value, fn))
    3596              :         {
    3597       151049 :           insert_decl_map (id, p, value);
    3598       151049 :           if (!id->debug_map)
    3599       146214 :             id->debug_map = new hash_map<tree, tree>;
    3600       151049 :           id->debug_map->put (p, var);
    3601       151049 :           return insert_init_debug_bind (id, bb, var, value, NULL);
    3602              :         }
    3603              :     }
    3604              : 
    3605              :   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
    3606              :      that way, when the PARM_DECL is encountered, it will be
    3607              :      automatically replaced by the VAR_DECL.  */
    3608      7270763 :   insert_decl_map (id, p, var);
    3609              : 
    3610              :   /* Even if P was TREE_READONLY, the new VAR should not be.  In the original
    3611              :      code, we would have constructed a temporary, and then the function body
    3612              :      would have never changed the value of P.  However, now, we will be
    3613              :      constructing VAR directly.  Therefore, it must not be TREE_READONLY.  */
    3614      7270763 :   TREE_READONLY (var) = 0;
    3615              : 
    3616      7270763 :   tree rhs = value;
    3617      7270763 :   if (value
    3618      7269851 :       && value != error_mark_node
    3619     14540614 :       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
    3620         2055 :     rhs = force_value_to_type (TREE_TYPE (p), value);
    3621              : 
    3622              :   /* If there is no setup required and we are in SSA, take the easy route
    3623              :      replacing all SSA names representing the function parameter by the
    3624              :      SSA name passed to function.
    3625              : 
    3626              :      We need to construct map for the variable anyway as it might be used
    3627              :      in different SSA names when parameter is set in function.
    3628              : 
    3629              :      Do replacement at -O0 for const arguments replaced by constant.
    3630              :      This is important for builtin_constant_p and other construct requiring
    3631              :      constant argument to be visible in inlined function body.  */
    3632     14541526 :   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
    3633      6534397 :       && (optimize
    3634        23027 :           || (TREE_READONLY (p)
    3635        10686 :               && is_gimple_min_invariant (rhs)))
    3636      6514449 :       && (TREE_CODE (rhs) == SSA_NAME
    3637      2480872 :           || is_gimple_min_invariant (rhs))
    3638      6514143 :       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
    3639              :     {
    3640      6514143 :       insert_decl_map (id, def, rhs);
    3641      6514143 :       return insert_init_debug_bind (id, bb, var, rhs, NULL);
    3642              :     }
    3643              : 
    3644              :   /* If the value of argument is never used, don't care about initializing
    3645              :      it.  */
    3646      1489642 :   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
    3647              :     {
    3648              :       /* When there's a gross type mismatch between the passed value
    3649              :          and the declared argument type drop it on the floor and do
    3650              :          not bother to insert a debug bind.  */
    3651       331660 :       if (value && !is_gimple_reg_type (TREE_TYPE (value)))
    3652              :         return NULL;
    3653       331646 :       return insert_init_debug_bind (id, bb, var, rhs, NULL);
    3654              :     }
    3655              : 
    3656              :   /* Initialize this VAR_DECL from the equivalent argument.  Convert
    3657              :      the argument to the proper type in case it was promoted.  */
    3658       424960 :   if (value)
    3659              :     {
    3660       424872 :       if (rhs == error_mark_node)
    3661              :         {
    3662            0 :           insert_decl_map (id, p, var);
    3663            0 :           return insert_init_debug_bind (id, bb, var, rhs, NULL);
    3664              :         }
    3665              : 
    3666       424872 :       STRIP_USELESS_TYPE_CONVERSION (rhs);
    3667              : 
    3668              :       /* If we are in SSA form properly remap the default definition.  */
    3669       849744 :       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
    3670              :         {
    3671        23082 :           if (def)
    3672              :             {
    3673        20254 :               def = remap_ssa_name (def, id);
    3674        20254 :               init_stmt = gimple_build_assign (def, rhs);
    3675        20254 :               SSA_NAME_IS_DEFAULT_DEF (def) = 0;
    3676        20254 :               set_ssa_default_def (cfun, var, NULL);
    3677              :             }
    3678              :         }
    3679       401790 :       else if (!is_empty_type (TREE_TYPE (var)))
    3680       344872 :         init_stmt = gimple_build_assign (var, rhs);
    3681              : 
    3682       424872 :       if (bb && init_stmt)
    3683       364685 :         insert_init_stmt (id, bb, init_stmt);
    3684              :     }
    3685              :   return init_stmt;
    3686              : }
    3687              : 
    3688              : /* Generate code to initialize the parameters of the function at the
    3689              :    top of the stack in ID from the GIMPLE_CALL STMT.  */
    3690              : 
    3691              : static void
    3692      4316706 : initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
    3693              :                                tree fn, basic_block bb)
    3694              : {
    3695      4316706 :   tree parms;
    3696      4316706 :   size_t i;
    3697      4316706 :   tree p;
    3698      4316706 :   tree vars = NULL_TREE;
    3699      4316706 :   tree static_chain = gimple_call_chain (stmt);
    3700              : 
    3701              :   /* Figure out what the parameters are.  */
    3702      4316706 :   parms = DECL_ARGUMENTS (fn);
    3703              : 
    3704              :   /* Loop through the parameter declarations, replacing each with an
    3705              :      equivalent VAR_DECL, appropriately initialized.  */
    3706     11710650 :   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
    3707              :     {
    3708      7393944 :       tree val;
    3709      7393944 :       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
    3710      7393944 :       setup_one_parameter (id, p, val, fn, bb, &vars);
    3711              :     }
    3712              :   /* After remapping parameters remap their types.  This has to be done
    3713              :      in a second loop over all parameters to appropriately remap
    3714              :      variable sized arrays when the size is specified in a
    3715              :      parameter following the array.  */
    3716     11710650 :   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
    3717              :     {
    3718      7393944 :       tree *varp = id->decl_map->get (p);
    3719      7393944 :       if (varp && VAR_P (*varp))
    3720              :         {
    3721     14489424 :           tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
    3722      6843129 :                       ? ssa_default_def (id->src_cfun, p) : NULL);
    3723      7244712 :           tree var = *varp;
    3724      7244712 :           TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
    3725              :           /* Also remap the default definition if it was remapped
    3726              :              to the default definition of the parameter replacement
    3727              :              by the parameter setup.  */
    3728      7244712 :           if (def)
    3729              :             {
    3730      6510499 :               tree *defp = id->decl_map->get (def);
    3731      6510499 :               if (defp
    3732      6510412 :                   && TREE_CODE (*defp) == SSA_NAME
    3733     12700188 :                   && SSA_NAME_VAR (*defp) == var)
    3734        20254 :                 TREE_TYPE (*defp) = TREE_TYPE (var);
    3735              :             }
    3736              :           /* When not optimizing and the parameter is unused, assign to
    3737              :              a dummy SSA name.  Do this after remapping the type above.  */
    3738       734213 :           else if (!optimize
    3739         3650 :                    && is_gimple_reg (p)
    3740       737041 :                    && i < gimple_call_num_args (stmt))
    3741              :             {
    3742         2828 :               tree val = gimple_call_arg (stmt, i);
    3743         2828 :               if (val != error_mark_node)
    3744              :                 {
    3745         2828 :                   if (!useless_type_conversion_p (TREE_TYPE (p),
    3746         2828 :                                                   TREE_TYPE (val)))
    3747            0 :                     val = force_value_to_type (TREE_TYPE (p), val);
    3748         2828 :                   def = make_ssa_name (var);
    3749         2828 :                   gimple *init_stmt = gimple_build_assign (def, val);
    3750         2828 :                   insert_init_stmt (id, bb, init_stmt);
    3751              :                 }
    3752              :             }
    3753              :         }
    3754              :     }
    3755              : 
    3756              :   /* Initialize the static chain.  */
    3757      4316706 :   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
    3758      4316706 :   gcc_assert (fn != current_function_decl);
    3759      4316706 :   if (p)
    3760              :     {
    3761              :       /* No static chain?  Seems like a bug in tree-nested.cc.  */
    3762         2279 :       gcc_assert (static_chain);
    3763              : 
    3764         2279 :       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
    3765              :     }
    3766              : 
    3767              :   /* Reverse so the variables appear in the correct order in DWARF
    3768              :      debug info.  */
    3769      4316706 :   vars = nreverse (vars);
    3770              : 
    3771      4316706 :   declare_inline_vars (id->block, vars);
    3772      4316706 : }
    3773              : 
    3774              : 
    3775              : /* Declare a return variable to replace the RESULT_DECL for the
    3776              :    function we are calling.  An appropriate DECL_STMT is returned.
    3777              :    The USE_STMT is filled to contain a use of the declaration to
    3778              :    indicate the return value of the function.
    3779              : 
    3780              :    RETURN_SLOT, if non-null is place where to store the result.  It
    3781              :    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
    3782              :    was the LHS of the MODIFY_EXPR to which this call is the RHS.
    3783              : 
    3784              :    The return value is a (possibly null) value that holds the result
    3785              :    as seen by the caller.  */
    3786              : 
    3787              : static tree
    3788      4316706 : declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
    3789              :                          basic_block entry_bb)
    3790              : {
    3791      4316706 :   tree callee = id->src_fn;
    3792      4316706 :   tree result = DECL_RESULT (callee);
    3793      4316706 :   tree callee_type = TREE_TYPE (result);
    3794      4316706 :   tree caller_type;
    3795      4316706 :   tree var, use;
    3796              : 
    3797              :   /* Handle type-mismatches in the function declaration return type
    3798              :      vs. the call expression.  */
    3799      4316706 :   if (modify_dest)
    3800      2034923 :     caller_type = TREE_TYPE (modify_dest);
    3801      2281783 :   else if (return_slot)
    3802       128156 :     caller_type = TREE_TYPE (return_slot);
    3803              :   else /* No LHS on the call.  */
    3804      2153627 :     caller_type = TREE_TYPE (TREE_TYPE (callee));
    3805              : 
    3806              :   /* We don't need to do anything for functions that don't return anything.  */
    3807      4316706 :   if (VOID_TYPE_P (callee_type))
    3808              :     return NULL_TREE;
    3809              : 
    3810              :   /* If there was a return slot, then the return value is the
    3811              :      dereferenced address of that object.  */
    3812      2307088 :   if (return_slot)
    3813              :     {
    3814              :       /* The front end shouldn't have used both return_slot and
    3815              :          a modify expression.  */
    3816       128156 :       gcc_assert (!modify_dest);
    3817       128156 :       if (DECL_BY_REFERENCE (result))
    3818              :         {
    3819        74121 :           tree return_slot_addr = build_fold_addr_expr (return_slot);
    3820        74121 :           STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
    3821              : 
    3822              :           /* We are going to construct *&return_slot and we can't do that
    3823              :              for variables believed to be not addressable.
    3824              : 
    3825              :              FIXME: This check possibly can match, because values returned
    3826              :              via return slot optimization are not believed to have address
    3827              :              taken by alias analysis.  */
    3828        74121 :           gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
    3829        74121 :           var = return_slot_addr;
    3830        74121 :           mark_addressable (return_slot);
    3831              :         }
    3832              :       else
    3833              :         {
    3834        54035 :           var = return_slot;
    3835        54035 :           gcc_assert (TREE_CODE (var) != SSA_NAME);
    3836        54035 :           if (TREE_ADDRESSABLE (result))
    3837        18873 :             mark_addressable (var);
    3838              :         }
    3839       128156 :       if (DECL_NOT_GIMPLE_REG_P (result)
    3840       128156 :           && DECL_P (var))
    3841            0 :         DECL_NOT_GIMPLE_REG_P (var) = 1;
    3842              : 
    3843       128156 :       if (!useless_type_conversion_p (callee_type, caller_type))
    3844        74122 :         var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
    3845              : 
    3846       128156 :       use = NULL;
    3847       128156 :       goto done;
    3848              :     }
    3849              : 
    3850              :   /* All types requiring non-trivial constructors should have been handled.  */
    3851      2178932 :   gcc_assert (!TREE_ADDRESSABLE (callee_type));
    3852              : 
    3853              :   /* Attempt to avoid creating a new temporary variable.  */
    3854      2178932 :   if (modify_dest
    3855      2034902 :       && TREE_CODE (modify_dest) != SSA_NAME)
    3856              :     {
    3857       283574 :       bool use_it = false;
    3858              : 
    3859              :       /* We can't use MODIFY_DEST if there's type promotion involved.  */
    3860       283574 :       if (!useless_type_conversion_p (callee_type, caller_type))
    3861              :         use_it = false;
    3862              : 
    3863              :       /* ??? If we're assigning to a variable sized type, then we must
    3864              :          reuse the destination variable, because we've no good way to
    3865              :          create variable sized temporaries at this point.  */
    3866       283572 :       else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
    3867              :         use_it = true;
    3868              : 
    3869              :       /* If the callee cannot possibly modify MODIFY_DEST, then we can
    3870              :          reuse it as the result of the call directly.  Don't do this if
    3871              :          it would promote MODIFY_DEST to addressable.  */
    3872       283572 :       else if (TREE_ADDRESSABLE (result))
    3873              :         use_it = false;
    3874              :       else
    3875              :         {
    3876       282836 :           tree base_m = get_base_address (modify_dest);
    3877              : 
    3878              :           /* If the base isn't a decl, then it's a pointer, and we don't
    3879              :              know where that's going to go.  */
    3880       282836 :           if (!DECL_P (base_m))
    3881              :             use_it = false;
    3882       278820 :           else if (is_global_var (base_m))
    3883              :             use_it = false;
    3884       278448 :           else if (DECL_NOT_GIMPLE_REG_P (result)
    3885       278448 :                    && !DECL_NOT_GIMPLE_REG_P (base_m))
    3886              :             use_it = false;
    3887       278448 :           else if (!TREE_ADDRESSABLE (base_m))
    3888              :             use_it = true;
    3889              :         }
    3890              : 
    3891              :       if (use_it)
    3892              :         {
    3893       193580 :           var = modify_dest;
    3894       193580 :           use = NULL;
    3895       193580 :           goto done;
    3896              :         }
    3897              :     }
    3898              : 
    3899      1985352 :   gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
    3900              : 
    3901      1985352 :   var = copy_result_decl_to_var (result, id);
    3902      1985352 :   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
    3903              : 
    3904              :   /* Do not have the rest of GCC warn about this variable as it should
    3905              :      not be visible to the user.  */
    3906      1985352 :   suppress_warning (var /* OPT_Wuninitialized? */);
    3907              : 
    3908      1985352 :   declare_inline_vars (id->block, var);
    3909              : 
    3910              :   /* Build the use expr.  If the return type of the function was
    3911              :      promoted, convert it back to the expected type.  */
    3912      1985352 :   use = var;
    3913      1985352 :   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
    3914              :     {
    3915              :       /* If we can match up types by promotion/demotion do so.  */
    3916            7 :       if (fold_convertible_p (caller_type, var))
    3917            0 :         use = fold_convert (caller_type, var);
    3918              :       else
    3919              :         {
    3920              :           /* ???  For valid programs we should not end up here.
    3921              :              Still if we end up with truly mismatched types here, fall back
    3922              :              to using a MEM_REF to not leak invalid GIMPLE to the following
    3923              :              passes.  */
    3924              :           /* Prevent var from being written into SSA form.  */
    3925            7 :           if (is_gimple_reg_type (TREE_TYPE (var)))
    3926            7 :             DECL_NOT_GIMPLE_REG_P (var) = true;
    3927            7 :           use = fold_build2 (MEM_REF, caller_type,
    3928              :                              build_fold_addr_expr (var),
    3929              :                              build_int_cst (ptr_type_node, 0));
    3930              :         }
    3931              :     }
    3932              : 
    3933      1985352 :   STRIP_USELESS_TYPE_CONVERSION (use);
    3934              : 
    3935      1985352 :   if (DECL_BY_REFERENCE (result))
    3936              :     {
    3937            0 :       TREE_ADDRESSABLE (var) = 1;
    3938            0 :       var = build_fold_addr_expr (var);
    3939              :     }
    3940              : 
    3941      1985352 :  done:
    3942              :   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
    3943              :      way, when the RESULT_DECL is encountered, it will be
    3944              :      automatically replaced by the VAR_DECL.
    3945              : 
    3946              :      When returning by reference, ensure that RESULT_DECL remaps to
    3947              :      gimple_val.  */
    3948      2307088 :   if (DECL_BY_REFERENCE (result)
    3949      2307088 :       && !is_gimple_val (var))
    3950              :     {
    3951        74121 :       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
    3952        74121 :       insert_decl_map (id, result, temp);
    3953              :       /* When RESULT_DECL is in SSA form, we need to remap and initialize
    3954              :          it's default_def SSA_NAME.  */
    3955        74121 :       if (gimple_in_ssa_p (id->src_cfun)
    3956        74121 :           && is_gimple_reg (result))
    3957        74121 :         if (tree default_def = ssa_default_def (id->src_cfun, result))
    3958              :           {
    3959        74084 :             temp = make_ssa_name (temp);
    3960        74084 :             insert_decl_map (id, default_def, temp);
    3961              :           }
    3962        74121 :       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
    3963              :     }
    3964              :   else
    3965      2232967 :     insert_decl_map (id, result, var);
    3966              : 
    3967              :   /* Remember this so we can ignore it in remap_decls.  */
    3968      2307088 :   id->retvar = var;
    3969      2307088 :   return use;
    3970              : }
    3971              : 
    3972              : /* Determine if the function can be copied.  If so return NULL.  If
    3973              :    not return a string describng the reason for failure.  */
    3974              : 
    3975              : const char *
    3976     18628797 : copy_forbidden (struct function *fun)
    3977              : {
    3978     18628797 :   const char *reason = fun->cannot_be_copied_reason;
    3979              : 
    3980              :   /* Only examine the function once.  */
    3981     18628797 :   if (fun->cannot_be_copied_set)
    3982              :     return reason;
    3983              : 
    3984              :   /* We cannot copy a function that receives a non-local goto
    3985              :      because we cannot remap the destination label used in the
    3986              :      function that is performing the non-local goto.  */
    3987              :   /* ??? Actually, this should be possible, if we work at it.
    3988              :      No doubt there's just a handful of places that simply
    3989              :      assume it doesn't happen and don't substitute properly.  */
    3990     11462729 :   if (fun->has_nonlocal_label)
    3991              :     {
    3992          779 :       reason = G_("function %q+F can never be copied "
    3993              :                   "because it receives a non-local goto");
    3994          779 :       goto fail;
    3995              :     }
    3996              : 
    3997     11461950 :   if (fun->has_forced_label_in_static)
    3998              :     {
    3999          225 :       reason = G_("function %q+F can never be copied because it saves "
    4000              :                   "address of local label in a static variable");
    4001          225 :       goto fail;
    4002              :     }
    4003              : 
    4004     11461725 :  fail:
    4005     11462729 :   fun->cannot_be_copied_reason = reason;
    4006     11462729 :   fun->cannot_be_copied_set = true;
    4007     11462729 :   return reason;
    4008              : }
    4009              : 
    4010              : 
    4011              : static const char *inline_forbidden_reason;
    4012              : 
    4013              : /* A callback for walk_gimple_seq to handle statements.  Returns non-null
    4014              :    iff a function cannot be inlined.  Also sets the reason why. */
    4015              : 
    4016              : static tree
    4017    165479525 : inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
    4018              :                          struct walk_stmt_info *wip)
    4019              : {
    4020    165479525 :   tree fn = (tree) wip->info;
    4021    165479525 :   tree t;
    4022    165479525 :   gimple *stmt = gsi_stmt (*gsi);
    4023              : 
    4024    165479525 :   switch (gimple_code (stmt))
    4025              :     {
    4026     18488058 :     case GIMPLE_CALL:
    4027              :       /* Refuse to inline alloca call unless user explicitly forced so as
    4028              :          this may change program's memory overhead drastically when the
    4029              :          function using alloca is called in loop.  In GCC present in
    4030              :          SPEC2000 inlining into schedule_block cause it to require 2GB of
    4031              :          RAM instead of 256MB.  Don't do so for alloca calls emitted for
    4032              :          VLA objects as those can't cause unbounded growth (they're always
    4033              :          wrapped inside stack_save/stack_restore regions.  */
    4034     18488058 :       if (gimple_maybe_alloca_call_p (stmt)
    4035         9547 :           && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
    4036     18492931 :           && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
    4037              :         {
    4038         4745 :           inline_forbidden_reason
    4039         4745 :             = G_("function %q+F can never be inlined because it uses "
    4040              :                  "alloca (override using the always_inline attribute)");
    4041         4745 :           *handled_ops_p = true;
    4042         4745 :           return fn;
    4043              :         }
    4044              : 
    4045     18483313 :       t = gimple_call_fndecl (stmt);
    4046     18483313 :       if (t == NULL_TREE)
    4047              :         break;
    4048              : 
    4049              :       /* We cannot inline functions that call setjmp.  */
    4050     17835706 :       if (setjmp_call_p (t))
    4051              :         {
    4052          965 :           inline_forbidden_reason
    4053          965 :             = G_("function %q+F can never be inlined because it uses setjmp");
    4054          965 :           *handled_ops_p = true;
    4055          965 :           return t;
    4056              :         }
    4057              : 
    4058     17834741 :       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
    4059      3645275 :         switch (DECL_FUNCTION_CODE (t))
    4060              :           {
    4061              :             /* We cannot inline functions that take a variable number of
    4062              :                arguments.  */
    4063         2758 :           case BUILT_IN_VA_START:
    4064         2758 :           case BUILT_IN_NEXT_ARG:
    4065         2758 :           case BUILT_IN_VA_END:
    4066         2758 :             inline_forbidden_reason
    4067         2758 :               = G_("function %q+F can never be inlined because it "
    4068              :                    "uses variable argument lists");
    4069         2758 :             *handled_ops_p = true;
    4070         2758 :             return t;
    4071              : 
    4072          226 :           case BUILT_IN_LONGJMP:
    4073              :             /* We can't inline functions that call __builtin_longjmp at
    4074              :                all.  The non-local goto machinery really requires the
    4075              :                destination be in a different function.  If we allow the
    4076              :                function calling __builtin_longjmp to be inlined into the
    4077              :                function calling __builtin_setjmp, Things will Go Awry.  */
    4078          226 :             inline_forbidden_reason
    4079          226 :               = G_("function %q+F can never be inlined because "
    4080              :                    "it uses setjmp-longjmp exception handling");
    4081          226 :             *handled_ops_p = true;
    4082          226 :             return t;
    4083              : 
    4084          269 :           case BUILT_IN_NONLOCAL_GOTO:
    4085              :             /* Similarly.  */
    4086          269 :             inline_forbidden_reason
    4087          269 :               = G_("function %q+F can never be inlined because "
    4088              :                    "it uses non-local goto");
    4089          269 :             *handled_ops_p = true;
    4090          269 :             return t;
    4091              : 
    4092          384 :           case BUILT_IN_RETURN:
    4093          384 :           case BUILT_IN_APPLY_ARGS:
    4094              :             /* If a __builtin_apply_args caller would be inlined,
    4095              :                it would be saving arguments of the function it has
    4096              :                been inlined into.  Similarly __builtin_return would
    4097              :                return from the function the inline has been inlined into.  */
    4098          384 :             inline_forbidden_reason
    4099          384 :               = G_("function %q+F can never be inlined because "
    4100              :                    "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
    4101          384 :             *handled_ops_p = true;
    4102          384 :             return t;
    4103              : 
    4104              :           default:
    4105              :             break;
    4106              :           }
    4107              :       break;
    4108              : 
    4109          291 :     case GIMPLE_GOTO:
    4110          291 :       t = gimple_goto_dest (stmt);
    4111              : 
    4112              :       /* We will not inline a function which uses computed goto.  The
    4113              :          addresses of its local labels, which may be tucked into
    4114              :          global storage, are of course not constant across
    4115              :          instantiations, which causes unexpected behavior.  */
    4116          291 :       if (TREE_CODE (t) != LABEL_DECL)
    4117              :         {
    4118          291 :           inline_forbidden_reason
    4119          291 :             = G_("function %q+F can never be inlined "
    4120              :                  "because it contains a computed goto");
    4121          291 :           *handled_ops_p = true;
    4122          291 :           return t;
    4123              :         }
    4124              :       break;
    4125              : 
    4126              :     default:
    4127              :       break;
    4128              :     }
    4129              : 
    4130    165469887 :   *handled_ops_p = false;
    4131    165469887 :   return NULL_TREE;
    4132              : }
    4133              : 
    4134              : /* Return true if FNDECL is a function that cannot be inlined into
    4135              :    another one.  */
    4136              : 
    4137              : static bool
    4138      5680812 : inline_forbidden_p (tree fndecl)
    4139              : {
    4140      5680812 :   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
    4141      5680812 :   struct walk_stmt_info wi;
    4142      5680812 :   basic_block bb;
    4143      5680812 :   bool forbidden_p = false;
    4144              : 
    4145              :   /* First check for shared reasons not to copy the code.  */
    4146      5680812 :   inline_forbidden_reason = copy_forbidden (fun);
    4147      5680812 :   if (inline_forbidden_reason != NULL)
    4148              :     return true;
    4149              : 
    4150              :   /* Next, walk the statements of the function looking for
    4151              :      constraucts we can't handle, or are non-optimal for inlining.  */
    4152      5680114 :   hash_set<tree> visited_nodes;
    4153      5680114 :   memset (&wi, 0, sizeof (wi));
    4154      5680114 :   wi.info = (void *) fndecl;
    4155      5680114 :   wi.pset = &visited_nodes;
    4156              : 
    4157              :   /* We cannot inline a function with a variable-sized parameter because we
    4158              :      cannot materialize a temporary of such a type in the caller if need be.
    4159              :      Note that the return case is not symmetrical because we can guarantee
    4160              :      that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT.  */
    4161     18129826 :   for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
    4162     12449714 :     if (!poly_int_tree_p (DECL_SIZE (parm)))
    4163              :       {
    4164            2 :         inline_forbidden_reason
    4165            2 :           = G_("function %q+F can never be inlined because "
    4166              :                "it has a VLA argument");
    4167            2 :         return true;
    4168              :       }
    4169              : 
    4170     37147365 :   FOR_EACH_BB_FN (bb, fun)
    4171              :     {
    4172     31476891 :       gimple *ret;
    4173     31476891 :       gimple_seq seq = bb_seq (bb);
    4174     31476891 :       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
    4175     31476891 :       forbidden_p = (ret != NULL);
    4176     31476891 :       if (forbidden_p)
    4177              :         break;
    4178              :     }
    4179              : 
    4180              :   return forbidden_p;
    4181      5680114 : }
    4182              : 
    4183              : /* Return false if the function FNDECL cannot be inlined on account of its
    4184              :    attributes, true otherwise.  */
    4185              : static bool
    4186      5680812 : function_attribute_inlinable_p (const_tree fndecl)
    4187              : {
    4188     11339415 :   for (auto scoped_attributes : targetm.attribute_table)
    4189              :     {
    4190      5680812 :       const_tree a;
    4191              : 
    4192      7479264 :       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
    4193              :         {
    4194      1820661 :           const_tree name = get_attribute_name (a);
    4195              : 
    4196     52245637 :           for (const attribute_spec &attribute : scoped_attributes->attributes)
    4197     50447185 :             if (is_attribute_p (attribute.name, name))
    4198        22209 :               return targetm.function_attribute_inlinable_p (fndecl);
    4199              :         }
    4200              :     }
    4201              : 
    4202              :   return true;
    4203              : }
    4204              : 
    4205              : /* Returns nonzero if FN is a function that does not have any
    4206              :    fundamental inline blocking properties.  */
    4207              : 
    4208              : bool
    4209      6311175 : tree_inlinable_function_p (tree fn)
    4210              : {
    4211      6311175 :   bool inlinable = true;
    4212      6311175 :   bool do_warning;
    4213      6311175 :   tree always_inline;
    4214              : 
    4215              :   /* If we've already decided this function shouldn't be inlined,
    4216              :      there's no need to check again.  */
    4217      6311175 :   if (DECL_UNINLINABLE (fn))
    4218              :     return false;
    4219              : 
    4220              :   /* We only warn for functions declared `inline' by the user.  */
    4221      5707943 :   do_warning = (opt_for_fn (fn, warn_inline)
    4222          631 :                 && DECL_DECLARED_INLINE_P (fn)
    4223          502 :                 && !DECL_NO_INLINE_WARNING_P (fn)
    4224      5708324 :                 && !DECL_IN_SYSTEM_HEADER (fn));
    4225              : 
    4226      5707943 :   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
    4227              : 
    4228      5707943 :   if (flag_no_inline
    4229       102153 :       && always_inline == NULL)
    4230              :     {
    4231        27131 :       if (do_warning)
    4232            0 :         warning (OPT_Winline, "function %q+F can never be inlined because it "
    4233              :                  "is suppressed using %<-fno-inline%>", fn);
    4234              :       inlinable = false;
    4235              :     }
    4236              : 
    4237      5680812 :   else if (!function_attribute_inlinable_p (fn))
    4238              :     {
    4239            0 :       if (do_warning)
    4240            0 :         warning (OPT_Winline, "function %q+F can never be inlined because it "
    4241              :                  "uses attributes conflicting with inlining", fn);
    4242              :       inlinable = false;
    4243              :     }
    4244              : 
    4245      5680812 :   else if (inline_forbidden_p (fn))
    4246              :     {
    4247              :       /* See if we should warn about uninlinable functions.  Previously,
    4248              :          some of these warnings would be issued while trying to expand
    4249              :          the function inline, but that would cause multiple warnings
    4250              :          about functions that would for example call alloca.  But since
    4251              :          this a property of the function, just one warning is enough.
    4252              :          As a bonus we can now give more details about the reason why a
    4253              :          function is not inlinable.  */
    4254        10338 :       if (always_inline)
    4255            2 :         error (inline_forbidden_reason, fn);
    4256        10336 :       else if (do_warning)
    4257            2 :         warning (OPT_Winline, inline_forbidden_reason, fn);
    4258              : 
    4259              :       inlinable = false;
    4260              :     }
    4261              : 
    4262              :   /* Squirrel away the result so that we don't have to check again.  */
    4263      5707943 :   DECL_UNINLINABLE (fn) = !inlinable;
    4264              : 
    4265      5707943 :   return inlinable;
    4266              : }
    4267              : 
    4268              : /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
    4269              :    word size and take possible memcpy call into account and return
    4270              :    cost based on whether optimizing for size or speed according to SPEED_P.  */
    4271              : 
    4272              : int
    4273    337379043 : estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
    4274              : {
    4275    337379043 :   HOST_WIDE_INT size;
    4276              : 
    4277    337379043 :   gcc_assert (!VOID_TYPE_P (type));
    4278              : 
    4279    337379043 :   if (VECTOR_TYPE_P (type))
    4280              :     {
    4281      3992673 :       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
    4282      3992673 :       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
    4283      3992673 :       int orig_mode_size
    4284      7985346 :         = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
    4285      7985346 :       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
    4286      3992673 :       return ((orig_mode_size + simd_mode_size - 1)
    4287      3992673 :               / simd_mode_size);
    4288              :     }
    4289              : 
    4290    333386370 :   size = int_size_in_bytes (type);
    4291              : 
    4292    333388838 :   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
    4293              :     /* Cost of a memcpy call, 3 arguments and the call.  */
    4294              :     return 4;
    4295              :   else
    4296    332464022 :     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
    4297              : }
    4298              : 
    4299              : /* Returns cost of operation CODE, according to WEIGHTS  */
    4300              : 
    4301              : static int
    4302    464877727 : estimate_operator_cost (enum tree_code code, eni_weights *weights,
    4303              :                         tree op1 ATTRIBUTE_UNUSED, tree op2)
    4304              : {
    4305    464877727 :   switch (code)
    4306              :     {
    4307              :     /* These are "free" conversions, or their presumed cost
    4308              :        is folded into other operations.  */
    4309              :     case RANGE_EXPR:
    4310              :     CASE_CONVERT:
    4311              :     case COMPLEX_EXPR:
    4312              :     case PAREN_EXPR:
    4313              :     case VIEW_CONVERT_EXPR:
    4314              :       return 0;
    4315              : 
    4316              :     /* Assign cost of 1 to usual operations.
    4317              :        ??? We may consider mapping RTL costs to this.  */
    4318              :     case COND_EXPR:
    4319              :     case VEC_COND_EXPR:
    4320              :     case VEC_PERM_EXPR:
    4321              : 
    4322              :     case PLUS_EXPR:
    4323              :     case POINTER_PLUS_EXPR:
    4324              :     case POINTER_DIFF_EXPR:
    4325              :     case MINUS_EXPR:
    4326              :     case MULT_EXPR:
    4327              :     case MULT_HIGHPART_EXPR:
    4328              : 
    4329              :     case ADDR_SPACE_CONVERT_EXPR:
    4330              :     case FIXED_CONVERT_EXPR:
    4331              :     case FIX_TRUNC_EXPR:
    4332              : 
    4333              :     case NEGATE_EXPR:
    4334              :     case FLOAT_EXPR:
    4335              :     case MIN_EXPR:
    4336              :     case MAX_EXPR:
    4337              :     case ABS_EXPR:
    4338              :     case ABSU_EXPR:
    4339              : 
    4340              :     case LSHIFT_EXPR:
    4341              :     case RSHIFT_EXPR:
    4342              :     case LROTATE_EXPR:
    4343              :     case RROTATE_EXPR:
    4344              : 
    4345              :     case BIT_IOR_EXPR:
    4346              :     case BIT_XOR_EXPR:
    4347              :     case BIT_AND_EXPR:
    4348              :     case BIT_NOT_EXPR:
    4349              : 
    4350              :     case TRUTH_ANDIF_EXPR:
    4351              :     case TRUTH_ORIF_EXPR:
    4352              :     case TRUTH_AND_EXPR:
    4353              :     case TRUTH_OR_EXPR:
    4354              :     case TRUTH_XOR_EXPR:
    4355              :     case TRUTH_NOT_EXPR:
    4356              : 
    4357              :     case LT_EXPR:
    4358              :     case LE_EXPR:
    4359              :     case GT_EXPR:
    4360              :     case GE_EXPR:
    4361              :     case EQ_EXPR:
    4362              :     case NE_EXPR:
    4363              :     case ORDERED_EXPR:
    4364              :     case UNORDERED_EXPR:
    4365              : 
    4366              :     case UNLT_EXPR:
    4367              :     case UNLE_EXPR:
    4368              :     case UNGT_EXPR:
    4369              :     case UNGE_EXPR:
    4370              :     case UNEQ_EXPR:
    4371              :     case LTGT_EXPR:
    4372              : 
    4373              :     case CONJ_EXPR:
    4374              : 
    4375              :     case PREDECREMENT_EXPR:
    4376              :     case PREINCREMENT_EXPR:
    4377              :     case POSTDECREMENT_EXPR:
    4378              :     case POSTINCREMENT_EXPR:
    4379              : 
    4380              :     case REALIGN_LOAD_EXPR:
    4381              : 
    4382              :     case WIDEN_SUM_EXPR:
    4383              :     case WIDEN_MULT_EXPR:
    4384              :     case DOT_PROD_EXPR:
    4385              :     case SAD_EXPR:
    4386              :     case WIDEN_MULT_PLUS_EXPR:
    4387              :     case WIDEN_MULT_MINUS_EXPR:
    4388              :     case WIDEN_LSHIFT_EXPR:
    4389              : 
    4390              :     case VEC_WIDEN_MULT_HI_EXPR:
    4391              :     case VEC_WIDEN_MULT_LO_EXPR:
    4392              :     case VEC_WIDEN_MULT_EVEN_EXPR:
    4393              :     case VEC_WIDEN_MULT_ODD_EXPR:
    4394              :     case VEC_UNPACK_HI_EXPR:
    4395              :     case VEC_UNPACK_LO_EXPR:
    4396              :     case VEC_UNPACK_FLOAT_HI_EXPR:
    4397              :     case VEC_UNPACK_FLOAT_LO_EXPR:
    4398              :     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
    4399              :     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
    4400              :     case VEC_PACK_TRUNC_EXPR:
    4401              :     case VEC_PACK_SAT_EXPR:
    4402              :     case VEC_PACK_FIX_TRUNC_EXPR:
    4403              :     case VEC_PACK_FLOAT_EXPR:
    4404              :     case VEC_WIDEN_LSHIFT_HI_EXPR:
    4405              :     case VEC_WIDEN_LSHIFT_LO_EXPR:
    4406              :     case VEC_DUPLICATE_EXPR:
    4407              :     case VEC_SERIES_EXPR:
    4408              : 
    4409              :       return 1;
    4410              : 
    4411              :     /* Few special cases of expensive operations.  This is useful
    4412              :        to avoid inlining on functions having too many of these.  */
    4413      2917304 :     case TRUNC_DIV_EXPR:
    4414      2917304 :     case CEIL_DIV_EXPR:
    4415      2917304 :     case FLOOR_DIV_EXPR:
    4416      2917304 :     case ROUND_DIV_EXPR:
    4417      2917304 :     case EXACT_DIV_EXPR:
    4418      2917304 :     case TRUNC_MOD_EXPR:
    4419      2917304 :     case CEIL_MOD_EXPR:
    4420      2917304 :     case FLOOR_MOD_EXPR:
    4421      2917304 :     case ROUND_MOD_EXPR:
    4422      2917304 :     case RDIV_EXPR:
    4423      2917304 :       if (TREE_CODE (op2) != INTEGER_CST)
    4424      1319970 :         return weights->div_mod_cost;
    4425              :       return 1;
    4426              : 
    4427              :     /* Bit-field insertion needs several shift and mask operations.  */
    4428              :     case BIT_INSERT_EXPR:
    4429              :       return 3;
    4430              : 
    4431    189637303 :     default:
    4432              :       /* We expect a copy assignment with no operator.  */
    4433    189637303 :       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
    4434              :       return 0;
    4435              :     }
    4436              : }
    4437              : 
    4438              : 
    4439              : /* Estimate number of instructions that will be created by expanding
    4440              :    the statements in the statement sequence STMTS.
    4441              :    WEIGHTS contains weights attributed to various constructs.  */
    4442              : 
    4443              : int
    4444       232561 : estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
    4445              : {
    4446       232561 :   int cost;
    4447       232561 :   gimple_stmt_iterator gsi;
    4448              : 
    4449       232561 :   cost = 0;
    4450       615720 :   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
    4451       383159 :     cost += estimate_num_insns (gsi_stmt (gsi), weights);
    4452              : 
    4453       232561 :   return cost;
    4454              : }
    4455              : 
    4456              : 
    4457              : /* Estimate number of instructions that will be created by expanding STMT.
    4458              :    WEIGHTS contains weights attributed to various constructs.  */
    4459              : 
    4460              : int
    4461    626739109 : estimate_num_insns (gimple *stmt, eni_weights *weights)
    4462              : {
    4463    626739109 :   unsigned cost, i;
    4464    626739109 :   enum gimple_code code = gimple_code (stmt);
    4465    626739109 :   tree lhs;
    4466    626739109 :   tree rhs;
    4467              : 
    4468    626739109 :   switch (code)
    4469              :     {
    4470    355767326 :     case GIMPLE_ASSIGN:
    4471              :       /* Try to estimate the cost of assignments.  We have three cases to
    4472              :          deal with:
    4473              :          1) Simple assignments to registers;
    4474              :          2) Stores to things that must live in memory.  This includes
    4475              :             "normal" stores to scalars, but also assignments of large
    4476              :             structures, or constructors of big arrays;
    4477              : 
    4478              :          Let us look at the first two cases, assuming we have "a = b + C":
    4479              :          <GIMPLE_ASSIGN <var_decl "a">
    4480              :                 <plus_expr <var_decl "b"> <constant C>>
    4481              :          If "a" is a GIMPLE register, the assignment to it is free on almost
    4482              :          any target, because "a" usually ends up in a real register.  Hence
    4483              :          the only cost of this expression comes from the PLUS_EXPR, and we
    4484              :          can ignore the GIMPLE_ASSIGN.
    4485              :          If "a" is not a GIMPLE register, the assignment to "a" will most
    4486              :          likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
    4487              :          of moving something into "a", which we compute using the function
    4488              :          estimate_move_cost.  */
    4489    355767326 :       if (gimple_clobber_p (stmt))
    4490              :         return 0;       /* ={v} {CLOBBER} stmt expands to nothing.  */
    4491              : 
    4492    337991612 :       lhs = gimple_assign_lhs (stmt);
    4493    337991612 :       rhs = gimple_assign_rhs1 (stmt);
    4494              : 
    4495    337991612 :       cost = 0;
    4496              : 
    4497              :       /* Account for the cost of moving to / from memory.  */
    4498    337991612 :       if (gimple_store_p (stmt))
    4499     76995896 :         cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
    4500    337991612 :       if (gimple_assign_load_p (stmt))
    4501     90591035 :         cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
    4502              : 
    4503    445370700 :       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
    4504              :                                       gimple_assign_rhs1 (stmt),
    4505    337991612 :                                       get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
    4506              :                                       == GIMPLE_BINARY_RHS
    4507    107379088 :                                       ? gimple_assign_rhs2 (stmt) : NULL);
    4508    337991612 :       break;
    4509              : 
    4510    126886115 :     case GIMPLE_COND:
    4511    126886115 :       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
    4512              :                                          gimple_op (stmt, 0),
    4513              :                                          gimple_op (stmt, 1));
    4514    126886115 :       break;
    4515              : 
    4516       837959 :     case GIMPLE_SWITCH:
    4517       837959 :       {
    4518       837959 :         gswitch *switch_stmt = as_a <gswitch *> (stmt);
    4519              :         /* Take into account cost of the switch + guess 2 conditional jumps for
    4520              :            each case label.
    4521              : 
    4522              :            TODO: once the switch expansion logic is sufficiently separated, we can
    4523              :            do better job on estimating cost of the switch.  */
    4524       837959 :         if (weights->time_based)
    4525       125612 :           cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
    4526              :         else
    4527       712347 :           cost = gimple_switch_num_labels (switch_stmt) * 2;
    4528              :       }
    4529              :       break;
    4530              : 
    4531     71979363 :     case GIMPLE_CALL:
    4532     71979363 :       {
    4533     71979363 :         tree decl;
    4534              : 
    4535     71979363 :         if (gimple_call_internal_p (stmt))
    4536              :           return 0;
    4537     69215931 :         else if ((decl = gimple_call_fndecl (stmt))
    4538     69215931 :                  && fndecl_built_in_p (decl))
    4539              :           {
    4540              :             /* Do not special case builtins where we see the body.
    4541              :                This just confuse inliner.  */
    4542     17790179 :             struct cgraph_node *node;
    4543     17790179 :             if ((node = cgraph_node::get (decl))
    4544     17790179 :                 && node->definition)
    4545              :               ;
    4546              :             /* For buitins that are likely expanded to nothing or
    4547              :                inlined do not account operand costs.  */
    4548     17760228 :             else if (is_simple_builtin (decl))
    4549              :               return 0;
    4550     15217687 :             else if (is_inexpensive_builtin (decl))
    4551      1950850 :               return weights->target_builtin_call_cost;
    4552     13266837 :             else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
    4553              :               {
    4554              :                 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
    4555              :                    specialize the cheap expansion we do here.
    4556              :                    ???  This asks for a more general solution.  */
    4557     13031913 :                 switch (DECL_FUNCTION_CODE (decl))
    4558              :                   {
    4559         9154 :                     case BUILT_IN_POW:
    4560         9154 :                     case BUILT_IN_POWF:
    4561         9154 :                     case BUILT_IN_POWL:
    4562         9154 :                       if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
    4563        13023 :                           && (real_equal
    4564         3869 :                               (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
    4565              :                                &dconst2)))
    4566          565 :                         return estimate_operator_cost
    4567          565 :                             (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
    4568          565 :                              gimple_call_arg (stmt, 0));
    4569              :                       break;
    4570              : 
    4571              :                     default:
    4572              :                       break;
    4573              :                   }
    4574              :               }
    4575              :           }
    4576              : 
    4577     64721975 :         cost = decl ? weights->call_cost : weights->indirect_call_cost;
    4578     64721975 :         if (gimple_call_lhs (stmt))
    4579     25467405 :           cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
    4580     25467405 :                                       weights->time_based);
    4581    194881578 :         for (i = 0; i < gimple_call_num_args (stmt); i++)
    4582              :           {
    4583    130159603 :             tree arg = gimple_call_arg (stmt, i);
    4584    130159603 :             cost += estimate_move_cost (TREE_TYPE (arg),
    4585    130159603 :                                         weights->time_based);
    4586              :           }
    4587              :         break;
    4588              :       }
    4589              : 
    4590     17533650 :     case GIMPLE_RETURN:
    4591     17533650 :       return weights->return_cost;
    4592              : 
    4593              :     case GIMPLE_GOTO:
    4594              :     case GIMPLE_LABEL:
    4595              :     case GIMPLE_NOP:
    4596              :     case GIMPLE_PHI:
    4597              :     case GIMPLE_PREDICT:
    4598              :     case GIMPLE_DEBUG:
    4599              :       return 0;
    4600              : 
    4601       758472 :     case GIMPLE_ASM:
    4602       758472 :       {
    4603       758472 :         int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
    4604              :         /* 1000 means infinity. This avoids overflows later
    4605              :            with very long asm statements.  */
    4606       758472 :         if (count > 1000)
    4607              :           count = 1000;
    4608              :         /* If this asm is asm inline, count anything as minimum size.  */
    4609       758472 :         if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
    4610          662 :           count = MIN (1, count);
    4611       758472 :         return MAX (1, count);
    4612              :       }
    4613              : 
    4614              :     case GIMPLE_RESX:
    4615              :       /* This is either going to be an external function call with one
    4616              :          argument, or two register copy statements plus a goto.  */
    4617              :       return 2;
    4618              : 
    4619        14084 :     case GIMPLE_EH_DISPATCH:
    4620              :       /* ??? This is going to turn into a switch statement.  Ideally
    4621              :          we'd have a look at the eh region and estimate the number of
    4622              :          edges involved.  */
    4623        14084 :       return 10;
    4624              : 
    4625            0 :     case GIMPLE_BIND:
    4626            0 :       return estimate_num_insns_seq (
    4627            0 :                gimple_bind_body (as_a <gbind *> (stmt)),
    4628            0 :                weights);
    4629              : 
    4630            0 :     case GIMPLE_EH_FILTER:
    4631            0 :       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
    4632              : 
    4633         8292 :     case GIMPLE_CATCH:
    4634         8292 :       return estimate_num_insns_seq (gimple_catch_handler (
    4635         8292 :                                        as_a <gcatch *> (stmt)),
    4636         8292 :                                      weights);
    4637              : 
    4638         8297 :     case GIMPLE_TRY:
    4639         8297 :       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
    4640         8297 :               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
    4641              : 
    4642              :     /* OMP directives are generally very expensive.  */
    4643              : 
    4644              :     case GIMPLE_OMP_RETURN:
    4645              :     case GIMPLE_OMP_SECTIONS_SWITCH:
    4646              :     case GIMPLE_OMP_ATOMIC_STORE:
    4647              :     case GIMPLE_OMP_CONTINUE:
    4648              :       /* ...except these, which are cheap.  */
    4649              :       return 0;
    4650              : 
    4651            0 :     case GIMPLE_OMP_ATOMIC_LOAD:
    4652            0 :       return weights->omp_cost;
    4653              : 
    4654            0 :     case GIMPLE_OMP_FOR:
    4655            0 :       return (weights->omp_cost
    4656            0 :               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
    4657            0 :               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
    4658              : 
    4659            0 :     case GIMPLE_OMP_PARALLEL:
    4660            0 :     case GIMPLE_OMP_TASK:
    4661            0 :     case GIMPLE_OMP_CRITICAL:
    4662            0 :     case GIMPLE_OMP_MASTER:
    4663            0 :     case GIMPLE_OMP_MASKED:
    4664            0 :     case GIMPLE_OMP_SCOPE:
    4665            0 :     case GIMPLE_OMP_DISPATCH:
    4666            0 :     case GIMPLE_OMP_TASKGROUP:
    4667            0 :     case GIMPLE_OMP_ORDERED:
    4668            0 :     case GIMPLE_OMP_SCAN:
    4669            0 :     case GIMPLE_OMP_SECTION:
    4670            0 :     case GIMPLE_OMP_SECTIONS:
    4671            0 :     case GIMPLE_OMP_STRUCTURED_BLOCK:
    4672            0 :     case GIMPLE_OMP_SINGLE:
    4673            0 :     case GIMPLE_OMP_TARGET:
    4674            0 :     case GIMPLE_OMP_TEAMS:
    4675            0 :       return (weights->omp_cost
    4676            0 :               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
    4677              : 
    4678           82 :     case GIMPLE_TRANSACTION:
    4679           82 :       return (weights->tm_cost
    4680           82 :               + estimate_num_insns_seq (gimple_transaction_body (
    4681           82 :                                           as_a <gtransaction *> (stmt)),
    4682           82 :                                         weights));
    4683              : 
    4684            0 :     default:
    4685            0 :       gcc_unreachable ();
    4686              :     }
    4687              : 
    4688    530437661 :   return cost;
    4689              : }
    4690              : 
    4691              : /* Estimate number of instructions that will be created by expanding
    4692              :    function FNDECL.  WEIGHTS contains weights attributed to various
    4693              :    constructs.  */
    4694              : 
    4695              : int
    4696            0 : estimate_num_insns_fn (tree fndecl, eni_weights *weights)
    4697              : {
    4698            0 :   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
    4699            0 :   gimple_stmt_iterator bsi;
    4700            0 :   basic_block bb;
    4701            0 :   int n = 0;
    4702              : 
    4703            0 :   gcc_assert (my_function && my_function->cfg);
    4704            0 :   FOR_EACH_BB_FN (bb, my_function)
    4705              :     {
    4706            0 :       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
    4707            0 :         n += estimate_num_insns (gsi_stmt (bsi), weights);
    4708              :     }
    4709              : 
    4710            0 :   return n;
    4711              : }
    4712              : 
    4713              : 
    4714              : /* Initializes weights used by estimate_num_insns.  */
    4715              : 
    4716              : void
    4717       278641 : init_inline_once (void)
    4718              : {
    4719       278641 :   eni_size_weights.call_cost = 1;
    4720       278641 :   eni_size_weights.indirect_call_cost = 3;
    4721       278641 :   eni_size_weights.target_builtin_call_cost = 1;
    4722       278641 :   eni_size_weights.div_mod_cost = 1;
    4723       278641 :   eni_size_weights.omp_cost = 40;
    4724       278641 :   eni_size_weights.tm_cost = 10;
    4725       278641 :   eni_size_weights.time_based = false;
    4726       278641 :   eni_size_weights.return_cost = 1;
    4727              : 
    4728              :   /* Estimating time for call is difficult, since we have no idea what the
    4729              :      called function does.  In the current uses of eni_time_weights,
    4730              :      underestimating the cost does less harm than overestimating it, so
    4731              :      we choose a rather small value here.  */
    4732       278641 :   eni_time_weights.call_cost = 10;
    4733       278641 :   eni_time_weights.indirect_call_cost = 15;
    4734       278641 :   eni_time_weights.target_builtin_call_cost = 1;
    4735       278641 :   eni_time_weights.div_mod_cost = 10;
    4736       278641 :   eni_time_weights.omp_cost = 40;
    4737       278641 :   eni_time_weights.tm_cost = 40;
    4738       278641 :   eni_time_weights.time_based = true;
    4739       278641 :   eni_time_weights.return_cost = 2;
    4740       278641 : }
    4741              : 
    4742              : 
    4743              : /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
    4744              : 
    4745              : static void
    4746     26712576 : prepend_lexical_block (tree current_block, tree new_block)
    4747              : {
    4748     26712576 :   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
    4749     26712576 :   BLOCK_SUBBLOCKS (current_block) = new_block;
    4750     26712576 :   BLOCK_SUPERCONTEXT (new_block) = current_block;
    4751     26712576 : }
    4752              : 
    4753              : /* Add local variables from CALLEE to CALLER.  */
    4754              : 
    4755              : static inline void
    4756      4450780 : add_local_variables (struct function *callee, struct function *caller,
    4757              :                      copy_body_data *id)
    4758              : {
    4759      4450780 :   tree var;
    4760      4450780 :   unsigned ix;
    4761              : 
    4762     12015255 :   FOR_EACH_LOCAL_DECL (callee, ix, var)
    4763      3908903 :     if (!can_be_nonlocal (var, id))
    4764              :       {
    4765      3825111 :         tree new_var = remap_decl (var, id);
    4766              : 
    4767              :         /* Remap debug-expressions.  */
    4768      3825111 :         if (VAR_P (new_var)
    4769      3825111 :             && DECL_HAS_DEBUG_EXPR_P (var)
    4770      4070701 :             && new_var != var)
    4771              :           {
    4772       245590 :             tree tem = DECL_DEBUG_EXPR (var);
    4773       245590 :             bool old_regimplify = id->regimplify;
    4774       245590 :             id->remapping_type_depth++;
    4775       245590 :             walk_tree (&tem, copy_tree_body_r, id, NULL);
    4776       245590 :             id->remapping_type_depth--;
    4777       245590 :             id->regimplify = old_regimplify;
    4778       245590 :             SET_DECL_DEBUG_EXPR (new_var, tem);
    4779       245590 :             DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
    4780              :           }
    4781      3825111 :         add_local_decl (caller, new_var);
    4782              :       }
    4783      4450780 : }
    4784              : 
    4785              : /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
    4786              :    have brought in or introduced any debug stmts for SRCVAR.  */
    4787              : 
    4788              : static inline void
    4789      9006038 : reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
    4790              : {
    4791      9006038 :   tree *remappedvarp = id->decl_map->get (srcvar);
    4792              : 
    4793      9006038 :   if (!remappedvarp)
    4794              :     return;
    4795              : 
    4796      8953431 :   if (!VAR_P (*remappedvarp))
    4797              :     return;
    4798              : 
    4799      8818520 :   if (*remappedvarp == id->retvar)
    4800              :     return;
    4801              : 
    4802      8818520 :   tree tvar = target_for_debug_bind (*remappedvarp);
    4803      8818520 :   if (!tvar)
    4804              :     return;
    4805              : 
    4806     14524080 :   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
    4807      7262040 :                                           id->call_stmt);
    4808      7262040 :   gimple_seq_add_stmt (bindings, stmt);
    4809              : }
    4810              : 
    4811              : /* For each inlined variable for which we may have debug bind stmts,
    4812              :    add before GSI a final debug stmt resetting it, marking the end of
    4813              :    its life, so that var-tracking knows it doesn't have to compute
    4814              :    further locations for it.  */
    4815              : 
    4816              : static inline void
    4817      4316706 : reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
    4818              : {
    4819      4316706 :   tree var;
    4820      4316706 :   unsigned ix;
    4821      4316706 :   gimple_seq bindings = NULL;
    4822              : 
    4823      4316706 :   if (!gimple_in_ssa_p (id->src_cfun))
    4824       537197 :     return;
    4825              : 
    4826      4316706 :   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
    4827              :     return;
    4828              : 
    4829      3779509 :   for (var = DECL_ARGUMENTS (id->src_fn);
    4830     10313716 :        var; var = DECL_CHAIN (var))
    4831      6534207 :     reset_debug_binding (id, var, &bindings);
    4832              : 
    4833      9340697 :   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
    4834      2471831 :     reset_debug_binding (id, var, &bindings);
    4835              : 
    4836      3779509 :   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
    4837              : }
    4838              : 
    4839              : /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
    4840              : 
    4841              : static bool
    4842     14966958 : expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
    4843              :                     bitmap to_purge)
    4844              : {
    4845     14966958 :   tree use_retvar;
    4846     14966958 :   tree fn;
    4847     14966958 :   hash_map<tree, tree> *dst;
    4848     14966958 :   hash_map<tree, tree> *st = NULL;
    4849     14966958 :   tree return_slot;
    4850     14966958 :   tree modify_dest;
    4851     14966958 :   struct cgraph_edge *cg_edge;
    4852     14966958 :   cgraph_inline_failed_t reason;
    4853     14966958 :   basic_block return_block;
    4854     14966958 :   edge e;
    4855     14966958 :   gimple_stmt_iterator gsi, stmt_gsi;
    4856     14966958 :   bool successfully_inlined = false;
    4857     14966958 :   bool purge_dead_abnormal_edges;
    4858     14966958 :   gcall *call_stmt;
    4859     14966958 :   unsigned int prop_mask, src_properties;
    4860     14966958 :   struct function *dst_cfun;
    4861     14966958 :   tree simduid;
    4862     14966958 :   use_operand_p use;
    4863     14966958 :   gimple *simtenter_stmt = NULL;
    4864     14966958 :   vec<tree> *simtvars_save;
    4865     14966958 :   tree save_stack = NULL_TREE;
    4866              : 
    4867              :   /* The gimplifier uses input_location in too many places, such as
    4868              :      internal_get_tmp_var ().  */
    4869     14966958 :   location_t saved_location = input_location;
    4870     14966958 :   input_location = gimple_location (stmt);
    4871              : 
    4872              :   /* From here on, we're only interested in CALL_EXPRs.  */
    4873     14966958 :   call_stmt = dyn_cast <gcall *> (stmt);
    4874     14966958 :   if (!call_stmt)
    4875            0 :     goto egress;
    4876              : 
    4877     14966958 :   cg_edge = id->dst_node->get_edge (stmt);
    4878              :   /* Edge should exist and speculations should be resolved at this
    4879              :      stage.  */
    4880     14966958 :   gcc_checking_assert (cg_edge && !cg_edge->speculative);
    4881              :   /* First, see if we can figure out what function is being called.
    4882              :      If we cannot, then there is no hope of inlining the function.  */
    4883     14966958 :   if (cg_edge->indirect_unknown_callee)
    4884       242650 :     goto egress;
    4885     14724308 :   fn = cg_edge->callee->decl;
    4886     14724308 :   gcc_checking_assert (fn);
    4887              : 
    4888              :   /* If FN is a declaration of a function in a nested scope that was
    4889              :      globally declared inline, we don't set its DECL_INITIAL.
    4890              :      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
    4891              :      C++ front-end uses it for cdtors to refer to their internal
    4892              :      declarations, that are not real functions.  Fortunately those
    4893              :      don't have trees to be saved, so we can tell by checking their
    4894              :      gimple_body.  */
    4895     14724308 :   if (!DECL_INITIAL (fn)
    4896      6605824 :       && DECL_ABSTRACT_ORIGIN (fn)
    4897     14851887 :       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
    4898          289 :     fn = DECL_ABSTRACT_ORIGIN (fn);
    4899              : 
    4900              :   /* Don't try to inline functions that are not well-suited to inlining.  */
    4901     14724308 :   if (cg_edge->inline_failed)
    4902              :     {
    4903     10407389 :       reason = cg_edge->inline_failed;
    4904              :       /* If this call was originally indirect, we do not want to emit any
    4905              :          inlining related warnings or sorry messages because there are no
    4906              :          guarantees regarding those.  */
    4907     10407389 :       if (cg_edge->indirect_inlining_edge)
    4908         1930 :         goto egress;
    4909              : 
    4910     10405459 :       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
    4911              :           /* For extern inline functions that get redefined we always
    4912              :              silently ignored always_inline flag. Better behavior would
    4913              :              be to be able to keep both bodies and use extern inline body
    4914              :              for inlining, but we can't do that because frontends overwrite
    4915              :              the body.  */
    4916           53 :           && !cg_edge->callee->redefined_extern_inline
    4917              :           /* During early inline pass, report only when optimization is
    4918              :              not turned on.  */
    4919           53 :           && (symtab->global_info_ready
    4920           51 :               || !optimize
    4921           41 :               || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
    4922              :           /* PR 20090218-1_0.c. Body can be provided by another module. */
    4923     10405493 :           && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
    4924              :         {
    4925           27 :           error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
    4926              :                  cgraph_inline_failed_string (reason));
    4927           27 :           if (gimple_location (stmt) != UNKNOWN_LOCATION)
    4928           27 :             inform (gimple_location (stmt), "called from here");
    4929            0 :           else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
    4930            0 :             inform (DECL_SOURCE_LOCATION (cfun->decl),
    4931              :                    "called from this function");
    4932              :         }
    4933     10405432 :       else if (opt_for_fn (fn, warn_inline)
    4934          325 :                && DECL_DECLARED_INLINE_P (fn)
    4935          100 :                && !DECL_NO_INLINE_WARNING_P (fn)
    4936           71 :                && !DECL_IN_SYSTEM_HEADER (fn)
    4937           71 :                && reason != CIF_UNSPECIFIED
    4938           71 :                && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
    4939              :                /* Do not warn about not inlined recursive calls.  */
    4940           71 :                && !cg_edge->recursive_p ()
    4941              :                /* Avoid warnings during early inline pass. */
    4942     10405503 :                && symtab->global_info_ready)
    4943              :         {
    4944            8 :           auto_diagnostic_group d;
    4945            8 :           if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
    4946              :                        fn, _(cgraph_inline_failed_string (reason))))
    4947              :             {
    4948            8 :               if (gimple_location (stmt) != UNKNOWN_LOCATION)
    4949            8 :                 inform (gimple_location (stmt), "called from here");
    4950            0 :               else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
    4951            0 :                 inform (DECL_SOURCE_LOCATION (cfun->decl),
    4952              :                        "called from this function");
    4953              :             }
    4954            8 :         }
    4955     10405459 :       goto egress;
    4956              :     }
    4957      4316919 :   id->src_node = cg_edge->callee;
    4958              : 
    4959              :   /* If callee is thunk, all we need is to adjust the THIS pointer
    4960              :      and redirect to function being thunked.  */
    4961      4316919 :   if (id->src_node->thunk)
    4962              :     {
    4963          213 :       cgraph_edge *edge;
    4964          213 :       tree virtual_offset = NULL;
    4965          213 :       profile_count count = cg_edge->count;
    4966          213 :       tree op;
    4967          213 :       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
    4968          213 :       thunk_info *info = thunk_info::get (id->src_node);
    4969              : 
    4970          213 :       cgraph_edge::remove (cg_edge);
    4971          426 :       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
    4972              :                                            gimple_uid (stmt),
    4973              :                                            profile_count::one (),
    4974              :                                            profile_count::one (),
    4975              :                                            true);
    4976          213 :       edge->count = count;
    4977          213 :       if (info->virtual_offset_p)
    4978            7 :         virtual_offset = size_int (info->virtual_value);
    4979          213 :       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
    4980              :                               NULL);
    4981          213 :       gsi_insert_before (&iter, gimple_build_assign (op,
    4982              :                                                     gimple_call_arg (stmt, 0)),
    4983              :                          GSI_NEW_STMT);
    4984          213 :       gcc_assert (info->this_adjusting);
    4985          213 :       op = thunk_adjust (&iter, op, 1, info->fixed_offset,
    4986              :                          virtual_offset, info->indirect_offset);
    4987              : 
    4988          213 :       gimple_call_set_arg (stmt, 0, op);
    4989          213 :       gimple_call_set_fndecl (stmt, edge->callee->decl);
    4990          213 :       update_stmt (stmt);
    4991          213 :       id->src_node->remove ();
    4992          213 :       successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
    4993          213 :       maybe_remove_unused_call_args (cfun, stmt);
    4994              :       /* This used to return true even though we do fail to inline in
    4995              :          some cases.  See PR98525.  */
    4996          213 :       goto egress;
    4997              :     }
    4998      4316706 :   fn = cg_edge->callee->decl;
    4999      4316706 :   cg_edge->callee->get_untransformed_body ();
    5000              : 
    5001      4316706 :   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
    5002      4316697 :     cg_edge->callee->verify ();
    5003              : 
    5004              :   /* We will be inlining this callee.  */
    5005      4316706 :   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
    5006              : 
    5007              :   /* Update the callers EH personality.  */
    5008      4316706 :   if (DECL_FUNCTION_PERSONALITY (fn))
    5009       280216 :     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
    5010       140108 :       = DECL_FUNCTION_PERSONALITY (fn);
    5011              : 
    5012              :   /* Split the block before the GIMPLE_CALL.  */
    5013      4316706 :   stmt_gsi = gsi_for_stmt (stmt);
    5014      4316706 :   gsi_prev (&stmt_gsi);
    5015      4316706 :   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
    5016      4316706 :   bb = e->src;
    5017      4316706 :   return_block = e->dest;
    5018      4316706 :   remove_edge (e);
    5019              : 
    5020              :   /* If the GIMPLE_CALL was in the last statement of BB, it may have
    5021              :      been the source of abnormal edges.  In this case, schedule
    5022              :      the removal of dead abnormal edges.  */
    5023      4316706 :   gsi = gsi_start_bb (return_block);
    5024      4316706 :   gsi_next (&gsi);
    5025      4316706 :   purge_dead_abnormal_edges = gsi_end_p (gsi);
    5026              : 
    5027      4316706 :   stmt_gsi = gsi_start_bb (return_block);
    5028              : 
    5029              :   /* Build a block containing code to initialize the arguments, the
    5030              :      actual inline expansion of the body, and a label for the return
    5031              :      statements within the function to jump to.  The type of the
    5032              :      statement expression is the return type of the function call.
    5033              :      ???  If the call does not have an associated block then we will
    5034              :      remap all callee blocks to NULL, effectively dropping most of
    5035              :      its debug information.  This should only happen for calls to
    5036              :      artificial decls inserted by the compiler itself.  We need to
    5037              :      either link the inlined blocks into the caller block tree or
    5038              :      not refer to them in any way to not break GC for locations.  */
    5039      4316706 :   if (tree block = gimple_block (stmt))
    5040              :     {
    5041              :       /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
    5042              :          to make inlined_function_outer_scope_p return true on this BLOCK.  */
    5043      4290004 :       location_t loc = LOCATION_LOCUS (gimple_location (stmt));
    5044      4290004 :       if (loc == UNKNOWN_LOCATION)
    5045       207198 :         loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
    5046       207198 :       if (loc == UNKNOWN_LOCATION)
    5047              :         loc = BUILTINS_LOCATION;
    5048      4290004 :       if (has_discriminator (gimple_location (stmt)))
    5049      1438502 :         loc = location_with_discriminator
    5050      1438502 :                 (loc, get_discriminator_from_loc (gimple_location (stmt)));
    5051      4290004 :       id->block = make_node (BLOCK);
    5052      7366130 :       BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
    5053      4290004 :       BLOCK_SOURCE_LOCATION (id->block) = loc;
    5054      4290004 :       prepend_lexical_block (block, id->block);
    5055              :     }
    5056              : 
    5057              :   /* Local declarations will be replaced by their equivalents in this map.  */
    5058      4316706 :   st = id->decl_map;
    5059      4316706 :   id->decl_map = new hash_map<tree, tree>;
    5060      4316706 :   dst = id->debug_map;
    5061      4316706 :   id->debug_map = NULL;
    5062      4316706 :   if (flag_stack_reuse != SR_NONE)
    5063      4310415 :     id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
    5064              : 
    5065              :   /* Record the function we are about to inline.  */
    5066      4316706 :   id->src_fn = fn;
    5067      4316706 :   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
    5068      4316706 :   id->reset_location = DECL_IGNORED_P (fn);
    5069      4316706 :   id->call_stmt = call_stmt;
    5070      4316706 :   cfun->cfg->full_profile &= id->src_cfun->cfg->full_profile;
    5071              : 
    5072              :   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
    5073              :      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
    5074      4316706 :   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
    5075      4316706 :   simtvars_save = id->dst_simt_vars;
    5076      4316706 :   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
    5077        20461 :       && (simduid = bb->loop_father->simduid) != NULL_TREE
    5078            0 :       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
    5079            0 :       && single_imm_use (simduid, &use, &simtenter_stmt)
    5080            0 :       && is_gimple_call (simtenter_stmt)
    5081      4316706 :       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
    5082            0 :     vec_alloc (id->dst_simt_vars, 0);
    5083              :   else
    5084      4316706 :     id->dst_simt_vars = NULL;
    5085              : 
    5086      4316706 :   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
    5087        28680 :     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
    5088              : 
    5089              :   /* If the src function contains an IFN_VA_ARG, then so will the dst
    5090              :      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
    5091      4316706 :   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
    5092      4316706 :   src_properties = id->src_cfun->curr_properties & prop_mask;
    5093      4316706 :   if (src_properties != prop_mask)
    5094         1047 :     dst_cfun->curr_properties &= src_properties | ~prop_mask;
    5095      4316706 :   dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
    5096      4316706 :   id->dst_node->has_omp_variant_constructs
    5097      4316706 :     |= id->src_node->has_omp_variant_constructs;
    5098              : 
    5099      4316706 :   gcc_assert (!id->src_cfun->after_inlining);
    5100              : 
    5101      4316706 :   id->entry_bb = bb;
    5102      4316706 :   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
    5103              :     {
    5104         1053 :       gimple_stmt_iterator si = gsi_last_bb (bb);
    5105         1053 :       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
    5106              :                                                    NOT_TAKEN),
    5107              :                         GSI_NEW_STMT);
    5108              :     }
    5109      4316706 :   initialize_inlined_parameters (id, stmt, fn, bb);
    5110      3779527 :   if (debug_nonbind_markers_p && debug_inline_points && id->block
    5111      8083631 :       && inlined_function_outer_scope_p (id->block))
    5112              :     {
    5113      3766925 :       gimple_stmt_iterator si = gsi_last_bb (bb);
    5114      3766925 :       gsi_insert_after (&si, gimple_build_debug_inline_entry
    5115      3766925 :                         (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
    5116              :                         GSI_NEW_STMT);
    5117              :     }
    5118              : 
    5119              :   /* If function to be inlined calls alloca, wrap the inlined function
    5120              :      in between save_stack = __builtin_stack_save (); and
    5121              :      __builtin_stack_restore (save_stack); calls.  */
    5122      4316706 :   if (id->src_cfun->calls_alloca && !gimple_call_noreturn_p (stmt))
    5123              :     /* Don't do this for VLA allocations though, just for user alloca
    5124              :        calls.  */
    5125         5255 :     for (struct cgraph_edge *e = id->src_node->callees; e; e = e->next_callee)
    5126         4851 :       if (gimple_maybe_alloca_call_p (e->call_stmt)
    5127         4851 :           && !gimple_call_alloca_for_var_p (e->call_stmt))
    5128              :         {
    5129           88 :           tree fn = builtin_decl_implicit (BUILT_IN_STACK_SAVE);
    5130           88 :           gcall *call = gimple_build_call (fn, 0);
    5131           88 :           save_stack = make_ssa_name (ptr_type_node);
    5132           88 :           gimple_call_set_lhs (call, save_stack);
    5133           88 :           gimple_stmt_iterator si = gsi_last_bb (bb);
    5134           88 :           gsi_insert_after (&si, call, GSI_NEW_STMT);
    5135           88 :           struct cgraph_node *dest = cgraph_node::get_create (fn);
    5136           88 :           id->dst_node->create_edge (dest, call, bb->count)->inline_failed
    5137           88 :             = CIF_BODY_NOT_AVAILABLE;
    5138           88 :           break;
    5139              :         }
    5140              : 
    5141      4316706 :   if (DECL_INITIAL (fn))
    5142              :     {
    5143      4316706 :       if (gimple_block (stmt))
    5144              :         {
    5145      4290004 :           tree *var;
    5146              : 
    5147      4290004 :           prepend_lexical_block (id->block,
    5148      4290004 :                                  remap_blocks (DECL_INITIAL (fn), id));
    5149      4290004 :           gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
    5150              :                                && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
    5151              :                                    == NULL_TREE));
    5152              :           /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
    5153              :              otherwise for DWARF DW_TAG_formal_parameter will not be children of
    5154              :              DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
    5155              :              under it.  The parameters can be then evaluated in the debugger,
    5156              :              but don't show in backtraces.  */
    5157      6224099 :           for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
    5158      1934095 :             if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
    5159              :               {
    5160       490272 :                 tree v = *var;
    5161       490272 :                 *var = TREE_CHAIN (v);
    5162       490272 :                 TREE_CHAIN (v) = BLOCK_VARS (id->block);
    5163       490272 :                 BLOCK_VARS (id->block) = v;
    5164              :               }
    5165              :             else
    5166      1443823 :               var = &TREE_CHAIN (*var);
    5167              :         }
    5168              :       else
    5169        26702 :         remap_blocks_to_null (DECL_INITIAL (fn), id);
    5170              :     }
    5171              : 
    5172              :   /* Return statements in the function body will be replaced by jumps
    5173              :      to the RET_LABEL.  */
    5174      4316706 :   gcc_assert (DECL_INITIAL (fn));
    5175      4316706 :   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
    5176              : 
    5177              :   /* Find the LHS to which the result of this call is assigned.  */
    5178      4316706 :   return_slot = NULL;
    5179      4316706 :   if (gimple_call_lhs (stmt))
    5180              :     {
    5181      2182270 :       modify_dest = gimple_call_lhs (stmt);
    5182              : 
    5183              :       /* The function which we are inlining might not return a value,
    5184              :          in which case we should issue a warning that the function
    5185              :          does not return a value.  In that case the optimizers will
    5186              :          see that the variable to which the value is assigned was not
    5187              :          initialized.  We do not want to issue a warning about that
    5188              :          uninitialized variable.  */
    5189      2182270 :       if (DECL_P (modify_dest))
    5190       385946 :         suppress_warning (modify_dest, OPT_Wuninitialized);
    5191              : 
    5192              :       /* If we have a return slot, we can assign it the result directly,
    5193              :          except in the case where it is a global variable that is only
    5194              :          written to because, the callee being permitted to read or take
    5195              :          the address of its DECL_RESULT, this could invalidate the flag
    5196              :          on the global variable; instead we preventively remove the store,
    5197              :          which would have happened later if the call was not inlined.  */
    5198      2182270 :       if (gimple_call_return_slot_opt_p (call_stmt))
    5199              :         {
    5200       128156 :           tree base = get_base_address (modify_dest);
    5201              : 
    5202       128156 :           if (VAR_P (base)
    5203       105425 :               && (TREE_STATIC (base) || DECL_EXTERNAL (base))
    5204       128215 :               && varpool_node::get (base)->writeonly)
    5205              :             return_slot = NULL;
    5206              :           else
    5207              :             return_slot = modify_dest;
    5208              : 
    5209              :           modify_dest = NULL;
    5210              :         }
    5211              :     }
    5212              :   else
    5213              :     modify_dest = NULL;
    5214              : 
    5215              :   /* If we are inlining a call to the C++ operator new, we don't want
    5216              :      to use type based alias analysis on the return value.  Otherwise
    5217              :      we may get confused if the compiler sees that the inlined new
    5218              :      function returns a pointer which was just deleted.  See bug
    5219              :      33407.  */
    5220      4316706 :   if (DECL_IS_OPERATOR_NEW_P (fn))
    5221              :     {
    5222        19200 :       return_slot = NULL;
    5223        19200 :       modify_dest = NULL;
    5224              :     }
    5225              : 
    5226              :   /* Declare the return variable for the function.  */
    5227      4316706 :   use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
    5228              : 
    5229              :   /* Add local vars in this inlined callee to caller.  */
    5230      4316706 :   add_local_variables (id->src_cfun, cfun, id);
    5231              : 
    5232      4316706 :   if (dump_enabled_p ())
    5233              :     {
    5234          678 :       char buf[128];
    5235          678 :       snprintf (buf, sizeof(buf), "%4.2f",
    5236          678 :                 cg_edge->sreal_frequency ().to_double ());
    5237          678 :       dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
    5238          678 :                        call_stmt,
    5239              :                        "Inlining %C to %C with frequency %s\n",
    5240              :                        id->src_node, id->dst_node, buf);
    5241          678 :       if (dump_file && (dump_flags & TDF_DETAILS))
    5242              :         {
    5243          199 :           id->src_node->dump (dump_file);
    5244          199 :           id->dst_node->dump (dump_file);
    5245              :         }
    5246              :     }
    5247              : 
    5248              :   /* This is it.  Duplicate the callee body.  Assume callee is
    5249              :      pre-gimplified.  Note that we must not alter the caller
    5250              :      function in any way before this point, as this CALL_EXPR may be
    5251              :      a self-referential call; if we're calling ourselves, we need to
    5252              :      duplicate our body before altering anything.  */
    5253      4316706 :   copy_body (id, bb, return_block, NULL);
    5254              : 
    5255      4316706 :   reset_debug_bindings (id, stmt_gsi);
    5256              : 
    5257      4316706 :   if (flag_stack_reuse != SR_NONE)
    5258     11695033 :     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
    5259      7384618 :       if (!TREE_THIS_VOLATILE (p))
    5260              :         {
    5261              :           /* The value associated with P is a local temporary only if
    5262              :              there is no value associated with P in the debug map.  */
    5263      7384355 :           tree *varp = id->decl_map->get (p);
    5264      7384355 :           if (varp
    5265      7384355 :               && VAR_P (*varp)
    5266      7235261 :               && !is_gimple_reg (*varp)
    5267      7785451 :               && !(id->debug_map && id->debug_map->get (p)))
    5268              :             {
    5269       400863 :               tree clobber = build_clobber (TREE_TYPE (*varp),
    5270              :                                             CLOBBER_STORAGE_END);
    5271       400863 :               gimple *clobber_stmt;
    5272       400863 :               clobber_stmt = gimple_build_assign (*varp, clobber);
    5273       400863 :               gimple_set_location (clobber_stmt, gimple_location (stmt));
    5274       400863 :               gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
    5275              :             }
    5276              :         }
    5277              : 
    5278      4316706 :   if (save_stack)
    5279              :     {
    5280           88 :       tree fn = builtin_decl_implicit (BUILT_IN_STACK_RESTORE);
    5281           88 :       gcall *call = gimple_build_call (fn, 1, save_stack);
    5282           88 :       gsi_insert_before (&stmt_gsi, call, GSI_SAME_STMT);
    5283           88 :       struct cgraph_node *dest = cgraph_node::get_create (fn);
    5284           88 :       id->dst_node->create_edge (dest, call,
    5285              :                                  return_block->count)->inline_failed
    5286           88 :         = CIF_BODY_NOT_AVAILABLE;
    5287              :     }
    5288              : 
    5289              :   /* Reset the escaped solution.  */
    5290      4316706 :   if (cfun->gimple_df)
    5291              :     {
    5292      4316706 :       pt_solution_reset (&cfun->gimple_df->escaped);
    5293      4316706 :       pt_solution_reset (&cfun->gimple_df->escaped_return);
    5294              :     }
    5295              : 
    5296              :   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
    5297      4316706 :   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
    5298              :     {
    5299            0 :       size_t nargs = gimple_call_num_args (simtenter_stmt);
    5300            0 :       vec<tree> *vars = id->dst_simt_vars;
    5301            0 :       auto_vec<tree> newargs (nargs + vars->length ());
    5302            0 :       for (size_t i = 0; i < nargs; i++)
    5303            0 :         newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
    5304            0 :       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
    5305              :         {
    5306            0 :           tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
    5307            0 :           newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
    5308              :         }
    5309            0 :       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
    5310            0 :       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
    5311            0 :       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
    5312            0 :       gsi_replace (&gsi, g, false);
    5313            0 :     }
    5314      4316706 :   vec_free (id->dst_simt_vars);
    5315      4316706 :   id->dst_simt_vars = simtvars_save;
    5316              : 
    5317              :   /* Clean up.  */
    5318      4316706 :   if (id->debug_map)
    5319              :     {
    5320       144947 :       delete id->debug_map;
    5321       144947 :       id->debug_map = dst;
    5322              :     }
    5323      8633412 :   delete id->decl_map;
    5324      4316706 :   id->decl_map = st;
    5325              : 
    5326              :   /* Unlink the calls virtual operands before replacing it.  */
    5327      4316706 :   unlink_stmt_vdef (stmt);
    5328      4316706 :   if (gimple_vdef (stmt)
    5329      4316706 :       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
    5330      2001260 :     release_ssa_name (gimple_vdef (stmt));
    5331              : 
    5332              :   /* If the inlined function returns a result that we care about,
    5333              :      substitute the GIMPLE_CALL with an assignment of the return
    5334              :      variable to the LHS of the call.  That is, if STMT was
    5335              :      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
    5336      4316706 :   if (use_retvar && gimple_call_lhs (stmt))
    5337              :     {
    5338      1860513 :       gimple *old_stmt = stmt;
    5339      1860513 :       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
    5340      1860513 :       gimple_set_location (stmt, gimple_location (old_stmt));
    5341      1860513 :       gsi_replace (&stmt_gsi, stmt, false);
    5342      1860513 :       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
    5343              :       /* Append a clobber for id->retvar if easily possible.  */
    5344      1860513 :       if (flag_stack_reuse != SR_NONE
    5345      1858380 :           && id->retvar
    5346      1858380 :           && VAR_P (id->retvar)
    5347      1858380 :           && id->retvar != return_slot
    5348      1858380 :           && id->retvar != modify_dest
    5349      1858380 :           && !TREE_THIS_VOLATILE (id->retvar)
    5350      1858354 :           && !is_gimple_reg (id->retvar)
    5351      1951435 :           && !stmt_ends_bb_p (stmt))
    5352              :         {
    5353        90922 :           tree clobber = build_clobber (TREE_TYPE (id->retvar),
    5354              :                                         CLOBBER_STORAGE_END);
    5355        90922 :           gimple *clobber_stmt;
    5356        90922 :           clobber_stmt = gimple_build_assign (id->retvar, clobber);
    5357        90922 :           gimple_set_location (clobber_stmt, gimple_location (old_stmt));
    5358        90922 :           gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
    5359              :         }
    5360              :     }
    5361              :   else
    5362              :     {
    5363              :       /* Handle the case of inlining a function with no return
    5364              :          statement, which causes the return value to become undefined.  */
    5365      2456193 :       if (gimple_call_lhs (stmt)
    5366      2456193 :           && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
    5367              :         {
    5368           21 :           tree name = gimple_call_lhs (stmt);
    5369           21 :           tree var = SSA_NAME_VAR (name);
    5370            6 :           tree def = var ? ssa_default_def (cfun, var) : NULL;
    5371              : 
    5372            6 :           if (def)
    5373              :             {
    5374              :               /* If the variable is used undefined, make this name
    5375              :                  undefined via a move.  */
    5376            0 :               stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
    5377            0 :               gsi_replace (&stmt_gsi, stmt, true);
    5378              :             }
    5379              :           else
    5380              :             {
    5381           21 :               if (!var)
    5382              :                 {
    5383           15 :                   var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
    5384           30 :                   SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
    5385              :                 }
    5386              :               /* Otherwise make this variable undefined.  */
    5387           21 :               gsi_remove (&stmt_gsi, true);
    5388           21 :               set_ssa_default_def (cfun, var, name);
    5389           21 :               SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
    5390              :             }
    5391              :         }
    5392              :       /* Replace with a clobber for id->retvar.  */
    5393      2456172 :       else if (flag_stack_reuse != SR_NONE
    5394      2452014 :                && id->retvar
    5395       446090 :                && VAR_P (id->retvar)
    5396       364796 :                && id->retvar != return_slot
    5397       317044 :                && id->retvar != modify_dest
    5398       124638 :                && !TREE_THIS_VOLATILE (id->retvar)
    5399      2580810 :                && !is_gimple_reg (id->retvar))
    5400              :         {
    5401        11622 :           tree clobber = build_clobber (TREE_TYPE (id->retvar));
    5402        11622 :           gimple *clobber_stmt;
    5403        11622 :           clobber_stmt = gimple_build_assign (id->retvar, clobber);
    5404        11622 :           gimple_set_location (clobber_stmt, gimple_location (stmt));
    5405        11622 :           gsi_replace (&stmt_gsi, clobber_stmt, false);
    5406        11622 :           maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
    5407              :         }
    5408              :       else
    5409      2444550 :         gsi_remove (&stmt_gsi, true);
    5410              :     }
    5411              : 
    5412      4316706 :   if (purge_dead_abnormal_edges)
    5413      1011279 :     bitmap_set_bit (to_purge, return_block->index);
    5414              : 
    5415              :   /* If the value of the new expression is ignored, that's OK.  We
    5416              :      don't warn about this for CALL_EXPRs, so we shouldn't warn about
    5417              :      the equivalent inlined version either.  */
    5418      4316706 :   if (is_gimple_assign (stmt))
    5419              :     {
    5420      1860513 :       gcc_assert (gimple_assign_single_p (stmt)
    5421              :                   || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
    5422      1860513 :       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
    5423              :     }
    5424              : 
    5425      4316706 :   id->add_clobbers_to_eh_landing_pads = 0;
    5426              : 
    5427              :   /* Output the inlining info for this abstract function, since it has been
    5428              :      inlined.  If we don't do this now, we can lose the information about the
    5429              :      variables in the function when the blocks get blown away as soon as we
    5430              :      remove the cgraph node.  */
    5431      4316706 :   if (gimple_block (stmt))
    5432      4290004 :     (*debug_hooks->outlining_inline_function) (fn);
    5433              : 
    5434              :   /* Update callgraph if needed.  */
    5435      4316706 :   cg_edge->callee->remove ();
    5436              : 
    5437      4316706 :   id->block = NULL_TREE;
    5438      4316706 :   id->retvar = NULL_TREE;
    5439      4316706 :   successfully_inlined = true;
    5440              : 
    5441     14966958 :  egress:
    5442     14966958 :   input_location = saved_location;
    5443     14966958 :   return successfully_inlined;
    5444              : }
    5445              : 
    5446              : /* Expand call statements reachable from STMT_P.
    5447              :    We can only have CALL_EXPRs as the "toplevel" tree code or nested
    5448              :    in a MODIFY_EXPR.  */
    5449              : 
    5450              : static bool
    5451     30893942 : gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
    5452              :                             bitmap to_purge)
    5453              : {
    5454     30893942 :   gimple_stmt_iterator gsi;
    5455     30893942 :   bool inlined = false;
    5456              : 
    5457    224538453 :   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
    5458              :     {
    5459    162750569 :       gimple *stmt = gsi_stmt (gsi);
    5460    162750569 :       gsi_prev (&gsi);
    5461              : 
    5462    162750569 :       if (is_gimple_call (stmt)
    5463    162750569 :           && !gimple_call_internal_p (stmt))
    5464     14966745 :         inlined |= expand_call_inline (bb, stmt, id, to_purge);
    5465              :     }
    5466              : 
    5467     30893942 :   return inlined;
    5468              : }
    5469              : 
    5470              : 
    5471              : /* Walk all basic blocks created after FIRST and try to fold every statement
    5472              :    in the STATEMENTS pointer set.  */
    5473              : 
    5474              : static void
    5475      1560610 : fold_marked_statements (int first, hash_set<gimple *> *statements)
    5476              : {
    5477      1560610 :   auto_bitmap to_purge;
    5478      1560610 :   auto_bitmap to_purge_abnormal;
    5479              : 
    5480      1560610 :   auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
    5481      1560610 :   auto_sbitmap visited (last_basic_block_for_fn (cfun));
    5482      1560610 :   bitmap_clear (visited);
    5483              : 
    5484      1560610 :   stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
    5485     37716844 :   while (!stack.is_empty ())
    5486              :     {
    5487              :       /* Look at the edge on the top of the stack.  */
    5488     36156234 :       edge e = stack.pop ();
    5489     36156234 :       basic_block dest = e->dest;
    5490              : 
    5491     44500764 :       if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
    5492     34618445 :           || bitmap_bit_p (visited, dest->index))
    5493      8344530 :         continue;
    5494              : 
    5495     27811704 :       bitmap_set_bit (visited, dest->index);
    5496              : 
    5497     27811704 :       if (dest->index >= first)
    5498     35944516 :         for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
    5499    121304803 :              !gsi_end_p (gsi); gsi_next (&gsi))
    5500              :           {
    5501    103332545 :             if (!statements->contains (gsi_stmt (gsi)))
    5502     22417399 :               continue;
    5503              : 
    5504     80915146 :             gimple *old_stmt = gsi_stmt (gsi);
    5505     80915146 :             bool can_make_abnormal_goto = false;
    5506     80915146 :             tree old_decl = NULL_TREE;
    5507              : 
    5508     80915146 :             if (is_gimple_call (old_stmt))
    5509              :               {
    5510      3741731 :                 old_decl = gimple_call_fndecl (old_stmt);
    5511      3741731 :                 if (stmt_can_make_abnormal_goto (old_stmt))
    5512              :                   can_make_abnormal_goto = true;
    5513              :               }
    5514              : 
    5515      3741731 :             if (old_decl && fndecl_built_in_p (old_decl))
    5516              :               {
    5517              :                 /* Folding builtins can create multiple instructions,
    5518              :                    we need to look at all of them.  */
    5519      1369984 :                 gimple_stmt_iterator i2 = gsi;
    5520      1369984 :                 gsi_prev (&i2);
    5521      1369984 :                 if (fold_stmt (&gsi))
    5522              :                   {
    5523        91537 :                     gimple *new_stmt;
    5524              :                     /* If a builtin at the end of a bb folded into nothing,
    5525              :                        the following loop won't work.  */
    5526        91537 :                     if (gsi_end_p (gsi))
    5527              :                       {
    5528            0 :                         cgraph_update_edges_for_call_stmt (old_stmt,
    5529              :                                                            old_decl, NULL);
    5530            0 :                         if (can_make_abnormal_goto)
    5531            0 :                           bitmap_set_bit (to_purge_abnormal, dest->index);
    5532     17972258 :                         break;
    5533              :                       }
    5534        91537 :                     if (gsi_end_p (i2))
    5535       124728 :                       i2 = gsi_start_bb (dest);
    5536              :                     else
    5537        29173 :                       gsi_next (&i2);
    5538          426 :                     while (1)
    5539              :                       {
    5540        91963 :                         new_stmt = gsi_stmt (i2);
    5541        91963 :                         update_stmt (new_stmt);
    5542        91963 :                         cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
    5543              :                                                            new_stmt);
    5544              : 
    5545        91963 :                         if (new_stmt == gsi_stmt (gsi))
    5546              :                           {
    5547              :                             /* It is okay to check only for the very last
    5548              :                                of these statements.  If it is a throwing
    5549              :                                statement nothing will change.  If it isn't
    5550              :                                this can remove EH edges.  If that weren't
    5551              :                                correct then because some intermediate stmts
    5552              :                                throw, but not the last one.  That would mean
    5553              :                                we'd have to split the block, which we can't
    5554              :                                here and we'd loose anyway.  And as builtins
    5555              :                                probably never throw, this all
    5556              :                                is mood anyway.  */
    5557        91537 :                             if (maybe_clean_or_replace_eh_stmt (old_stmt,
    5558              :                                                                 new_stmt))
    5559            7 :                               bitmap_set_bit (to_purge, dest->index);
    5560        91537 :                             if (can_make_abnormal_goto
    5561        91537 :                                 && !stmt_can_make_abnormal_goto (new_stmt))
    5562            0 :                               bitmap_set_bit (to_purge_abnormal, dest->index);
    5563              :                             break;
    5564              :                           }
    5565          426 :                         gsi_next (&i2);
    5566              :                       }
    5567              :                   }
    5568              :               }
    5569     79545162 :             else if (fold_stmt (&gsi))
    5570              :               {
    5571              :                 /* Re-read the statement from GSI as fold_stmt() may
    5572              :                    have changed it.  */
    5573      2999402 :                 gimple *new_stmt = gsi_stmt (gsi);
    5574      2999402 :                 update_stmt (new_stmt);
    5575              : 
    5576      2999402 :                 if (is_gimple_call (old_stmt)
    5577      2999402 :                     || is_gimple_call (new_stmt))
    5578         4307 :                   cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
    5579              :                                                      new_stmt);
    5580              : 
    5581      2999402 :                 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
    5582          164 :                   bitmap_set_bit (to_purge, dest->index);
    5583      2999402 :                 if (can_make_abnormal_goto
    5584      2999402 :                     && !stmt_can_make_abnormal_goto (new_stmt))
    5585            7 :                   bitmap_set_bit (to_purge_abnormal, dest->index);
    5586              :               }
    5587              :           }
    5588              : 
    5589     54082750 :       if (EDGE_COUNT (dest->succs) > 0)
    5590              :         {
    5591              :           /* Avoid warnings emitted from folding statements that
    5592              :              became unreachable because of inlined function parameter
    5593              :              propagation.  */
    5594     26271046 :           e = find_taken_edge (dest, NULL_TREE);
    5595     26271046 :           if (e)
    5596     18104215 :             stack.quick_push (e);
    5597              :           else
    5598              :             {
    5599      8166831 :               edge_iterator ei;
    5600     24658240 :               FOR_EACH_EDGE (e, ei, dest->succs)
    5601     16491409 :                 stack.safe_push (e);
    5602              :             }
    5603              :         }
    5604              :     }
    5605              : 
    5606      1560610 :   gimple_purge_all_dead_eh_edges (to_purge);
    5607      1560610 :   gimple_purge_all_dead_abnormal_call_edges (to_purge_abnormal);
    5608      1560610 : }
    5609              : 
    5610              : /* Expand calls to inline functions in the body of FN.  */
    5611              : 
    5612              : unsigned int
    5613      1948232 : optimize_inline_calls (tree fn)
    5614              : {
    5615      1948232 :   copy_body_data id;
    5616      1948232 :   basic_block bb;
    5617      1948232 :   int last = n_basic_blocks_for_fn (cfun);
    5618      1948232 :   bool inlined_p = false;
    5619              : 
    5620              :   /* Clear out ID.  */
    5621      1948232 :   memset (&id, 0, sizeof (id));
    5622              : 
    5623      1948232 :   id.src_node = id.dst_node = cgraph_node::get (fn);
    5624      1948232 :   gcc_assert (id.dst_node->definition);
    5625      1948232 :   id.dst_fn = fn;
    5626              :   /* Or any functions that aren't finished yet.  */
    5627      1948232 :   if (current_function_decl)
    5628      1948232 :     id.dst_fn = current_function_decl;
    5629              : 
    5630      1948232 :   id.copy_decl = copy_decl_maybe_to_var;
    5631      1948232 :   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
    5632      1948232 :   id.transform_new_cfg = false;
    5633      1948232 :   id.transform_return_to_modify = true;
    5634      1948232 :   id.transform_parameter = true;
    5635      1948232 :   id.statements_to_fold = new hash_set<gimple *>;
    5636              : 
    5637      1948232 :   push_gimplify_context ();
    5638              : 
    5639              :   /* We make no attempts to keep dominance info up-to-date.  */
    5640      1948232 :   free_dominance_info (CDI_DOMINATORS);
    5641      1948232 :   free_dominance_info (CDI_POST_DOMINATORS);
    5642              : 
    5643              :   /* Register specific gimple functions.  */
    5644      1948232 :   gimple_register_cfg_hooks ();
    5645              : 
    5646              :   /* Reach the trees by walking over the CFG, and note the
    5647              :      enclosing basic-blocks in the call edges.  */
    5648              :   /* We walk the blocks going forward, because inlined function bodies
    5649              :      will split id->current_basic_block, and the new blocks will
    5650              :      follow it; we'll trudge through them, processing their CALL_EXPRs
    5651              :      along the way.  */
    5652      1948232 :   auto_bitmap to_purge;
    5653     32842174 :   FOR_EACH_BB_FN (bb, cfun)
    5654     30893942 :     inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
    5655              : 
    5656      1948232 :   pop_gimplify_context (NULL);
    5657              : 
    5658      1948232 :   if (flag_checking)
    5659              :     {
    5660      1948214 :       struct cgraph_edge *e;
    5661              : 
    5662      1948214 :       id.dst_node->verify ();
    5663              : 
    5664              :       /* Double check that we inlined everything we are supposed to inline.  */
    5665     11691901 :       for (e = id.dst_node->callees; e; e = e->next_callee)
    5666      9743687 :         gcc_assert (e->inline_failed);
    5667              :     }
    5668              : 
    5669              :   /* If we didn't inline into the function there is nothing to do.  */
    5670      1948232 :   if (!inlined_p)
    5671              :     {
    5672      1239486 :       delete id.statements_to_fold;
    5673       619743 :       return 0;
    5674              :     }
    5675              : 
    5676              :   /* Fold queued statements.  */
    5677      1328489 :   update_max_bb_count ();
    5678      1328489 :   fold_marked_statements (last, id.statements_to_fold);
    5679      2656978 :   delete id.statements_to_fold;
    5680              : 
    5681              :   /* Finally purge EH and abnormal edges from the call stmts we inlined.
    5682              :      We need to do this after fold_marked_statements since that may walk
    5683              :      the SSA use-def chain.  */
    5684      1328489 :   unsigned i;
    5685      1328489 :   bitmap_iterator bi;
    5686      2339768 :   EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
    5687              :     {
    5688      1011279 :       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
    5689      1011279 :       if (bb)
    5690              :         {
    5691      1011279 :           gimple_purge_dead_eh_edges (bb);
    5692      1011279 :           gimple_purge_dead_abnormal_call_edges (bb);
    5693              :         }
    5694              :     }
    5695              : 
    5696      1328489 :   gcc_assert (!id.debug_stmts.exists ());
    5697              : 
    5698              :   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
    5699      1328489 :   number_blocks (fn);
    5700              : 
    5701      1328489 :   delete_unreachable_blocks_update_callgraph (id.dst_node, false);
    5702      1328489 :   id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
    5703              : 
    5704      1328489 :   if (flag_checking)
    5705      1328480 :     id.dst_node->verify ();
    5706              : 
    5707              :   /* It would be nice to check SSA/CFG/statement consistency here, but it is
    5708              :      not possible yet - the IPA passes might make various functions to not
    5709              :      throw and they don't care to proactively update local EH info.  This is
    5710              :      done later in fixup_cfg pass that also execute the verification.  */
    5711      1328489 :   return (TODO_update_ssa
    5712              :           | TODO_cleanup_cfg
    5713      1328489 :           | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
    5714      2656978 :           | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0));
    5715      1948232 : }
    5716              : 
    5717              : /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
    5718              : 
    5719              : tree
    5720   2175612319 : copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
    5721              : {
    5722   2175612319 :   enum tree_code code = TREE_CODE (*tp);
    5723   2175612319 :   enum tree_code_class cl = TREE_CODE_CLASS (code);
    5724              : 
    5725              :   /* We make copies of most nodes.  */
    5726   2175612319 :   if (IS_EXPR_CODE_CLASS (cl)
    5727              :       || code == TREE_LIST
    5728    302566878 :       || code == TREE_VEC
    5729    298067200 :       || code == TYPE_DECL
    5730    298067200 :       || code == OMP_CLAUSE)
    5731              :     {
    5732              :       /* Because the chain gets clobbered when we make a copy, we save it
    5733              :          here.  */
    5734   1877568176 :       tree chain = NULL_TREE, new_tree;
    5735              : 
    5736   1877568176 :       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
    5737      4522735 :         chain = TREE_CHAIN (*tp);
    5738              : 
    5739              :       /* Copy the node.  */
    5740   1877568176 :       new_tree = copy_node (*tp);
    5741              : 
    5742   1877568176 :       *tp = new_tree;
    5743              : 
    5744              :       /* Now, restore the chain, if appropriate.  That will cause
    5745              :          walk_tree to walk into the chain as well.  */
    5746   1877568176 :       if (code == PARM_DECL
    5747   1877568176 :           || code == TREE_LIST
    5748   1875550174 :           || code == OMP_CLAUSE)
    5749      2041033 :         TREE_CHAIN (*tp) = chain;
    5750              : 
    5751              :       /* For now, we don't update BLOCKs when we make copies.  So, we
    5752              :          have to nullify all BIND_EXPRs.  */
    5753   1877568176 :       if (TREE_CODE (*tp) == BIND_EXPR)
    5754     23024235 :         BIND_EXPR_BLOCK (*tp) = NULL_TREE;
    5755              :     }
    5756    298044143 :   else if (code == CONSTRUCTOR)
    5757              :     {
    5758              :       /* CONSTRUCTOR nodes need special handling because
    5759              :          we need to duplicate the vector of elements.  */
    5760     21355574 :       tree new_tree;
    5761              : 
    5762     21355574 :       new_tree = copy_node (*tp);
    5763     32673667 :       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
    5764     21355574 :       *tp = new_tree;
    5765              :     }
    5766    276688569 :   else if (code == STATEMENT_LIST)
    5767              :     /* We used to just abort on STATEMENT_LIST, but we can run into them
    5768              :        with statement-expressions (c++/40975).  */
    5769          198 :     copy_statement_list (tp);
    5770    276688371 :   else if (TREE_CODE_CLASS (code) == tcc_type)
    5771          278 :     *walk_subtrees = 0;
    5772    276688093 :   else if (TREE_CODE_CLASS (code) == tcc_declaration)
    5773    128166566 :     *walk_subtrees = 0;
    5774    148521527 :   else if (TREE_CODE_CLASS (code) == tcc_constant)
    5775            0 :     *walk_subtrees = 0;
    5776   2175612319 :   return NULL_TREE;
    5777              : }
    5778              : 
    5779              : /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
    5780              :    information indicating to what new SAVE_EXPR this one should be mapped,
    5781              :    use that one.  Otherwise, create a new node and enter it in ST.  FN is
    5782              :    the function into which the copy will be placed.  */
    5783              : 
    5784              : static void
    5785     16273940 : remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
    5786              : {
    5787     16273940 :   tree *n;
    5788     16273940 :   tree t;
    5789              : 
    5790              :   /* See if we already encountered this SAVE_EXPR.  */
    5791     16273940 :   n = st->get (*tp);
    5792              : 
    5793              :   /* If we didn't already remap this SAVE_EXPR, do so now.  */
    5794     16273940 :   if (!n)
    5795              :     {
    5796     15821394 :       t = copy_node (*tp);
    5797              : 
    5798              :       /* Remember this SAVE_EXPR.  */
    5799     15821394 :       st->put (*tp, t);
    5800              :       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
    5801     15821394 :       st->put (t, t);
    5802              :     }
    5803              :   else
    5804              :     {
    5805              :       /* We've already walked into this SAVE_EXPR; don't do it again.  */
    5806       452546 :       *walk_subtrees = 0;
    5807       452546 :       t = *n;
    5808              :     }
    5809              : 
    5810              :   /* Replace this SAVE_EXPR with the copy.  */
    5811     16273940 :   *tp = t;
    5812     16273940 : }
    5813              : 
    5814              : /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
    5815              :    label, copies the declaration and enters it in the splay_tree in DATA (which
    5816              :    is really a 'copy_body_data *'.  */
    5817              : 
    5818              : static tree
    5819      1527993 : mark_local_labels_stmt (gimple_stmt_iterator *gsip,
    5820              :                         bool *handled_ops_p ATTRIBUTE_UNUSED,
    5821              :                         struct walk_stmt_info *wi)
    5822              : {
    5823      1527993 :   copy_body_data *id = (copy_body_data *) wi->info;
    5824      1527993 :   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
    5825              : 
    5826        51626 :   if (stmt)
    5827              :     {
    5828        51626 :       tree decl = gimple_label_label (stmt);
    5829              : 
    5830              :       /* Copy the decl and remember the copy.  */
    5831        51626 :       insert_decl_map (id, decl, id->copy_decl (decl, id));
    5832              :     }
    5833              : 
    5834      1527993 :   return NULL_TREE;
    5835              : }
    5836              : 
    5837              : static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
    5838              :                                                   struct walk_stmt_info *wi);
    5839              : 
    5840              : /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
    5841              :    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
    5842              :    remaps all local declarations to appropriate replacements in gimple
    5843              :    operands. */
    5844              : 
    5845              : static tree
    5846      3343087 : replace_locals_op (tree *tp, int *walk_subtrees, void *data)
    5847              : {
    5848      3343087 :   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
    5849      3343087 :   copy_body_data *id = (copy_body_data *) wi->info;
    5850      3343087 :   hash_map<tree, tree> *st = id->decl_map;
    5851      3343087 :   tree *n;
    5852      3343087 :   tree expr = *tp;
    5853              : 
    5854              :   /* For recursive invocations this is no longer the LHS itself.  */
    5855      3343087 :   bool is_lhs = wi->is_lhs;
    5856      3343087 :   wi->is_lhs = false;
    5857              : 
    5858      3343087 :   if (TREE_CODE (expr) == SSA_NAME)
    5859              :     {
    5860        24394 :       *tp = remap_ssa_name (*tp, id);
    5861        24394 :       *walk_subtrees = 0;
    5862        24394 :       if (is_lhs)
    5863        12197 :         SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
    5864              :     }
    5865              :   /* Only a local declaration (variable or label).  */
    5866      3318693 :   else if ((VAR_P (expr) && !TREE_STATIC (expr))
    5867      1941658 :            || TREE_CODE (expr) == LABEL_DECL)
    5868              :     {
    5869              :       /* Lookup the declaration.  */
    5870      1482343 :       n = st->get (expr);
    5871              : 
    5872              :       /* If it's there, remap it.  */
    5873      1482343 :       if (n)
    5874       103966 :         *tp = *n;
    5875      1482343 :       *walk_subtrees = 0;
    5876              :     }
    5877      1836350 :   else if (TREE_CODE (expr) == STATEMENT_LIST
    5878      1836350 :            || TREE_CODE (expr) == BIND_EXPR
    5879      1836350 :            || TREE_CODE (expr) == SAVE_EXPR)
    5880            0 :     gcc_unreachable ();
    5881      1836350 :   else if (TREE_CODE (expr) == TARGET_EXPR)
    5882              :     {
    5883              :       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
    5884              :          It's OK for this to happen if it was part of a subtree that
    5885              :          isn't immediately expanded, such as operand 2 of another
    5886              :          TARGET_EXPR.  */
    5887            0 :       if (!TREE_OPERAND (expr, 1))
    5888              :         {
    5889            0 :           TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
    5890            0 :           TREE_OPERAND (expr, 3) = NULL_TREE;
    5891              :         }
    5892              :     }
    5893      1836350 :   else if (TREE_CODE (expr) == OMP_CLAUSE)
    5894              :     {
    5895              :       /* Before the omplower pass completes, some OMP clauses can contain
    5896              :          sequences that are neither copied by gimple_seq_copy nor walked by
    5897              :          walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
    5898              :          in those situations, we have to copy and process them explicitly.  */
    5899              : 
    5900          552 :       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
    5901              :         {
    5902           14 :           gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
    5903           14 :           seq = duplicate_remap_omp_clause_seq (seq, wi);
    5904           14 :           OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
    5905              :         }
    5906          538 :       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
    5907              :         {
    5908           77 :           gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
    5909           77 :           seq = duplicate_remap_omp_clause_seq (seq, wi);
    5910           77 :           OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
    5911              :         }
    5912          461 :       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
    5913              :         {
    5914           99 :           gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
    5915           99 :           seq = duplicate_remap_omp_clause_seq (seq, wi);
    5916           99 :           OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
    5917           99 :           seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
    5918           99 :           seq = duplicate_remap_omp_clause_seq (seq, wi);
    5919           99 :           OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
    5920              :         }
    5921              :     }
    5922              : 
    5923              :   /* Keep iterating.  */
    5924      3343087 :   return NULL_TREE;
    5925              : }
    5926              : 
    5927              : 
    5928              : /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
    5929              :    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
    5930              :    remaps all local declarations to appropriate replacements in gimple
    5931              :    statements. */
    5932              : 
    5933              : static tree
    5934      1527993 : replace_locals_stmt (gimple_stmt_iterator *gsip,
    5935              :                      bool *handled_ops_p ATTRIBUTE_UNUSED,
    5936              :                      struct walk_stmt_info *wi)
    5937              : {
    5938      1527993 :   copy_body_data *id = (copy_body_data *) wi->info;
    5939      1527993 :   gimple *gs = gsi_stmt (*gsip);
    5940              : 
    5941      1527993 :   if (gbind *stmt = dyn_cast <gbind *> (gs))
    5942              :     {
    5943          349 :       tree block = gimple_bind_block (stmt);
    5944              : 
    5945          349 :       if (block)
    5946              :         {
    5947          269 :           remap_block (&block, id);
    5948          269 :           gimple_bind_set_block (stmt, block);
    5949              :         }
    5950              : 
    5951              :       /* This will remap a lot of the same decls again, but this should be
    5952              :          harmless.  */
    5953          349 :       if (gimple_bind_vars (stmt))
    5954              :         {
    5955              :           tree old_var, decls = gimple_bind_vars (stmt);
    5956              : 
    5957          528 :           for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
    5958          360 :             if (!can_be_nonlocal (old_var, id)
    5959          360 :                 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
    5960          360 :               remap_decl (old_var, id);
    5961              : 
    5962          168 :           gcc_checking_assert (!id->prevent_decl_creation_for_types);
    5963          168 :           id->prevent_decl_creation_for_types = true;
    5964          168 :           gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
    5965          168 :           id->prevent_decl_creation_for_types = false;
    5966              :         }
    5967              :     }
    5968              : 
    5969              :   /* Keep iterating.  */
    5970      1527993 :   return NULL_TREE;
    5971              : }
    5972              : 
    5973              : /* Create a copy of SEQ and remap all decls in it.  */
    5974              : 
    5975              : static gimple_seq
    5976          289 : duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
    5977              : {
    5978          289 :   if (!seq)
    5979              :     return NULL;
    5980              : 
    5981              :   /* If there are any labels in OMP sequences, they can be only referred to in
    5982              :      the sequence itself and therefore we can do both here.  */
    5983           60 :   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
    5984           60 :   gimple_seq copy = gimple_seq_copy (seq);
    5985           60 :   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
    5986           60 :   return copy;
    5987              : }
    5988              : 
    5989              : /* Copies everything in SEQ and replaces variables and labels local to
    5990              :    current_function_decl.  */
    5991              : 
    5992              : gimple_seq
    5993      1017167 : copy_gimple_seq_and_replace_locals (gimple_seq seq)
    5994              : {
    5995      1017167 :   copy_body_data id;
    5996      1017167 :   struct walk_stmt_info wi;
    5997      1017167 :   gimple_seq copy;
    5998              : 
    5999              :   /* There's nothing to do for NULL_TREE.  */
    6000      1017167 :   if (seq == NULL)
    6001              :     return seq;
    6002              : 
    6003              :   /* Set up ID.  */
    6004      1017151 :   memset (&id, 0, sizeof (id));
    6005      1017151 :   id.src_fn = current_function_decl;
    6006      1017151 :   id.dst_fn = current_function_decl;
    6007      1017151 :   id.src_cfun = cfun;
    6008      1017151 :   id.decl_map = new hash_map<tree, tree>;
    6009      1017151 :   id.debug_map = NULL;
    6010              : 
    6011      1017151 :   id.copy_decl = copy_decl_no_change;
    6012      1017151 :   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
    6013      1017151 :   id.transform_new_cfg = false;
    6014      1017151 :   id.transform_return_to_modify = false;
    6015      1017151 :   id.transform_parameter = false;
    6016              : 
    6017              :   /* Walk the tree once to find local labels.  */
    6018      1017151 :   memset (&wi, 0, sizeof (wi));
    6019      1017151 :   hash_set<tree> visited;
    6020      1017151 :   wi.info = &id;
    6021      1017151 :   wi.pset = &visited;
    6022      1017151 :   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
    6023              : 
    6024      1017151 :   copy = gimple_seq_copy (seq);
    6025              : 
    6026              :   /* Walk the copy, remapping decls.  */
    6027      1017151 :   memset (&wi, 0, sizeof (wi));
    6028      1017151 :   wi.info = &id;
    6029      1017151 :   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
    6030              : 
    6031              :   /* Clean up.  */
    6032      2034302 :   delete id.decl_map;
    6033      1017151 :   if (id.debug_map)
    6034            0 :     delete id.debug_map;
    6035      1017151 :   if (id.dependence_map)
    6036              :     {
    6037            0 :       delete id.dependence_map;
    6038            0 :       id.dependence_map = NULL;
    6039              :     }
    6040              : 
    6041      1017151 :   return copy;
    6042      1017151 : }
    6043              : 
    6044              : 
    6045              : /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
    6046              : 
    6047              : static tree
    6048            0 : debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
    6049              : {
    6050            0 :   if (*tp == data)
    6051              :     return (tree) data;
    6052              :   else
    6053            0 :     return NULL;
    6054              : }
    6055              : 
    6056              : DEBUG_FUNCTION bool
    6057            0 : debug_find_tree (tree top, tree search)
    6058              : {
    6059            0 :   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
    6060              : }
    6061              : 
    6062              : 
    6063              : /* Declare the variables created by the inliner.  Add all the variables in
    6064              :    VARS to BIND_EXPR.  */
    6065              : 
    6066              : static void
    6067      6574456 : declare_inline_vars (tree block, tree vars)
    6068              : {
    6069      6574456 :   tree t;
    6070     16173836 :   for (t = vars; t; t = DECL_CHAIN (t))
    6071              :     {
    6072      9599380 :       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
    6073      9599380 :       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
    6074      9599380 :       add_local_decl (cfun, t);
    6075              :     }
    6076              : 
    6077      6574456 :   if (block)
    6078      6495988 :     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
    6079      6574456 : }
    6080              : 
    6081              : /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
    6082              :    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
    6083              :    VAR_DECL translation.  */
    6084              : 
    6085              : tree
    6086    140118464 : copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
    6087              : {
    6088              :   /* Don't generate debug information for the copy if we wouldn't have
    6089              :      generated it for the copy either.  */
    6090    140118464 :   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
    6091    140118464 :   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
    6092              : 
    6093              :   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
    6094              :      declaration inspired this copy.  */
    6095    167697126 :   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
    6096              : 
    6097              :   /* The new variable/label has no RTL, yet.  */
    6098    140118464 :   if (HAS_RTL_P (copy)
    6099    140118464 :       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
    6100    140113558 :     SET_DECL_RTL (copy, 0);
    6101              :   /* For vector typed decls make sure to update DECL_MODE according
    6102              :      to the new function context.  */
    6103    140118464 :   if (VECTOR_TYPE_P (TREE_TYPE (copy)))
    6104       169750 :     SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
    6105              : 
    6106              :   /* These args would always appear unused, if not for this.  */
    6107    140118464 :   TREE_USED (copy) = 1;
    6108              : 
    6109              :   /* Set the context for the new declaration.  */
    6110    140118464 :   if (!DECL_CONTEXT (decl))
    6111              :     /* Globals stay global.  */
    6112              :     ;
    6113    140118313 :   else if (DECL_CONTEXT (decl) != id->src_fn)
    6114              :     /* Things that weren't in the scope of the function we're inlining
    6115              :        from aren't in the scope we're inlining to, either.  */
    6116              :     ;
    6117    140116058 :   else if (TREE_STATIC (decl))
    6118              :     /* Function-scoped static variables should stay in the original
    6119              :        function.  */
    6120              :     ;
    6121              :   else
    6122              :     {
    6123              :       /* Ordinary automatic local variables are now in the scope of the
    6124              :          new function.  */
    6125    140112665 :       DECL_CONTEXT (copy) = id->dst_fn;
    6126    140112665 :       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
    6127              :         {
    6128            0 :           if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
    6129            0 :             DECL_ATTRIBUTES (copy)
    6130            0 :               = tree_cons (get_identifier ("omp simt private"), NULL,
    6131            0 :                            DECL_ATTRIBUTES (copy));
    6132            0 :           id->dst_simt_vars->safe_push (copy);
    6133              :         }
    6134              :     }
    6135              : 
    6136    140118464 :   return copy;
    6137              : }
    6138              : 
    6139              : /* Create a new VAR_DECL that is indentical in all respect to DECL except that
    6140              :    DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL.  The original
    6141              :    DECL must come from ID->src_fn and the copy will be part of ID->dst_fn.  */
    6142              : 
    6143              : tree
    6144      7585899 : copy_decl_to_var (tree decl, copy_body_data *id)
    6145              : {
    6146      7585899 :   tree copy, type;
    6147              : 
    6148      7585899 :   gcc_assert (TREE_CODE (decl) == PARM_DECL
    6149              :               || TREE_CODE (decl) == RESULT_DECL);
    6150              : 
    6151      7585899 :   type = TREE_TYPE (decl);
    6152              : 
    6153      7585899 :   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
    6154      7585899 :                      VAR_DECL, DECL_NAME (decl), type);
    6155      7585899 :   if (DECL_PT_UID_SET_P (decl))
    6156          289 :     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
    6157      7585899 :   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
    6158      7585899 :   TREE_READONLY (copy) = TREE_READONLY (decl);
    6159      7585899 :   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
    6160      7585899 :   DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
    6161      7585899 :   DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
    6162              : 
    6163      7585899 :   return copy_decl_for_dup_finish (id, decl, copy);
    6164              : }
    6165              : 
    6166              : /* Like copy_decl_to_var, but create a return slot object instead of a
    6167              :    pointer variable for return by invisible reference.  */
    6168              : 
    6169              : static tree
    6170      2025629 : copy_result_decl_to_var (tree decl, copy_body_data *id)
    6171              : {
    6172      2025629 :   tree copy, type;
    6173              : 
    6174      2025629 :   gcc_assert (TREE_CODE (decl) == PARM_DECL
    6175              :               || TREE_CODE (decl) == RESULT_DECL);
    6176              : 
    6177      2025629 :   type = TREE_TYPE (decl);
    6178      2025629 :   if (DECL_BY_REFERENCE (decl))
    6179          109 :     type = TREE_TYPE (type);
    6180              : 
    6181      2025629 :   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
    6182      2025629 :                      VAR_DECL, DECL_NAME (decl), type);
    6183      2025629 :   if (DECL_PT_UID_SET_P (decl))
    6184            0 :     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
    6185      2025629 :   TREE_READONLY (copy) = TREE_READONLY (decl);
    6186      2025629 :   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
    6187      2025629 :   if (!DECL_BY_REFERENCE (decl))
    6188              :     {
    6189      2025520 :       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
    6190      2025520 :       DECL_NOT_GIMPLE_REG_P (copy)
    6191      4051040 :         = (DECL_NOT_GIMPLE_REG_P (decl)
    6192              :            /* RESULT_DECLs are treated special by needs_to_live_in_memory,
    6193              :               mirror that to the created VAR_DECL.  */
    6194      2025520 :            || (TREE_CODE (decl) == RESULT_DECL
    6195      2025364 :                && aggregate_value_p (decl, id->src_fn)));
    6196              :     }
    6197              : 
    6198      2025629 :   return copy_decl_for_dup_finish (id, decl, copy);
    6199              : }
    6200              : 
    6201              : tree
    6202    130506766 : copy_decl_no_change (tree decl, copy_body_data *id)
    6203              : {
    6204    130506766 :   tree copy;
    6205              : 
    6206    130506766 :   copy = copy_node (decl);
    6207              : 
    6208              :   /* The COPY is not abstract; it will be generated in DST_FN.  */
    6209    130506766 :   DECL_ABSTRACT_P (copy) = false;
    6210    130506766 :   lang_hooks.dup_lang_specific_decl (copy);
    6211              : 
    6212              :   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
    6213              :      been taken; it's for internal bookkeeping in expand_goto_internal.  */
    6214    130506766 :   if (TREE_CODE (copy) == LABEL_DECL)
    6215              :     {
    6216      1798519 :       TREE_ADDRESSABLE (copy) = 0;
    6217      1798519 :       LABEL_DECL_UID (copy) = -1;
    6218              :     }
    6219              : 
    6220    130506766 :   return copy_decl_for_dup_finish (id, decl, copy);
    6221              : }
    6222              : 
    6223              : static tree
    6224     20164982 : copy_decl_maybe_to_var (tree decl, copy_body_data *id)
    6225              : {
    6226     20164982 :   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
    6227        12148 :     return copy_decl_to_var (decl, id);
    6228              :   else
    6229     20152834 :     return copy_decl_no_change (decl, id);
    6230              : }
    6231              : 
    6232              : /* Return a copy of the function's argument tree without any modifications.  */
    6233              : 
    6234              : static tree
    6235        75239 : copy_arguments_nochange (tree orig_parm, copy_body_data * id)
    6236              : {
    6237        75239 :   tree arg, *parg;
    6238        75239 :   tree new_parm = NULL;
    6239              : 
    6240        75239 :   parg = &new_parm;
    6241       227498 :   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
    6242              :     {
    6243       152259 :       tree new_tree = remap_decl (arg, id);
    6244       152259 :       if (TREE_CODE (new_tree) != PARM_DECL)
    6245         4317 :         new_tree = id->copy_decl (arg, id);
    6246       152259 :       lang_hooks.dup_lang_specific_decl (new_tree);
    6247       152259 :       *parg = new_tree;
    6248       152259 :       parg = &DECL_CHAIN (new_tree);
    6249              :     }
    6250        75239 :   return new_parm;
    6251              : }
    6252              : 
    6253              : /* Return a copy of the function's static chain.  */
    6254              : static tree
    6255          975 : copy_static_chain (tree static_chain, copy_body_data * id)
    6256              : {
    6257          975 :   tree *chain_copy, *pvar;
    6258              : 
    6259          975 :   chain_copy = &static_chain;
    6260         1950 :   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
    6261              :     {
    6262          975 :       tree new_tree = remap_decl (*pvar, id);
    6263          975 :       lang_hooks.dup_lang_specific_decl (new_tree);
    6264          975 :       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
    6265          975 :       *pvar = new_tree;
    6266              :     }
    6267          975 :   return static_chain;
    6268              : }
    6269              : 
    6270              : /* Return true if the function is allowed to be versioned.
    6271              :    This is a guard for the versioning functionality.  */
    6272              : 
    6273              : bool
    6274     13217835 : tree_versionable_function_p (tree fndecl)
    6275              : {
    6276     13217835 :   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
    6277     26165818 :           && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
    6278              : }
    6279              : 
    6280              : /* Update clone info after duplication.  */
    6281              : 
    6282              : static void
    6283       232121 : update_clone_info (copy_body_data * id)
    6284              : {
    6285       232121 :   struct cgraph_node *this_node = id->dst_node;
    6286       232121 :   if (!this_node->clones)
    6287              :     return;
    6288       597094 :   for (cgraph_node *node = this_node->clones; node != this_node;)
    6289              :     {
    6290              :       /* First update replace maps to match the new body.  */
    6291       520176 :       clone_info *info = clone_info::get (node);
    6292       520176 :       if (info && info->tree_map)
    6293              :         {
    6294              :           unsigned int i;
    6295            0 :           for (i = 0; i < vec_safe_length (info->tree_map); i++)
    6296              :             {
    6297            0 :               struct ipa_replace_map *replace_info;
    6298            0 :               replace_info = (*info->tree_map)[i];
    6299            0 :               walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
    6300              :             }
    6301              :         }
    6302              : 
    6303       520176 :       if (node->clones)
    6304              :         node = node->clones;
    6305       497945 :       else if (node->next_sibling_clone)
    6306              :         node = node->next_sibling_clone;
    6307              :       else
    6308              :         {
    6309       187877 :           while (node != id->dst_node && !node->next_sibling_clone)
    6310        99149 :             node = node->clone_of;
    6311        88728 :           if (node != id->dst_node)
    6312        11810 :             node = node->next_sibling_clone;
    6313              :         }
    6314              :     }
    6315              : }
    6316              : 
    6317              : /* Create a copy of a function's tree.
    6318              :    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
    6319              :    of the original function and the new copied function
    6320              :    respectively.  In case we want to replace a DECL
    6321              :    tree with another tree while duplicating the function's
    6322              :    body, TREE_MAP represents the mapping between these
    6323              :    trees. If UPDATE_CLONES is set, the call_stmt fields
    6324              :    of edges of clones of the function will be updated.
    6325              : 
    6326              :    If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
    6327              :    function parameters and return value) should be modified).
    6328              :    If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
    6329              :    If non_NULL NEW_ENTRY determine new entry BB of the clone.
    6330              : */
    6331              : void
    6332       232121 : tree_function_versioning (tree old_decl, tree new_decl,
    6333              :                           vec<ipa_replace_map *, va_gc> *tree_map,
    6334              :                           ipa_param_adjustments *param_adjustments,
    6335              :                           bool update_clones, bitmap blocks_to_copy,
    6336              :                           basic_block new_entry)
    6337              : {
    6338       232121 :   struct cgraph_node *old_version_node;
    6339       232121 :   struct cgraph_node *new_version_node;
    6340       232121 :   copy_body_data id;
    6341       232121 :   tree p;
    6342       232121 :   unsigned i;
    6343       232121 :   struct ipa_replace_map *replace_info;
    6344       232121 :   basic_block old_entry_block, bb;
    6345       232121 :   auto_vec<gimple *, 10> init_stmts;
    6346       232121 :   tree vars = NULL_TREE;
    6347              : 
    6348              :   /* We can get called recursively from expand_call_inline via clone
    6349              :      materialization.  While expand_call_inline maintains input_location
    6350              :      we cannot tolerate it to leak into the materialized clone.  */
    6351       232121 :   location_t saved_location = input_location;
    6352       232121 :   input_location = UNKNOWN_LOCATION;
    6353              : 
    6354       232121 :   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
    6355              :               && TREE_CODE (new_decl) == FUNCTION_DECL);
    6356       232121 :   DECL_POSSIBLY_INLINED (old_decl) = 1;
    6357              : 
    6358       232121 :   old_version_node = cgraph_node::get (old_decl);
    6359       232121 :   gcc_checking_assert (old_version_node);
    6360       232121 :   new_version_node = cgraph_node::get (new_decl);
    6361       232121 :   gcc_checking_assert (new_version_node);
    6362              : 
    6363              :   /* Copy over debug args.  */
    6364       232121 :   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
    6365              :     {
    6366         3314 :       vec<tree, va_gc> **new_debug_args, **old_debug_args;
    6367         3314 :       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
    6368         3314 :       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
    6369         3314 :       old_debug_args = decl_debug_args_lookup (old_decl);
    6370         3314 :       if (old_debug_args)
    6371              :         {
    6372         3314 :           new_debug_args = decl_debug_args_insert (new_decl);
    6373         6628 :           *new_debug_args = vec_safe_copy (*old_debug_args);
    6374              :         }
    6375              :     }
    6376              : 
    6377              :   /* Output the inlining info for this abstract function, since it has been
    6378              :      inlined.  If we don't do this now, we can lose the information about the
    6379              :      variables in the function when the blocks get blown away as soon as we
    6380              :      remove the cgraph node.  */
    6381       232121 :   (*debug_hooks->outlining_inline_function) (old_decl);
    6382              : 
    6383       232121 :   DECL_ARTIFICIAL (new_decl) = 1;
    6384       417546 :   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
    6385       417546 :   if (DECL_ORIGIN (old_decl) == old_decl)
    6386       208196 :     old_version_node->used_as_abstract_origin = true;
    6387       232121 :   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
    6388              : 
    6389              :   /* Prepare the data structures for the tree copy.  */
    6390       232121 :   memset (&id, 0, sizeof (id));
    6391              : 
    6392              :   /* Generate a new name for the new version. */
    6393       232121 :   id.statements_to_fold = new hash_set<gimple *>;
    6394              : 
    6395       232121 :   id.decl_map = new hash_map<tree, tree>;
    6396       232121 :   id.debug_map = NULL;
    6397       232121 :   id.src_fn = old_decl;
    6398       232121 :   id.dst_fn = new_decl;
    6399       232121 :   id.src_node = old_version_node;
    6400       232121 :   id.dst_node = new_version_node;
    6401       232121 :   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
    6402       232121 :   id.blocks_to_copy = blocks_to_copy;
    6403              : 
    6404       232121 :   id.copy_decl = copy_decl_no_change;
    6405       232121 :   id.transform_call_graph_edges
    6406       232121 :     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
    6407       232121 :   id.transform_new_cfg = true;
    6408       232121 :   id.transform_return_to_modify = false;
    6409       232121 :   id.transform_parameter = false;
    6410              : 
    6411       232121 :   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (old_decl));
    6412       232121 :   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
    6413       232121 :   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
    6414       232121 :   initialize_cfun (new_decl, old_decl,
    6415       232121 :                    new_entry ? new_entry->count : old_entry_block->count);
    6416       232121 :   new_version_node->has_omp_variant_constructs
    6417       232121 :     = old_version_node->has_omp_variant_constructs;
    6418       232121 :   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
    6419       232121 :     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
    6420       232121 :       = id.src_cfun->gimple_df->ipa_pta;
    6421              : 
    6422              :   /* Copy the function's static chain.  */
    6423       232121 :   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
    6424       232121 :   if (p)
    6425         1950 :     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
    6426          975 :       = copy_static_chain (p, &id);
    6427              : 
    6428       232121 :   auto_vec<int, 16> new_param_indices;
    6429       232121 :   clone_info *info = clone_info::get (old_version_node);
    6430       238495 :   ipa_param_adjustments *old_param_adjustments
    6431       232121 :     = info ? info->param_adjustments : NULL;
    6432         6374 :   if (old_param_adjustments)
    6433         6291 :     old_param_adjustments->get_updated_indices (&new_param_indices);
    6434              : 
    6435              :   /* If there's a tree_map, prepare for substitution.  */
    6436       232121 :   if (tree_map)
    6437        40240 :     for (i = 0; i < tree_map->length (); i++)
    6438              :       {
    6439        25589 :         gimple *init;
    6440        25589 :         replace_info = (*tree_map)[i];
    6441              : 
    6442        25589 :         int p = replace_info->parm_num;
    6443        25589 :         if (old_param_adjustments)
    6444            0 :           p = new_param_indices[p];
    6445              : 
    6446        25589 :         tree parm;
    6447        75511 :         for (parm = DECL_ARGUMENTS (old_decl); p;
    6448        49922 :              parm = DECL_CHAIN (parm))
    6449        49922 :           p--;
    6450        25589 :         gcc_assert (parm);
    6451        25589 :         init = setup_one_parameter (&id, parm, replace_info->new_tree,
    6452              :                                     id.src_fn, NULL, &vars);
    6453        25589 :         if (init)
    6454         7797 :           init_stmts.safe_push (init);
    6455              :       }
    6456              : 
    6457       232121 :   ipa_param_body_adjustments *param_body_adjs = NULL;
    6458       232121 :   if (param_adjustments)
    6459              :     {
    6460       302002 :       param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
    6461              :                                                         new_decl, old_decl,
    6462       151001 :                                                         &id, &vars, tree_map);
    6463       151001 :       id.param_body_adjs = param_body_adjs;
    6464       151001 :       DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
    6465              :     }
    6466        81120 :   else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
    6467       150478 :     DECL_ARGUMENTS (new_decl)
    6468        75239 :       = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
    6469              : 
    6470       232121 :   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
    6471       232121 :   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
    6472              : 
    6473       232121 :   declare_inline_vars (DECL_INITIAL (new_decl), vars);
    6474              : 
    6475       232121 :   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
    6476              :     /* Add local vars.  */
    6477       134074 :     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
    6478              : 
    6479       232121 :   if (DECL_RESULT (old_decl) == NULL_TREE)
    6480              :     ;
    6481       151001 :   else if (param_adjustments && param_adjustments->m_skip_return
    6482       278982 :            && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
    6483              :     {
    6484        40277 :       tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
    6485              :                                                    &id);
    6486        40277 :       declare_inline_vars (NULL, resdecl_repl);
    6487        40277 :       if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
    6488          109 :         resdecl_repl = build_fold_addr_expr (resdecl_repl);
    6489        40277 :       insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
    6490              : 
    6491        80554 :       DECL_RESULT (new_decl)
    6492        40277 :         = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
    6493              :                       RESULT_DECL, NULL_TREE, void_type_node);
    6494        40277 :       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
    6495        40277 :       DECL_IS_MALLOC (new_decl) = false;
    6496        40277 :       cfun->returns_struct = 0;
    6497        40277 :       cfun->returns_pcc_struct = 0;
    6498              :     }
    6499              :   else
    6500              :     {
    6501       191844 :       tree old_name;
    6502       191844 :       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
    6503       191844 :       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
    6504       383688 :       if (gimple_in_ssa_p (id.src_cfun)
    6505       191844 :           && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
    6506         4475 :           && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
    6507              :         {
    6508         4456 :           tree new_name = make_ssa_name (DECL_RESULT (new_decl));
    6509         4456 :           insert_decl_map (&id, old_name, new_name);
    6510         4456 :           SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
    6511         4456 :           set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
    6512              :         }
    6513              :     }
    6514              : 
    6515              :   /* Set up the destination functions loop tree.  */
    6516       232121 :   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
    6517              :     {
    6518       232121 :       cfun->curr_properties &= ~PROP_loops;
    6519       232121 :       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
    6520       232121 :       cfun->curr_properties |= PROP_loops;
    6521              :     }
    6522              : 
    6523              :   /* Copy the Function's body.  */
    6524       232121 :   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
    6525              :              new_entry);
    6526              : 
    6527              :   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
    6528       232121 :   number_blocks (new_decl);
    6529              : 
    6530              :   /* We want to create the BB unconditionally, so that the addition of
    6531              :      debug stmts doesn't affect BB count, which may in the end cause
    6532              :      codegen differences.  */
    6533       232121 :   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
    6534       472039 :   while (init_stmts.length ())
    6535         7797 :     insert_init_stmt (&id, bb, init_stmts.pop ());
    6536       232121 :   if (param_body_adjs)
    6537       151001 :     param_body_adjs->append_init_stmts (bb);
    6538       232121 :   update_clone_info (&id);
    6539              : 
    6540              :   /* Remap the nonlocal_goto_save_area, if any.  */
    6541       232121 :   if (cfun->nonlocal_goto_save_area)
    6542              :     {
    6543            0 :       struct walk_stmt_info wi;
    6544              : 
    6545            0 :       memset (&wi, 0, sizeof (wi));
    6546            0 :       wi.info = &id;
    6547            0 :       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
    6548              :     }
    6549              : 
    6550              :   /* Clean up.  */
    6551       464242 :   delete id.decl_map;
    6552       232121 :   if (id.debug_map)
    6553         1267 :     delete id.debug_map;
    6554       232121 :   free_dominance_info (CDI_DOMINATORS);
    6555       232121 :   free_dominance_info (CDI_POST_DOMINATORS);
    6556              : 
    6557       232121 :   update_max_bb_count ();
    6558       232121 :   fold_marked_statements (0, id.statements_to_fold);
    6559       464242 :   delete id.statements_to_fold;
    6560       232121 :   delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
    6561       232121 :   if (id.dst_node->definition)
    6562       227540 :     cgraph_edge::rebuild_references ();
    6563       232121 :   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
    6564              :     {
    6565       232121 :       calculate_dominance_info (CDI_DOMINATORS);
    6566       232121 :       fix_loop_structure (NULL);
    6567              :     }
    6568       232121 :   update_ssa (TODO_update_ssa);
    6569              : 
    6570              :   /* After partial cloning we need to rescale frequencies, so they are
    6571              :      within proper range in the cloned function.  */
    6572       232121 :   if (new_entry)
    6573              :     {
    6574        46846 :       struct cgraph_edge *e;
    6575        46846 :       rebuild_frequencies ();
    6576              : 
    6577        46846 :       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
    6578       222033 :       for (e = new_version_node->callees; e; e = e->next_callee)
    6579              :         {
    6580       175187 :           basic_block bb = gimple_bb (e->call_stmt);
    6581       175187 :           e->count = bb->count;
    6582              :         }
    6583        51841 :       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
    6584              :         {
    6585         4995 :           basic_block bb = gimple_bb (e->call_stmt);
    6586         4995 :           e->count = bb->count;
    6587              :         }
    6588              :     }
    6589              : 
    6590       232121 :   if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
    6591              :     {
    6592       126377 :       vec<tree, va_gc> **debug_args = NULL;
    6593       126377 :       unsigned int len = 0;
    6594       126377 :       unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
    6595              : 
    6596       225480 :       for (i = 0; i < reset_len; i++)
    6597              :         {
    6598        99103 :           tree parm = param_body_adjs->m_reset_debug_decls[i];
    6599        99103 :           gcc_assert (is_gimple_reg (parm));
    6600        99103 :           tree ddecl;
    6601              : 
    6602        99103 :           if (debug_args == NULL)
    6603              :             {
    6604        74031 :               debug_args = decl_debug_args_insert (new_decl);
    6605        74031 :               len = vec_safe_length (*debug_args);
    6606              :             }
    6607        99103 :           ddecl = build_debug_expr_decl (TREE_TYPE (parm));
    6608              :           /* FIXME: Is setting the mode really necessary? */
    6609        99103 :           SET_DECL_MODE (ddecl, DECL_MODE (parm));
    6610        99103 :           vec_safe_push (*debug_args, DECL_ORIGIN (parm));
    6611        99103 :           vec_safe_push (*debug_args, ddecl);
    6612              :         }
    6613       126377 :       if (debug_args != NULL)
    6614              :         {
    6615              :           /* On the callee side, add
    6616              :              DEBUG D#Y s=> parm
    6617              :              DEBUG var => D#Y
    6618              :              stmts to the first bb where var is a VAR_DECL created for the
    6619              :              optimized away parameter in DECL_INITIAL block.  This hints
    6620              :              in the debug info that var (whole DECL_ORIGIN is the parm
    6621              :              PARM_DECL) is optimized away, but could be looked up at the
    6622              :              call site as value of D#X there.  */
    6623        74031 :           gimple_stmt_iterator cgsi
    6624        74031 :             = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
    6625        74031 :           gimple *def_temp;
    6626        74031 :           tree var = vars;
    6627        74031 :           i = vec_safe_length (*debug_args);
    6628        99103 :           do
    6629              :             {
    6630        99103 :               tree vexpr = NULL_TREE;
    6631        99103 :               i -= 2;
    6632        99103 :               while (var != NULL_TREE
    6633       132075 :                      && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
    6634        32972 :                 var = TREE_CHAIN (var);
    6635        99103 :               if (var == NULL_TREE)
    6636              :                 break;
    6637        99103 :               tree parm = (**debug_args)[i];
    6638        99103 :               if (tree parm_ddef = ssa_default_def (id.src_cfun, parm))
    6639       141892 :                 if (tree *d
    6640        70946 :                     = param_body_adjs->m_dead_ssa_debug_equiv.get (parm_ddef))
    6641        23936 :                   vexpr = *d;
    6642        99103 :               if (!vexpr)
    6643              :                 {
    6644        75167 :                   vexpr = build_debug_expr_decl (TREE_TYPE (parm));
    6645              :                   /* FIXME: Is setting the mode really necessary? */
    6646        75167 :                   SET_DECL_MODE (vexpr, DECL_MODE (parm));
    6647              :                 }
    6648        99103 :               def_temp = gimple_build_debug_bind (var, vexpr, NULL);
    6649        99103 :               gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
    6650        99103 :               def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
    6651        99103 :               gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
    6652              :             }
    6653        99103 :           while (i > len);
    6654              :         }
    6655              :     }
    6656       151001 :   delete param_body_adjs;
    6657       232121 :   free_dominance_info (CDI_DOMINATORS);
    6658       232121 :   free_dominance_info (CDI_POST_DOMINATORS);
    6659              : 
    6660       232121 :   gcc_assert (!id.debug_stmts.exists ());
    6661       232121 :   pop_cfun ();
    6662       232121 :   input_location = saved_location;
    6663       232121 :   return;
    6664       232121 : }
    6665              : 
    6666              : /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
    6667              :    the callee and return the inlined body on success.  */
    6668              : 
    6669              : tree
    6670            0 : maybe_inline_call_in_expr (tree exp)
    6671              : {
    6672            0 :   tree fn = get_callee_fndecl (exp);
    6673              : 
    6674              :   /* We can only try to inline "const" functions.  */
    6675            0 :   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
    6676              :     {
    6677            0 :       call_expr_arg_iterator iter;
    6678            0 :       copy_body_data id;
    6679            0 :       tree param, arg, t;
    6680            0 :       hash_map<tree, tree> decl_map;
    6681              : 
    6682              :       /* Remap the parameters.  */
    6683            0 :       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
    6684            0 :            param;
    6685            0 :            param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
    6686            0 :         decl_map.put (param, arg);
    6687              : 
    6688            0 :       memset (&id, 0, sizeof (id));
    6689            0 :       id.src_fn = fn;
    6690            0 :       id.dst_fn = current_function_decl;
    6691            0 :       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
    6692            0 :       id.decl_map = &decl_map;
    6693              : 
    6694            0 :       id.copy_decl = copy_decl_no_change;
    6695            0 :       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
    6696            0 :       id.transform_new_cfg = false;
    6697            0 :       id.transform_return_to_modify = true;
    6698            0 :       id.transform_parameter = true;
    6699              : 
    6700              :       /* Make sure not to unshare trees behind the front-end's back
    6701              :          since front-end specific mechanisms may rely on sharing.  */
    6702            0 :       id.regimplify = false;
    6703            0 :       id.do_not_unshare = true;
    6704              : 
    6705              :       /* We're not inside any EH region.  */
    6706            0 :       id.eh_lp_nr = 0;
    6707              : 
    6708            0 :       t = copy_tree_body (&id);
    6709              : 
    6710              :       /* We can only return something suitable for use in a GENERIC
    6711              :          expression tree.  */
    6712            0 :       if (TREE_CODE (t) == MODIFY_EXPR)
    6713            0 :         return TREE_OPERAND (t, 1);
    6714            0 :     }
    6715              : 
    6716              :    return NULL_TREE;
    6717              : }
    6718              : 
    6719              : /* Duplicate a type, fields and all.  */
    6720              : 
    6721              : tree
    6722           63 : build_duplicate_type (tree type)
    6723              : {
    6724           63 :   struct copy_body_data id;
    6725              : 
    6726           63 :   memset (&id, 0, sizeof (id));
    6727           63 :   id.src_fn = current_function_decl;
    6728           63 :   id.dst_fn = current_function_decl;
    6729           63 :   id.src_cfun = cfun;
    6730           63 :   id.decl_map = new hash_map<tree, tree>;
    6731           63 :   id.debug_map = NULL;
    6732           63 :   id.copy_decl = copy_decl_no_change;
    6733              : 
    6734           63 :   type = remap_type_1 (type, &id);
    6735              : 
    6736          126 :   delete id.decl_map;
    6737           63 :   if (id.debug_map)
    6738            0 :     delete id.debug_map;
    6739              : 
    6740           63 :   TYPE_CANONICAL (type) = type;
    6741              : 
    6742           63 :   return type;
    6743              : }
    6744              : 
    6745              : /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
    6746              :    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
    6747              :    evaluation.  */
    6748              : 
    6749              : tree
    6750     33066939 : copy_fn (tree fn, tree& parms, tree& result)
    6751              : {
    6752     33066939 :   copy_body_data id;
    6753     33066939 :   tree param;
    6754     33066939 :   hash_map<tree, tree> decl_map;
    6755              : 
    6756     33066939 :   tree *p = &parms;
    6757     33066939 :   *p = NULL_TREE;
    6758              : 
    6759     33066939 :   memset (&id, 0, sizeof (id));
    6760     33066939 :   id.src_fn = fn;
    6761     33066939 :   id.dst_fn = current_function_decl;
    6762     33066939 :   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
    6763     33066939 :   id.decl_map = &decl_map;
    6764              : 
    6765    140291609 :   id.copy_decl = [] (tree decl, copy_body_data *id)
    6766              :     {
    6767    107224670 :       if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
    6768              :         /* Don't make copies of local types or injected enumerators,
    6769              :            the C++ constexpr evaluator doesn't need them and they
    6770              :            confuse modules streaming.  */
    6771              :         return decl;
    6772    104861608 :       return copy_decl_no_change (decl, id);
    6773              :     };
    6774     33066939 :   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
    6775     33066939 :   id.transform_new_cfg = false;
    6776     33066939 :   id.transform_return_to_modify = false;
    6777     33066939 :   id.transform_parameter = true;
    6778              : 
    6779              :   /* Make sure not to unshare trees behind the front-end's back
    6780              :      since front-end specific mechanisms may rely on sharing.  */
    6781     33066939 :   id.regimplify = false;
    6782     33066939 :   id.do_not_unshare = true;
    6783     33066939 :   id.do_not_fold = true;
    6784              : 
    6785              :   /* We're not inside any EH region.  */
    6786     33066939 :   id.eh_lp_nr = 0;
    6787              : 
    6788              :   /* Remap the parameters and result and return them to the caller.  */
    6789     33066939 :   for (param = DECL_ARGUMENTS (fn);
    6790     80413339 :        param;
    6791     47346400 :        param = DECL_CHAIN (param))
    6792              :     {
    6793     47346400 :       *p = remap_decl (param, &id);
    6794     47346400 :       p = &DECL_CHAIN (*p);
    6795              :     }
    6796              : 
    6797     33066939 :   if (DECL_RESULT (fn))
    6798     33066939 :     result = remap_decl (DECL_RESULT (fn), &id);
    6799              :   else
    6800            0 :     result = NULL_TREE;
    6801              : 
    6802     33066939 :   return copy_tree_body (&id);
    6803     33066939 : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.