LCOV - code coverage report
Current view: top level - gcc - function.h (source / functions) Coverage Total Hit
Test: gcc.info Lines: 100.0 % 11 11
Test Date: 2026-02-28 14:20:25 Functions: 100.0 % 1 1
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Structure for saving state for a nested function.
       2              :    Copyright (C) 1989-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #ifndef GCC_FUNCTION_H
      21              : #define GCC_FUNCTION_H
      22              : 
      23              : 
      24              : /* Stack of pending (incomplete) sequences saved by `start_sequence'.
      25              :    Each element describes one pending sequence.
      26              :    The main insn-chain is saved in the last element of the chain,
      27              :    unless the chain is empty.  */
      28              : 
      29              : struct GTY(()) sequence_stack {
      30              :   /* First and last insns in the chain of the saved sequence.  */
      31              :   rtx_insn *first;
      32              :   rtx_insn *last;
      33              :   struct sequence_stack *next;
      34              : };
      35              : 
      36              : struct GTY(()) emit_status {
      37              :   void ensure_regno_capacity ();
      38              : 
      39              :   /* This is reset to LAST_VIRTUAL_REGISTER + 1 at the start of each function.
      40              :      After rtl generation, it is 1 plus the largest register number used.  */
      41              :   int x_reg_rtx_no;
      42              : 
      43              :   /* Lowest label number in current function.  */
      44              :   int x_first_label_num;
      45              : 
      46              :   /* seq.first and seq.last are the ends of the doubly-linked chain of
      47              :      rtl for the current function.  Both are reset to null at the
      48              :      start of rtl generation for the function.
      49              : 
      50              :      start_sequence saves both of these on seq.next and then starts
      51              :      a new, nested sequence of insns.
      52              : 
      53              :      seq.next is a stack of pending (incomplete) sequences saved by
      54              :      start_sequence.  Each element describes one pending sequence.
      55              :      The main insn-chain is the last element of the chain.  */
      56              :   struct sequence_stack seq;
      57              : 
      58              :   /* INSN_UID for next insn emitted.
      59              :      Reset to 1 for each function compiled.  */
      60              :   int x_cur_insn_uid;
      61              : 
      62              :   /* INSN_UID for next debug insn emitted.  Only used if
      63              :      --param min-nondebug-insn-uid=<value> is given with nonzero value.  */
      64              :   int x_cur_debug_insn_uid;
      65              : 
      66              :   /* The length of the regno_pointer_align, regno_decl, and x_regno_reg_rtx
      67              :      vectors.  Since these vectors are needed during the expansion phase when
      68              :      the total number of registers in the function is not yet known, the
      69              :      vectors are copied and made bigger when necessary.  */
      70              :   int regno_pointer_align_length;
      71              : 
      72              :   /* Indexed by pseudo register number, if nonzero gives the known alignment
      73              :      for that pseudo (if REG_POINTER is set in x_regno_reg_rtx).
      74              :      Allocated in parallel with x_regno_reg_rtx.  */
      75              :   unsigned char * GTY((skip)) regno_pointer_align;
      76              : };
      77              : 
      78              : 
      79              : /* Indexed by register number, gives an rtx for that register (and only
      80              :    that register).  For pseudo registers, it is the unique rtx for
      81              :    that pseudo.  For hard registers, it is an rtx of the mode specified
      82              :    by reg_raw_mode.
      83              : 
      84              :    FIXME: We could put it into emit_status struct, but gengtype is not
      85              :    able to deal with length attribute nested in top level structures.  */
      86              : 
      87              : extern GTY ((length ("crtl->emit.x_reg_rtx_no"))) rtx * regno_reg_rtx;
      88              : 
      89              : /* For backward compatibility... eventually these should all go away.  */
      90              : #define reg_rtx_no (crtl->emit.x_reg_rtx_no)
      91              : 
      92              : #define REGNO_POINTER_ALIGN(REGNO) (crtl->emit.regno_pointer_align[REGNO])
      93              : 
      94              : struct GTY(()) expr_status {
      95              :   /* Number of units that we should eventually pop off the stack.
      96              :      These are the arguments to function calls that have already returned.  */
      97              :   poly_int64 x_pending_stack_adjust;
      98              : 
      99              :   /* Under some ABIs, it is the caller's responsibility to pop arguments
     100              :      pushed for function calls.  A naive implementation would simply pop
     101              :      the arguments immediately after each call.  However, if several
     102              :      function calls are made in a row, it is typically cheaper to pop
     103              :      all the arguments after all of the calls are complete since a
     104              :      single pop instruction can be used.  Therefore, GCC attempts to
     105              :      defer popping the arguments until absolutely necessary.  (For
     106              :      example, at the end of a conditional, the arguments must be popped,
     107              :      since code outside the conditional won't know whether or not the
     108              :      arguments need to be popped.)
     109              : 
     110              :      When INHIBIT_DEFER_POP is nonzero, however, the compiler does not
     111              :      attempt to defer pops.  Instead, the stack is popped immediately
     112              :      after each call.  Rather then setting this variable directly, use
     113              :      NO_DEFER_POP and OK_DEFER_POP.  */
     114              :   int x_inhibit_defer_pop;
     115              : 
     116              :   /* If PREFERRED_STACK_BOUNDARY and PUSH_ROUNDING are defined, the stack
     117              :      boundary can be momentarily unaligned while pushing the arguments.
     118              :      Record the delta since last aligned boundary here in order to get
     119              :      stack alignment in the nested function calls working right.  */
     120              :   poly_int64 x_stack_pointer_delta;
     121              : 
     122              :   /* Nonzero means __builtin_saveregs has already been done in this function.
     123              :      The value is the pseudoreg containing the value __builtin_saveregs
     124              :      returned.  */
     125              :   rtx x_saveregs_value;
     126              : 
     127              :   /* Similarly for __builtin_apply_args.  */
     128              :   rtx x_apply_args_value;
     129              : 
     130              :   /* List of labels that must never be deleted.  */
     131              :   vec<rtx_insn *, va_gc> *x_forced_labels;
     132              : };
     133              : 
     134              : typedef struct call_site_record_d *call_site_record;
     135              : 
     136              : /* RTL representation of exception handling.  */
     137              : struct GTY(()) rtl_eh {
     138              :   rtx ehr_stackadj;
     139              :   rtx ehr_handler;
     140              :   rtx_code_label *ehr_label;
     141              : 
     142              :   rtx sjlj_fc;
     143              :   rtx_insn *sjlj_exit_after;
     144              : 
     145              :   vec<uchar, va_gc> *action_record_data;
     146              : 
     147              :   vec<call_site_record, va_gc> *call_site_record_v[2];
     148              : };
     149              : 
     150              : #define pending_stack_adjust (crtl->expr.x_pending_stack_adjust)
     151              : #define inhibit_defer_pop (crtl->expr.x_inhibit_defer_pop)
     152              : #define saveregs_value (crtl->expr.x_saveregs_value)
     153              : #define apply_args_value (crtl->expr.x_apply_args_value)
     154              : #define forced_labels (crtl->expr.x_forced_labels)
     155              : #define stack_pointer_delta (crtl->expr.x_stack_pointer_delta)
     156              : 
     157              : struct gimple_df;
     158              : struct call_site_record_d;
     159              : struct dw_fde_node;
     160              : class range_query;
     161              : 
     162              : struct GTY(()) varasm_status {
     163              :   /* If we're using a per-function constant pool, this is it.  */
     164              :   struct rtx_constant_pool *pool;
     165              : 
     166              :   /* Number of tree-constants deferred during the expansion of this
     167              :      function.  */
     168              :   unsigned int deferred_constants;
     169              : };
     170              : 
     171              : 
     172              : /* Data for function partitioning.  */
     173              : struct GTY(()) function_subsections {
     174              :   /* Assembly labels for the hot and cold text sections, to
     175              :      be used by debugger functions for determining the size of text
     176              :      sections.  */
     177              : 
     178              :   const char *hot_section_label;
     179              :   const char *cold_section_label;
     180              :   const char *hot_section_end_label;
     181              :   const char *cold_section_end_label;
     182              : };
     183              : 
     184              : /* Describe an empty area of space in the stack frame.  These can be chained
     185              :    into a list; this is used to keep track of space wasted for alignment
     186              :    reasons.  */
     187              : class GTY(()) frame_space
     188              : {
     189              : public:
     190              :   class frame_space *next;
     191              : 
     192              :   poly_int64 start;
     193              :   poly_int64 length;
     194              : };
     195              : 
     196              : /* Describe emitted calls for -fcallgraph-info.  */
     197              : struct GTY(()) callinfo_callee
     198              : {
     199              :   location_t location;
     200              :   tree decl;
     201              : };
     202              : 
     203              : /* Describe dynamic allocation for -fcallgraph-info=da.  */
     204              : struct GTY(()) callinfo_dalloc
     205              : {
     206              :   location_t location;
     207              :   char const *name;
     208              : };
     209              : 
     210              : class GTY(()) stack_usage
     211              : {
     212              : public:
     213              :   /* # of bytes of static stack space allocated by the function.  */
     214              :   HOST_WIDE_INT static_stack_size;
     215              : 
     216              :   /* # of bytes of dynamic stack space allocated by the function.  This is
     217              :      meaningful only if has_unbounded_dynamic_stack_size is zero.  */
     218              :   HOST_WIDE_INT dynamic_stack_size;
     219              : 
     220              :   /* Upper bound on the number of bytes pushed onto the stack after the
     221              :      prologue.  If !ACCUMULATE_OUTGOING_ARGS, it contains the outgoing
     222              :      arguments.  */
     223              :   poly_int64 pushed_stack_size;
     224              : 
     225              :   /* Nonzero if the amount of stack space allocated dynamically cannot
     226              :      be bounded at compile-time.  */
     227              :   unsigned int has_unbounded_dynamic_stack_size : 1;
     228              : 
     229              :   /* Functions called within the function, if callgraph is enabled.  */
     230              :   vec<callinfo_callee, va_gc> *callees;
     231              : 
     232              :   /* Dynamic allocations encountered within the function, if callgraph
     233              :      da is enabled.  */
     234              :   vec<callinfo_dalloc, va_gc> *dallocs;
     235              : };
     236              : 
     237              : #define current_function_static_stack_size (cfun->su->static_stack_size)
     238              : #define current_function_dynamic_stack_size (cfun->su->dynamic_stack_size)
     239              : #define current_function_pushed_stack_size (cfun->su->pushed_stack_size)
     240              : #define current_function_has_unbounded_dynamic_stack_size \
     241              :   (cfun->su->has_unbounded_dynamic_stack_size)
     242              : #define current_function_allocates_dynamic_stack_space    \
     243              :   (current_function_dynamic_stack_size != 0               \
     244              :    || current_function_has_unbounded_dynamic_stack_size)
     245              : 
     246              : /* This structure can save all the important global and static variables
     247              :    describing the status of the current function.  */
     248              : 
     249              : struct GTY(()) function {
     250              :   struct eh_status *eh;
     251              : 
     252              :   /* The control flow graph for this function.  */
     253              :   struct control_flow_graph *cfg;
     254              : 
     255              :   /* GIMPLE body for this function.  */
     256              :   gimple_seq gimple_body;
     257              : 
     258              :   /* SSA and dataflow information.  */
     259              :   struct gimple_df *gimple_df;
     260              : 
     261              :   /* The loops in this function.  */
     262              :   struct loops *x_current_loops;
     263              : 
     264              :   /* Filled by the GIMPLE and RTL FEs, pass to start compilation with.  */
     265              :   char *pass_startwith;
     266              : 
     267              :   /* The stack usage of this function.  */
     268              :   class stack_usage *su;
     269              : 
     270              :   /* Value histograms attached to particular statements.  */
     271              :   htab_t GTY((skip)) value_histograms;
     272              : 
     273              :   /* Annotated gconds so that basic conditions in the same expression map to
     274              :      the same uid.  This is used for condition coverage.  */
     275              :   hash_map <gcond*, unsigned> *GTY((skip)) cond_uids;
     276              : 
     277              :   /* Per-function copyid allocator for hierarchical discriminators.
     278              :      Tracks the next available copyid for each location to ensure uniqueness
     279              :      across code duplication passes (unrolling, vectorization, etc.).  */
     280              :   struct copyid_allocator *GTY((skip)) copyid_alloc;
     281              : 
     282              :   /* For function.cc.  */
     283              : 
     284              :   /* Points to the FUNCTION_DECL of this function.  */
     285              :   tree decl;
     286              : 
     287              :   /* A PARM_DECL that should contain the static chain for this function.
     288              :      It will be initialized at the beginning of the function.  */
     289              :   tree static_chain_decl;
     290              : 
     291              :   /* An expression that contains the non-local goto save area.  The first
     292              :      word is the saved frame pointer and the second is the saved stack
     293              :      pointer.  */
     294              :   tree nonlocal_goto_save_area;
     295              : 
     296              :   /* Vector of function local variables, functions, types and constants.  */
     297              :   vec<tree, va_gc> *local_decls;
     298              : 
     299              :   /* For md files.  */
     300              : 
     301              :   /* tm.h can use this to store whatever it likes.  */
     302              :   struct machine_function * GTY ((maybe_undef)) machine;
     303              : 
     304              :   /* Language-specific code can use this to store whatever it likes.  */
     305              :   struct language_function * language;
     306              : 
     307              :   /* Used types hash table.  */
     308              :   hash_set<tree> *GTY (()) used_types_hash;
     309              : 
     310              :   /* Dwarf2 Frame Description Entry, containing the Call Frame Instructions
     311              :      used for unwinding.  Only set when either dwarf2 unwinding or dwarf2
     312              :      debugging is enabled.  */
     313              :   struct dw_fde_node *fde;
     314              : 
     315              :   /* Range query mechanism for functions.  The default is to pick up
     316              :      global ranges.  If a pass wants on-demand ranges OTOH, it must
     317              :      call enable/disable_ranger().  The pointer is never null.  It
     318              :      should be queried by calling get_range_query().  */
     319              :   range_query * GTY ((skip)) x_range_query;
     320              : 
     321              :   /* Last statement uid.  */
     322              :   int last_stmt_uid;
     323              : 
     324              :   /* Debug marker counter.  Count begin stmt markers.  We don't have
     325              :      to keep it exact, it's more of a rough estimate to enable us to
     326              :      decide whether they are too many to copy during inlining, or when
     327              :      expanding to RTL.  */
     328              :   int debug_marker_count;
     329              : 
     330              :   /* Function sequence number for profiling, debugging, etc.  */
     331              :   int funcdef_no;
     332              : 
     333              :   /* Line number of the start of the function for debugging purposes.  */
     334              :   location_t function_start_locus;
     335              : 
     336              :   /* Line number of the end of the function.  */
     337              :   location_t function_end_locus;
     338              : 
     339              :   /* Properties used by the pass manager.  */
     340              :   unsigned int curr_properties;
     341              : 
     342              :   /* Different from normal TODO_flags which are handled right at the
     343              :      beginning or the end of one pass execution, the pending_TODOs
     344              :      are passed down in the pipeline until one of its consumers can
     345              :      perform the requested action.  Consumers should then clear the
     346              :      flags for the actions that they have taken.  */
     347              :   unsigned int pending_TODOs;
     348              : 
     349              :   /* Non-null if the function does something that would prevent it from
     350              :      being copied; this applies to both versioning and inlining.  Set to
     351              :      a string describing the reason for failure.  */
     352              :   const char * GTY((skip)) cannot_be_copied_reason;
     353              : 
     354              :   /* Last assigned dependence info clique.  */
     355              :   unsigned short last_clique;
     356              : 
     357              :   /* Collected bit flags.  */
     358              : 
     359              :   /* Number of units of general registers that need saving in stdarg
     360              :      function.  What unit is depends on the backend, either it is number
     361              :      of bytes, or it can be number of registers.  */
     362              :   unsigned int va_list_gpr_size : 8;
     363              : 
     364              :   /* Number of units of floating point registers that need saving in stdarg
     365              :      function.  */
     366              :   unsigned int va_list_fpr_size : 8;
     367              : 
     368              :   /* Nonzero if function being compiled can call setjmp.  */
     369              :   unsigned int calls_setjmp : 1;
     370              : 
     371              :   /* Nonzero if function being compiled can call alloca,
     372              :      either as a subroutine or builtin.  */
     373              :   unsigned int calls_alloca : 1;
     374              : 
     375              :   /* Nonzero if function being compiled can call __builtin_eh_return.  */
     376              :   unsigned int calls_eh_return : 1;
     377              : 
     378              :   /* Nonzero if function being compiled receives nonlocal gotos
     379              :      from nested functions.  */
     380              :   unsigned int has_nonlocal_label : 1;
     381              : 
     382              :   /* Nonzero if function being compiled has a forced label
     383              :      placed into static storage.  */
     384              :   unsigned int has_forced_label_in_static : 1;
     385              : 
     386              :   /* Nonzero if we've set cannot_be_copied_reason.  I.e. if
     387              :      (cannot_be_copied_set && !cannot_be_copied_reason), the function
     388              :      can in fact be copied.  */
     389              :   unsigned int cannot_be_copied_set : 1;
     390              : 
     391              :   /* Nonzero if current function uses stdarg.h or equivalent.  */
     392              :   unsigned int stdarg : 1;
     393              : 
     394              :   unsigned int after_inlining : 1;
     395              :   unsigned int always_inline_functions_inlined : 1;
     396              : 
     397              :   /* Nonzero if function being compiled can throw synchronous non-call
     398              :      exceptions.  */
     399              :   unsigned int can_throw_non_call_exceptions : 1;
     400              : 
     401              :   /* Nonzero if instructions that may throw exceptions but don't otherwise
     402              :      contribute to the execution of the program can be deleted.  */
     403              :   unsigned int can_delete_dead_exceptions : 1;
     404              : 
     405              :   /* Fields below this point are not set for abstract functions; see
     406              :      allocate_struct_function.  */
     407              : 
     408              :   /* Nonzero if function being compiled needs to be given an address
     409              :      where the value should be stored.  */
     410              :   unsigned int returns_struct : 1;
     411              : 
     412              :   /* Nonzero if function being compiled needs to
     413              :      return the address of where it has put a structure value.  */
     414              :   unsigned int returns_pcc_struct : 1;
     415              : 
     416              :   /* Nonzero if this function has local DECL_HARD_REGISTER variables.
     417              :      In this case code motion has to be done more carefully.  */
     418              :   unsigned int has_local_explicit_reg_vars : 1;
     419              : 
     420              :   /* Nonzero if the current function is a thunk, i.e., a lightweight
     421              :      function implemented by the output_mi_thunk hook) that just
     422              :      adjusts one of its arguments and forwards to another
     423              :      function.  */
     424              :   unsigned int is_thunk : 1;
     425              : 
     426              :   /* Nonzero if the current function contains any loops with
     427              :      loop->force_vectorize set.  */
     428              :   unsigned int has_force_vectorize_loops : 1;
     429              : 
     430              :   /* Nonzero if the current function contains any loops with
     431              :      nonzero value in loop->simduid.  */
     432              :   unsigned int has_simduid_loops : 1;
     433              : 
     434              :   /* Nonzero when the tail call has been identified.  */
     435              :   unsigned int tail_call_marked : 1;
     436              : 
     437              :   /* Has musttail marked calls.  */
     438              :   unsigned int has_musttail : 1;
     439              : 
     440              :   /* Nonzero if the current function contains a #pragma GCC unroll.  */
     441              :   unsigned int has_unroll : 1;
     442              : 
     443              :   /* Set when the function was compiled with generation of debug
     444              :      (begin stmt, inline entry, ...) markers enabled.  */
     445              :   unsigned int debug_nonbind_markers : 1;
     446              : 
     447              :   /* Set if this is a coroutine-related function.  */
     448              :   unsigned int coroutine_component : 1;
     449              : 
     450              :   /* Set if there are any OMP_TARGET regions in the function.  */
     451              :   unsigned int has_omp_target : 1;
     452              : 
     453              :   /* Set for artificial function created for [[assume (cond)]].
     454              :      These should be GIMPLE optimized, but not expanded to RTL.  */
     455              :   unsigned int assume_function : 1;
     456              : 
     457              :   /* Nonzero if reload will have to split basic blocks.  */
     458              :   unsigned int split_basic_blocks_after_reload : 1;
     459              : };
     460              : 
     461              : /* Add the decl D to the local_decls list of FUN.  */
     462              : 
     463              : void add_local_decl (struct function *fun, tree d);
     464              : 
     465              : #define FOR_EACH_LOCAL_DECL(FUN, I, D)          \
     466              :   FOR_EACH_VEC_SAFE_ELT_REVERSE ((FUN)->local_decls, I, D)
     467              : 
     468              : /* Record a final call to CALLEE at LOCATION.  */
     469              : void record_final_call (tree callee, location_t location);
     470              : 
     471              : /* Record a dynamic allocation made for DECL_OR_EXP.  */
     472              : void record_dynamic_alloc (tree decl_or_exp);
     473              : 
     474              : /* If va_list_[gf]pr_size is set to this, it means we don't know how
     475              :    many units need to be saved.  */
     476              : #define VA_LIST_MAX_GPR_SIZE    255
     477              : #define VA_LIST_MAX_FPR_SIZE    255
     478              : 
     479              : /* The function currently being compiled.  */
     480              : extern GTY(()) struct function *cfun;
     481              : 
     482              : /* In order to ensure that cfun is not set directly, we redefine it so
     483              :    that it is not an lvalue.  Rather than assign to cfun, use
     484              :    push_cfun or set_cfun.  */
     485              : #define cfun (cfun + 0)
     486              : 
     487              : /* Nonzero if we've already converted virtual regs to hard regs.  */
     488              : extern int virtuals_instantiated;
     489              : 
     490              : /* Nonzero if at least one trampoline has been created.  */
     491              : extern int trampolines_created;
     492              : 
     493              : struct GTY((for_user)) types_used_by_vars_entry {
     494              :   tree type;
     495              :   tree var_decl;
     496              : };
     497              : 
     498              : struct used_type_hasher : ggc_ptr_hash<types_used_by_vars_entry>
     499              : {
     500              :   static hashval_t hash (types_used_by_vars_entry *);
     501              :   static bool equal (types_used_by_vars_entry *, types_used_by_vars_entry *);
     502              : };
     503              : 
     504              : /* Hash table making the relationship between a global variable
     505              :    and the types it references in its initializer. The key of the
     506              :    entry is a referenced type, and the value is the DECL of the global
     507              :    variable. types_use_by_vars_do_hash and types_used_by_vars_eq below are
     508              :    the hash and equality functions to use for this hash table.  */
     509              : extern GTY(()) hash_table<used_type_hasher> *types_used_by_vars_hash;
     510              : 
     511              : void types_used_by_var_decl_insert (tree type, tree var_decl);
     512              : 
     513              : /* During parsing of a global variable, this vector contains the types
     514              :    referenced by the global variable.  */
     515              : extern GTY(()) vec<tree, va_gc> *types_used_by_cur_var_decl;
     516              : 
     517              : 
     518              : /* Return the loop tree of FN.  */
     519              : 
     520              : inline struct loops *
     521   7040492022 : loops_for_fn (struct function *fn)
     522              : {
     523   6918170262 :   return fn->x_current_loops;
     524              : }
     525              : 
     526              : /* Set the loop tree of FN to LOOPS.  */
     527              : 
     528              : inline void
     529     11000855 : set_loops_for_fn (struct function *fn, struct loops *loops)
     530              : {
     531     11000855 :   gcc_checking_assert (fn->x_current_loops == NULL || loops == NULL);
     532     11000855 :   fn->x_current_loops = loops;
     533       146956 : }
     534              : 
     535              : /* Get a new unique dependence clique or zero if none is left.  */
     536              : 
     537              : inline unsigned short
     538      1026397 : get_new_clique (function *fn)
     539              : {
     540      1026397 :   unsigned short clique = fn->last_clique + 1;
     541      1026397 :   if (clique != 0)
     542      1026397 :     fn->last_clique = clique;
     543      1026397 :   return clique;
     544              : }
     545              : 
     546              : /* For backward compatibility... eventually these should all go away.  */
     547              : #define current_function_funcdef_no (cfun->funcdef_no)
     548              : 
     549              : #define current_loops (cfun->x_current_loops)
     550              : #define dom_computed (cfun->cfg->x_dom_computed)
     551              : #define n_bbs_in_dom_tree (cfun->cfg->x_n_bbs_in_dom_tree)
     552              : #define VALUE_HISTOGRAMS(fun) (fun)->value_histograms
     553              : 
     554              : /* A pointer to a function to create target specific, per-function
     555              :    data structures.  */
     556              : extern struct machine_function * (*init_machine_status) (void);
     557              : 
     558              : /* Structure to record the size of a sequence of arguments
     559              :    as the sum of a tree-expression and a constant.  This structure is
     560              :    also used to store offsets from the stack, which might be negative,
     561              :    so the variable part must be ssizetype, not sizetype.  */
     562              : 
     563              : struct args_size
     564              : {
     565              :   poly_int64 constant;
     566              :   tree var;
     567              : };
     568              : 
     569              : /* Package up various arg related fields of struct args for
     570              :    locate_and_pad_parm.  */
     571              : struct locate_and_pad_arg_data
     572              : {
     573              :   /* Size of this argument on the stack, rounded up for any padding it
     574              :      gets.  If REG_PARM_STACK_SPACE is defined, then register parms are
     575              :      counted here, otherwise they aren't.  */
     576              :   struct args_size size;
     577              :   /* Offset of this argument from beginning of stack-args.  */
     578              :   struct args_size offset;
     579              :   /* Offset to the start of the stack slot.  Different from OFFSET
     580              :      if this arg pads downward.  */
     581              :   struct args_size slot_offset;
     582              :   /* The amount that the stack pointer needs to be adjusted to
     583              :      force alignment for the next argument.  */
     584              :   struct args_size alignment_pad;
     585              :   /* Which way we should pad this arg.  */
     586              :   pad_direction where_pad;
     587              :   /* slot_offset is at least this aligned.  */
     588              :   unsigned int boundary;
     589              : };
     590              : 
     591              : /* Add the value of the tree INC to the `struct args_size' TO.  */
     592              : 
     593              : #define ADD_PARM_SIZE(TO, INC)                                  \
     594              : do {                                                            \
     595              :   tree inc = (INC);                                             \
     596              :   if (tree_fits_shwi_p (inc))                                   \
     597              :     (TO).constant += tree_to_shwi (inc);                        \
     598              :   else if ((TO).var == 0)                                       \
     599              :     (TO).var = fold_convert (ssizetype, inc);                   \
     600              :   else                                                          \
     601              :     (TO).var = size_binop (PLUS_EXPR, (TO).var,                 \
     602              :                            fold_convert (ssizetype, inc));      \
     603              : } while (0)
     604              : 
     605              : #define SUB_PARM_SIZE(TO, DEC)                                  \
     606              : do {                                                            \
     607              :   tree dec = (DEC);                                             \
     608              :   if (tree_fits_shwi_p (dec))                                   \
     609              :     (TO).constant -= tree_to_shwi (dec);                        \
     610              :   else if ((TO).var == 0)                                       \
     611              :     (TO).var = size_binop (MINUS_EXPR, ssize_int (0),           \
     612              :                            fold_convert (ssizetype, dec));      \
     613              :   else                                                          \
     614              :     (TO).var = size_binop (MINUS_EXPR, (TO).var,                \
     615              :                            fold_convert (ssizetype, dec));      \
     616              : } while (0)
     617              : 
     618              : /* Convert the implicit sum in a `struct args_size' into a tree
     619              :    of type ssizetype.  */
     620              : #define ARGS_SIZE_TREE(SIZE)                                    \
     621              : ((SIZE).var == 0 ? ssize_int ((SIZE).constant)                  \
     622              :  : size_binop (PLUS_EXPR, fold_convert (ssizetype, (SIZE).var), \
     623              :                ssize_int ((SIZE).constant)))
     624              : 
     625              : /* Convert the implicit sum in a `struct args_size' into an rtx.  */
     626              : #define ARGS_SIZE_RTX(SIZE)                                     \
     627              : ((SIZE).var == 0 ? gen_int_mode ((SIZE).constant, Pmode)        \
     628              :  : expand_normal (ARGS_SIZE_TREE (SIZE)))
     629              : 
     630              : #define ASLK_REDUCE_ALIGN 1
     631              : #define ASLK_RECORD_PAD 2
     632              : 
     633              : /* If pointers to member functions use the least significant bit to
     634              :    indicate whether a function is virtual, ensure a pointer
     635              :    to this function will have that bit clear.  */
     636              : #define MINIMUM_METHOD_BOUNDARY \
     637              :   ((TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)       \
     638              :    ? MAX (FUNCTION_BOUNDARY, 2 * BITS_PER_UNIT) : FUNCTION_BOUNDARY)
     639              : 
     640              : enum stack_clash_probes {
     641              :   NO_PROBE_NO_FRAME,
     642              :   NO_PROBE_SMALL_FRAME,
     643              :   PROBE_INLINE,
     644              :   PROBE_LOOP
     645              : };
     646              : 
     647              : extern void dump_stack_clash_frame_info (enum stack_clash_probes, bool);
     648              : 
     649              : 
     650              : extern void push_function_context (void);
     651              : extern void pop_function_context (void);
     652              : 
     653              : /* Save and restore status information for a nested function.  */
     654              : extern void free_after_parsing (struct function *);
     655              : extern void free_after_compilation (struct function *);
     656              : 
     657              : /* Return size needed for stack frame based on slots so far allocated.
     658              :    This size counts from zero.  It is not rounded to STACK_BOUNDARY;
     659              :    the caller may have to do that.  */
     660              : extern poly_int64 get_frame_size (void);
     661              : 
     662              : /* Issue an error message and return TRUE if frame OFFSET overflows in
     663              :    the signed target pointer arithmetics for function FUNC.  Otherwise
     664              :    return FALSE.  */
     665              : extern bool frame_offset_overflow (poly_int64, tree);
     666              : 
     667              : extern unsigned int spill_slot_alignment (machine_mode);
     668              : 
     669              : extern rtx assign_stack_local_1 (machine_mode, poly_int64, int, int);
     670              : extern rtx assign_stack_local (machine_mode, poly_int64, int);
     671              : extern rtx assign_stack_temp_for_type (machine_mode, poly_int64, tree);
     672              : extern rtx assign_stack_temp (machine_mode, poly_int64);
     673              : extern rtx assign_temp (tree, int, int);
     674              : extern void update_temp_slot_address (rtx, rtx);
     675              : extern void preserve_temp_slots (rtx);
     676              : extern void free_temp_slots (void);
     677              : extern void push_temp_slots (void);
     678              : extern void pop_temp_slots (void);
     679              : extern void init_temp_slots (void);
     680              : extern rtx get_hard_reg_initial_reg (rtx);
     681              : extern rtx get_hard_reg_initial_val (machine_mode, unsigned int);
     682              : extern rtx has_hard_reg_initial_val (machine_mode, unsigned int);
     683              : 
     684              : /* Called from gimple_expand_cfg.  */
     685              : extern void emit_initial_value_sets (void);
     686              : 
     687              : extern bool initial_value_entry (int i, rtx *, rtx *);
     688              : extern void instantiate_decl_rtl (rtx x);
     689              : extern bool aggregate_value_p (const_tree, const_tree);
     690              : extern bool use_register_for_decl (const_tree);
     691              : extern gimple_seq gimplify_parameters (gimple_seq *);
     692              : extern void locate_and_pad_parm (machine_mode, tree, int, int, int,
     693              :                                  tree, struct args_size *,
     694              :                                  struct locate_and_pad_arg_data *);
     695              : extern void generate_setjmp_warnings (void);
     696              : 
     697              : /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
     698              :    and create duplicate blocks.  */
     699              : extern void reorder_blocks (void);
     700              : extern void clear_block_marks (tree);
     701              : extern tree blocks_nreverse (tree);
     702              : extern tree block_chainon (tree, tree);
     703              : 
     704              : /* Set BLOCK_NUMBER for all the blocks in FN.  */
     705              : extern void number_blocks (tree);
     706              : 
     707              : /* cfun shouldn't be set directly; use one of these functions instead.  */
     708              : extern void set_cfun (struct function *new_cfun, bool force = false);
     709              : extern void push_cfun (struct function *new_cfun);
     710              : extern void pop_cfun (void);
     711              : extern void push_function_decl (tree, bool = false);
     712              : extern void pop_function_decl (void);
     713              : 
     714              : extern int get_next_funcdef_no (void);
     715              : extern int get_last_funcdef_no (void);
     716              : extern void allocate_struct_function (tree, bool);
     717              : extern void push_struct_function (tree fndecl, bool = false);
     718              : extern void push_dummy_function (bool);
     719              : extern void pop_dummy_function (void);
     720              : extern void init_dummy_function_start (void);
     721              : extern void init_function_start (tree);
     722              : extern void stack_protect_epilogue (void);
     723              : extern void expand_function_start (tree);
     724              : extern void expand_dummy_function_end (void);
     725              : 
     726              : extern void thread_prologue_and_epilogue_insns (void);
     727              : extern void diddle_return_value (void (*)(rtx, void*), void*);
     728              : extern void clobber_return_register (void);
     729              : extern void expand_function_end (void);
     730              : extern rtx get_arg_pointer_save_area (void);
     731              : extern void maybe_copy_prologue_epilogue_insn (rtx, rtx);
     732              : extern bool prologue_contains (const rtx_insn *);
     733              : extern bool epilogue_contains (const rtx_insn *);
     734              : extern bool prologue_epilogue_contains (const rtx_insn *);
     735              : extern void record_prologue_seq (rtx_insn *);
     736              : extern void record_epilogue_seq (rtx_insn *);
     737              : extern void emit_return_into_block (bool simple_p, basic_block bb);
     738              : extern void set_return_jump_label (rtx_insn *);
     739              : extern bool active_insn_between (rtx_insn *head, rtx_insn *tail);
     740              : extern vec<edge> convert_jumps_to_returns (basic_block last_bb, bool simple_p,
     741              :                                            vec<edge> unconverted);
     742              : extern basic_block emit_return_for_exit (edge exit_fallthru_edge,
     743              :                                          bool simple_p);
     744              : extern void reposition_prologue_and_epilogue_notes (void);
     745              : extern poly_int64 get_stack_dynamic_offset ();
     746              : 
     747              : /* Returns the name of the current function.  */
     748              : extern const char *fndecl_name (tree);
     749              : extern const char *function_name (const function *);
     750              : extern const char *current_function_name (void);
     751              : 
     752              : extern void used_types_insert (tree);
     753              : 
     754              : extern bool currently_expanding_function_start;
     755              : 
     756              : #endif  /* GCC_FUNCTION_H */
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.