LCOV - code coverage report
Current view: top level - gcc - emit-rtl.h (source / functions) Coverage Total Hit
Test: gcc.info Lines: 88.5 % 26 23
Test Date: 2026-02-28 14:20:25 Functions: 75.0 % 4 3
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Exported functions from emit-rtl.cc
       2              :    Copyright (C) 2004-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #ifndef GCC_EMIT_RTL_H
      21              : #define GCC_EMIT_RTL_H
      22              : 
      23              : class temp_slot;
      24              : typedef class temp_slot *temp_slot_p;
      25              : class predefined_function_abi;
      26              : namespace rtl_ssa { class function_info; }
      27              : 
      28              : /* Information mainlined about RTL representation of incoming arguments.  */
      29              : struct GTY(()) incoming_args {
      30              :   /* Number of bytes of args popped by function being compiled on its return.
      31              :      Zero if no bytes are to be popped.
      32              :      May affect compilation of return insn or of function epilogue.  */
      33              :   poly_int64 pops_args;
      34              : 
      35              :   /* If function's args have a fixed size, this is that size, in bytes.
      36              :      Otherwise, it is -1.
      37              :      May affect compilation of return insn or of function epilogue.  */
      38              :   poly_int64 size;
      39              : 
      40              :   /* # bytes the prologue should push and pretend that the caller pushed them.
      41              :      The prologue must do this, but only if parms can be passed in
      42              :      registers.  */
      43              :   int pretend_args_size;
      44              : 
      45              :   /* This is the offset from the arg pointer to the place where the first
      46              :      anonymous arg can be found, if there is one.  */
      47              :   rtx arg_offset_rtx;
      48              : 
      49              :   /* Quantities of various kinds of registers
      50              :      used for the current function's args.  */
      51              :   CUMULATIVE_ARGS info;
      52              : 
      53              :   /* The arg pointer hard register, or the pseudo into which it was copied.  */
      54              :   rtx internal_arg_pointer;
      55              : };
      56              : 
      57              : 
      58              : /* Datastructures maintained for currently processed function in RTL form.  */
      59              : struct GTY(()) rtl_data {
      60              :   void init_stack_alignment ();
      61              : 
      62              :   struct expr_status expr;
      63              :   struct emit_status emit;
      64              :   struct varasm_status varasm;
      65              :   struct incoming_args args;
      66              :   struct function_subsections subsections;
      67              :   struct rtl_eh eh;
      68              : 
      69              :   /* The ABI of the function, i.e. the interface it presents to its callers.
      70              :      This is the ABI that should be queried to see which registers the
      71              :      function needs to save before it uses them.
      72              : 
      73              :      Other functions (including those called by this function) might use
      74              :      different ABIs.  */
      75              :   const predefined_function_abi *GTY((skip)) abi;
      76              : 
      77              :   rtl_ssa::function_info *GTY((skip)) ssa;
      78              : 
      79              :   /* For function.cc  */
      80              : 
      81              :   /* # of bytes of outgoing arguments.  If ACCUMULATE_OUTGOING_ARGS is
      82              :      defined, the needed space is pushed by the prologue.  */
      83              :   poly_int64 outgoing_args_size;
      84              : 
      85              :   /* If nonzero, an RTL expression for the location at which the current
      86              :      function returns its result.  If the current function returns its
      87              :      result in a register, current_function_return_rtx will always be
      88              :      the hard register containing the result.  */
      89              :   rtx return_rtx;
      90              : 
      91              :   /* Vector of initial-value pairs.  Each pair consists of a pseudo
      92              :      register of approprite mode that stores the initial value a hard
      93              :      register REGNO, and that hard register itself.  */
      94              :   /* ??? This could be a VEC but there is currently no way to define an
      95              :          opaque VEC type.  */
      96              :   struct initial_value_struct *hard_reg_initial_vals;
      97              : 
      98              :   /* A variable living at the top of the frame that holds a known value.
      99              :      Used for detecting stack clobbers.  */
     100              :   tree stack_protect_guard;
     101              : 
     102              :   /* The __stack_chk_guard variable or expression holding the stack
     103              :      protector canary value.  */
     104              :   tree stack_protect_guard_decl;
     105              : 
     106              :   /* List (chain of INSN_LIST) of labels heading the current handlers for
     107              :      nonlocal gotos.  */
     108              :   rtx_insn_list *x_nonlocal_goto_handler_labels;
     109              : 
     110              :   /* Label that will go on function epilogue.
     111              :      Jumping to this label serves as a "return" instruction
     112              :      on machines which require execution of the epilogue on all returns.  */
     113              :   rtx_code_label *x_return_label;
     114              : 
     115              :   /* Label that will go on the end of function epilogue.
     116              :      Jumping to this label serves as a "naked return" instruction
     117              :      on machines which require execution of the epilogue on all returns.  */
     118              :   rtx_code_label *x_naked_return_label;
     119              : 
     120              :   /* List (chain of EXPR_LISTs) of all stack slots in this function.
     121              :      Made for the sake of unshare_all_rtl.  */
     122              :   vec<rtx, va_gc> *x_stack_slot_list;
     123              : 
     124              :   /* List of empty areas in the stack frame.  */
     125              :   class frame_space *frame_space_list;
     126              : 
     127              :   /* Place after which to insert the tail_recursion_label if we need one.  */
     128              :   rtx_note *x_stack_check_probe_note;
     129              : 
     130              :   /* Location at which to save the argument pointer if it will need to be
     131              :      referenced.  There are two cases where this is done: if nonlocal gotos
     132              :      exist, or if vars stored at an offset from the argument pointer will be
     133              :      needed by inner routines.  */
     134              :   rtx x_arg_pointer_save_area;
     135              : 
     136              :   /* Dynamic Realign Argument Pointer used for realigning stack.  */
     137              :   rtx drap_reg;
     138              : 
     139              :   /* Offset to end of allocated area of stack frame.
     140              :      If stack grows down, this is the address of the last stack slot allocated.
     141              :      If stack grows up, this is the address for the next slot.  */
     142              :   poly_int64 x_frame_offset;
     143              : 
     144              :   /* The function's FUNCTION_BEG note.  */
     145              :   rtx_insn *x_function_beg_insn;
     146              : 
     147              :   /* Insn after which register parms and SAVE_EXPRs are born, if nonopt.  */
     148              :   rtx_insn *x_parm_birth_insn;
     149              : 
     150              :   /* List of all used temporaries allocated, by level.  */
     151              :   vec<temp_slot_p, va_gc> *x_used_temp_slots;
     152              : 
     153              :   /* List of available temp slots.  */
     154              :   class temp_slot *x_avail_temp_slots;
     155              : 
     156              :   /* Current nesting level for temporaries.  */
     157              :   int x_temp_slot_level;
     158              : 
     159              :   /* The largest alignment needed on the stack, including requirement
     160              :      for outgoing stack alignment.  */
     161              :   unsigned int stack_alignment_needed;
     162              : 
     163              :   /* Preferred alignment of the end of stack frame, which is preferred
     164              :      to call other functions.  */
     165              :   unsigned int preferred_stack_boundary;
     166              : 
     167              :   /* The minimum alignment of parameter stack.  */
     168              :   unsigned int parm_stack_boundary;
     169              : 
     170              :   /* The largest alignment of slot allocated on the stack.  */
     171              :   unsigned int max_used_stack_slot_alignment;
     172              : 
     173              :   /* The stack alignment estimated before reload, with consideration of
     174              :      following factors:
     175              :      1. Alignment of local stack variables (max_used_stack_slot_alignment)
     176              :      2. Alignment requirement to call other functions
     177              :         (preferred_stack_boundary)
     178              :      3. Alignment of non-local stack variables but might be spilled in
     179              :         local stack.  */
     180              :   unsigned int stack_alignment_estimated;
     181              : 
     182              :   /* How many NOP insns to place at each function entry by default.  */
     183              :   unsigned short patch_area_size;
     184              : 
     185              :   /* How far the real asm entry point is into this area.  */
     186              :   unsigned short patch_area_entry;
     187              : 
     188              :   /* For reorg.  */
     189              : 
     190              :   /* Nonzero if function being compiled called builtin_return_addr or
     191              :      builtin_frame_address with nonzero count.  */
     192              :   bool accesses_prior_frames;
     193              : 
     194              :   /* Nonzero if the function calls __builtin_eh_return.  */
     195              :   bool calls_eh_return;
     196              : 
     197              :   /* Nonzero if function saves all registers, e.g. if it has a nonlocal
     198              :      label that can reach the exit block via non-exceptional paths. */
     199              :   bool saves_all_registers;
     200              : 
     201              :   /* Nonzero if function being compiled has nonlocal gotos to parent
     202              :      function.  */
     203              :   bool has_nonlocal_goto;
     204              : 
     205              :   /* Nonzero if function being compiled has an asm statement.  */
     206              :   bool has_asm_statement;
     207              : 
     208              :   /* This bit is used by the exception handling logic.  It is set if all
     209              :      calls (if any) are sibling calls.  Such functions do not have to
     210              :      have EH tables generated, as they cannot throw.  A call to such a
     211              :      function, however, should be treated as throwing if any of its callees
     212              :      can throw.  */
     213              :   bool all_throwers_are_sibcalls;
     214              : 
     215              :   /* Nonzero if stack limit checking should be enabled in the current
     216              :      function.  */
     217              :   bool limit_stack;
     218              : 
     219              :   /* Nonzero if profiling code should be generated.  */
     220              :   bool profile;
     221              : 
     222              :   /* Nonzero if the current function uses the constant pool.  */
     223              :   bool uses_const_pool;
     224              : 
     225              :   /* Nonzero if the current function uses pic_offset_table_rtx.  */
     226              :   bool uses_pic_offset_table;
     227              : 
     228              :   /* Nonzero if the current function needs an lsda for exception handling.  */
     229              :   bool uses_eh_lsda;
     230              : 
     231              :   /* Set when the tail call has been produced.  */
     232              :   bool tail_call_emit;
     233              : 
     234              :   /* Nonzero if code to initialize arg_pointer_save_area has been emitted.  */
     235              :   bool arg_pointer_save_area_init;
     236              : 
     237              :   /* Nonzero if current function must be given a frame pointer.
     238              :      Set in reload1.cc or lra-eliminations.cc if anything is allocated
     239              :      on the stack there.  */
     240              :   bool frame_pointer_needed;
     241              : 
     242              :   /* When set, expand should optimize for speed.  */
     243              :   bool maybe_hot_insn_p;
     244              : 
     245              :   /* Nonzero if function stack realignment is needed.  This flag may be
     246              :      set twice: before and after reload.  It is set before reload wrt
     247              :      stack alignment estimation before reload.  It will be changed after
     248              :      reload if by then criteria of stack realignment is different.
     249              :      The value set after reload is the accurate one and is finalized.  */
     250              :   bool stack_realign_needed;
     251              : 
     252              :   /* Nonzero if function stack realignment is tried.  This flag is set
     253              :      only once before reload.  It affects register elimination.  This
     254              :      is used to generate DWARF debug info for stack variables.  */
     255              :   bool stack_realign_tried;
     256              : 
     257              :   /* Nonzero if function being compiled needs dynamic realigned
     258              :      argument pointer (drap) if stack needs realigning.  */
     259              :   bool need_drap;
     260              : 
     261              :   /* Nonzero if function stack realignment estimation is done, namely
     262              :      stack_realign_needed flag has been set before reload wrt estimated
     263              :      stack alignment info.  */
     264              :   bool stack_realign_processed;
     265              : 
     266              :   /* Nonzero if function stack realignment has been finalized, namely
     267              :      stack_realign_needed flag has been set and finalized after reload.  */
     268              :   bool stack_realign_finalized;
     269              : 
     270              :   /* True if dbr_schedule has already been called for this function.  */
     271              :   bool dbr_scheduled_p;
     272              : 
     273              :   /* True if current function cannot throw.  Unlike
     274              :      TREE_NOTHROW (current_function_decl) it is set even for overwritable
     275              :      function where currently compiled version of it is nothrow.  */
     276              :   bool nothrow;
     277              : 
     278              :   /* True if we performed shrink-wrapping for the current function.  */
     279              :   bool shrink_wrapped;
     280              : 
     281              :   /* True if we performed shrink-wrapping for separate components for
     282              :      the current function.  */
     283              :   bool shrink_wrapped_separate;
     284              : 
     285              :   /* Nonzero if function being compiled doesn't modify the stack pointer
     286              :      (ignoring the prologue and epilogue).  This is only valid after
     287              :      pass_stack_ptr_mod has run.  */
     288              :   bool sp_is_unchanging;
     289              : 
     290              :   /* True if the stack pointer is clobbered by asm statement.  */
     291              :   bool sp_is_clobbered_by_asm;
     292              : 
     293              :   /* Nonzero if function being compiled doesn't contain any calls
     294              :      (ignoring the prologue and epilogue).  This is set prior to
     295              :      register allocation in IRA and is valid for the remaining
     296              :      compiler passes.  */
     297              :   bool is_leaf;
     298              : 
     299              :   /* Nonzero if the function being compiled is a leaf function which only
     300              :      uses leaf registers.  This is valid after reload (specifically after
     301              :      sched2) and is useful only if the port defines LEAF_REGISTERS.  */
     302              :   bool uses_only_leaf_regs;
     303              : 
     304              :   /* Nonzero if the function being compiled has undergone hot/cold partitioning
     305              :      (under flag_reorder_blocks_and_partition) and has at least one cold
     306              :      block.  */
     307              :   bool has_bb_partition;
     308              : 
     309              :   /* Nonzero if the function being compiled has completed the bb reordering
     310              :      pass.  */
     311              :   bool bb_reorder_complete;
     312              : 
     313              :   /* Like regs_ever_live, but 1 if a reg is set or clobbered from an
     314              :      asm.  Unlike regs_ever_live, elements of this array corresponding
     315              :      to eliminable regs (like the frame pointer) are set if an asm
     316              :      sets them.  */
     317              :   HARD_REG_SET asm_clobbers;
     318              : 
     319              :   /* All hard registers that need to be zeroed at the return of the routine.  */
     320              :   HARD_REG_SET must_be_zero_on_return;
     321              : 
     322              :   /* The highest address seen during shorten_branches.  */
     323              :   int max_insn_address;
     324              : };
     325              : 
     326              : #define return_label (crtl->x_return_label)
     327              : #define naked_return_label (crtl->x_naked_return_label)
     328              : #define stack_slot_list (crtl->x_stack_slot_list)
     329              : #define function_beg_insn (crtl->x_function_beg_insn)
     330              : #define parm_birth_insn (crtl->x_parm_birth_insn)
     331              : #define frame_offset (crtl->x_frame_offset)
     332              : #define stack_check_probe_note (crtl->x_stack_check_probe_note)
     333              : #define arg_pointer_save_area (crtl->x_arg_pointer_save_area)
     334              : #define used_temp_slots (crtl->x_used_temp_slots)
     335              : #define avail_temp_slots (crtl->x_avail_temp_slots)
     336              : #define temp_slot_level (crtl->x_temp_slot_level)
     337              : #define nonlocal_goto_handler_labels (crtl->x_nonlocal_goto_handler_labels)
     338              : #define frame_pointer_needed (crtl->frame_pointer_needed)
     339              : #define stack_realign_fp (crtl->stack_realign_needed && !crtl->need_drap)
     340              : #define stack_realign_drap (crtl->stack_realign_needed && crtl->need_drap)
     341              : 
     342              : extern GTY(()) struct rtl_data x_rtl;
     343              : 
     344              : /* Accessor to RTL datastructures.  We keep them statically allocated now since
     345              :    we never keep multiple functions.  For threaded compiler we might however
     346              :    want to do differently.  */
     347              : #define crtl (&x_rtl)
     348              : 
     349              : /* Return whether two MEM_ATTRs are equal.  */
     350              : bool mem_attrs_eq_p (const class mem_attrs *, const class mem_attrs *);
     351              : 
     352              : /* Set the alias set of MEM to SET.  */
     353              : extern void set_mem_alias_set (rtx, alias_set_type);
     354              : 
     355              : /* Set the alignment of MEM to ALIGN bits.  */
     356              : extern void set_mem_align (rtx, unsigned int);
     357              : 
     358              : /* Set the address space of MEM to ADDRSPACE.  */
     359              : extern void set_mem_addr_space (rtx, addr_space_t);
     360              : 
     361              : /* Set the expr for MEM to EXPR.  */
     362              : extern void set_mem_expr (rtx, tree);
     363              : 
     364              : /* Set the offset for MEM to OFFSET.  */
     365              : extern void set_mem_offset (rtx, poly_int64);
     366              : 
     367              : /* Clear the offset recorded for MEM.  */
     368              : extern void clear_mem_offset (rtx);
     369              : 
     370              : /* Set the size for MEM to SIZE.  */
     371              : extern void set_mem_size (rtx, poly_int64);
     372              : 
     373              : /* Clear the size recorded for MEM.  */
     374              : extern void clear_mem_size (rtx);
     375              : 
     376              : /* Set the attributes for MEM appropriate for a spill slot.  */
     377              : extern void set_mem_attrs_for_spill (rtx);
     378              : extern tree get_spill_slot_decl (bool);
     379              : 
     380              : /* Return a memory reference like MEMREF, but with its address changed to
     381              :    ADDR.  The caller is asserting that the actual piece of memory pointed
     382              :    to is the same, just the form of the address is being changed, such as
     383              :    by putting something into a register.  */
     384              : extern rtx replace_equiv_address (rtx, rtx, bool = false);
     385              : 
     386              : /* Likewise, but the reference is not required to be valid.  */
     387              : extern rtx replace_equiv_address_nv (rtx, rtx, bool = false);
     388              : 
     389              : extern rtx gen_blockage (void);
     390              : extern rtvec gen_rtvec (int, ...);
     391              : extern rtx copy_insn_1 (rtx);
     392              : extern rtx copy_insn (rtx);
     393              : extern rtx_insn *copy_delay_slot_insn (rtx_insn *);
     394              : extern rtx gen_int_mode (poly_int64, machine_mode);
     395              : extern rtx_insn *emit_copy_of_insn_after (rtx_insn *, rtx_insn *);
     396              : extern void set_reg_attrs_from_value (rtx, rtx);
     397              : extern void set_reg_attrs_for_parm (rtx, rtx);
     398              : extern void set_reg_attrs_for_decl_rtl (tree t, rtx x);
     399              : extern void adjust_reg_mode (rtx, machine_mode);
     400              : extern bool mem_expr_equal_p (const_tree, const_tree);
     401              : extern rtx gen_int_shift_amount (machine_mode, poly_int64);
     402              : 
     403              : extern bool need_atomic_barrier_p (enum memmodel, bool);
     404              : 
     405              : /* Return the current sequence.  */
     406              : 
     407              : inline struct sequence_stack *
     408          952 : get_current_sequence (void)
     409              : {
     410            0 :   return &crtl->emit.seq;
     411              : }
     412              : 
     413              : /* Return the outermost sequence.  */
     414              : 
     415              : inline struct sequence_stack *
     416          952 : get_topmost_sequence (void)
     417              : {
     418          952 :   struct sequence_stack *seq, *top;
     419              : 
     420          952 :   seq = get_current_sequence ();
     421         1904 :   do
     422              :     {
     423         1904 :       top = seq;
     424         1904 :       seq = seq->next;
     425         1904 :     } while (seq);
     426          952 :   return top;
     427              : }
     428              : 
     429              : /* Return the first insn of the current sequence or current function.  */
     430              : 
     431              : inline rtx_insn *
     432   1200518578 : get_insns (void)
     433              : {
     434   1014014501 :   return get_current_sequence ()->first;
     435              : }
     436              : 
     437              : /* Specify a new insn as the first in the chain.  */
     438              : 
     439              : inline void
     440    549602232 : set_first_insn (rtx_insn *insn)
     441              : {
     442    683298837 :   gcc_checking_assert (!insn || !PREV_INSN (insn));
     443      6722004 :   get_current_sequence ()->first = insn;
     444      5178834 : }
     445              : 
     446              : /* Return the last insn emitted in current sequence or current function.  */
     447              : 
     448              : inline rtx_insn *
     449   1282077488 : get_last_insn (void)
     450              : {
     451   1097061464 :   return get_current_sequence ()->last;
     452              : }
     453              : 
     454              : /* Specify a new insn as the last in the chain.  */
     455              : 
     456              : inline void
     457    791088570 : set_last_insn (rtx_insn *insn)
     458              : {
     459   1166419554 :   gcc_checking_assert (!insn || !NEXT_INSN (insn));
     460      5661947 :   get_current_sequence ()->last = insn;
     461    604454960 : }
     462              : 
     463              : /* Return a number larger than any instruction's uid in this function.  */
     464              : 
     465              : inline int
     466    728181390 : get_max_uid (void)
     467              : {
     468    726525938 :   return crtl->emit.x_cur_insn_uid;
     469              : }
     470              : 
     471              : extern bool valid_for_const_vector_p (machine_mode, rtx);
     472              : extern rtx gen_const_vec_duplicate (machine_mode, rtx);
     473              : extern rtx gen_vec_duplicate (machine_mode, rtx);
     474              : 
     475              : extern rtx gen_const_vec_series (machine_mode, rtx, rtx);
     476              : extern rtx gen_vec_series (machine_mode, rtx, rtx);
     477              : 
     478              : extern void set_decl_incoming_rtl (tree, rtx, bool);
     479              : 
     480              : /* Return a memory reference like MEMREF, but with its mode changed
     481              :    to MODE and its address changed to ADDR.
     482              :    (VOIDmode means don't change the mode.
     483              :    NULL for ADDR means don't change the address.)  */
     484              : extern rtx change_address (rtx, machine_mode, rtx);
     485              : 
     486              : /* Return a memory reference like MEMREF, but with its mode changed
     487              :    to MODE and its address offset by OFFSET bytes.  */
     488              : #define adjust_address(MEMREF, MODE, OFFSET) \
     489              :   adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 0, 0)
     490              : 
     491              : /* Likewise, but the reference is not required to be valid.  */
     492              : #define adjust_address_nv(MEMREF, MODE, OFFSET) \
     493              :   adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1, 0, 0)
     494              : 
     495              : /* Return a memory reference like MEMREF, but with its mode changed
     496              :    to MODE and its address offset by OFFSET bytes.  Assume that it's
     497              :    for a bitfield and conservatively drop the underlying object if we
     498              :    cannot be sure to stay within its bounds.  */
     499              : #define adjust_bitfield_address(MEMREF, MODE, OFFSET) \
     500              :   adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 1, 0)
     501              : 
     502              : /* As for adjust_bitfield_address, but specify that the width of
     503              :    BLKmode accesses is SIZE bytes.  */
     504              : #define adjust_bitfield_address_size(MEMREF, MODE, OFFSET, SIZE) \
     505              :   adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 1, SIZE)
     506              : 
     507              : /* Likewise, but the reference is not required to be valid.  */
     508              : #define adjust_bitfield_address_nv(MEMREF, MODE, OFFSET) \
     509              :   adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1, 1, 0)
     510              : 
     511              : /* Return a memory reference like MEMREF, but with its mode changed
     512              :    to MODE and its address changed to ADDR, which is assumed to be
     513              :    increased by OFFSET bytes from MEMREF.  */
     514              : #define adjust_automodify_address(MEMREF, MODE, ADDR, OFFSET) \
     515              :   adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 1)
     516              : 
     517              : /* Likewise, but the reference is not required to be valid.  */
     518              : #define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \
     519              :   adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0)
     520              : 
     521              : extern rtx adjust_address_1 (rtx, machine_mode, poly_int64, int, int,
     522              :                              int, poly_int64);
     523              : extern rtx adjust_automodify_address_1 (rtx, machine_mode, rtx,
     524              :                                         poly_int64, int);
     525              : 
     526              : /* Class wrapping emit_autoinc which allows derived classes to control
     527              :    how reload pseudos are created.  */
     528            0 : struct address_reload_context
     529              : {
     530              :   /* Can be overriden by derived classes.  */
     531            0 :   virtual rtx get_reload_reg () const { return gen_reg_rtx (Pmode); }
     532              : 
     533              :   /* Emit insns to reload VALUE into a new register.  VALUE is an
     534              :      auto-increment or auto-decrement RTX whose operand is a register or
     535              :      memory location; so reloading involves incrementing that location.
     536              : 
     537              :      AMOUNT is the number to increment or decrement by (always
     538              :      positive and ignored for POST_MODIFY/PRE_MODIFY).
     539              : 
     540              :      Return a pseudo containing the result.  */
     541              :   rtx emit_autoinc (rtx value, poly_int64 amount);
     542              : };
     543              : 
     544              : /* Return a memory reference like MEM, but with the address reloaded into a
     545              :    pseudo register.  */
     546              : extern rtx force_reload_address (rtx mem);
     547              : 
     548              : /* Return a memory reference like MEMREF, but whose address is changed by
     549              :    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
     550              :    known to be in OFFSET (possibly 1).  */
     551              : extern rtx offset_address (rtx, rtx, unsigned HOST_WIDE_INT);
     552              : 
     553              : /* Given REF, a MEM, and T, either the type of X or the expression
     554              :    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
     555              :    if we are making a new object of this type.  */
     556              : extern void set_mem_attributes (rtx, tree, int);
     557              : 
     558              : /* Similar, except that BITPOS has not yet been applied to REF, so if
     559              :    we alter MEM_OFFSET according to T then we should subtract BITPOS
     560              :    expecting that it'll be added back in later.  */
     561              : extern void set_mem_attributes_minus_bitpos (rtx, tree, int, poly_int64);
     562              : 
     563              : /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
     564              :    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
     565              :    -1 if not known.  */
     566              : extern int get_mem_align_offset (rtx, unsigned int);
     567              : 
     568              : /* Return a memory reference like MEMREF, but with its mode widened to
     569              :    MODE and adjusted by OFFSET.  */
     570              : extern rtx widen_memory_access (rtx, machine_mode, poly_int64);
     571              : 
     572              : extern void maybe_set_max_label_num (rtx_code_label *x);
     573              : 
     574              : #endif /* GCC_EMIT_RTL_H */
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.