Line data Source code
1 : /* Register Transfer Language (RTL) definitions for GCC
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #ifndef GCC_RTL_H
21 : #define GCC_RTL_H
22 :
23 : /* This file is occasionally included by generator files which expect
24 : machmode.h and other files to exist and would not normally have been
25 : included by coretypes.h. */
26 : #ifdef GENERATOR_FILE
27 : #include "real.h"
28 : #include "fixed-value.h"
29 : #include "statistics.h"
30 : #include "vec.h"
31 : #include "hash-table.h"
32 : #include "hash-set.h"
33 : #include "input.h"
34 : #include "is-a.h"
35 : #endif /* GENERATOR_FILE */
36 :
37 : #include "hard-reg-set.h"
38 :
39 : class predefined_function_abi;
40 :
41 : /* Value used by some passes to "recognize" noop moves as valid
42 : instructions. */
43 : #define NOOP_MOVE_INSN_CODE INT_MAX
44 :
45 : /* Register Transfer Language EXPRESSIONS CODES */
46 :
47 : #define RTX_CODE enum rtx_code
48 : enum rtx_code : unsigned {
49 :
50 : #define DEF_RTL_EXPR(ENUM, NAME, FORMAT, CLASS) ENUM ,
51 : #include "rtl.def" /* rtl expressions are documented here */
52 : #undef DEF_RTL_EXPR
53 :
54 : LAST_AND_UNUSED_RTX_CODE}; /* A convenient way to get a value for
55 : NUM_RTX_CODE.
56 : Assumes default enum value assignment. */
57 :
58 : /* The cast here, saves many elsewhere. */
59 : #define NUM_RTX_CODE ((int) LAST_AND_UNUSED_RTX_CODE)
60 :
61 : /* Similar, but since generator files get more entries... */
62 : #ifdef GENERATOR_FILE
63 : # define NON_GENERATOR_NUM_RTX_CODE ((int) MATCH_OPERAND)
64 : #endif
65 :
66 : #define RTX_CODE_BITSIZE 8
67 :
68 : /* Register Transfer Language EXPRESSIONS CODE CLASSES */
69 :
70 : enum rtx_class {
71 : /* We check bit 0-1 of some rtx class codes in the predicates below. */
72 :
73 : /* Bit 0 = comparison if 0, arithmetic is 1
74 : Bit 1 = 1 if commutative. */
75 : RTX_COMPARE, /* 0 */
76 : RTX_COMM_COMPARE,
77 : RTX_BIN_ARITH,
78 : RTX_COMM_ARITH,
79 :
80 : /* Must follow the four preceding values. */
81 : RTX_UNARY, /* 4 */
82 :
83 : RTX_EXTRA,
84 : RTX_MATCH,
85 : RTX_INSN,
86 :
87 : /* Bit 0 = 1 if constant. */
88 : RTX_OBJ, /* 8 */
89 : RTX_CONST_OBJ,
90 :
91 : RTX_TERNARY,
92 : RTX_BITFIELD_OPS,
93 : RTX_AUTOINC
94 : };
95 :
96 : #define RTX_OBJ_MASK (~1)
97 : #define RTX_OBJ_RESULT (RTX_OBJ & RTX_OBJ_MASK)
98 : #define RTX_COMPARE_MASK (~1)
99 : #define RTX_COMPARE_RESULT (RTX_COMPARE & RTX_COMPARE_MASK)
100 : #define RTX_ARITHMETIC_MASK (~1)
101 : #define RTX_ARITHMETIC_RESULT (RTX_COMM_ARITH & RTX_ARITHMETIC_MASK)
102 : #define RTX_BINARY_MASK (~3)
103 : #define RTX_BINARY_RESULT (RTX_COMPARE & RTX_BINARY_MASK)
104 : #define RTX_COMMUTATIVE_MASK (~2)
105 : #define RTX_COMMUTATIVE_RESULT (RTX_COMM_COMPARE & RTX_COMMUTATIVE_MASK)
106 : #define RTX_NON_COMMUTATIVE_RESULT (RTX_COMPARE & RTX_COMMUTATIVE_MASK)
107 :
108 : extern const unsigned char rtx_length[NUM_RTX_CODE];
109 : #define GET_RTX_LENGTH(CODE) (rtx_length[(int) (CODE)])
110 :
111 : extern const char * const rtx_name[NUM_RTX_CODE];
112 : #define GET_RTX_NAME(CODE) (rtx_name[(int) (CODE)])
113 :
114 : extern const char * const rtx_format[NUM_RTX_CODE];
115 : #define GET_RTX_FORMAT(CODE) (rtx_format[(int) (CODE)])
116 :
117 : extern const enum rtx_class rtx_class[NUM_RTX_CODE];
118 : #define GET_RTX_CLASS(CODE) (rtx_class[(int) (CODE)])
119 :
120 : /* True if CODE is part of the insn chain (i.e. has INSN_UID, PREV_INSN
121 : and NEXT_INSN fields). */
122 : #define INSN_CHAIN_CODE_P(CODE) IN_RANGE (CODE, DEBUG_INSN, NOTE)
123 :
124 : extern const unsigned char rtx_code_size[NUM_RTX_CODE];
125 : extern const unsigned char rtx_next[NUM_RTX_CODE];
126 :
127 : /* The flags and bitfields of an ADDR_DIFF_VEC. BASE is the base label
128 : relative to which the offsets are calculated, as explained in rtl.def. */
129 : struct addr_diff_vec_flags
130 : {
131 : /* Set at the start of shorten_branches - ONLY WHEN OPTIMIZING - : */
132 : unsigned min_align: 8;
133 : /* Flags: */
134 : unsigned base_after_vec: 1; /* BASE is after the ADDR_DIFF_VEC. */
135 : unsigned min_after_vec: 1; /* minimum address target label is
136 : after the ADDR_DIFF_VEC. */
137 : unsigned max_after_vec: 1; /* maximum address target label is
138 : after the ADDR_DIFF_VEC. */
139 : unsigned min_after_base: 1; /* minimum address target label is
140 : after BASE. */
141 : unsigned max_after_base: 1; /* maximum address target label is
142 : after BASE. */
143 : /* Set by the actual branch shortening process - ONLY WHEN OPTIMIZING - : */
144 : unsigned offset_unsigned: 1; /* offsets have to be treated as unsigned. */
145 : unsigned : 2;
146 : unsigned scale : 8;
147 : };
148 :
149 : /* Structure used to describe the attributes of a MEM. These are hashed
150 : so MEMs that the same attributes share a data structure. This means
151 : they cannot be modified in place. */
152 : class GTY(()) mem_attrs
153 : {
154 : public:
155 : mem_attrs ();
156 :
157 : /* The expression that the MEM accesses, or null if not known.
158 : This expression might be larger than the memory reference itself.
159 : (In other words, the MEM might access only part of the object.) */
160 : tree expr;
161 :
162 : /* The offset of the memory reference from the start of EXPR.
163 : Only valid if OFFSET_KNOWN_P. */
164 : poly_int64 offset;
165 :
166 : /* The size of the memory reference in bytes. Only valid if
167 : SIZE_KNOWN_P. */
168 : poly_int64 size;
169 :
170 : /* The alias set of the memory reference. */
171 : alias_set_type alias;
172 :
173 : /* The alignment of the reference in bits. Always a multiple of
174 : BITS_PER_UNIT. Note that EXPR may have a stricter alignment
175 : than the memory reference itself. */
176 : unsigned int align;
177 :
178 : /* The address space that the memory reference uses. */
179 : unsigned char addrspace;
180 :
181 : /* True if OFFSET is known. */
182 : bool offset_known_p;
183 :
184 : /* True if SIZE is known. */
185 : bool size_known_p;
186 : };
187 :
188 : /* Structure used to describe the attributes of a REG in similar way as
189 : mem_attrs does for MEM above. Note that the OFFSET field is calculated
190 : in the same way as for mem_attrs, rather than in the same way as a
191 : SUBREG_BYTE. For example, if a big-endian target stores a byte
192 : object in the low part of a 4-byte register, the OFFSET field
193 : will be -3 rather than 0. */
194 :
195 : class GTY((for_user)) reg_attrs {
196 : public:
197 : tree decl; /* decl corresponding to REG. */
198 : poly_int64 offset; /* Offset from start of DECL. */
199 : };
200 :
201 : /* Common union for an element of an rtx. */
202 :
203 : union rtunion
204 : {
205 : int rt_int;
206 : unsigned int rt_uint;
207 : location_t rt_loc;
208 : poly_uint16 rt_subreg;
209 : const char *rt_str;
210 : rtx rt_rtx;
211 : rtvec rt_rtvec;
212 : machine_mode rt_type;
213 : addr_diff_vec_flags rt_addr_diff_vec_flags;
214 : struct cselib_val *rt_cselib;
215 : tree rt_tree;
216 : basic_block rt_bb;
217 : mem_attrs *rt_mem;
218 : class constant_descriptor_rtx *rt_constant;
219 : struct dw_cfi_node *rt_cfi;
220 : };
221 :
222 : /* Describes the properties of a REG. */
223 : struct GTY(()) reg_info {
224 : /* The value of REGNO. */
225 : unsigned int regno;
226 :
227 : /* The value of REG_NREGS. */
228 : unsigned int nregs : 8;
229 : unsigned int unused : 24;
230 :
231 : /* The value of REG_ATTRS. */
232 : reg_attrs *attrs;
233 : };
234 :
235 : /* This structure remembers the position of a SYMBOL_REF within an
236 : object_block structure. A SYMBOL_REF only provides this information
237 : if SYMBOL_REF_HAS_BLOCK_INFO_P is true. */
238 : struct GTY(()) block_symbol {
239 : /* The usual SYMBOL_REF fields. */
240 : rtunion GTY ((skip)) fld[2];
241 :
242 : /* The block that contains this object. */
243 : struct object_block *block;
244 :
245 : /* The offset of this object from the start of its block. It is negative
246 : if the symbol has not yet been assigned an offset. */
247 : HOST_WIDE_INT offset;
248 : };
249 :
250 : /* Describes a group of objects that are to be placed together in such
251 : a way that their relative positions are known. */
252 : struct GTY((for_user)) object_block {
253 : /* The section in which these objects should be placed. */
254 : section *sect;
255 :
256 : /* The alignment of the first object, measured in bits. */
257 : unsigned int alignment;
258 :
259 : /* The total size of the objects, measured in bytes. */
260 : HOST_WIDE_INT size;
261 :
262 : /* The SYMBOL_REFs for each object. The vector is sorted in
263 : order of increasing offset and the following conditions will
264 : hold for each element X:
265 :
266 : SYMBOL_REF_HAS_BLOCK_INFO_P (X)
267 : !SYMBOL_REF_ANCHOR_P (X)
268 : SYMBOL_REF_BLOCK (X) == [address of this structure]
269 : SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
270 : vec<rtx, va_gc> *objects;
271 :
272 : /* All the anchor SYMBOL_REFs used to address these objects, sorted
273 : in order of increasing offset, and then increasing TLS model.
274 : The following conditions will hold for each element X in this vector:
275 :
276 : SYMBOL_REF_HAS_BLOCK_INFO_P (X)
277 : SYMBOL_REF_ANCHOR_P (X)
278 : SYMBOL_REF_BLOCK (X) == [address of this structure]
279 : SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
280 : vec<rtx, va_gc> *anchors;
281 : };
282 :
283 : struct GTY((variable_size)) hwivec_def {
284 : HOST_WIDE_INT elem[1];
285 : };
286 :
287 : /* Number of elements of the HWIVEC if RTX is a CONST_WIDE_INT. */
288 : #define CWI_GET_NUM_ELEM(RTX) \
289 : ((int)RTL_FLAG_CHECK1("CWI_GET_NUM_ELEM", (RTX), CONST_WIDE_INT)->u2.num_elem)
290 : #define CWI_PUT_NUM_ELEM(RTX, NUM) \
291 : (RTL_FLAG_CHECK1("CWI_PUT_NUM_ELEM", (RTX), CONST_WIDE_INT)->u2.num_elem = (NUM))
292 :
293 : struct GTY((variable_size)) const_poly_int_def {
294 : trailing_wide_ints<NUM_POLY_INT_COEFFS> coeffs;
295 : };
296 :
297 : /* RTL expression ("rtx"). */
298 :
299 : /* The GTY "desc" and "tag" options below are a kludge: we need a desc
300 : field for gengtype to recognize that inheritance is occurring,
301 : so that all subclasses are redirected to the traversal hook for the
302 : base class.
303 : However, all of the fields are in the base class, and special-casing
304 : is at work. Hence we use desc and tag of 0, generating a switch
305 : statement of the form:
306 : switch (0)
307 : {
308 : case 0: // all the work happens here
309 : }
310 : in order to work with the existing special-casing in gengtype. */
311 :
312 : struct GTY((desc("0"), tag("0"),
313 : chain_next ("RTX_NEXT (&%h)"),
314 : chain_prev ("RTX_PREV (&%h)"))) rtx_def {
315 : /* The kind of value the expression has. */
316 : ENUM_BITFIELD(machine_mode) mode : MACHINE_MODE_BITSIZE;
317 :
318 : /* The kind of expression this is. */
319 : ENUM_BITFIELD(rtx_code) code: RTX_CODE_BITSIZE;
320 :
321 : /* 1 in a MEM if we should keep the alias set for this mem unchanged
322 : when we access a component.
323 : 1 in a JUMP_INSN if it is a crossing jump.
324 : 1 in a CALL_INSN if it is a sibling call.
325 : 1 in a SET that is for a return.
326 : In a CODE_LABEL, part of the two-bit alternate entry field.
327 : 1 in a CONCAT is VAL_EXPR_IS_COPIED in var-tracking.cc.
328 : 1 in a VALUE is SP_BASED_VALUE_P in cselib.cc.
329 : 1 in a SUBREG generated by LRA for reload insns.
330 : 1 in a REG if this is a static chain register.
331 : Dumped as "/j" in RTL dumps. */
332 : unsigned int jump : 1;
333 : /* In a CODE_LABEL, part of the two-bit alternate entry field.
334 : 1 in a MEM if it cannot trap.
335 : 1 in a CALL_INSN logically equivalent to
336 : ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P.
337 : 1 in a VALUE is SP_DERIVED_VALUE_P in cselib.cc.
338 : Dumped as "/c" in RTL dumps. */
339 : unsigned int call : 1;
340 : /* 1 in a REG, MEM, or CONCAT if the value is set at most once, anywhere.
341 : 1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P.
342 : 1 in a SYMBOL_REF if it addresses something in the per-function
343 : constants pool.
344 : 1 in a CALL_INSN logically equivalent to ECF_CONST and TREE_READONLY.
345 : 1 in a NOTE, or EXPR_LIST for a const call.
346 : 1 in a JUMP_INSN of an annulling branch.
347 : 1 in a CONCAT is VAL_EXPR_IS_CLOBBERED in var-tracking.cc.
348 : 1 in a preserved VALUE is PRESERVED_VALUE_P in cselib.cc.
349 : 1 in a clobber temporarily created for LRA.
350 : Dumped as "/u" in RTL dumps. */
351 : unsigned int unchanging : 1;
352 : /* 1 in a MEM or ASM_OPERANDS expression if the memory reference is volatile.
353 : 1 in an INSN, CALL_INSN, JUMP_INSN, CODE_LABEL, BARRIER, or NOTE
354 : if it has been deleted.
355 : 1 in a REG expression if corresponds to a variable declared by the user,
356 : 0 for an internally generated temporary.
357 : 1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P.
358 : 1 in a LABEL_REF, REG_LABEL_TARGET or REG_LABEL_OPERAND note for a
359 : non-local label.
360 : In a SYMBOL_REF, this flag is used for machine-specific purposes.
361 : In a PREFETCH, this flag indicates that it should be considered a
362 : scheduling barrier.
363 : 1 in a CONCAT is VAL_NEEDS_RESOLUTION in var-tracking.cc.
364 : Dumped as "/v" in RTL dumps. */
365 : unsigned int volatil : 1;
366 : /* 1 in a REG if the register is used only in exit code a loop.
367 : 1 in a SUBREG expression if was generated from a variable with a
368 : promoted mode.
369 : 1 in a CODE_LABEL if the label is used for nonlocal gotos
370 : and must not be deleted even if its count is zero.
371 : 1 in an INSN, JUMP_INSN or CALL_INSN if this insn must be scheduled
372 : together with the preceding insn. Valid only within sched.
373 : 1 in an INSN, JUMP_INSN, or CALL_INSN if insn is in a delay slot and
374 : from the target of a branch. Valid from reorg until end of compilation;
375 : cleared before used.
376 :
377 : The name of the field is historical. It used to be used in MEMs
378 : to record whether the MEM accessed part of a structure.
379 : Dumped as "/s" in RTL dumps. */
380 : unsigned int in_struct : 1;
381 : /* At the end of RTL generation, 1 if this rtx is used. This is used for
382 : copying shared structure. See `unshare_all_rtl'.
383 : In a REG, this is not needed for that purpose, and used instead
384 : in `leaf_renumber_regs_insn'.
385 : 1 in a SYMBOL_REF, means that emit_library_call
386 : has used it as the function.
387 : 1 in a CONCAT is VAL_HOLDS_TRACK_EXPR in var-tracking.cc.
388 : 1 in a VALUE or DEBUG_EXPR is VALUE_RECURSED_INTO in var-tracking.cc. */
389 : unsigned int used : 1;
390 : /* 1 in an INSN or a SET if this rtx is related to the call frame,
391 : either changing how we compute the frame address or saving and
392 : restoring registers in the prologue and epilogue.
393 : 1 in a REG or MEM if it is a pointer.
394 : 1 in a SYMBOL_REF if it addresses something in the per-function
395 : constant string pool.
396 : 1 in a VALUE is VALUE_CHANGED in var-tracking.cc.
397 : Dumped as "/f" in RTL dumps. */
398 : unsigned frame_related : 1;
399 : /* 1 in a REG or PARALLEL that is the current function's return value.
400 : 1 in a SYMBOL_REF for a weak symbol.
401 : 1 in a CALL_INSN logically equivalent to ECF_PURE and DECL_PURE_P.
402 : 1 in a CONCAT is VAL_EXPR_HAS_REVERSE in var-tracking.cc.
403 : 1 in a VALUE or DEBUG_EXPR is NO_LOC_P in var-tracking.cc.
404 : Dumped as "/i" in RTL dumps. */
405 : unsigned return_val : 1;
406 :
407 : union {
408 : /* The final union field is aligned to 64 bits on LP64 hosts,
409 : giving a 32-bit gap after the fields above. We optimize the
410 : layout for that case and use the gap for extra code-specific
411 : information. */
412 :
413 : /* The ORIGINAL_REGNO of a REG. */
414 : unsigned int original_regno;
415 :
416 : /* The INSN_UID of an RTX_INSN-class code. */
417 : int insn_uid;
418 :
419 : /* The SYMBOL_REF_FLAGS of a SYMBOL_REF. */
420 : unsigned int symbol_ref_flags;
421 :
422 : /* The PAT_VAR_LOCATION_STATUS of a VAR_LOCATION. */
423 : enum var_init_status var_location_status;
424 :
425 : /* In a CONST_WIDE_INT (aka hwivec_def), this is the number of
426 : HOST_WIDE_INTs in the hwivec_def. */
427 : unsigned int num_elem;
428 :
429 : /* Information about a CONST_VECTOR. */
430 : struct
431 : {
432 : /* The value of CONST_VECTOR_NPATTERNS. */
433 : unsigned int npatterns : 16;
434 :
435 : /* The value of CONST_VECTOR_NELTS_PER_PATTERN. */
436 : unsigned int nelts_per_pattern : 8;
437 :
438 : /* For future expansion. */
439 : unsigned int unused : 8;
440 : } const_vector;
441 : } GTY ((skip)) u2;
442 :
443 : /* The first element of the operands of this rtx.
444 : The number of operands and their types are controlled
445 : by the `code' field, according to rtl.def. */
446 : union u {
447 : rtunion fld[1];
448 : HOST_WIDE_INT hwint[1];
449 : struct reg_info reg;
450 : struct block_symbol block_sym;
451 : struct real_value rv;
452 : struct fixed_value fv;
453 : struct hwivec_def hwiv;
454 : struct const_poly_int_def cpi;
455 : } GTY ((special ("rtx_def"), desc ("GET_CODE (&%0)"))) u;
456 : };
457 :
458 : /* A node for constructing singly-linked lists of rtx. */
459 :
460 : struct GTY(()) rtx_expr_list : public rtx_def
461 : {
462 : private:
463 : /* No extra fields, but adds invariant: (GET_CODE (X) == EXPR_LIST). */
464 :
465 : public:
466 : /* Get next in list. */
467 : rtx_expr_list *next () const;
468 :
469 : /* Get at the underlying rtx. */
470 : rtx element () const;
471 : };
472 :
473 : template <>
474 : template <>
475 : inline bool
476 916056879 : is_a_helper <rtx_expr_list *>::test (rtx rt)
477 : {
478 916056879 : return rt->code == EXPR_LIST;
479 : }
480 :
481 : struct GTY(()) rtx_insn_list : public rtx_def
482 : {
483 : private:
484 : /* No extra fields, but adds invariant: (GET_CODE (X) == INSN_LIST).
485 :
486 : This is an instance of:
487 :
488 : DEF_RTL_EXPR(INSN_LIST, "insn_list", "ue", RTX_EXTRA)
489 :
490 : i.e. a node for constructing singly-linked lists of rtx_insn *, where
491 : the list is "external" to the insn (as opposed to the doubly-linked
492 : list embedded within rtx_insn itself). */
493 :
494 : public:
495 : /* Get next in list. */
496 : rtx_insn_list *next () const;
497 :
498 : /* Get at the underlying instruction. */
499 : rtx_insn *insn () const;
500 :
501 : };
502 :
503 : template <>
504 : template <>
505 : inline bool
506 2220888369 : is_a_helper <rtx_insn_list *>::test (rtx rt)
507 : {
508 2220888369 : return rt->code == INSN_LIST;
509 : }
510 :
511 : /* A node with invariant GET_CODE (X) == SEQUENCE i.e. a vector of rtx,
512 : typically (but not always) of rtx_insn *, used in the late passes. */
513 :
514 : struct GTY(()) rtx_sequence : public rtx_def
515 : {
516 : private:
517 : /* No extra fields, but adds invariant: (GET_CODE (X) == SEQUENCE). */
518 :
519 : public:
520 : /* Get number of elements in sequence. */
521 : int len () const;
522 :
523 : /* Get i-th element of the sequence. */
524 : rtx element (int index) const;
525 :
526 : /* Get i-th element of the sequence, with a checked cast to
527 : rtx_insn *. */
528 : rtx_insn *insn (int index) const;
529 : };
530 :
531 : template <>
532 : template <>
533 : inline bool
534 810627179 : is_a_helper <rtx_sequence *>::test (rtx rt)
535 : {
536 810627179 : return rt->code == SEQUENCE;
537 : }
538 :
539 : template <>
540 : template <>
541 : inline bool
542 0 : is_a_helper <const rtx_sequence *>::test (const_rtx rt)
543 : {
544 0 : return rt->code == SEQUENCE;
545 : }
546 :
547 : struct GTY(()) rtx_insn : public rtx_def
548 : {
549 : public:
550 : /* No extra fields, but adds the invariant:
551 :
552 : (INSN_P (X)
553 : || NOTE_P (X)
554 : || JUMP_TABLE_DATA_P (X)
555 : || BARRIER_P (X)
556 : || LABEL_P (X))
557 :
558 : i.e. that we must be able to use the following:
559 : INSN_UID ()
560 : NEXT_INSN ()
561 : PREV_INSN ()
562 : i.e. we have an rtx that has an INSN_UID field and can be part of
563 : a linked list of insns.
564 : */
565 :
566 : /* Returns true if this insn has been deleted. */
567 :
568 2207535925 : bool deleted () const { return volatil; }
569 :
570 : /* Mark this insn as deleted. */
571 :
572 126417793 : void set_deleted () { volatil = true; }
573 :
574 : /* Mark this insn as not deleted. */
575 :
576 8601 : void set_undeleted () { volatil = false; }
577 : };
578 :
579 : /* Subclasses of rtx_insn. */
580 :
581 : struct GTY(()) rtx_debug_insn : public rtx_insn
582 : {
583 : /* No extra fields, but adds the invariant:
584 : DEBUG_INSN_P (X) aka (GET_CODE (X) == DEBUG_INSN)
585 : i.e. an annotation for tracking variable assignments.
586 :
587 : This is an instance of:
588 : DEF_RTL_EXPR(DEBUG_INSN, "debug_insn", "uuBeLie", RTX_INSN)
589 : from rtl.def. */
590 : };
591 :
592 : struct GTY(()) rtx_nonjump_insn : public rtx_insn
593 : {
594 : /* No extra fields, but adds the invariant:
595 : NONJUMP_INSN_P (X) aka (GET_CODE (X) == INSN)
596 : i.e an instruction that cannot jump.
597 :
598 : This is an instance of:
599 : DEF_RTL_EXPR(INSN, "insn", "uuBeLie", RTX_INSN)
600 : from rtl.def. */
601 : };
602 :
603 : struct GTY(()) rtx_jump_insn : public rtx_insn
604 : {
605 : public:
606 : /* No extra fields, but adds the invariant:
607 : JUMP_P (X) aka (GET_CODE (X) == JUMP_INSN)
608 : i.e. an instruction that can possibly jump.
609 :
610 : This is an instance of:
611 : DEF_RTL_EXPR(JUMP_INSN, "jump_insn", "uuBeLie0", RTX_INSN)
612 : from rtl.def. */
613 :
614 : /* Returns jump target of this instruction. The returned value is not
615 : necessarily a code label: it may also be a RETURN or SIMPLE_RETURN
616 : expression. Also, when the code label is marked "deleted", it is
617 : replaced by a NOTE. In some cases the value is NULL_RTX. */
618 :
619 : inline rtx jump_label () const;
620 :
621 : /* Returns jump target cast to rtx_code_label *. */
622 :
623 : inline rtx_code_label *jump_target () const;
624 :
625 : /* Set jump target. */
626 :
627 : inline void set_jump_target (rtx_code_label *);
628 : };
629 :
630 : struct GTY(()) rtx_call_insn : public rtx_insn
631 : {
632 : /* No extra fields, but adds the invariant:
633 : CALL_P (X) aka (GET_CODE (X) == CALL_INSN)
634 : i.e. an instruction that can possibly call a subroutine
635 : but which will not change which instruction comes next
636 : in the current function.
637 :
638 : This is an instance of:
639 : DEF_RTL_EXPR(CALL_INSN, "call_insn", "uuBeLiee", RTX_INSN)
640 : from rtl.def. */
641 : };
642 :
643 : struct GTY(()) rtx_jump_table_data : public rtx_insn
644 : {
645 : /* No extra fields, but adds the invariant:
646 : JUMP_TABLE_DATA_P (X) aka (GET_CODE (INSN) == JUMP_TABLE_DATA)
647 : i.e. a data for a jump table, considered an instruction for
648 : historical reasons.
649 :
650 : This is an instance of:
651 : DEF_RTL_EXPR(JUMP_TABLE_DATA, "jump_table_data", "uuBe0000", RTX_INSN)
652 : from rtl.def. */
653 :
654 : /* This can be either:
655 :
656 : (a) a table of absolute jumps, in which case PATTERN (this) is an
657 : ADDR_VEC with arg 0 a vector of labels, or
658 :
659 : (b) a table of relative jumps (e.g. for -fPIC), in which case
660 : PATTERN (this) is an ADDR_DIFF_VEC, with arg 0 a LABEL_REF and
661 : arg 1 the vector of labels.
662 :
663 : This method gets the underlying vec. */
664 :
665 : inline rtvec get_labels () const;
666 : inline scalar_int_mode get_data_mode () const;
667 : };
668 :
669 : struct GTY(()) rtx_barrier : public rtx_insn
670 : {
671 : /* No extra fields, but adds the invariant:
672 : BARRIER_P (X) aka (GET_CODE (X) == BARRIER)
673 : i.e. a marker that indicates that control will not flow through.
674 :
675 : This is an instance of:
676 : DEF_RTL_EXPR(BARRIER, "barrier", "uu00000", RTX_EXTRA)
677 : from rtl.def. */
678 : };
679 :
680 : struct GTY(()) rtx_code_label : public rtx_insn
681 : {
682 : /* No extra fields, but adds the invariant:
683 : LABEL_P (X) aka (GET_CODE (X) == CODE_LABEL)
684 : i.e. a label in the assembler.
685 :
686 : This is an instance of:
687 : DEF_RTL_EXPR(CODE_LABEL, "code_label", "uuB00is", RTX_EXTRA)
688 : from rtl.def. */
689 : };
690 :
691 : struct GTY(()) rtx_note : public rtx_insn
692 : {
693 : /* No extra fields, but adds the invariant:
694 : NOTE_P(X) aka (GET_CODE (X) == NOTE)
695 : i.e. a note about the corresponding source code.
696 :
697 : This is an instance of:
698 : DEF_RTL_EXPR(NOTE, "note", "uuB0ni", RTX_EXTRA)
699 : from rtl.def. */
700 : };
701 :
702 : /* The size in bytes of an rtx header (code, mode and flags). */
703 : #define RTX_HDR_SIZE offsetof (struct rtx_def, u)
704 :
705 : /* The size in bytes of an rtx with code CODE. */
706 : #define RTX_CODE_SIZE(CODE) rtx_code_size[CODE]
707 :
708 : #define NULL_RTX (rtx) 0
709 :
710 : /* The "next" and "previous" RTX, relative to this one. */
711 :
712 : #define RTX_NEXT(X) (rtx_next[GET_CODE (X)] == 0 ? NULL \
713 : : *(rtx *)(((char *)X) + rtx_next[GET_CODE (X)]))
714 :
715 : /* FIXME: the "NEXT_INSN (PREV_INSN (X)) == X" condition shouldn't be needed.
716 : */
717 : #define RTX_PREV(X) ((INSN_P (X) \
718 : || NOTE_P (X) \
719 : || JUMP_TABLE_DATA_P (X) \
720 : || BARRIER_P (X) \
721 : || LABEL_P (X)) \
722 : && PREV_INSN (as_a <rtx_insn *> (X)) != NULL \
723 : && NEXT_INSN (PREV_INSN (as_a <rtx_insn *> (X))) == X \
724 : ? PREV_INSN (as_a <rtx_insn *> (X)) : NULL)
725 :
726 : /* Define macros to access the `code' field of the rtx. */
727 :
728 : #define GET_CODE(RTX) ((enum rtx_code) (RTX)->code)
729 : #define PUT_CODE(RTX, CODE) ((RTX)->code = (CODE))
730 :
731 : #define GET_MODE(RTX) ((machine_mode) (RTX)->mode)
732 : #define PUT_MODE_RAW(RTX, MODE) ((RTX)->mode = (MODE))
733 :
734 : /* RTL vector. These appear inside RTX's when there is a need
735 : for a variable number of things. The principle use is inside
736 : PARALLEL expressions. */
737 :
738 : struct GTY(()) rtvec_def {
739 : int num_elem; /* number of elements */
740 : rtx GTY ((length ("%h.num_elem"))) elem[1];
741 : };
742 :
743 : #define NULL_RTVEC (rtvec) 0
744 :
745 : #define GET_NUM_ELEM(RTVEC) ((RTVEC)->num_elem)
746 : #define PUT_NUM_ELEM(RTVEC, NUM) ((RTVEC)->num_elem = (NUM))
747 :
748 : /* Predicate yielding nonzero iff X is an rtx for a register. */
749 : #define REG_P(X) (GET_CODE (X) == REG)
750 :
751 : /* Predicate yielding nonzero iff X is an rtx for a memory location. */
752 : #define MEM_P(X) (GET_CODE (X) == MEM)
753 :
754 : #if TARGET_SUPPORTS_WIDE_INT
755 :
756 : /* Match CONST_*s that can represent compile-time constant integers. */
757 : #define CASE_CONST_SCALAR_INT \
758 : case CONST_INT: \
759 : case CONST_WIDE_INT
760 :
761 : /* Match CONST_*s for which pointer equality corresponds to value
762 : equality. */
763 : #define CASE_CONST_UNIQUE \
764 : case CONST_INT: \
765 : case CONST_WIDE_INT: \
766 : case CONST_POLY_INT: \
767 : case CONST_DOUBLE: \
768 : case CONST_FIXED
769 :
770 : /* Match all CONST_* rtxes. */
771 : #define CASE_CONST_ANY \
772 : case CONST_INT: \
773 : case CONST_WIDE_INT: \
774 : case CONST_POLY_INT: \
775 : case CONST_DOUBLE: \
776 : case CONST_FIXED: \
777 : case CONST_VECTOR
778 :
779 : #else
780 :
781 : /* Match CONST_*s that can represent compile-time constant integers. */
782 : #define CASE_CONST_SCALAR_INT \
783 : case CONST_INT: \
784 : case CONST_DOUBLE
785 :
786 : /* Match CONST_*s for which pointer equality corresponds to value
787 : equality. */
788 : #define CASE_CONST_UNIQUE \
789 : case CONST_INT: \
790 : case CONST_DOUBLE: \
791 : case CONST_FIXED
792 :
793 : /* Match all CONST_* rtxes. */
794 : #define CASE_CONST_ANY \
795 : case CONST_INT: \
796 : case CONST_DOUBLE: \
797 : case CONST_FIXED: \
798 : case CONST_VECTOR
799 : #endif
800 :
801 : /* Predicate yielding nonzero iff X is an rtx for a constant integer. */
802 : #define CONST_INT_P(X) (GET_CODE (X) == CONST_INT)
803 :
804 : /* Predicate yielding nonzero iff X is an rtx for a constant integer. */
805 : #define CONST_WIDE_INT_P(X) (GET_CODE (X) == CONST_WIDE_INT)
806 :
807 : /* Predicate yielding nonzero iff X is an rtx for a polynomial constant
808 : integer. */
809 : #define CONST_POLY_INT_P(X) \
810 : (NUM_POLY_INT_COEFFS > 1 && GET_CODE (X) == CONST_POLY_INT)
811 :
812 : /* Predicate yielding nonzero iff X is an rtx for a constant fixed-point. */
813 : #define CONST_FIXED_P(X) (GET_CODE (X) == CONST_FIXED)
814 :
815 : /* Predicate yielding true iff X is an rtx for a double-int
816 : or floating point constant. */
817 : #define CONST_DOUBLE_P(X) (GET_CODE (X) == CONST_DOUBLE)
818 :
819 : /* Predicate yielding true iff X is an rtx for a double-int. */
820 : #define CONST_DOUBLE_AS_INT_P(X) \
821 : (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) == VOIDmode)
822 :
823 : /* Predicate yielding true iff X is an rtx for a integer const. */
824 : #if TARGET_SUPPORTS_WIDE_INT
825 : #define CONST_SCALAR_INT_P(X) \
826 : (CONST_INT_P (X) || CONST_WIDE_INT_P (X))
827 : #else
828 : #define CONST_SCALAR_INT_P(X) \
829 : (CONST_INT_P (X) || CONST_DOUBLE_AS_INT_P (X))
830 : #endif
831 :
832 : /* Predicate yielding true iff X is an rtx for a double-int. */
833 : #define CONST_DOUBLE_AS_FLOAT_P(X) \
834 : (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) != VOIDmode)
835 :
836 : /* Predicate yielding nonzero iff X is an rtx for a constant vector. */
837 : #define CONST_VECTOR_P(X) (GET_CODE (X) == CONST_VECTOR)
838 :
839 : /* Predicate yielding nonzero iff X is a label insn. */
840 : #define LABEL_P(X) (GET_CODE (X) == CODE_LABEL)
841 :
842 : /* Predicate yielding nonzero iff X is a jump insn. */
843 : #define JUMP_P(X) (GET_CODE (X) == JUMP_INSN)
844 :
845 : /* Predicate yielding nonzero iff X is a call insn. */
846 : #define CALL_P(X) (GET_CODE (X) == CALL_INSN)
847 :
848 : /* 1 if RTX is a call_insn for a fake call.
849 : CALL_INSN use "used" flag to indicate it's a fake call. */
850 : #define FAKE_CALL_P(RTX) \
851 : (RTL_FLAG_CHECK1 ("FAKE_CALL_P", (RTX), CALL_INSN)->used)
852 :
853 : /* Predicate yielding nonzero iff X is an insn that cannot jump. */
854 : #define NONJUMP_INSN_P(X) (GET_CODE (X) == INSN)
855 :
856 : /* Predicate yielding nonzero iff X is a debug note/insn. */
857 : #define DEBUG_INSN_P(X) (GET_CODE (X) == DEBUG_INSN)
858 :
859 : /* Predicate yielding nonzero iff X is an insn that is not a debug insn. */
860 : #define NONDEBUG_INSN_P(X) (NONJUMP_INSN_P (X) || JUMP_P (X) || CALL_P (X))
861 :
862 : /* Nonzero if DEBUG_MARKER_INSN_P may possibly hold. */
863 : #define MAY_HAVE_DEBUG_MARKER_INSNS debug_nonbind_markers_p
864 : /* Nonzero if DEBUG_BIND_INSN_P may possibly hold. */
865 : #define MAY_HAVE_DEBUG_BIND_INSNS flag_var_tracking_assignments
866 : /* Nonzero if DEBUG_INSN_P may possibly hold. */
867 : #define MAY_HAVE_DEBUG_INSNS \
868 : (MAY_HAVE_DEBUG_MARKER_INSNS || MAY_HAVE_DEBUG_BIND_INSNS)
869 :
870 : /* Predicate yielding nonzero iff X is a real insn. */
871 : #define INSN_P(X) (NONDEBUG_INSN_P (X) || DEBUG_INSN_P (X))
872 :
873 : /* Predicate yielding nonzero iff X is a note insn. */
874 : #define NOTE_P(X) (GET_CODE (X) == NOTE)
875 :
876 : /* Predicate yielding nonzero iff X is a barrier insn. */
877 : #define BARRIER_P(X) (GET_CODE (X) == BARRIER)
878 :
879 : /* Predicate yielding nonzero iff X is a data for a jump table. */
880 : #define JUMP_TABLE_DATA_P(INSN) (GET_CODE (INSN) == JUMP_TABLE_DATA)
881 :
882 : /* Predicate yielding nonzero iff RTX is a subreg. */
883 : #define SUBREG_P(RTX) (GET_CODE (RTX) == SUBREG)
884 :
885 : /* Predicate yielding true iff RTX is a symbol ref. */
886 : #define SYMBOL_REF_P(RTX) (GET_CODE (RTX) == SYMBOL_REF)
887 :
888 : template <>
889 : template <>
890 : inline bool
891 >17737*10^7 : is_a_helper <rtx_insn *>::test (rtx rt)
892 : {
893 >17737*10^7 : return (INSN_P (rt)
894 >17737*10^7 : || NOTE_P (rt)
895 22561767928 : || JUMP_TABLE_DATA_P (rt)
896 22559178394 : || BARRIER_P (rt)
897 >19136*10^7 : || LABEL_P (rt));
898 : }
899 :
900 : template <>
901 : template <>
902 : inline bool
903 103774634 : is_a_helper <const rtx_insn *>::test (const_rtx rt)
904 : {
905 103774634 : return (INSN_P (rt)
906 103774634 : || NOTE_P (rt)
907 283 : || JUMP_TABLE_DATA_P (rt)
908 283 : || BARRIER_P (rt)
909 103774909 : || LABEL_P (rt));
910 : }
911 :
912 : template <>
913 : template <>
914 : inline bool
915 49805323 : is_a_helper <rtx_debug_insn *>::test (rtx rt)
916 : {
917 49805323 : return DEBUG_INSN_P (rt);
918 : }
919 :
920 : template <>
921 : template <>
922 : inline bool
923 : is_a_helper <rtx_nonjump_insn *>::test (rtx rt)
924 : {
925 : return NONJUMP_INSN_P (rt);
926 : }
927 :
928 : template <>
929 : template <>
930 : inline bool
931 17958825 : is_a_helper <rtx_jump_insn *>::test (rtx rt)
932 : {
933 17958825 : return JUMP_P (rt);
934 : }
935 :
936 : template <>
937 : template <>
938 : inline bool
939 49203044 : is_a_helper <rtx_jump_insn *>::test (rtx_insn *insn)
940 : {
941 49203044 : return JUMP_P (insn);
942 : }
943 :
944 : template <>
945 : template <>
946 : inline bool
947 6254213 : is_a_helper <rtx_call_insn *>::test (rtx rt)
948 : {
949 6254213 : return CALL_P (rt);
950 : }
951 :
952 : template <>
953 : template <>
954 : inline bool
955 737530248 : is_a_helper <rtx_call_insn *>::test (rtx_insn *insn)
956 : {
957 737530248 : return CALL_P (insn);
958 : }
959 :
960 : template <>
961 : template <>
962 : inline bool
963 31655982 : is_a_helper <const rtx_call_insn *>::test (const rtx_insn *insn)
964 : {
965 31655982 : return CALL_P (insn);
966 : }
967 :
968 : template <>
969 : template <>
970 : inline bool
971 6316 : is_a_helper <rtx_jump_table_data *>::test (rtx rt)
972 : {
973 6316 : return JUMP_TABLE_DATA_P (rt);
974 : }
975 :
976 : template <>
977 : template <>
978 : inline bool
979 140219156 : is_a_helper <rtx_jump_table_data *>::test (rtx_insn *insn)
980 : {
981 140219156 : return JUMP_TABLE_DATA_P (insn);
982 : }
983 :
984 : template <>
985 : template <>
986 : inline bool
987 11071197 : is_a_helper <rtx_barrier *>::test (rtx rt)
988 : {
989 11071197 : return BARRIER_P (rt);
990 : }
991 :
992 : template <>
993 : template <>
994 : inline bool
995 23875200 : is_a_helper <rtx_code_label *>::test (rtx rt)
996 : {
997 23875200 : return LABEL_P (rt);
998 : }
999 :
1000 : template <>
1001 : template <>
1002 : inline bool
1003 398907764 : is_a_helper <rtx_code_label *>::test (rtx_insn *insn)
1004 : {
1005 398907764 : return LABEL_P (insn);
1006 : }
1007 :
1008 : template <>
1009 : template <>
1010 : inline bool
1011 162983199 : is_a_helper <rtx_note *>::test (rtx rt)
1012 : {
1013 162983199 : return NOTE_P (rt);
1014 : }
1015 :
1016 : template <>
1017 : template <>
1018 : inline bool
1019 73830602 : is_a_helper <rtx_note *>::test (rtx_insn *insn)
1020 : {
1021 73830602 : return NOTE_P (insn);
1022 : }
1023 :
1024 : /* Predicate yielding nonzero iff X is a return or simple_return. */
1025 : #define ANY_RETURN_P(X) \
1026 : (GET_CODE (X) == RETURN || GET_CODE (X) == SIMPLE_RETURN)
1027 :
1028 : /* 1 if X is a unary operator. */
1029 :
1030 : #define UNARY_P(X) \
1031 : (GET_RTX_CLASS (GET_CODE (X)) == RTX_UNARY)
1032 :
1033 : /* 1 if X is a binary operator. */
1034 :
1035 : #define BINARY_P(X) \
1036 : ((GET_RTX_CLASS (GET_CODE (X)) & RTX_BINARY_MASK) == RTX_BINARY_RESULT)
1037 :
1038 : /* 1 if X is an arithmetic operator. */
1039 :
1040 : #define ARITHMETIC_P(X) \
1041 : ((GET_RTX_CLASS (GET_CODE (X)) & RTX_ARITHMETIC_MASK) \
1042 : == RTX_ARITHMETIC_RESULT)
1043 :
1044 : /* 1 if X is an arithmetic operator. */
1045 :
1046 : #define COMMUTATIVE_ARITH_P(X) \
1047 : (GET_RTX_CLASS (GET_CODE (X)) == RTX_COMM_ARITH)
1048 :
1049 : /* 1 if X is a commutative arithmetic operator or a comparison operator.
1050 : These two are sometimes selected together because it is possible to
1051 : swap the two operands. */
1052 :
1053 : #define SWAPPABLE_OPERANDS_P(X) \
1054 : ((1 << GET_RTX_CLASS (GET_CODE (X))) \
1055 : & ((1 << RTX_COMM_ARITH) | (1 << RTX_COMM_COMPARE) \
1056 : | (1 << RTX_COMPARE)))
1057 :
1058 : /* 1 if X is a non-commutative operator. */
1059 :
1060 : #define NON_COMMUTATIVE_P(X) \
1061 : ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMMUTATIVE_MASK) \
1062 : == RTX_NON_COMMUTATIVE_RESULT)
1063 :
1064 : /* 1 if X is a commutative operator on integers. */
1065 :
1066 : #define COMMUTATIVE_P(X) \
1067 : ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMMUTATIVE_MASK) \
1068 : == RTX_COMMUTATIVE_RESULT)
1069 :
1070 : /* 1 if X is a relational operator. */
1071 :
1072 : #define COMPARISON_P(X) \
1073 : ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMPARE_MASK) == RTX_COMPARE_RESULT)
1074 :
1075 : /* 1 if X is a constant value that is an integer. */
1076 :
1077 : #define CONSTANT_P(X) \
1078 : (GET_RTX_CLASS (GET_CODE (X)) == RTX_CONST_OBJ)
1079 :
1080 : /* 1 if X is a LABEL_REF. */
1081 : #define LABEL_REF_P(X) \
1082 : (GET_CODE (X) == LABEL_REF)
1083 :
1084 : /* 1 if X can be used to represent an object. */
1085 : #define OBJECT_P(X) \
1086 : ((GET_RTX_CLASS (GET_CODE (X)) & RTX_OBJ_MASK) == RTX_OBJ_RESULT)
1087 :
1088 : /* General accessor macros for accessing the fields of an rtx. */
1089 :
1090 : #if defined ENABLE_RTL_CHECKING && (GCC_VERSION >= 2007)
1091 : /* The bit with a star outside the statement expr and an & inside is
1092 : so that N can be evaluated only once. */
1093 : #define RTL_CHECK1(RTX, N, C1) __extension__ \
1094 : (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1095 : const enum rtx_code _code = GET_CODE (_rtx); \
1096 : if (_n < 0 || _n >= GET_RTX_LENGTH (_code)) \
1097 : rtl_check_failed_bounds (_rtx, _n, __FILE__, __LINE__, \
1098 : __FUNCTION__); \
1099 : if (GET_RTX_FORMAT (_code)[_n] != C1) \
1100 : rtl_check_failed_type1 (_rtx, _n, C1, __FILE__, __LINE__, \
1101 : __FUNCTION__); \
1102 : &_rtx->u.fld[_n]; }))
1103 :
1104 : #define RTL_CHECK2(RTX, N, C1, C2) __extension__ \
1105 : (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1106 : const enum rtx_code _code = GET_CODE (_rtx); \
1107 : if (_n < 0 || _n >= GET_RTX_LENGTH (_code)) \
1108 : rtl_check_failed_bounds (_rtx, _n, __FILE__, __LINE__, \
1109 : __FUNCTION__); \
1110 : if (GET_RTX_FORMAT (_code)[_n] != C1 \
1111 : && GET_RTX_FORMAT (_code)[_n] != C2) \
1112 : rtl_check_failed_type2 (_rtx, _n, C1, C2, __FILE__, __LINE__, \
1113 : __FUNCTION__); \
1114 : &_rtx->u.fld[_n]; }))
1115 :
1116 : #define RTL_CHECKC1(RTX, N, C) __extension__ \
1117 : (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1118 : if (GET_CODE (_rtx) != (C)) \
1119 : rtl_check_failed_code1 (_rtx, (C), __FILE__, __LINE__, \
1120 : __FUNCTION__); \
1121 : &_rtx->u.fld[_n]; }))
1122 :
1123 : #define RTL_CHECKC2(RTX, N, C1, C2) __extension__ \
1124 : (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1125 : const enum rtx_code _code = GET_CODE (_rtx); \
1126 : if (_code != (C1) && _code != (C2)) \
1127 : rtl_check_failed_code2 (_rtx, (C1), (C2), __FILE__, __LINE__, \
1128 : __FUNCTION__); \
1129 : &_rtx->u.fld[_n]; }))
1130 :
1131 : #define RTL_CHECKC3(RTX, N, C1, C2, C3) __extension__ \
1132 : (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1133 : const enum rtx_code _code = GET_CODE (_rtx); \
1134 : if (_code != (C1) && _code != (C2) && _code != (C3)) \
1135 : rtl_check_failed_code3 (_rtx, (C1), (C2), (C3), __FILE__, \
1136 : __LINE__, __FUNCTION__); \
1137 : &_rtx->u.fld[_n]; }))
1138 :
1139 : #define RTVEC_ELT(RTVEC, I) __extension__ \
1140 : (*({ __typeof (RTVEC) const _rtvec = (RTVEC); const int _i = (I); \
1141 : if (_i < 0 || _i >= GET_NUM_ELEM (_rtvec)) \
1142 : rtvec_check_failed_bounds (_rtvec, _i, __FILE__, __LINE__, \
1143 : __FUNCTION__); \
1144 : &_rtvec->elem[_i]; }))
1145 :
1146 : #define XWINT(RTX, N) __extension__ \
1147 : (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1148 : const enum rtx_code _code = GET_CODE (_rtx); \
1149 : if (_n < 0 || _n >= GET_RTX_LENGTH (_code)) \
1150 : rtl_check_failed_bounds (_rtx, _n, __FILE__, __LINE__, \
1151 : __FUNCTION__); \
1152 : if (GET_RTX_FORMAT (_code)[_n] != 'w') \
1153 : rtl_check_failed_type1 (_rtx, _n, 'w', __FILE__, __LINE__, \
1154 : __FUNCTION__); \
1155 : &_rtx->u.hwint[_n]; }))
1156 :
1157 : #define CWI_ELT(RTX, I) __extension__ \
1158 : (*({ __typeof (RTX) const _cwi = (RTX); \
1159 : int _max = CWI_GET_NUM_ELEM (_cwi); \
1160 : const int _i = (I); \
1161 : if (_i < 0 || _i >= _max) \
1162 : cwi_check_failed_bounds (_cwi, _i, __FILE__, __LINE__, \
1163 : __FUNCTION__); \
1164 : &_cwi->u.hwiv.elem[_i]; }))
1165 :
1166 : #define XCWINT(RTX, N, C) __extension__ \
1167 : (*({ __typeof (RTX) const _rtx = (RTX); \
1168 : if (GET_CODE (_rtx) != (C)) \
1169 : rtl_check_failed_code1 (_rtx, (C), __FILE__, __LINE__, \
1170 : __FUNCTION__); \
1171 : &_rtx->u.hwint[N]; }))
1172 :
1173 : #define XCMWINT(RTX, N, C, M) __extension__ \
1174 : (*({ __typeof (RTX) const _rtx = (RTX); \
1175 : if (GET_CODE (_rtx) != (C) || GET_MODE (_rtx) != (M)) \
1176 : rtl_check_failed_code_mode (_rtx, (C), (M), false, __FILE__, \
1177 : __LINE__, __FUNCTION__); \
1178 : &_rtx->u.hwint[N]; }))
1179 :
1180 : #define XCNMPRV(RTX, C, M) __extension__ \
1181 : ({ __typeof (RTX) const _rtx = (RTX); \
1182 : if (GET_CODE (_rtx) != (C) || GET_MODE (_rtx) == (M)) \
1183 : rtl_check_failed_code_mode (_rtx, (C), (M), true, __FILE__, \
1184 : __LINE__, __FUNCTION__); \
1185 : &_rtx->u.rv; })
1186 :
1187 : #define XCNMPFV(RTX, C, M) __extension__ \
1188 : ({ __typeof (RTX) const _rtx = (RTX); \
1189 : if (GET_CODE (_rtx) != (C) || GET_MODE (_rtx) == (M)) \
1190 : rtl_check_failed_code_mode (_rtx, (C), (M), true, __FILE__, \
1191 : __LINE__, __FUNCTION__); \
1192 : &_rtx->u.fv; })
1193 :
1194 : #define REG_CHECK(RTX) __extension__ \
1195 : ({ __typeof (RTX) const _rtx = (RTX); \
1196 : if (GET_CODE (_rtx) != REG) \
1197 : rtl_check_failed_code1 (_rtx, REG, __FILE__, __LINE__, \
1198 : __FUNCTION__); \
1199 : &_rtx->u.reg; })
1200 :
1201 : #define BLOCK_SYMBOL_CHECK(RTX) __extension__ \
1202 : ({ __typeof (RTX) const _symbol = (RTX); \
1203 : const unsigned int flags = SYMBOL_REF_FLAGS (_symbol); \
1204 : if ((flags & SYMBOL_FLAG_HAS_BLOCK_INFO) == 0) \
1205 : rtl_check_failed_block_symbol (__FILE__, __LINE__, \
1206 : __FUNCTION__); \
1207 : &_symbol->u.block_sym; })
1208 :
1209 : #define HWIVEC_CHECK(RTX,C) __extension__ \
1210 : ({ __typeof (RTX) const _symbol = (RTX); \
1211 : RTL_CHECKC1 (_symbol, 0, C); \
1212 : &_symbol->u.hwiv; })
1213 :
1214 : extern void rtl_check_failed_bounds (const_rtx, int, const char *, int,
1215 : const char *)
1216 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1217 : extern void rtl_check_failed_type1 (const_rtx, int, int, const char *, int,
1218 : const char *)
1219 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1220 : extern void rtl_check_failed_type2 (const_rtx, int, int, int, const char *,
1221 : int, const char *)
1222 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1223 : extern void rtl_check_failed_code1 (const_rtx, enum rtx_code, const char *,
1224 : int, const char *)
1225 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1226 : extern void rtl_check_failed_code2 (const_rtx, enum rtx_code, enum rtx_code,
1227 : const char *, int, const char *)
1228 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1229 : extern void rtl_check_failed_code3 (const_rtx, enum rtx_code, enum rtx_code,
1230 : enum rtx_code, const char *, int,
1231 : const char *)
1232 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1233 : extern void rtl_check_failed_code_mode (const_rtx, enum rtx_code, machine_mode,
1234 : bool, const char *, int, const char *)
1235 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1236 : extern void rtl_check_failed_block_symbol (const char *, int, const char *)
1237 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1238 : extern void cwi_check_failed_bounds (const_rtx, int, const char *, int,
1239 : const char *)
1240 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1241 : extern void rtvec_check_failed_bounds (const_rtvec, int, const char *, int,
1242 : const char *)
1243 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
1244 :
1245 : #else /* not ENABLE_RTL_CHECKING */
1246 :
1247 : #define RTL_CHECK1(RTX, N, C1) ((RTX)->u.fld[N])
1248 : #define RTL_CHECK2(RTX, N, C1, C2) ((RTX)->u.fld[N])
1249 : #define RTL_CHECKC1(RTX, N, C) ((RTX)->u.fld[N])
1250 : #define RTL_CHECKC2(RTX, N, C1, C2) ((RTX)->u.fld[N])
1251 : #define RTL_CHECKC3(RTX, N, C1, C2, C3) ((RTX)->u.fld[N])
1252 : #define RTVEC_ELT(RTVEC, I) ((RTVEC)->elem[I])
1253 : #define XWINT(RTX, N) ((RTX)->u.hwint[N])
1254 : #define CWI_ELT(RTX, I) ((RTX)->u.hwiv.elem[I])
1255 : #define XCWINT(RTX, N, C) ((RTX)->u.hwint[N])
1256 : #define XCMWINT(RTX, N, C, M) ((RTX)->u.hwint[N])
1257 : #define XCNMWINT(RTX, N, C, M) ((RTX)->u.hwint[N])
1258 : #define XCNMPRV(RTX, C, M) (&(RTX)->u.rv)
1259 : #define XCNMPFV(RTX, C, M) (&(RTX)->u.fv)
1260 : #define REG_CHECK(RTX) (&(RTX)->u.reg)
1261 : #define BLOCK_SYMBOL_CHECK(RTX) (&(RTX)->u.block_sym)
1262 : #define HWIVEC_CHECK(RTX,C) (&(RTX)->u.hwiv)
1263 :
1264 : #endif
1265 :
1266 : /* General accessor macros for accessing the flags of an rtx. */
1267 :
1268 : /* Access an individual rtx flag, with no checking of any kind. */
1269 : #define RTX_FLAG(RTX, FLAG) ((RTX)->FLAG)
1270 :
1271 : #if defined ENABLE_RTL_FLAG_CHECKING && (GCC_VERSION >= 2007)
1272 : #define RTL_FLAG_CHECK1(NAME, RTX, C1) __extension__ \
1273 : ({ __typeof (RTX) const _rtx = (RTX); \
1274 : if (GET_CODE (_rtx) != C1) \
1275 : rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \
1276 : __FUNCTION__); \
1277 : _rtx; })
1278 :
1279 : #define RTL_FLAG_CHECK2(NAME, RTX, C1, C2) __extension__ \
1280 : ({ __typeof (RTX) const _rtx = (RTX); \
1281 : if (GET_CODE (_rtx) != C1 && GET_CODE(_rtx) != C2) \
1282 : rtl_check_failed_flag (NAME,_rtx, __FILE__, __LINE__, \
1283 : __FUNCTION__); \
1284 : _rtx; })
1285 :
1286 : #define RTL_FLAG_CHECK3(NAME, RTX, C1, C2, C3) __extension__ \
1287 : ({ __typeof (RTX) const _rtx = (RTX); \
1288 : if (GET_CODE (_rtx) != C1 && GET_CODE(_rtx) != C2 \
1289 : && GET_CODE (_rtx) != C3) \
1290 : rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \
1291 : __FUNCTION__); \
1292 : _rtx; })
1293 :
1294 : #define RTL_FLAG_CHECK4(NAME, RTX, C1, C2, C3, C4) __extension__ \
1295 : ({ __typeof (RTX) const _rtx = (RTX); \
1296 : if (GET_CODE (_rtx) != C1 && GET_CODE(_rtx) != C2 \
1297 : && GET_CODE (_rtx) != C3 && GET_CODE(_rtx) != C4) \
1298 : rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \
1299 : __FUNCTION__); \
1300 : _rtx; })
1301 :
1302 : #define RTL_FLAG_CHECK5(NAME, RTX, C1, C2, C3, C4, C5) __extension__ \
1303 : ({ __typeof (RTX) const _rtx = (RTX); \
1304 : if (GET_CODE (_rtx) != C1 && GET_CODE (_rtx) != C2 \
1305 : && GET_CODE (_rtx) != C3 && GET_CODE (_rtx) != C4 \
1306 : && GET_CODE (_rtx) != C5) \
1307 : rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \
1308 : __FUNCTION__); \
1309 : _rtx; })
1310 :
1311 : #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6) \
1312 : __extension__ \
1313 : ({ __typeof (RTX) const _rtx = (RTX); \
1314 : if (GET_CODE (_rtx) != C1 && GET_CODE (_rtx) != C2 \
1315 : && GET_CODE (_rtx) != C3 && GET_CODE (_rtx) != C4 \
1316 : && GET_CODE (_rtx) != C5 && GET_CODE (_rtx) != C6) \
1317 : rtl_check_failed_flag (NAME,_rtx, __FILE__, __LINE__, \
1318 : __FUNCTION__); \
1319 : _rtx; })
1320 :
1321 : #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7) \
1322 : __extension__ \
1323 : ({ __typeof (RTX) const _rtx = (RTX); \
1324 : if (GET_CODE (_rtx) != C1 && GET_CODE (_rtx) != C2 \
1325 : && GET_CODE (_rtx) != C3 && GET_CODE (_rtx) != C4 \
1326 : && GET_CODE (_rtx) != C5 && GET_CODE (_rtx) != C6 \
1327 : && GET_CODE (_rtx) != C7) \
1328 : rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \
1329 : __FUNCTION__); \
1330 : _rtx; })
1331 :
1332 : #define RTL_INSN_CHAIN_FLAG_CHECK(NAME, RTX) \
1333 : __extension__ \
1334 : ({ __typeof (RTX) const _rtx = (RTX); \
1335 : if (!INSN_CHAIN_CODE_P (GET_CODE (_rtx))) \
1336 : rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \
1337 : __FUNCTION__); \
1338 : _rtx; })
1339 :
1340 : extern void rtl_check_failed_flag (const char *, const_rtx, const char *,
1341 : int, const char *)
1342 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD
1343 : ;
1344 :
1345 : #else /* not ENABLE_RTL_FLAG_CHECKING */
1346 :
1347 : #define RTL_FLAG_CHECK1(NAME, RTX, C1) (RTX)
1348 : #define RTL_FLAG_CHECK2(NAME, RTX, C1, C2) (RTX)
1349 : #define RTL_FLAG_CHECK3(NAME, RTX, C1, C2, C3) (RTX)
1350 : #define RTL_FLAG_CHECK4(NAME, RTX, C1, C2, C3, C4) (RTX)
1351 : #define RTL_FLAG_CHECK5(NAME, RTX, C1, C2, C3, C4, C5) (RTX)
1352 : #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6) (RTX)
1353 : #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7) (RTX)
1354 : #define RTL_INSN_CHAIN_FLAG_CHECK(NAME, RTX) (RTX)
1355 : #endif
1356 :
1357 : #define XINT(RTX, N) (RTL_CHECK2 (RTX, N, 'i', 'n').rt_int)
1358 : #define XUINT(RTX, N) (RTL_CHECK2 (RTX, N, 'i', 'n').rt_uint)
1359 : #define XLOC(RTX, N) (RTL_CHECK1 (RTX, N, 'L').rt_loc)
1360 : #define XSTR(RTX, N) (RTL_CHECK2 (RTX, N, 's', 'S').rt_str)
1361 : #define XEXP(RTX, N) (RTL_CHECK2 (RTX, N, 'e', 'u').rt_rtx)
1362 : #define XVEC(RTX, N) (RTL_CHECK2 (RTX, N, 'E', 'V').rt_rtvec)
1363 : #define XMODE(RTX, N) (RTL_CHECK1 (RTX, N, 'M').rt_type)
1364 : #define XTREE(RTX, N) (RTL_CHECK1 (RTX, N, 't').rt_tree)
1365 : #define XBBDEF(RTX, N) (RTL_CHECK1 (RTX, N, 'B').rt_bb)
1366 : #define XTMPL(RTX, N) (RTL_CHECK1 (RTX, N, 'T').rt_str)
1367 : #define XCFI(RTX, N) (RTL_CHECK1 (RTX, N, 'C').rt_cfi)
1368 :
1369 : #define XVECEXP(RTX, N, M) RTVEC_ELT (XVEC (RTX, N), M)
1370 : #define XVECLEN(RTX, N) GET_NUM_ELEM (XVEC (RTX, N))
1371 :
1372 : /* These are like XINT, etc. except that they expect a '0' field instead
1373 : of the normal type code. */
1374 :
1375 : #define X0INT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_int)
1376 : #define X0UINT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_uint)
1377 : #define X0LOC(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_loc)
1378 : #define X0STR(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_str)
1379 : #define X0EXP(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_rtx)
1380 : #define X0VEC(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_rtvec)
1381 : #define X0MODE(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_type)
1382 : #define X0TREE(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_tree)
1383 : #define X0BBDEF(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_bb)
1384 : #define X0ADVFLAGS(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_addr_diff_vec_flags)
1385 : #define X0CSELIB(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_cselib)
1386 : #define X0MEMATTR(RTX, N) (RTL_CHECKC1 (RTX, N, MEM).rt_mem)
1387 : #define X0CONSTANT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_constant)
1388 :
1389 : /* Access a '0' field with any type. */
1390 : #define X0ANY(RTX, N) RTL_CHECK1 (RTX, N, '0')
1391 :
1392 : #define XCINT(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_int)
1393 : #define XCUINT(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_uint)
1394 : #define XCLOC(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_loc)
1395 : #define XCSUBREG(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_subreg)
1396 : #define XCSTR(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_str)
1397 : #define XCEXP(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_rtx)
1398 : #define XCVEC(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_rtvec)
1399 : #define XCMODE(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_type)
1400 : #define XCTREE(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_tree)
1401 : #define XCBBDEF(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_bb)
1402 : #define XCCFI(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_cfi)
1403 : #define XCCSELIB(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_cselib)
1404 :
1405 : #define XCVECEXP(RTX, N, M, C) RTVEC_ELT (XCVEC (RTX, N, C), M)
1406 : #define XCVECLEN(RTX, N, C) GET_NUM_ELEM (XCVEC (RTX, N, C))
1407 :
1408 : #define XC2EXP(RTX, N, C1, C2) (RTL_CHECKC2 (RTX, N, C1, C2).rt_rtx)
1409 : #define XC3EXP(RTX, N, C1, C2, C3) (RTL_CHECKC3 (RTX, N, C1, C2, C3).rt_rtx)
1410 :
1411 :
1412 : /* Methods of rtx_expr_list. */
1413 :
1414 136450353 : inline rtx_expr_list *rtx_expr_list::next () const
1415 : {
1416 136450353 : rtx tmp = XEXP (this, 1);
1417 136450353 : return safe_as_a <rtx_expr_list *> (tmp);
1418 : }
1419 :
1420 151433063 : inline rtx rtx_expr_list::element () const
1421 : {
1422 151433063 : return XEXP (this, 0);
1423 : }
1424 :
1425 : /* Methods of rtx_insn_list. */
1426 :
1427 1644744230 : inline rtx_insn_list *rtx_insn_list::next () const
1428 : {
1429 1644744230 : rtx tmp = XEXP (this, 1);
1430 1644744230 : return safe_as_a <rtx_insn_list *> (tmp);
1431 : }
1432 :
1433 1068400189 : inline rtx_insn *rtx_insn_list::insn () const
1434 : {
1435 1068400189 : rtx tmp = XEXP (this, 0);
1436 1068400189 : return safe_as_a <rtx_insn *> (tmp);
1437 : }
1438 :
1439 : /* Methods of rtx_sequence. */
1440 :
1441 0 : inline int rtx_sequence::len () const
1442 : {
1443 0 : return XVECLEN (this, 0);
1444 : }
1445 :
1446 0 : inline rtx rtx_sequence::element (int index) const
1447 : {
1448 0 : return XVECEXP (this, 0, index);
1449 : }
1450 :
1451 0 : inline rtx_insn *rtx_sequence::insn (int index) const
1452 : {
1453 0 : return as_a <rtx_insn *> (XVECEXP (this, 0, index));
1454 : }
1455 :
1456 : /* ACCESS MACROS for particular fields of insns. */
1457 :
1458 : /* Holds a unique number for each insn.
1459 : These are not necessarily sequentially increasing. */
1460 7493478061 : inline int INSN_UID (const_rtx insn)
1461 : {
1462 7493478061 : return RTL_INSN_CHAIN_FLAG_CHECK ("INSN_UID",
1463 7493478061 : (insn))->u2.insn_uid;
1464 : }
1465 58260284681 : inline int& INSN_UID (rtx insn)
1466 : {
1467 58260284681 : return RTL_INSN_CHAIN_FLAG_CHECK ("INSN_UID",
1468 58260284681 : (insn))->u2.insn_uid;
1469 : }
1470 :
1471 : /* Chain insns together in sequence. */
1472 :
1473 : /* For now these are split in two: an rvalue form:
1474 : PREV_INSN/NEXT_INSN
1475 : and an lvalue form:
1476 : SET_NEXT_INSN/SET_PREV_INSN. */
1477 :
1478 44274357543 : inline rtx_insn *PREV_INSN (const rtx_insn *insn)
1479 : {
1480 44274357543 : rtx prev = XEXP (insn, 0);
1481 44242288320 : return safe_as_a <rtx_insn *> (prev);
1482 : }
1483 :
1484 : inline rtx& SET_PREV_INSN (rtx_insn *insn)
1485 : {
1486 : return XEXP (insn, 0);
1487 : }
1488 :
1489 >13444*10^7 : inline rtx_insn *NEXT_INSN (const rtx_insn *insn)
1490 : {
1491 >13444*10^7 : rtx next = XEXP (insn, 1);
1492 >13377*10^7 : return safe_as_a <rtx_insn *> (next);
1493 : }
1494 :
1495 : inline rtx& SET_NEXT_INSN (rtx_insn *insn)
1496 : {
1497 : return XEXP (insn, 1);
1498 : }
1499 :
1500 915609 : inline basic_block BLOCK_FOR_INSN (const_rtx insn)
1501 : {
1502 915609 : return XBBDEF (insn, 2);
1503 : }
1504 :
1505 : inline basic_block& BLOCK_FOR_INSN (rtx insn)
1506 : {
1507 : return XBBDEF (insn, 2);
1508 : }
1509 :
1510 440403194 : inline void set_block_for_insn (rtx_insn *insn, basic_block bb)
1511 : {
1512 440403063 : BLOCK_FOR_INSN (insn) = bb;
1513 115349 : }
1514 :
1515 : /* The body of an insn. */
1516 29003641690 : inline rtx PATTERN (const_rtx insn)
1517 : {
1518 28443631405 : return XEXP (insn, 3);
1519 : }
1520 :
1521 2721555021 : inline rtx& PATTERN (rtx insn)
1522 : {
1523 2721555021 : return XEXP (insn, 3);
1524 : }
1525 :
1526 310470861 : inline location_t INSN_LOCATION (const rtx_insn *insn)
1527 : {
1528 218402299 : return XLOC (insn, 4);
1529 : }
1530 :
1531 : inline location_t& INSN_LOCATION (rtx_insn *insn)
1532 : {
1533 : return XLOC (insn, 4);
1534 : }
1535 :
1536 94731441 : inline bool INSN_HAS_LOCATION (const rtx_insn *insn)
1537 : {
1538 94731441 : return LOCATION_LOCUS (INSN_LOCATION (insn)) != UNKNOWN_LOCATION;
1539 : }
1540 :
1541 : /* LOCATION of an RTX if relevant. */
1542 : #define RTL_LOCATION(X) (INSN_P (X) ? \
1543 : INSN_LOCATION (as_a <rtx_insn *> (X)) \
1544 : : UNKNOWN_LOCATION)
1545 :
1546 : /* Code number of instruction, from when it was recognized.
1547 : -1 means this instruction has not been recognized yet. */
1548 : #define INSN_CODE(INSN) XINT (INSN, 5)
1549 :
1550 29264 : inline rtvec rtx_jump_table_data::get_labels () const
1551 : {
1552 29264 : rtx pat = PATTERN (this);
1553 29264 : if (GET_CODE (pat) == ADDR_VEC)
1554 23766 : return XVEC (pat, 0);
1555 : else
1556 5498 : return XVEC (pat, 1); /* presumably an ADDR_DIFF_VEC */
1557 : }
1558 :
1559 : /* Return the mode of the data in the table, which is always a scalar
1560 : integer. */
1561 :
1562 : inline scalar_int_mode
1563 6309 : rtx_jump_table_data::get_data_mode () const
1564 : {
1565 6309 : return as_a <scalar_int_mode> (GET_MODE (PATTERN (this)));
1566 : }
1567 :
1568 : /* If LABEL is followed by a jump table, return the table, otherwise
1569 : return null. */
1570 :
1571 : inline rtx_jump_table_data *
1572 13658730 : jump_table_for_label (const rtx_code_label *label)
1573 : {
1574 13658730 : return safe_dyn_cast <rtx_jump_table_data *> (NEXT_INSN (label));
1575 : }
1576 :
1577 : #define RTX_FRAME_RELATED_P(RTX) \
1578 : (RTL_FLAG_CHECK6 ("RTX_FRAME_RELATED_P", (RTX), DEBUG_INSN, INSN, \
1579 : CALL_INSN, JUMP_INSN, BARRIER, SET)->frame_related)
1580 :
1581 : /* 1 if JUMP RTX is a crossing jump. */
1582 : #define CROSSING_JUMP_P(RTX) \
1583 : (RTL_FLAG_CHECK1 ("CROSSING_JUMP_P", (RTX), JUMP_INSN)->jump)
1584 :
1585 : /* 1 if RTX is a call to a const function. Built from ECF_CONST and
1586 : TREE_READONLY. */
1587 : #define RTL_CONST_CALL_P(RTX) \
1588 : (RTL_FLAG_CHECK1 ("RTL_CONST_CALL_P", (RTX), CALL_INSN)->unchanging)
1589 :
1590 : /* 1 if RTX is a call to a pure function. Built from ECF_PURE and
1591 : DECL_PURE_P. */
1592 : #define RTL_PURE_CALL_P(RTX) \
1593 : (RTL_FLAG_CHECK1 ("RTL_PURE_CALL_P", (RTX), CALL_INSN)->return_val)
1594 :
1595 : /* 1 if RTX is a call to a const or pure function. */
1596 : #define RTL_CONST_OR_PURE_CALL_P(RTX) \
1597 : (RTL_CONST_CALL_P (RTX) || RTL_PURE_CALL_P (RTX))
1598 :
1599 : /* 1 if RTX is a call to a looping const or pure function. Built from
1600 : ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P. */
1601 : #define RTL_LOOPING_CONST_OR_PURE_CALL_P(RTX) \
1602 : (RTL_FLAG_CHECK1 ("CONST_OR_PURE_CALL_P", (RTX), CALL_INSN)->call)
1603 :
1604 : /* 1 if RTX is a call_insn for a sibling call. */
1605 : #define SIBLING_CALL_P(RTX) \
1606 : (RTL_FLAG_CHECK1 ("SIBLING_CALL_P", (RTX), CALL_INSN)->jump)
1607 :
1608 : /* 1 if RTX is a jump_insn, call_insn, or insn that is an annulling branch. */
1609 : #define INSN_ANNULLED_BRANCH_P(RTX) \
1610 : (RTL_FLAG_CHECK1 ("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN)->unchanging)
1611 :
1612 : /* 1 if RTX is an insn in a delay slot and is from the target of the branch.
1613 : If the branch insn has INSN_ANNULLED_BRANCH_P set, this insn should only be
1614 : executed if the branch is taken. For annulled branches with this bit
1615 : clear, the insn should be executed only if the branch is not taken. */
1616 : #define INSN_FROM_TARGET_P(RTX) \
1617 : (RTL_FLAG_CHECK3 ("INSN_FROM_TARGET_P", (RTX), INSN, JUMP_INSN, \
1618 : CALL_INSN)->in_struct)
1619 :
1620 : /* In an ADDR_DIFF_VEC, the flags for RTX for use by branch shortening.
1621 : See the comments for ADDR_DIFF_VEC in rtl.def. */
1622 : #define ADDR_DIFF_VEC_FLAGS(RTX) X0ADVFLAGS (RTX, 4)
1623 :
1624 : /* In a VALUE, the value cselib has assigned to RTX.
1625 : This is a "struct cselib_val", see cselib.h. */
1626 : #define CSELIB_VAL_PTR(RTX) X0CSELIB (RTX, 0)
1627 :
1628 : /* Holds a list of notes on what this insn does to various REGs.
1629 : It is a chain of EXPR_LIST rtx's, where the second operand is the
1630 : chain pointer and the first operand is the REG being described.
1631 : The mode field of the EXPR_LIST contains not a real machine mode
1632 : but a value from enum reg_note. */
1633 : #define REG_NOTES(INSN) XEXP(INSN, 6)
1634 :
1635 : /* In an ENTRY_VALUE this is the DECL_INCOMING_RTL of the argument in
1636 : question. */
1637 : #define ENTRY_VALUE_EXP(RTX) (RTL_CHECKC1 (RTX, 0, ENTRY_VALUE).rt_rtx)
1638 :
1639 : enum reg_note
1640 : {
1641 : #define DEF_REG_NOTE(NAME) NAME,
1642 : #include "reg-notes.def"
1643 : #undef DEF_REG_NOTE
1644 : REG_NOTE_MAX
1645 : };
1646 :
1647 : /* Define macros to extract and insert the reg-note kind in an EXPR_LIST. */
1648 : #define REG_NOTE_KIND(LINK) ((enum reg_note) GET_MODE (LINK))
1649 : #define PUT_REG_NOTE_KIND(LINK, KIND) \
1650 : PUT_MODE_RAW (LINK, (machine_mode) (KIND))
1651 :
1652 : /* Names for REG_NOTE's in EXPR_LIST insn's. */
1653 :
1654 : extern const char * const reg_note_name[];
1655 : #define GET_REG_NOTE_NAME(MODE) (reg_note_name[(int) (MODE)])
1656 :
1657 : /* This field is only present on CALL_INSNs. It holds a chain of EXPR_LIST of
1658 : USE, CLOBBER and SET expressions.
1659 : USE expressions list the registers filled with arguments that
1660 : are passed to the function.
1661 : CLOBBER expressions document the registers explicitly clobbered
1662 : by this CALL_INSN.
1663 : SET expressions say that the return value of the call (the SET_DEST)
1664 : is equivalent to a value available before the call (the SET_SRC).
1665 : This kind of SET is used when the return value is predictable in
1666 : advance. It is purely an optimisation hint; unlike USEs and CLOBBERs,
1667 : it does not affect register liveness.
1668 :
1669 : Pseudo registers cannot be mentioned in this list. */
1670 : #define CALL_INSN_FUNCTION_USAGE(INSN) XEXP(INSN, 7)
1671 :
1672 : /* The label-number of a code-label. The assembler label
1673 : is made from `L' and the label-number printed in decimal.
1674 : Label numbers are unique in a compilation. */
1675 : #define CODE_LABEL_NUMBER(INSN) XINT (INSN, 5)
1676 :
1677 : /* In a NOTE that is a line number, this is a string for the file name that the
1678 : line is in. We use the same field to record block numbers temporarily in
1679 : NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes. (We avoid lots of casts
1680 : between ints and pointers if we use a different macro for the block number.)
1681 : */
1682 :
1683 : /* Opaque data. */
1684 : #define NOTE_DATA(INSN) RTL_CHECKC1 (INSN, 3, NOTE)
1685 : #define NOTE_DELETED_LABEL_NAME(INSN) XCSTR (INSN, 3, NOTE)
1686 : #define SET_INSN_DELETED(INSN) set_insn_deleted (INSN);
1687 : #define NOTE_BLOCK(INSN) XCTREE (INSN, 3, NOTE)
1688 : #define NOTE_EH_HANDLER(INSN) XCINT (INSN, 3, NOTE)
1689 : #define NOTE_BASIC_BLOCK(INSN) XCBBDEF (INSN, 3, NOTE)
1690 : #define NOTE_VAR_LOCATION(INSN) XCEXP (INSN, 3, NOTE)
1691 : #define NOTE_MARKER_LOCATION(INSN) XCLOC (INSN, 3, NOTE)
1692 : #define NOTE_CFI(INSN) XCCFI (INSN, 3, NOTE)
1693 : #define NOTE_LABEL_NUMBER(INSN) XCINT (INSN, 3, NOTE)
1694 :
1695 : /* In a NOTE that is a line number, this is the line number.
1696 : Other kinds of NOTEs are identified by negative numbers here. */
1697 : #define NOTE_KIND(INSN) XCINT (INSN, 4, NOTE)
1698 :
1699 : /* Nonzero if INSN is a note marking the beginning of a basic block. */
1700 : #define NOTE_INSN_BASIC_BLOCK_P(INSN) \
1701 : (NOTE_P (INSN) && NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK)
1702 :
1703 : /* Nonzero if INSN is a debug nonbind marker note,
1704 : for which NOTE_MARKER_LOCATION can be used. */
1705 : #define NOTE_MARKER_P(INSN) \
1706 : (NOTE_P (INSN) && \
1707 : (NOTE_KIND (INSN) == NOTE_INSN_BEGIN_STMT \
1708 : || NOTE_KIND (INSN) == NOTE_INSN_INLINE_ENTRY))
1709 :
1710 : /* Variable declaration and the location of a variable. */
1711 : #define PAT_VAR_LOCATION_DECL(PAT) (XCTREE ((PAT), 0, VAR_LOCATION))
1712 : #define PAT_VAR_LOCATION_LOC(PAT) (XCEXP ((PAT), 1, VAR_LOCATION))
1713 :
1714 : /* Initialization status of the variable in the location. Status
1715 : can be unknown, uninitialized or initialized. See enumeration
1716 : type below. */
1717 : #define PAT_VAR_LOCATION_STATUS(PAT) \
1718 : (RTL_FLAG_CHECK1 ("PAT_VAR_LOCATION_STATUS", PAT, VAR_LOCATION) \
1719 : ->u2.var_location_status)
1720 :
1721 : /* Accessors for a NOTE_INSN_VAR_LOCATION. */
1722 : #define NOTE_VAR_LOCATION_DECL(NOTE) \
1723 : PAT_VAR_LOCATION_DECL (NOTE_VAR_LOCATION (NOTE))
1724 : #define NOTE_VAR_LOCATION_LOC(NOTE) \
1725 : PAT_VAR_LOCATION_LOC (NOTE_VAR_LOCATION (NOTE))
1726 : #define NOTE_VAR_LOCATION_STATUS(NOTE) \
1727 : PAT_VAR_LOCATION_STATUS (NOTE_VAR_LOCATION (NOTE))
1728 :
1729 : /* Evaluate to TRUE if INSN is a debug insn that denotes a variable
1730 : location/value tracking annotation. */
1731 : #define DEBUG_BIND_INSN_P(INSN) \
1732 : (DEBUG_INSN_P (INSN) \
1733 : && (GET_CODE (PATTERN (INSN)) \
1734 : == VAR_LOCATION))
1735 : /* Evaluate to TRUE if INSN is a debug insn that denotes a program
1736 : source location marker. */
1737 : #define DEBUG_MARKER_INSN_P(INSN) \
1738 : (DEBUG_INSN_P (INSN) \
1739 : && (GET_CODE (PATTERN (INSN)) \
1740 : != VAR_LOCATION))
1741 : /* Evaluate to the marker kind. */
1742 : #define INSN_DEBUG_MARKER_KIND(INSN) \
1743 : (GET_CODE (PATTERN (INSN)) == DEBUG_MARKER \
1744 : ? (GET_MODE (PATTERN (INSN)) == VOIDmode \
1745 : ? NOTE_INSN_BEGIN_STMT \
1746 : : GET_MODE (PATTERN (INSN)) == BLKmode \
1747 : ? NOTE_INSN_INLINE_ENTRY \
1748 : : (enum insn_note)-1) \
1749 : : (enum insn_note)-1)
1750 : /* Create patterns for debug markers. These and the above abstract
1751 : the representation, so that it's easier to get rid of the abuse of
1752 : the mode to hold the marker kind. Other marker types are
1753 : envisioned, so a single bit flag won't do; maybe separate RTL codes
1754 : wouldn't be a problem. */
1755 : #define GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT() \
1756 : gen_rtx_DEBUG_MARKER (VOIDmode)
1757 : #define GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT() \
1758 : gen_rtx_DEBUG_MARKER (BLKmode)
1759 :
1760 : /* The VAR_LOCATION rtx in a DEBUG_INSN. */
1761 : #define INSN_VAR_LOCATION(INSN) \
1762 : (RTL_FLAG_CHECK1 ("INSN_VAR_LOCATION", PATTERN (INSN), VAR_LOCATION))
1763 : /* A pointer to the VAR_LOCATION rtx in a DEBUG_INSN. */
1764 : #define INSN_VAR_LOCATION_PTR(INSN) \
1765 : (&PATTERN (INSN))
1766 :
1767 : /* Accessors for a tree-expanded var location debug insn. */
1768 : #define INSN_VAR_LOCATION_DECL(INSN) \
1769 : PAT_VAR_LOCATION_DECL (INSN_VAR_LOCATION (INSN))
1770 : #define INSN_VAR_LOCATION_LOC(INSN) \
1771 : PAT_VAR_LOCATION_LOC (INSN_VAR_LOCATION (INSN))
1772 : #define INSN_VAR_LOCATION_STATUS(INSN) \
1773 : PAT_VAR_LOCATION_STATUS (INSN_VAR_LOCATION (INSN))
1774 :
1775 : /* Expand to the RTL that denotes an unknown variable location in a
1776 : DEBUG_INSN. */
1777 : #define gen_rtx_UNKNOWN_VAR_LOC() (gen_rtx_CLOBBER (VOIDmode, const0_rtx))
1778 :
1779 : /* Determine whether X is such an unknown location. */
1780 : #define VAR_LOC_UNKNOWN_P(X) \
1781 : (GET_CODE (X) == CLOBBER && XEXP ((X), 0) == const0_rtx)
1782 :
1783 : /* 1 if RTX is emitted after a call, but it should take effect before
1784 : the call returns. */
1785 : #define NOTE_DURING_CALL_P(RTX) \
1786 : (RTL_FLAG_CHECK1 ("NOTE_VAR_LOCATION_DURING_CALL_P", (RTX), NOTE)->call)
1787 :
1788 : /* DEBUG_EXPR_DECL corresponding to a DEBUG_EXPR RTX. */
1789 : #define DEBUG_EXPR_TREE_DECL(RTX) XCTREE (RTX, 0, DEBUG_EXPR)
1790 :
1791 : /* VAR_DECL/PARM_DECL DEBUG_IMPLICIT_PTR takes address of. */
1792 : #define DEBUG_IMPLICIT_PTR_DECL(RTX) XCTREE (RTX, 0, DEBUG_IMPLICIT_PTR)
1793 :
1794 : /* PARM_DECL DEBUG_PARAMETER_REF references. */
1795 : #define DEBUG_PARAMETER_REF_DECL(RTX) XCTREE (RTX, 0, DEBUG_PARAMETER_REF)
1796 :
1797 : /* Codes that appear in the NOTE_KIND field for kinds of notes
1798 : that are not line numbers. These codes are all negative.
1799 :
1800 : Notice that we do not try to use zero here for any of
1801 : the special note codes because sometimes the source line
1802 : actually can be zero! This happens (for example) when we
1803 : are generating code for the per-translation-unit constructor
1804 : and destructor routines for some C++ translation unit. */
1805 :
1806 : enum insn_note
1807 : {
1808 : #define DEF_INSN_NOTE(NAME) NAME,
1809 : #include "insn-notes.def"
1810 : #undef DEF_INSN_NOTE
1811 :
1812 : NOTE_INSN_MAX
1813 : };
1814 :
1815 : /* Names for NOTE insn's other than line numbers. */
1816 :
1817 : extern const char * const note_insn_name[NOTE_INSN_MAX];
1818 : #define GET_NOTE_INSN_NAME(NOTE_CODE) \
1819 : (note_insn_name[(NOTE_CODE)])
1820 :
1821 : /* The name of a label, in case it corresponds to an explicit label
1822 : in the input source code. */
1823 : #define LABEL_NAME(RTX) XCSTR (RTX, 6, CODE_LABEL)
1824 :
1825 : /* In jump.cc, each label contains a count of the number
1826 : of LABEL_REFs that point at it, so unused labels can be deleted. */
1827 : #define LABEL_NUSES(RTX) XCINT (RTX, 4, CODE_LABEL)
1828 :
1829 : /* Labels carry a two-bit field composed of the ->jump and ->call
1830 : bits. This field indicates whether the label is an alternate
1831 : entry point, and if so, what kind. */
1832 : enum label_kind
1833 : {
1834 : LABEL_NORMAL = 0, /* ordinary label */
1835 : LABEL_STATIC_ENTRY, /* alternate entry point, not exported */
1836 : LABEL_GLOBAL_ENTRY, /* alternate entry point, exported */
1837 : LABEL_WEAK_ENTRY /* alternate entry point, exported as weak symbol */
1838 : };
1839 :
1840 : #if defined ENABLE_RTL_FLAG_CHECKING && (GCC_VERSION > 2007)
1841 :
1842 : /* Retrieve the kind of LABEL. */
1843 : #define LABEL_KIND(LABEL) __extension__ \
1844 : ({ __typeof (LABEL) const _label = (LABEL); \
1845 : if (! LABEL_P (_label)) \
1846 : rtl_check_failed_flag ("LABEL_KIND", _label, __FILE__, __LINE__, \
1847 : __FUNCTION__); \
1848 : (enum label_kind) ((_label->jump << 1) | _label->call); })
1849 :
1850 : /* Set the kind of LABEL. */
1851 : #define SET_LABEL_KIND(LABEL, KIND) do { \
1852 : __typeof (LABEL) const _label = (LABEL); \
1853 : const unsigned int _kind = (KIND); \
1854 : if (! LABEL_P (_label)) \
1855 : rtl_check_failed_flag ("SET_LABEL_KIND", _label, __FILE__, __LINE__, \
1856 : __FUNCTION__); \
1857 : _label->jump = ((_kind >> 1) & 1); \
1858 : _label->call = (_kind & 1); \
1859 : } while (0)
1860 :
1861 : #else
1862 :
1863 : /* Retrieve the kind of LABEL. */
1864 : #define LABEL_KIND(LABEL) \
1865 : ((enum label_kind) (((LABEL)->jump << 1) | (LABEL)->call))
1866 :
1867 : /* Set the kind of LABEL. */
1868 : #define SET_LABEL_KIND(LABEL, KIND) do { \
1869 : rtx const _label = (LABEL); \
1870 : const unsigned int _kind = (KIND); \
1871 : _label->jump = ((_kind >> 1) & 1); \
1872 : _label->call = (_kind & 1); \
1873 : } while (0)
1874 :
1875 : #endif /* rtl flag checking */
1876 :
1877 : #define LABEL_ALT_ENTRY_P(LABEL) (LABEL_KIND (LABEL) != LABEL_NORMAL)
1878 :
1879 : /* In jump.cc, each JUMP_INSN can point to a label that it can jump to,
1880 : so that if the JUMP_INSN is deleted, the label's LABEL_NUSES can
1881 : be decremented and possibly the label can be deleted. */
1882 : #define JUMP_LABEL(INSN) XCEXP (INSN, 7, JUMP_INSN)
1883 :
1884 8689223 : inline rtx_insn *JUMP_LABEL_AS_INSN (const rtx_insn *insn)
1885 : {
1886 8689223 : return safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
1887 : }
1888 :
1889 : /* Methods of rtx_jump_insn. */
1890 :
1891 7904440 : inline rtx rtx_jump_insn::jump_label () const
1892 : {
1893 7904440 : return JUMP_LABEL (this);
1894 : }
1895 :
1896 0 : inline rtx_code_label *rtx_jump_insn::jump_target () const
1897 : {
1898 0 : return safe_as_a <rtx_code_label *> (JUMP_LABEL (this));
1899 : }
1900 :
1901 60880 : inline void rtx_jump_insn::set_jump_target (rtx_code_label *target)
1902 : {
1903 60880 : JUMP_LABEL (this) = target;
1904 : }
1905 :
1906 : /* Once basic blocks are found, each CODE_LABEL starts a chain that
1907 : goes through all the LABEL_REFs that jump to that label. The chain
1908 : eventually winds up at the CODE_LABEL: it is circular. */
1909 : #define LABEL_REFS(LABEL) XCEXP (LABEL, 3, CODE_LABEL)
1910 :
1911 : /* Get the label that a LABEL_REF references. */
1912 : inline rtx_insn *
1913 90325275 : label_ref_label (const_rtx ref)
1914 : {
1915 90282311 : return as_a<rtx_insn *> (XCEXP (ref, 0, LABEL_REF));
1916 : }
1917 :
1918 : /* Set the label that LABEL_REF ref refers to. */
1919 :
1920 : inline void
1921 33124863 : set_label_ref_label (rtx ref, rtx_insn *label)
1922 : {
1923 33124863 : XCEXP (ref, 0, LABEL_REF) = label;
1924 234 : }
1925 :
1926 : /* For a REG rtx, REGNO extracts the register number. REGNO can only
1927 : be used on RHS. Use SET_REGNO to change the value. */
1928 : #define REGNO(RTX) (rhs_regno(RTX))
1929 : #define SET_REGNO(RTX, N) (df_ref_change_reg_with_loc (RTX, N))
1930 :
1931 : /* Return the number of consecutive registers in a REG. This is always
1932 : 1 for pseudo registers and is determined by TARGET_HARD_REGNO_NREGS for
1933 : hard registers. */
1934 : #define REG_NREGS(RTX) (REG_CHECK (RTX)->nregs)
1935 :
1936 : /* ORIGINAL_REGNO holds the number the register originally had; for a
1937 : pseudo register turned into a hard reg this will hold the old pseudo
1938 : register number. */
1939 : #define ORIGINAL_REGNO(RTX) \
1940 : (RTL_FLAG_CHECK1 ("ORIGINAL_REGNO", (RTX), REG)->u2.original_regno)
1941 :
1942 : /* Force the REGNO macro to only be used on the lhs. */
1943 : inline unsigned int
1944 76968979640 : rhs_regno (const_rtx x)
1945 : {
1946 67594890450 : return REG_CHECK (x)->regno;
1947 : }
1948 :
1949 : /* Return the final register in REG X plus one. */
1950 : inline unsigned int
1951 13498825861 : END_REGNO (const_rtx x)
1952 : {
1953 13497938550 : return REGNO (x) + REG_NREGS (x);
1954 : }
1955 :
1956 : /* Change the REGNO and REG_NREGS of REG X to the specified values,
1957 : bypassing the df machinery. */
1958 : inline void
1959 1679865549 : set_regno_raw (rtx x, unsigned int regno, unsigned int nregs)
1960 : {
1961 1679865549 : reg_info *reg = REG_CHECK (x);
1962 1679865549 : reg->regno = regno;
1963 1679865549 : reg->nregs = nregs;
1964 : }
1965 :
1966 : /* 1 if RTX is a reg or parallel that is the current function's return
1967 : value. */
1968 : #define REG_FUNCTION_VALUE_P(RTX) \
1969 : (RTL_FLAG_CHECK2 ("REG_FUNCTION_VALUE_P", (RTX), REG, PARALLEL)->return_val)
1970 :
1971 : /* 1 if RTX is a reg that corresponds to a variable declared by the user. */
1972 : #define REG_USERVAR_P(RTX) \
1973 : (RTL_FLAG_CHECK1 ("REG_USERVAR_P", (RTX), REG)->volatil)
1974 :
1975 : /* 1 if RTX is a reg that holds a pointer value. */
1976 : #define REG_POINTER(RTX) \
1977 : (RTL_FLAG_CHECK1 ("REG_POINTER", (RTX), REG)->frame_related)
1978 :
1979 : /* 1 if RTX is a mem that holds a pointer value. */
1980 : #define MEM_POINTER(RTX) \
1981 : (RTL_FLAG_CHECK1 ("MEM_POINTER", (RTX), MEM)->frame_related)
1982 :
1983 : /* 1 if the given register REG corresponds to a hard register. */
1984 : #define HARD_REGISTER_P(REG) HARD_REGISTER_NUM_P (REGNO (REG))
1985 :
1986 : /* 1 if the given register number REG_NO corresponds to a hard register. */
1987 : #define HARD_REGISTER_NUM_P(REG_NO) ((REG_NO) < FIRST_PSEUDO_REGISTER)
1988 :
1989 : /* 1 if the given register REG corresponds to a virtual register. */
1990 : #define VIRTUAL_REGISTER_P(REG) VIRTUAL_REGISTER_NUM_P (REGNO (REG))
1991 :
1992 : /* 1 if the given register number REG_NO corresponds to a virtual register. */
1993 : #define VIRTUAL_REGISTER_NUM_P(REG_NO) \
1994 : IN_RANGE (REG_NO, FIRST_VIRTUAL_REGISTER, LAST_VIRTUAL_REGISTER)
1995 :
1996 : /* For a CONST_INT rtx, INTVAL extracts the integer. */
1997 : #define INTVAL(RTX) XCWINT (RTX, 0, CONST_INT)
1998 : #define UINTVAL(RTX) ((unsigned HOST_WIDE_INT) INTVAL (RTX))
1999 :
2000 : /* For a CONST_WIDE_INT, CONST_WIDE_INT_NUNITS is the number of
2001 : elements actually needed to represent the constant.
2002 : CONST_WIDE_INT_ELT gets one of the elements. 0 is the least
2003 : significant HOST_WIDE_INT. */
2004 : #define CONST_WIDE_INT_VEC(RTX) HWIVEC_CHECK (RTX, CONST_WIDE_INT)
2005 : #define CONST_WIDE_INT_NUNITS(RTX) CWI_GET_NUM_ELEM (RTX)
2006 : #define CONST_WIDE_INT_ELT(RTX, N) CWI_ELT (RTX, N)
2007 :
2008 : /* For a CONST_POLY_INT, CONST_POLY_INT_COEFFS gives access to the
2009 : individual coefficients, in the form of a trailing_wide_ints structure. */
2010 : #define CONST_POLY_INT_COEFFS(RTX) \
2011 : (RTL_FLAG_CHECK1("CONST_POLY_INT_COEFFS", (RTX), \
2012 : CONST_POLY_INT)->u.cpi.coeffs)
2013 :
2014 : /* For a CONST_DOUBLE:
2015 : #if TARGET_SUPPORTS_WIDE_INT == 0
2016 : For a VOIDmode, there are two integers CONST_DOUBLE_LOW is the
2017 : low-order word and ..._HIGH the high-order.
2018 : #endif
2019 : For a float, there is a REAL_VALUE_TYPE structure, and
2020 : CONST_DOUBLE_REAL_VALUE(r) is a pointer to it. */
2021 : #define CONST_DOUBLE_LOW(r) XCMWINT (r, 0, CONST_DOUBLE, VOIDmode)
2022 : #define CONST_DOUBLE_HIGH(r) XCMWINT (r, 1, CONST_DOUBLE, VOIDmode)
2023 : #define CONST_DOUBLE_REAL_VALUE(r) \
2024 : ((const struct real_value *) XCNMPRV (r, CONST_DOUBLE, VOIDmode))
2025 :
2026 : #define CONST_FIXED_VALUE(r) \
2027 : ((const struct fixed_value *) XCNMPFV (r, CONST_FIXED, VOIDmode))
2028 : #define CONST_FIXED_VALUE_HIGH(r) \
2029 : ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.high))
2030 : #define CONST_FIXED_VALUE_LOW(r) \
2031 : ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.low))
2032 :
2033 : /* For a CONST_VECTOR, return element #n. */
2034 : #define CONST_VECTOR_ELT(RTX, N) const_vector_elt (RTX, N)
2035 :
2036 : /* See rtl.texi for a description of these macros. */
2037 : #define CONST_VECTOR_NPATTERNS(RTX) \
2038 : (RTL_FLAG_CHECK1 ("CONST_VECTOR_NPATTERNS", (RTX), CONST_VECTOR) \
2039 : ->u2.const_vector.npatterns)
2040 :
2041 : #define CONST_VECTOR_NELTS_PER_PATTERN(RTX) \
2042 : (RTL_FLAG_CHECK1 ("CONST_VECTOR_NELTS_PER_PATTERN", (RTX), CONST_VECTOR) \
2043 : ->u2.const_vector.nelts_per_pattern)
2044 :
2045 : #define CONST_VECTOR_DUPLICATE_P(RTX) \
2046 : (CONST_VECTOR_NELTS_PER_PATTERN (RTX) == 1)
2047 :
2048 : #define CONST_VECTOR_STEPPED_P(RTX) \
2049 : (CONST_VECTOR_NELTS_PER_PATTERN (RTX) == 3)
2050 :
2051 : #define CONST_VECTOR_ENCODED_ELT(RTX, N) XCVECEXP (RTX, 0, N, CONST_VECTOR)
2052 :
2053 : /* Return the number of elements encoded directly in a CONST_VECTOR. */
2054 :
2055 : inline unsigned int
2056 7930954 : const_vector_encoded_nelts (const_rtx x)
2057 : {
2058 7930954 : return CONST_VECTOR_NPATTERNS (x) * CONST_VECTOR_NELTS_PER_PATTERN (x);
2059 : }
2060 :
2061 : /* For a CONST_VECTOR, return the number of elements in a vector. */
2062 : #define CONST_VECTOR_NUNITS(RTX) GET_MODE_NUNITS (GET_MODE (RTX))
2063 :
2064 : /* For a SUBREG rtx, SUBREG_REG extracts the value we want a subreg of.
2065 : SUBREG_BYTE extracts the byte-number. */
2066 :
2067 : #define SUBREG_REG(RTX) XCEXP (RTX, 0, SUBREG)
2068 : #define SUBREG_BYTE(RTX) XCSUBREG (RTX, 1, SUBREG)
2069 :
2070 : /* in rtlanal.cc */
2071 : /* Return the right cost to give to an operation
2072 : to make the cost of the corresponding register-to-register instruction
2073 : N times that of a fast register-to-register instruction. */
2074 : #define COSTS_N_INSNS(N) ((N) * 4)
2075 :
2076 : /* Maximum cost of an rtl expression. This value has the special meaning
2077 : not to use an rtx with this cost under any circumstances. */
2078 : #define MAX_COST INT_MAX
2079 :
2080 : /* Return true if CODE always has VOIDmode. */
2081 :
2082 : inline bool
2083 57232350 : always_void_p (enum rtx_code code)
2084 : {
2085 57232350 : switch (code)
2086 : {
2087 : case SET:
2088 : case PC:
2089 : case RETURN:
2090 : case SIMPLE_RETURN:
2091 : return true;
2092 :
2093 33638056 : default:
2094 33638056 : return false;
2095 : }
2096 : }
2097 :
2098 : /* A structure to hold all available cost information about an rtl
2099 : expression. */
2100 : struct full_rtx_costs
2101 : {
2102 : int speed;
2103 : int size;
2104 : };
2105 :
2106 : /* Initialize a full_rtx_costs structure C to the maximum cost. */
2107 : inline void
2108 1850787 : init_costs_to_max (struct full_rtx_costs *c)
2109 : {
2110 1850787 : c->speed = MAX_COST;
2111 1850787 : c->size = MAX_COST;
2112 : }
2113 :
2114 : /* Initialize a full_rtx_costs structure C to zero cost. */
2115 : inline void
2116 6 : init_costs_to_zero (struct full_rtx_costs *c)
2117 : {
2118 6 : c->speed = 0;
2119 6 : c->size = 0;
2120 : }
2121 :
2122 : /* Compare two full_rtx_costs structures A and B, returning true
2123 : if A < B when optimizing for speed. */
2124 : inline bool
2125 2090990 : costs_lt_p (struct full_rtx_costs *a, struct full_rtx_costs *b,
2126 : bool speed)
2127 : {
2128 2090990 : if (speed)
2129 1373798 : return (a->speed < b->speed
2130 1390948 : || (a->speed == b->speed && a->size < b->size));
2131 : else
2132 717192 : return (a->size < b->size
2133 721674 : || (a->size == b->size && a->speed < b->speed));
2134 : }
2135 :
2136 : /* Increase both members of the full_rtx_costs structure C by the
2137 : cost of N insns. */
2138 : inline void
2139 492 : costs_add_n_insns (struct full_rtx_costs *c, int n)
2140 : {
2141 492 : c->speed += COSTS_N_INSNS (n);
2142 492 : c->size += COSTS_N_INSNS (n);
2143 : }
2144 :
2145 : /* Describes the shape of a subreg:
2146 :
2147 : inner_mode == the mode of the SUBREG_REG
2148 : offset == the SUBREG_BYTE
2149 : outer_mode == the mode of the SUBREG itself. */
2150 : class subreg_shape {
2151 : public:
2152 : subreg_shape (machine_mode, poly_uint16, machine_mode);
2153 : bool operator == (const subreg_shape &) const;
2154 : bool operator != (const subreg_shape &) const;
2155 : unsigned HOST_WIDE_INT unique_id () const;
2156 :
2157 : machine_mode inner_mode;
2158 : poly_uint16 offset;
2159 : machine_mode outer_mode;
2160 : };
2161 :
2162 : inline
2163 3269383 : subreg_shape::subreg_shape (machine_mode inner_mode_in,
2164 : poly_uint16 offset_in,
2165 : machine_mode outer_mode_in)
2166 : : inner_mode (inner_mode_in), offset (offset_in), outer_mode (outer_mode_in)
2167 : {}
2168 :
2169 : inline bool
2170 10665005 : subreg_shape::operator == (const subreg_shape &other) const
2171 : {
2172 10665005 : return (inner_mode == other.inner_mode
2173 3944361 : && known_eq (offset, other.offset)
2174 14195840 : && outer_mode == other.outer_mode);
2175 : }
2176 :
2177 : inline bool
2178 : subreg_shape::operator != (const subreg_shape &other) const
2179 : {
2180 : return !operator == (other);
2181 : }
2182 :
2183 : /* Return an integer that uniquely identifies this shape. Structures
2184 : like rtx_def assume that a mode can fit in an 8-bit bitfield and no
2185 : current mode is anywhere near being 65536 bytes in size, so the
2186 : id comfortably fits in an int. */
2187 :
2188 : inline unsigned HOST_WIDE_INT
2189 11263088 : subreg_shape::unique_id () const
2190 : {
2191 11263088 : { STATIC_ASSERT (MAX_MACHINE_MODE <= (1 << MACHINE_MODE_BITSIZE)); }
2192 11263088 : { STATIC_ASSERT (NUM_POLY_INT_COEFFS <= 3); }
2193 11263088 : { STATIC_ASSERT (sizeof (offset.coeffs[0]) <= 2); }
2194 11263088 : int res = (int) inner_mode + ((int) outer_mode << 8);
2195 11263088 : for (int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2196 11263088 : res += (HOST_WIDE_INT) offset.coeffs[i] << ((1 + i) * 16);
2197 3269383 : return res;
2198 : }
2199 :
2200 : /* Return the shape of a SUBREG rtx. */
2201 :
2202 : inline subreg_shape
2203 3269383 : shape_of_subreg (const_rtx x)
2204 : {
2205 3269383 : return subreg_shape (GET_MODE (SUBREG_REG (x)),
2206 3269383 : SUBREG_BYTE (x), GET_MODE (x));
2207 : }
2208 :
2209 : /* Information about an address. This structure is supposed to be able
2210 : to represent all supported target addresses. Please extend it if it
2211 : is not yet general enough. */
2212 : struct address_info {
2213 : /* The mode of the value being addressed, or VOIDmode if this is
2214 : a load-address operation with no known address mode. */
2215 : machine_mode mode;
2216 :
2217 : /* The address space. */
2218 : addr_space_t as;
2219 :
2220 : /* True if this is an RTX_AUTOINC address. */
2221 : bool autoinc_p;
2222 :
2223 : /* A pointer to the top-level address. */
2224 : rtx *outer;
2225 :
2226 : /* A pointer to the inner address, after all address mutations
2227 : have been stripped from the top-level address. It can be one
2228 : of the following:
2229 :
2230 : - A {PRE,POST}_{INC,DEC} of *BASE. SEGMENT, INDEX and DISP are null.
2231 :
2232 : - A {PRE,POST}_MODIFY of *BASE. In this case either INDEX or DISP
2233 : points to the step value, depending on whether the step is variable
2234 : or constant respectively. SEGMENT is null.
2235 :
2236 : - A plain sum of the form SEGMENT + BASE + INDEX + DISP,
2237 : with null fields evaluating to 0. */
2238 : rtx *inner;
2239 :
2240 : /* Components that make up *INNER. Each one may be null or nonnull.
2241 : When nonnull, their meanings are as follows:
2242 :
2243 : - *SEGMENT is the "segment" of memory to which the address refers.
2244 : This value is entirely target-specific and is only called a "segment"
2245 : because that's its most typical use. It contains exactly one UNSPEC,
2246 : pointed to by SEGMENT_TERM. The contents of *SEGMENT do not need
2247 : reloading.
2248 :
2249 : - *BASE is a variable expression representing a base address.
2250 : It contains exactly one "term", pointed to by BASE_TERM.
2251 : This term can be one of the following:
2252 :
2253 : (1) a REG, or a SUBREG of a REG
2254 : (2) an eliminated REG (a PLUS of (1) and a constant)
2255 : (3) a MEM, or a SUBREG of a MEM
2256 : (4) a SCRATCH
2257 :
2258 : This term is the one that base_reg_class constrains.
2259 :
2260 : - *INDEX is a variable expression representing an index value.
2261 : It may be a scaled expression, such as a MULT. It has exactly
2262 : one "term", pointed to by INDEX_TERM. The possible terms are
2263 : the same as for BASE. This term is the one that index_reg_class
2264 : constrains.
2265 :
2266 : - *DISP is a constant, possibly mutated. DISP_TERM points to the
2267 : unmutated RTX_CONST_OBJ. */
2268 : rtx *segment;
2269 : rtx *base;
2270 : rtx *index;
2271 : rtx *disp;
2272 :
2273 : rtx *segment_term;
2274 : rtx *base_term;
2275 : rtx *index_term;
2276 : rtx *disp_term;
2277 :
2278 : /* In a {PRE,POST}_MODIFY address, this points to a second copy
2279 : of BASE_TERM, otherwise it is null. */
2280 : rtx *base_term2;
2281 :
2282 : /* ADDRESS if this structure describes an address operand, MEM if
2283 : it describes a MEM address. */
2284 : enum rtx_code addr_outer_code;
2285 :
2286 : /* If BASE is nonnull, this is the code of the rtx that contains it. */
2287 : enum rtx_code base_outer_code;
2288 : };
2289 :
2290 : /* This is used to bundle an rtx and a mode together so that the pair
2291 : can be used with the wi:: routines. If we ever put modes into rtx
2292 : integer constants, this should go away and then just pass an rtx in. */
2293 : typedef std::pair <rtx, machine_mode> rtx_mode_t;
2294 :
2295 : namespace wi
2296 : {
2297 : template <>
2298 : struct int_traits <rtx_mode_t>
2299 : {
2300 : static const enum precision_type precision_type = VAR_PRECISION;
2301 : static const bool host_dependent_precision = false;
2302 : /* This ought to be true, except for the special case that BImode
2303 : is canonicalized to STORE_FLAG_VALUE, which might be 1. */
2304 : static const bool is_sign_extended = false;
2305 : static const bool needs_write_val_arg = false;
2306 : static unsigned int get_precision (const rtx_mode_t &);
2307 : static wi::storage_ref decompose (HOST_WIDE_INT *, unsigned int,
2308 : const rtx_mode_t &);
2309 : };
2310 : }
2311 :
2312 : inline unsigned int
2313 1189531897 : wi::int_traits <rtx_mode_t>::get_precision (const rtx_mode_t &x)
2314 : {
2315 1189531897 : return GET_MODE_PRECISION (as_a <scalar_mode> (x.second));
2316 : }
2317 :
2318 : inline wi::storage_ref
2319 607854430 : wi::int_traits <rtx_mode_t>::decompose (HOST_WIDE_INT *,
2320 : unsigned int precision,
2321 : const rtx_mode_t &x)
2322 : {
2323 607854430 : gcc_checking_assert (precision == get_precision (x));
2324 607854430 : switch (GET_CODE (x.first))
2325 : {
2326 607183745 : case CONST_INT:
2327 607183745 : if (precision < HOST_BITS_PER_WIDE_INT)
2328 : /* Nonzero BImodes are stored as STORE_FLAG_VALUE, which on many
2329 : targets is 1 rather than -1. */
2330 201054118 : gcc_checking_assert (INTVAL (x.first)
2331 : == sext_hwi (INTVAL (x.first), precision)
2332 : || (x.second == BImode && INTVAL (x.first) == 1));
2333 :
2334 607183745 : return wi::storage_ref (&INTVAL (x.first), 1, precision);
2335 :
2336 670685 : case CONST_WIDE_INT:
2337 670685 : return wi::storage_ref (&CONST_WIDE_INT_ELT (x.first, 0),
2338 670685 : CONST_WIDE_INT_NUNITS (x.first), precision);
2339 :
2340 : #if TARGET_SUPPORTS_WIDE_INT == 0
2341 : case CONST_DOUBLE:
2342 : return wi::storage_ref (&CONST_DOUBLE_LOW (x.first), 2, precision);
2343 : #endif
2344 :
2345 0 : default:
2346 0 : gcc_unreachable ();
2347 : }
2348 : }
2349 :
2350 : namespace wi
2351 : {
2352 : hwi_with_prec shwi (HOST_WIDE_INT, machine_mode mode);
2353 : wide_int min_value (machine_mode, signop);
2354 : wide_int max_value (machine_mode, signop);
2355 : }
2356 :
2357 : inline wi::hwi_with_prec
2358 271075 : wi::shwi (HOST_WIDE_INT val, machine_mode mode)
2359 : {
2360 271075 : return shwi (val, GET_MODE_PRECISION (as_a <scalar_mode> (mode)));
2361 : }
2362 :
2363 : /* Produce the smallest number that is represented in MODE. The precision
2364 : is taken from MODE and the sign from SGN. */
2365 : inline wide_int
2366 115278 : wi::min_value (machine_mode mode, signop sgn)
2367 : {
2368 115278 : return min_value (GET_MODE_PRECISION (as_a <scalar_mode> (mode)), sgn);
2369 : }
2370 :
2371 : /* Produce the largest number that is represented in MODE. The precision
2372 : is taken from MODE and the sign from SGN. */
2373 : inline wide_int
2374 65795 : wi::max_value (machine_mode mode, signop sgn)
2375 : {
2376 65795 : return max_value (GET_MODE_PRECISION (as_a <scalar_mode> (mode)), sgn);
2377 : }
2378 :
2379 : namespace wi
2380 : {
2381 : typedef poly_int<NUM_POLY_INT_COEFFS,
2382 : generic_wide_int <wide_int_ref_storage <false, false> > >
2383 : rtx_to_poly_wide_ref;
2384 : rtx_to_poly_wide_ref to_poly_wide (const_rtx, machine_mode);
2385 : }
2386 :
2387 : /* Return the value of a CONST_POLY_INT in its native precision. */
2388 :
2389 : inline wi::rtx_to_poly_wide_ref
2390 : const_poly_int_value (const_rtx x)
2391 : {
2392 : poly_int<NUM_POLY_INT_COEFFS, WIDE_INT_REF_FOR (wide_int)> res;
2393 : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2394 : res.coeffs[i] = CONST_POLY_INT_COEFFS (x)[i];
2395 : return res;
2396 : }
2397 :
2398 : /* Return true if X is a scalar integer or a CONST_POLY_INT. The value
2399 : can then be extracted using wi::to_poly_wide. */
2400 :
2401 : inline bool
2402 99474815 : poly_int_rtx_p (const_rtx x)
2403 : {
2404 99474815 : return CONST_SCALAR_INT_P (x) || CONST_POLY_INT_P (x);
2405 : }
2406 :
2407 : /* Access X (which satisfies poly_int_rtx_p) as a poly_wide_int.
2408 : MODE is the mode of X. */
2409 :
2410 : inline wi::rtx_to_poly_wide_ref
2411 9879629 : wi::to_poly_wide (const_rtx x, machine_mode mode)
2412 : {
2413 9879629 : if (CONST_POLY_INT_P (x))
2414 : return const_poly_int_value (x);
2415 9879629 : return rtx_mode_t (const_cast<rtx> (x), mode);
2416 : }
2417 :
2418 : /* Return the value of X as a poly_int64. */
2419 :
2420 : inline poly_int64
2421 8733472 : rtx_to_poly_int64 (const_rtx x)
2422 : {
2423 8733472 : if (CONST_POLY_INT_P (x))
2424 : {
2425 : poly_int64 res;
2426 : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2427 : res.coeffs[i] = CONST_POLY_INT_COEFFS (x)[i].to_shwi ();
2428 : return res;
2429 : }
2430 8733472 : return INTVAL (x);
2431 : }
2432 :
2433 : /* Return true if arbitrary value X is an integer constant that can
2434 : be represented as a poly_int64. Store the value in *RES if so,
2435 : otherwise leave it unmodified. */
2436 :
2437 : inline bool
2438 3555243499 : poly_int_rtx_p (const_rtx x, poly_int64 *res)
2439 : {
2440 2861858928 : if (CONST_INT_P (x))
2441 : {
2442 2573426409 : *res = INTVAL (x);
2443 2573426409 : return true;
2444 : }
2445 : if (CONST_POLY_INT_P (x))
2446 : {
2447 : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2448 : if (!wi::fits_shwi_p (CONST_POLY_INT_COEFFS (x)[i]))
2449 : return false;
2450 : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2451 : res->coeffs[i] = CONST_POLY_INT_COEFFS (x)[i].to_shwi ();
2452 : return true;
2453 : }
2454 : return false;
2455 : }
2456 :
2457 : extern void init_rtlanal (void);
2458 : extern int rtx_cost (rtx, machine_mode, enum rtx_code, int, bool);
2459 : extern int address_cost (rtx, machine_mode, addr_space_t, bool);
2460 : extern void get_full_rtx_cost (rtx, machine_mode, enum rtx_code, int,
2461 : struct full_rtx_costs *);
2462 : extern bool native_encode_rtx (machine_mode, rtx, vec<target_unit> &,
2463 : unsigned int, unsigned int);
2464 : extern wide_int native_decode_int (const vec<target_unit> &, unsigned int,
2465 : unsigned int, unsigned int);
2466 : extern rtx native_decode_rtx (machine_mode, const vec<target_unit> &,
2467 : unsigned int);
2468 : extern rtx native_decode_vector_rtx (machine_mode, const vec<target_unit> &,
2469 : unsigned int, unsigned int, unsigned int);
2470 : extern poly_uint64 subreg_lsb (const_rtx);
2471 : extern poly_uint64 subreg_size_lsb (poly_uint64, poly_uint64, poly_uint64);
2472 : extern poly_uint64 subreg_size_offset_from_lsb (poly_uint64, poly_uint64,
2473 : poly_uint64);
2474 : extern bool read_modify_subreg_p (const_rtx);
2475 :
2476 : /* Given a subreg's OUTER_MODE, INNER_MODE, and SUBREG_BYTE, return the
2477 : bit offset at which the subreg begins (counting from the least significant
2478 : bit of the operand). */
2479 :
2480 : inline poly_uint64
2481 3164451 : subreg_lsb_1 (machine_mode outer_mode, machine_mode inner_mode,
2482 : poly_uint64 subreg_byte)
2483 : {
2484 6328902 : return subreg_size_lsb (GET_MODE_SIZE (outer_mode),
2485 3164451 : GET_MODE_SIZE (inner_mode), subreg_byte);
2486 : }
2487 :
2488 : /* Return the subreg byte offset for a subreg whose outer mode is
2489 : OUTER_MODE, whose inner mode is INNER_MODE, and where there are
2490 : LSB_SHIFT *bits* between the lsb of the outer value and the lsb of
2491 : the inner value. This is the inverse of subreg_lsb_1 (which converts
2492 : byte offsets to bit shifts). */
2493 :
2494 : inline poly_uint64
2495 3056 : subreg_offset_from_lsb (machine_mode outer_mode,
2496 : machine_mode inner_mode,
2497 : poly_uint64 lsb_shift)
2498 : {
2499 6112 : return subreg_size_offset_from_lsb (GET_MODE_SIZE (outer_mode),
2500 3056 : GET_MODE_SIZE (inner_mode), lsb_shift);
2501 : }
2502 :
2503 : extern unsigned int subreg_regno_offset (unsigned int, machine_mode,
2504 : poly_uint64, machine_mode);
2505 : extern bool subreg_offset_representable_p (unsigned int, machine_mode,
2506 : poly_uint64, machine_mode);
2507 : extern unsigned int subreg_regno (const_rtx);
2508 : extern int simplify_subreg_regno (unsigned int, machine_mode,
2509 : poly_uint64, machine_mode,
2510 : bool allow_stack_regs = false);
2511 : extern int lowpart_subreg_regno (unsigned int, machine_mode,
2512 : machine_mode);
2513 : extern unsigned int subreg_nregs (const_rtx);
2514 : extern unsigned int subreg_nregs_with_regno (unsigned int, const_rtx);
2515 : extern unsigned HOST_WIDE_INT nonzero_bits (const_rtx, machine_mode);
2516 : extern unsigned int num_sign_bit_copies (const_rtx, machine_mode);
2517 : extern bool constant_pool_constant_p (rtx);
2518 : extern bool truncated_to_mode (machine_mode, const_rtx);
2519 : extern int low_bitmask_len (machine_mode, unsigned HOST_WIDE_INT);
2520 : extern void split_double (rtx, rtx *, rtx *);
2521 : extern rtx *strip_address_mutations (rtx *, enum rtx_code * = 0);
2522 : extern void decompose_address (struct address_info *, rtx *,
2523 : machine_mode, addr_space_t, enum rtx_code);
2524 : extern void decompose_lea_address (struct address_info *, rtx *);
2525 : extern void decompose_mem_address (struct address_info *, rtx);
2526 : extern void update_address (struct address_info *);
2527 : extern HOST_WIDE_INT get_index_scale (const struct address_info *);
2528 : extern enum rtx_code get_index_code (const struct address_info *);
2529 :
2530 : /* 1 if RTX is a subreg containing a reg that is already known to be
2531 : sign- or zero-extended from the mode of the subreg to the mode of
2532 : the reg. SUBREG_PROMOTED_UNSIGNED_P gives the signedness of the
2533 : extension.
2534 :
2535 : When used as a LHS, is means that this extension must be done
2536 : when assigning to SUBREG_REG. */
2537 :
2538 : #define SUBREG_PROMOTED_VAR_P(RTX) \
2539 : (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED", (RTX), SUBREG)->in_struct)
2540 :
2541 : /* Valid for subregs which are SUBREG_PROMOTED_VAR_P(). In that case
2542 : this gives the necessary extensions:
2543 : 0 - signed (SPR_SIGNED)
2544 : 1 - normal unsigned (SPR_UNSIGNED)
2545 : 2 - value is both sign and unsign extended for mode
2546 : (SPR_SIGNED_AND_UNSIGNED).
2547 : -1 - pointer unsigned, which most often can be handled like unsigned
2548 : extension, except for generating instructions where we need to
2549 : emit special code (ptr_extend insns) on some architectures
2550 : (SPR_POINTER). */
2551 :
2552 : const int SRP_POINTER = -1;
2553 : const int SRP_SIGNED = 0;
2554 : const int SRP_UNSIGNED = 1;
2555 : const int SRP_SIGNED_AND_UNSIGNED = 2;
2556 :
2557 : /* Sets promoted mode for SUBREG_PROMOTED_VAR_P(). */
2558 : #define SUBREG_PROMOTED_SET(RTX, VAL) \
2559 : do { \
2560 : rtx const _rtx = RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SET", \
2561 : (RTX), SUBREG); \
2562 : switch (VAL) \
2563 : { \
2564 : case SRP_POINTER: \
2565 : _rtx->volatil = 0; \
2566 : _rtx->unchanging = 0; \
2567 : break; \
2568 : case SRP_SIGNED: \
2569 : _rtx->volatil = 0; \
2570 : _rtx->unchanging = 1; \
2571 : break; \
2572 : case SRP_UNSIGNED: \
2573 : _rtx->volatil = 1; \
2574 : _rtx->unchanging = 0; \
2575 : break; \
2576 : case SRP_SIGNED_AND_UNSIGNED: \
2577 : _rtx->volatil = 1; \
2578 : _rtx->unchanging = 1; \
2579 : break; \
2580 : } \
2581 : } while (0)
2582 :
2583 : /* Gets the value stored in promoted mode for SUBREG_PROMOTED_VAR_P(),
2584 : including SRP_SIGNED_AND_UNSIGNED if promoted for
2585 : both signed and unsigned. */
2586 : #define SUBREG_PROMOTED_GET(RTX) \
2587 : (2 * (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_GET", (RTX), SUBREG)->volatil)\
2588 : + (RTX)->unchanging - 1)
2589 :
2590 : /* Returns sign of promoted mode for SUBREG_PROMOTED_VAR_P(). */
2591 : #define SUBREG_PROMOTED_SIGN(RTX) \
2592 : ((RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGN", (RTX), SUBREG)->volatil) ? 1\
2593 : : (RTX)->unchanging - 1)
2594 :
2595 : /* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted
2596 : for SIGNED type. */
2597 : #define SUBREG_PROMOTED_SIGNED_P(RTX) \
2598 : (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGNED_P", (RTX), SUBREG)->unchanging)
2599 :
2600 : /* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted
2601 : for UNSIGNED type. */
2602 : #define SUBREG_PROMOTED_UNSIGNED_P(RTX) \
2603 : (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_UNSIGNED_P", (RTX), SUBREG)->volatil)
2604 :
2605 : /* Checks if RTX of SUBREG_PROMOTED_VAR_P() is promoted for given SIGN. */
2606 : #define SUBREG_CHECK_PROMOTED_SIGN(RTX, SIGN) \
2607 : ((SIGN) == SRP_POINTER ? SUBREG_PROMOTED_GET (RTX) == SRP_POINTER \
2608 : : (SIGN) == SRP_SIGNED ? SUBREG_PROMOTED_SIGNED_P (RTX) \
2609 : : SUBREG_PROMOTED_UNSIGNED_P (RTX))
2610 :
2611 : /* True if the REG is the static chain register for some CALL_INSN. */
2612 : #define STATIC_CHAIN_REG_P(RTX) \
2613 : (RTL_FLAG_CHECK1 ("STATIC_CHAIN_REG_P", (RTX), REG)->jump)
2614 :
2615 : /* True if the subreg was generated by LRA for reload insns. Such
2616 : subregs are valid only during LRA. */
2617 : #define LRA_SUBREG_P(RTX) \
2618 : (RTL_FLAG_CHECK1 ("LRA_SUBREG_P", (RTX), SUBREG)->jump)
2619 :
2620 : /* Access various components of an ASM_OPERANDS rtx. */
2621 :
2622 : #define ASM_OPERANDS_TEMPLATE(RTX) XCSTR (RTX, 0, ASM_OPERANDS)
2623 : #define ASM_OPERANDS_OUTPUT_CONSTRAINT(RTX) XCSTR (RTX, 1, ASM_OPERANDS)
2624 : #define ASM_OPERANDS_OUTPUT_IDX(RTX) XCINT (RTX, 2, ASM_OPERANDS)
2625 : #define ASM_OPERANDS_INPUT_VEC(RTX) XCVEC (RTX, 3, ASM_OPERANDS)
2626 : #define ASM_OPERANDS_INPUT_CONSTRAINT_VEC(RTX) XCVEC (RTX, 4, ASM_OPERANDS)
2627 : #define ASM_OPERANDS_INPUT(RTX, N) XCVECEXP (RTX, 3, N, ASM_OPERANDS)
2628 : #define ASM_OPERANDS_INPUT_LENGTH(RTX) XCVECLEN (RTX, 3, ASM_OPERANDS)
2629 : #define ASM_OPERANDS_INPUT_CONSTRAINT_EXP(RTX, N) \
2630 : XCVECEXP (RTX, 4, N, ASM_OPERANDS)
2631 : #define ASM_OPERANDS_INPUT_CONSTRAINT(RTX, N) \
2632 : XSTR (XCVECEXP (RTX, 4, N, ASM_OPERANDS), 0)
2633 : #define ASM_OPERANDS_INPUT_MODE(RTX, N) \
2634 : GET_MODE (XCVECEXP (RTX, 4, N, ASM_OPERANDS))
2635 : #define ASM_OPERANDS_LABEL_VEC(RTX) XCVEC (RTX, 5, ASM_OPERANDS)
2636 : #define ASM_OPERANDS_LABEL_LENGTH(RTX) XCVECLEN (RTX, 5, ASM_OPERANDS)
2637 : #define ASM_OPERANDS_LABEL(RTX, N) XCVECEXP (RTX, 5, N, ASM_OPERANDS)
2638 : #define ASM_OPERANDS_SOURCE_LOCATION(RTX) XCLOC (RTX, 6, ASM_OPERANDS)
2639 : #define ASM_INPUT_SOURCE_LOCATION(RTX) XCLOC (RTX, 1, ASM_INPUT)
2640 :
2641 : /* 1 if RTX is a mem that is statically allocated in read-only memory. */
2642 : #define MEM_READONLY_P(RTX) \
2643 : (RTL_FLAG_CHECK1 ("MEM_READONLY_P", (RTX), MEM)->unchanging)
2644 :
2645 : /* 1 if RTX is a mem and we should keep the alias set for this mem
2646 : unchanged when we access a component. Set to 1, or example, when we
2647 : are already in a non-addressable component of an aggregate. */
2648 : #define MEM_KEEP_ALIAS_SET_P(RTX) \
2649 : (RTL_FLAG_CHECK1 ("MEM_KEEP_ALIAS_SET_P", (RTX), MEM)->jump)
2650 :
2651 : /* 1 if RTX is a mem or asm_operand for a volatile reference. */
2652 : #define MEM_VOLATILE_P(RTX) \
2653 : (RTL_FLAG_CHECK3 ("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS, \
2654 : ASM_INPUT)->volatil)
2655 :
2656 : /* 1 if RTX is a mem that cannot trap. */
2657 : #define MEM_NOTRAP_P(RTX) \
2658 : (RTL_FLAG_CHECK1 ("MEM_NOTRAP_P", (RTX), MEM)->call)
2659 :
2660 : /* The memory attribute block. We provide access macros for each value
2661 : in the block and provide defaults if none specified. */
2662 : #define MEM_ATTRS(RTX) X0MEMATTR (RTX, 1)
2663 :
2664 : /* The register attribute block. We provide access macros for each value
2665 : in the block and provide defaults if none specified. */
2666 : #define REG_ATTRS(RTX) (REG_CHECK (RTX)->attrs)
2667 :
2668 : #ifndef GENERATOR_FILE
2669 : /* For a MEM rtx, the alias set. If 0, this MEM is not in any alias
2670 : set, and may alias anything. Otherwise, the MEM can only alias
2671 : MEMs in a conflicting alias set. This value is set in a
2672 : language-dependent manner in the front-end, and should not be
2673 : altered in the back-end. These set numbers are tested with
2674 : alias_sets_conflict_p. */
2675 : #define MEM_ALIAS_SET(RTX) (get_mem_attrs (RTX)->alias)
2676 :
2677 : /* For a MEM rtx, the decl it is known to refer to, if it is known to
2678 : refer to part of a DECL. It may also be a COMPONENT_REF. */
2679 : #define MEM_EXPR(RTX) (get_mem_attrs (RTX)->expr)
2680 :
2681 : /* For a MEM rtx, true if its MEM_OFFSET is known. */
2682 : #define MEM_OFFSET_KNOWN_P(RTX) (get_mem_attrs (RTX)->offset_known_p)
2683 :
2684 : /* For a MEM rtx, the offset from the start of MEM_EXPR. */
2685 : #define MEM_OFFSET(RTX) (get_mem_attrs (RTX)->offset)
2686 :
2687 : /* For a MEM rtx, the address space. */
2688 : #define MEM_ADDR_SPACE(RTX) (get_mem_attrs (RTX)->addrspace)
2689 :
2690 : /* For a MEM rtx, true if its MEM_SIZE is known. */
2691 : #define MEM_SIZE_KNOWN_P(RTX) (get_mem_attrs (RTX)->size_known_p)
2692 :
2693 : /* For a MEM rtx, the size in bytes of the MEM. */
2694 : #define MEM_SIZE(RTX) (get_mem_attrs (RTX)->size)
2695 :
2696 : /* For a MEM rtx, the alignment in bits. We can use the alignment of the
2697 : mode as a default when STRICT_ALIGNMENT, but not if not. */
2698 : #define MEM_ALIGN(RTX) (get_mem_attrs (RTX)->align)
2699 : #else
2700 : #define MEM_ADDR_SPACE(RTX) ADDR_SPACE_GENERIC
2701 : #endif
2702 :
2703 : /* For a REG rtx, the decl it is known to refer to, if it is known to
2704 : refer to part of a DECL. */
2705 : #define REG_EXPR(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->decl)
2706 :
2707 : /* For a REG rtx, the offset from the start of REG_EXPR, if known, as an
2708 : HOST_WIDE_INT. */
2709 : #define REG_OFFSET(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->offset)
2710 :
2711 : /* Copy the attributes that apply to memory locations from RHS to LHS. */
2712 : #define MEM_COPY_ATTRIBUTES(LHS, RHS) \
2713 : (MEM_VOLATILE_P (LHS) = MEM_VOLATILE_P (RHS), \
2714 : MEM_NOTRAP_P (LHS) = MEM_NOTRAP_P (RHS), \
2715 : MEM_READONLY_P (LHS) = MEM_READONLY_P (RHS), \
2716 : MEM_KEEP_ALIAS_SET_P (LHS) = MEM_KEEP_ALIAS_SET_P (RHS), \
2717 : MEM_POINTER (LHS) = MEM_POINTER (RHS), \
2718 : MEM_ATTRS (LHS) = MEM_ATTRS (RHS))
2719 :
2720 : /* 1 if RTX is a label_ref for a nonlocal label. */
2721 : /* Likewise in an expr_list for a REG_LABEL_OPERAND or
2722 : REG_LABEL_TARGET note. */
2723 : #define LABEL_REF_NONLOCAL_P(RTX) \
2724 : (RTL_FLAG_CHECK1 ("LABEL_REF_NONLOCAL_P", (RTX), LABEL_REF)->volatil)
2725 :
2726 : /* 1 if RTX is a code_label that should always be considered to be needed. */
2727 : #define LABEL_PRESERVE_P(RTX) \
2728 : (RTL_FLAG_CHECK2 ("LABEL_PRESERVE_P", (RTX), CODE_LABEL, NOTE)->in_struct)
2729 :
2730 : /* During sched, 1 if RTX is an insn that must be scheduled together
2731 : with the preceding insn. */
2732 : #define SCHED_GROUP_P(RTX) \
2733 : (RTL_FLAG_CHECK4 ("SCHED_GROUP_P", (RTX), DEBUG_INSN, INSN, \
2734 : JUMP_INSN, CALL_INSN)->in_struct)
2735 :
2736 : /* For a SET rtx, SET_DEST is the place that is set
2737 : and SET_SRC is the value it is set to. */
2738 : #define SET_DEST(RTX) XC2EXP (RTX, 0, SET, CLOBBER)
2739 : #define SET_SRC(RTX) XCEXP (RTX, 1, SET)
2740 : #define SET_IS_RETURN_P(RTX) \
2741 : (RTL_FLAG_CHECK1 ("SET_IS_RETURN_P", (RTX), SET)->jump)
2742 :
2743 : /* For a TRAP_IF rtx, TRAP_CONDITION is an expression. */
2744 : #define TRAP_CONDITION(RTX) XCEXP (RTX, 0, TRAP_IF)
2745 : #define TRAP_CODE(RTX) XCEXP (RTX, 1, TRAP_IF)
2746 :
2747 : /* For a COND_EXEC rtx, COND_EXEC_TEST is the condition to base
2748 : conditionally executing the code on, COND_EXEC_CODE is the code
2749 : to execute if the condition is true. */
2750 : #define COND_EXEC_TEST(RTX) XCEXP (RTX, 0, COND_EXEC)
2751 : #define COND_EXEC_CODE(RTX) XCEXP (RTX, 1, COND_EXEC)
2752 :
2753 : /* 1 if RTX is a symbol_ref that addresses this function's rtl
2754 : constants pool. */
2755 : #define CONSTANT_POOL_ADDRESS_P(RTX) \
2756 : (RTL_FLAG_CHECK1 ("CONSTANT_POOL_ADDRESS_P", (RTX), SYMBOL_REF)->unchanging)
2757 :
2758 : /* 1 if RTX is a symbol_ref that addresses a value in the file's
2759 : tree constant pool. This information is private to varasm.cc. */
2760 : #define TREE_CONSTANT_POOL_ADDRESS_P(RTX) \
2761 : (RTL_FLAG_CHECK1 ("TREE_CONSTANT_POOL_ADDRESS_P", \
2762 : (RTX), SYMBOL_REF)->frame_related)
2763 :
2764 : /* Used if RTX is a symbol_ref, for machine-specific purposes. */
2765 : #define SYMBOL_REF_FLAG(RTX) \
2766 : (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAG", (RTX), SYMBOL_REF)->volatil)
2767 :
2768 : /* 1 if RTX is a symbol_ref that has been the library function in
2769 : emit_library_call. */
2770 : #define SYMBOL_REF_USED(RTX) \
2771 : (RTL_FLAG_CHECK1 ("SYMBOL_REF_USED", (RTX), SYMBOL_REF)->used)
2772 :
2773 : /* 1 if RTX is a symbol_ref for a weak symbol. */
2774 : #define SYMBOL_REF_WEAK(RTX) \
2775 : (RTL_FLAG_CHECK1 ("SYMBOL_REF_WEAK", (RTX), SYMBOL_REF)->return_val)
2776 :
2777 : /* A pointer attached to the SYMBOL_REF; either SYMBOL_REF_DECL or
2778 : SYMBOL_REF_CONSTANT. */
2779 : #define SYMBOL_REF_DATA(RTX) X0ANY ((RTX), 1)
2780 :
2781 : /* Set RTX's SYMBOL_REF_DECL to DECL. RTX must not be a constant
2782 : pool symbol. */
2783 : #define SET_SYMBOL_REF_DECL(RTX, DECL) \
2784 : (gcc_assert (!CONSTANT_POOL_ADDRESS_P (RTX)), X0TREE ((RTX), 1) = (DECL))
2785 :
2786 : /* The tree (decl or constant) associated with the symbol, or null. */
2787 : #define SYMBOL_REF_DECL(RTX) \
2788 : (CONSTANT_POOL_ADDRESS_P (RTX) ? NULL : X0TREE ((RTX), 1))
2789 :
2790 : /* Set RTX's SYMBOL_REF_CONSTANT to C. RTX must be a constant pool symbol. */
2791 : #define SET_SYMBOL_REF_CONSTANT(RTX, C) \
2792 : (gcc_assert (CONSTANT_POOL_ADDRESS_P (RTX)), X0CONSTANT ((RTX), 1) = (C))
2793 :
2794 : /* The rtx constant pool entry for a symbol, or null. */
2795 : #define SYMBOL_REF_CONSTANT(RTX) \
2796 : (CONSTANT_POOL_ADDRESS_P (RTX) ? X0CONSTANT ((RTX), 1) : NULL)
2797 :
2798 : /* A set of flags on a symbol_ref that are, in some respects, redundant with
2799 : information derivable from the tree decl associated with this symbol.
2800 : Except that we build a *lot* of SYMBOL_REFs that aren't associated with a
2801 : decl. In some cases this is a bug. But beyond that, it's nice to cache
2802 : this information to avoid recomputing it. Finally, this allows space for
2803 : the target to store more than one bit of information, as with
2804 : SYMBOL_REF_FLAG. */
2805 : #define SYMBOL_REF_FLAGS(RTX) \
2806 : (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAGS", (RTX), SYMBOL_REF) \
2807 : ->u2.symbol_ref_flags)
2808 :
2809 : /* These flags are common enough to be defined for all targets. They
2810 : are computed by the default version of targetm.encode_section_info. */
2811 :
2812 : /* Set if this symbol is a function. */
2813 : #define SYMBOL_FLAG_FUNCTION (1 << 0)
2814 : #define SYMBOL_REF_FUNCTION_P(RTX) \
2815 : ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_FUNCTION) != 0)
2816 : /* Set if targetm.binds_local_p is true. */
2817 : #define SYMBOL_FLAG_LOCAL (1 << 1)
2818 : #define SYMBOL_REF_LOCAL_P(RTX) \
2819 : ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_LOCAL) != 0)
2820 : /* Set if targetm.in_small_data_p is true. */
2821 : #define SYMBOL_FLAG_SMALL (1 << 2)
2822 : #define SYMBOL_REF_SMALL_P(RTX) \
2823 : ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_SMALL) != 0)
2824 : /* The three-bit field at [5:3] is true for TLS variables; use
2825 : SYMBOL_REF_TLS_MODEL to extract the field as an enum tls_model. */
2826 : #define SYMBOL_FLAG_TLS_SHIFT 3
2827 : #define SYMBOL_REF_TLS_MODEL(RTX) \
2828 : ((enum tls_model) ((SYMBOL_REF_FLAGS (RTX) >> SYMBOL_FLAG_TLS_SHIFT) & 7))
2829 : /* Set if this symbol is not defined in this translation unit. */
2830 : #define SYMBOL_FLAG_EXTERNAL (1 << 6)
2831 : #define SYMBOL_REF_EXTERNAL_P(RTX) \
2832 : ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_EXTERNAL) != 0)
2833 : /* Set if this symbol has a block_symbol structure associated with it. */
2834 : #define SYMBOL_FLAG_HAS_BLOCK_INFO (1 << 7)
2835 : #define SYMBOL_REF_HAS_BLOCK_INFO_P(RTX) \
2836 : ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_HAS_BLOCK_INFO) != 0)
2837 : /* Set if this symbol is a section anchor. SYMBOL_REF_ANCHOR_P implies
2838 : SYMBOL_REF_HAS_BLOCK_INFO_P. */
2839 : #define SYMBOL_FLAG_ANCHOR (1 << 8)
2840 : #define SYMBOL_REF_ANCHOR_P(RTX) \
2841 : ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_ANCHOR) != 0)
2842 :
2843 : /* Subsequent bits are available for the target to use. */
2844 : #define SYMBOL_FLAG_MACH_DEP_SHIFT 9
2845 : #define SYMBOL_FLAG_MACH_DEP (1 << SYMBOL_FLAG_MACH_DEP_SHIFT)
2846 :
2847 : /* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the object_block
2848 : structure to which the symbol belongs, or NULL if it has not been
2849 : assigned a block. */
2850 : #define SYMBOL_REF_BLOCK(RTX) (BLOCK_SYMBOL_CHECK (RTX)->block)
2851 :
2852 : /* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the offset of RTX from
2853 : the first object in SYMBOL_REF_BLOCK (RTX). The value is negative if
2854 : RTX has not yet been assigned to a block, or it has not been given an
2855 : offset within that block. */
2856 : #define SYMBOL_REF_BLOCK_OFFSET(RTX) (BLOCK_SYMBOL_CHECK (RTX)->offset)
2857 :
2858 : /* True if RTX is flagged to be a scheduling barrier. */
2859 : #define PREFETCH_SCHEDULE_BARRIER_P(RTX) \
2860 : (RTL_FLAG_CHECK1 ("PREFETCH_SCHEDULE_BARRIER_P", (RTX), PREFETCH)->volatil)
2861 :
2862 : /* Indicate whether the machine has any sort of auto increment addressing.
2863 : If not, we can avoid checking for REG_INC notes. */
2864 :
2865 : #if (defined (HAVE_PRE_INCREMENT) || defined (HAVE_PRE_DECREMENT) \
2866 : || defined (HAVE_POST_INCREMENT) || defined (HAVE_POST_DECREMENT) \
2867 : || defined (HAVE_PRE_MODIFY_DISP) || defined (HAVE_POST_MODIFY_DISP) \
2868 : || defined (HAVE_PRE_MODIFY_REG) || defined (HAVE_POST_MODIFY_REG))
2869 : #define AUTO_INC_DEC 1
2870 : #else
2871 : #define AUTO_INC_DEC 0
2872 : #endif
2873 :
2874 : /* Define a macro to look for REG_INC notes,
2875 : but save time on machines where they never exist. */
2876 :
2877 : #if AUTO_INC_DEC
2878 : #define FIND_REG_INC_NOTE(INSN, REG) \
2879 : ((REG) != NULL_RTX && REG_P ((REG)) \
2880 : ? find_regno_note ((INSN), REG_INC, REGNO (REG)) \
2881 : : find_reg_note ((INSN), REG_INC, (REG)))
2882 : #else
2883 : #define FIND_REG_INC_NOTE(INSN, REG) 0
2884 : #endif
2885 :
2886 : #ifndef HAVE_PRE_INCREMENT
2887 : #define HAVE_PRE_INCREMENT 0
2888 : #endif
2889 :
2890 : #ifndef HAVE_PRE_DECREMENT
2891 : #define HAVE_PRE_DECREMENT 0
2892 : #endif
2893 :
2894 : #ifndef HAVE_POST_INCREMENT
2895 : #define HAVE_POST_INCREMENT 0
2896 : #endif
2897 :
2898 : #ifndef HAVE_POST_DECREMENT
2899 : #define HAVE_POST_DECREMENT 0
2900 : #endif
2901 :
2902 : #ifndef HAVE_POST_MODIFY_DISP
2903 : #define HAVE_POST_MODIFY_DISP 0
2904 : #endif
2905 :
2906 : #ifndef HAVE_POST_MODIFY_REG
2907 : #define HAVE_POST_MODIFY_REG 0
2908 : #endif
2909 :
2910 : #ifndef HAVE_PRE_MODIFY_DISP
2911 : #define HAVE_PRE_MODIFY_DISP 0
2912 : #endif
2913 :
2914 : #ifndef HAVE_PRE_MODIFY_REG
2915 : #define HAVE_PRE_MODIFY_REG 0
2916 : #endif
2917 :
2918 :
2919 : /* Some architectures do not have complete pre/post increment/decrement
2920 : instruction sets, or only move some modes efficiently. These macros
2921 : allow us to tune autoincrement generation. */
2922 :
2923 : #ifndef USE_LOAD_POST_INCREMENT
2924 : #define USE_LOAD_POST_INCREMENT(MODE) HAVE_POST_INCREMENT
2925 : #endif
2926 :
2927 : #ifndef USE_LOAD_POST_DECREMENT
2928 : #define USE_LOAD_POST_DECREMENT(MODE) HAVE_POST_DECREMENT
2929 : #endif
2930 :
2931 : #ifndef USE_LOAD_PRE_INCREMENT
2932 : #define USE_LOAD_PRE_INCREMENT(MODE) HAVE_PRE_INCREMENT
2933 : #endif
2934 :
2935 : #ifndef USE_LOAD_PRE_DECREMENT
2936 : #define USE_LOAD_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT
2937 : #endif
2938 :
2939 : #ifndef USE_STORE_POST_INCREMENT
2940 : #define USE_STORE_POST_INCREMENT(MODE) HAVE_POST_INCREMENT
2941 : #endif
2942 :
2943 : #ifndef USE_STORE_POST_DECREMENT
2944 : #define USE_STORE_POST_DECREMENT(MODE) HAVE_POST_DECREMENT
2945 : #endif
2946 :
2947 : #ifndef USE_STORE_PRE_INCREMENT
2948 : #define USE_STORE_PRE_INCREMENT(MODE) HAVE_PRE_INCREMENT
2949 : #endif
2950 :
2951 : #ifndef USE_STORE_PRE_DECREMENT
2952 : #define USE_STORE_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT
2953 : #endif
2954 :
2955 : /* Nonzero when we are generating CONCATs. */
2956 : extern int generating_concat_p;
2957 :
2958 : /* Nonzero when we are expanding trees to RTL. */
2959 : extern int currently_expanding_to_rtl;
2960 :
2961 : /* Generally useful functions. */
2962 :
2963 : #ifndef GENERATOR_FILE
2964 : /* Return the cost of SET X. SPEED_P is true if optimizing for speed
2965 : rather than size. */
2966 :
2967 : inline int
2968 43031433 : set_rtx_cost (rtx x, bool speed_p)
2969 : {
2970 42600977 : return rtx_cost (x, VOIDmode, INSN, 4, speed_p);
2971 : }
2972 :
2973 : /* Like set_rtx_cost, but return both the speed and size costs in C. */
2974 :
2975 : inline void
2976 2218736 : get_full_set_rtx_cost (rtx x, struct full_rtx_costs *c)
2977 : {
2978 2090990 : get_full_rtx_cost (x, VOIDmode, INSN, 4, c);
2979 : }
2980 :
2981 : /* Return the cost of moving X into a register, relative to the cost
2982 : of a register move. SPEED_P is true if optimizing for speed rather
2983 : than size. */
2984 :
2985 : inline int
2986 2782616410 : set_src_cost (rtx x, machine_mode mode, bool speed_p)
2987 : {
2988 2781336122 : return rtx_cost (x, mode, SET, 1, speed_p);
2989 : }
2990 :
2991 : /* Like set_src_cost, but return both the speed and size costs in C. */
2992 :
2993 : inline void
2994 492 : get_full_set_src_cost (rtx x, machine_mode mode, struct full_rtx_costs *c)
2995 : {
2996 492 : get_full_rtx_cost (x, mode, SET, 1, c);
2997 : }
2998 : #endif
2999 :
3000 : /* A convenience macro to validate the arguments of a zero_extract
3001 : expression. It determines whether SIZE lies inclusively within
3002 : [1, RANGE], POS lies inclusively within between [0, RANGE - 1]
3003 : and the sum lies inclusively within [1, RANGE]. RANGE must be
3004 : >= 1, but SIZE and POS may be negative. */
3005 : #define EXTRACT_ARGS_IN_RANGE(SIZE, POS, RANGE) \
3006 : (IN_RANGE ((POS), 0, (unsigned HOST_WIDE_INT) (RANGE) - 1) \
3007 : && IN_RANGE ((SIZE), 1, (unsigned HOST_WIDE_INT) (RANGE) \
3008 : - (unsigned HOST_WIDE_INT)(POS)))
3009 :
3010 : /* In explow.cc */
3011 : extern HOST_WIDE_INT trunc_int_for_mode (HOST_WIDE_INT, machine_mode);
3012 : extern poly_int64 trunc_int_for_mode (poly_int64, machine_mode);
3013 : extern rtx plus_constant (machine_mode, rtx, poly_int64, bool = false);
3014 : extern HOST_WIDE_INT get_stack_check_protect (void);
3015 :
3016 : /* In rtl.cc */
3017 : extern rtx rtx_alloc (RTX_CODE CXX_MEM_STAT_INFO);
3018 : inline rtx
3019 2684221172 : rtx_init (rtx rt, RTX_CODE code)
3020 : {
3021 2684221172 : memset (rt, 0, RTX_HDR_SIZE);
3022 2684221172 : PUT_CODE (rt, code);
3023 2682220239 : return rt;
3024 : }
3025 : #define rtx_alloca(code) \
3026 : rtx_init ((rtx) alloca (RTX_CODE_SIZE ((code))), (code))
3027 : extern rtx rtx_alloc_stat_v (RTX_CODE MEM_STAT_DECL, int);
3028 : #define rtx_alloc_v(c, SZ) rtx_alloc_stat_v (c MEM_STAT_INFO, SZ)
3029 : #define const_wide_int_alloc(NWORDS) \
3030 : rtx_alloc_v (CONST_WIDE_INT, \
3031 : (sizeof (struct hwivec_def) \
3032 : + ((NWORDS)-1) * sizeof (HOST_WIDE_INT))) \
3033 :
3034 : extern rtvec rtvec_alloc (size_t);
3035 : extern rtvec shallow_copy_rtvec (rtvec);
3036 : extern bool shared_const_p (const_rtx);
3037 : extern rtx copy_rtx (rtx);
3038 : extern enum rtx_code classify_insn (rtx);
3039 : extern void dump_rtx_statistics (void);
3040 :
3041 : /* In emit-rtl.cc */
3042 :
3043 : /* Opcodes used in the bytecode generated by genemit.cc. */
3044 : enum class expand_opcode {
3045 : /* NULL_RTX. */
3046 : NO_RTX,
3047 :
3048 : /* A (match_operand N) or (match_dup N). Followed by the operand number. */
3049 : MATCH_OPERAND,
3050 :
3051 : /* A (match_operator N) or (match_op_dup N) that preserves the original mode.
3052 : Followed by the operand number. */
3053 : MATCH_OPERATOR,
3054 :
3055 : /* A (match_operator N) or (match_op_dup N) that overrides the original mode.
3056 : Followed by the new mode and by the operand number. */
3057 : MATCH_OPERATOR_WITH_MODE,
3058 :
3059 : /* A (match_parallel N) or (match_par_dup N). Followed by the operand
3060 : number. */
3061 : MATCH_PARALLEL,
3062 :
3063 : /* A (clobber (reg:M R)). Followed by M and R. */
3064 : CLOBBER_REG,
3065 :
3066 : /* FIRST_CODE + X represents a normal rtx with code X. */
3067 : FIRST_CODE
3068 : };
3069 :
3070 : extern rtx expand_rtx (const uint8_t *, rtx *);
3071 : extern rtx_insn *complete_seq (const uint8_t *, rtx *);
3072 : extern rtx copy_rtx_if_shared (rtx);
3073 :
3074 : /* In rtl.cc */
3075 : extern unsigned int rtx_size (const_rtx);
3076 : extern rtx shallow_copy_rtx (const_rtx CXX_MEM_STAT_INFO);
3077 :
3078 : typedef bool (*rtx_equal_p_callback_function) (const_rtx *, const_rtx *,
3079 : rtx *, rtx *);
3080 : extern bool rtx_equal_p (const_rtx, const_rtx,
3081 : rtx_equal_p_callback_function = NULL);
3082 :
3083 : extern bool rtvec_all_equal_p (const_rtvec);
3084 : extern bool rtvec_series_p (rtvec, int);
3085 :
3086 : /* Return true if X is a vector constant with a duplicated element value. */
3087 :
3088 : inline bool
3089 300550090 : const_vec_duplicate_p (const_rtx x)
3090 : {
3091 300550090 : return (GET_CODE (x) == CONST_VECTOR
3092 728888 : && CONST_VECTOR_NPATTERNS (x) == 1
3093 703766 : && CONST_VECTOR_DUPLICATE_P (x));
3094 : }
3095 :
3096 : /* Return true if X is a vector constant with a duplicated element value.
3097 : Store the duplicated element in *ELT if so. */
3098 :
3099 : template <typename T>
3100 : inline bool
3101 299332470 : const_vec_duplicate_p (T x, T *elt)
3102 : {
3103 303012855 : if (const_vec_duplicate_p (x))
3104 : {
3105 666088 : *elt = CONST_VECTOR_ENCODED_ELT (x, 0);
3106 22590 : return true;
3107 : }
3108 : return false;
3109 : }
3110 :
3111 : /* Return true if X is a vector with a duplicated element value, either
3112 : constant or nonconstant. Store the duplicated element in *ELT if so. */
3113 :
3114 : template <typename T>
3115 : inline bool
3116 111003412 : vec_duplicate_p (T x, T *elt)
3117 : {
3118 111003412 : if (GET_CODE (x) == VEC_DUPLICATE
3119 193224 : && !VECTOR_MODE_P (GET_MODE (XEXP (x, 0))))
3120 : {
3121 193217 : *elt = XEXP (x, 0);
3122 193217 : return true;
3123 : }
3124 111003412 : return const_vec_duplicate_p (x, elt);
3125 : }
3126 :
3127 : /* If X is a vector constant with a duplicated element value, return that
3128 : element value, otherwise return X. */
3129 :
3130 : template <typename T>
3131 : inline T
3132 1217291 : unwrap_const_vec_duplicate (T x)
3133 : {
3134 1230493 : if (const_vec_duplicate_p (x))
3135 13202 : x = CONST_VECTOR_ELT (x, 0);
3136 1217291 : return x;
3137 : }
3138 :
3139 : /* In emit-rtl.cc. */
3140 : extern wide_int const_vector_int_elt (const_rtx, unsigned int);
3141 : extern rtx const_vector_elt (const_rtx, unsigned int);
3142 : extern bool const_vec_series_p_1 (const_rtx, rtx *, rtx *);
3143 :
3144 : /* Return true if X is an integer constant vector that contains a linear
3145 : series of the form:
3146 :
3147 : { B, B + S, B + 2 * S, B + 3 * S, ... }
3148 :
3149 : for a nonzero S. Store B and S in *BASE_OUT and *STEP_OUT on sucess. */
3150 :
3151 : inline bool
3152 3815206 : const_vec_series_p (const_rtx x, rtx *base_out, rtx *step_out)
3153 : {
3154 3815206 : if (GET_CODE (x) == CONST_VECTOR
3155 3503 : && CONST_VECTOR_NPATTERNS (x) == 1
3156 3818204 : && !CONST_VECTOR_DUPLICATE_P (x))
3157 2998 : return const_vec_series_p_1 (x, base_out, step_out);
3158 : return false;
3159 : }
3160 :
3161 : /* Return true if X is a vector that contains a linear series of the
3162 : form:
3163 :
3164 : { B, B + S, B + 2 * S, B + 3 * S, ... }
3165 :
3166 : where B and S are constant or nonconstant. Store B and S in
3167 : *BASE_OUT and *STEP_OUT on sucess. */
3168 :
3169 : inline bool
3170 3815668 : vec_series_p (const_rtx x, rtx *base_out, rtx *step_out)
3171 : {
3172 3815668 : if (GET_CODE (x) == VEC_SERIES)
3173 : {
3174 462 : *base_out = XEXP (x, 0);
3175 462 : *step_out = XEXP (x, 1);
3176 462 : return true;
3177 : }
3178 3815206 : return const_vec_series_p (x, base_out, step_out);
3179 : }
3180 :
3181 : /* Return true if CONST_VECTORs X and Y, which are known to have the same mode,
3182 : also have the same encoding. This means that they are equal whenever their
3183 : operands are equal. */
3184 :
3185 : inline bool
3186 : same_vector_encodings_p (const_rtx x, const_rtx y)
3187 : {
3188 : /* Don't be fussy about the encoding of constant-length vectors,
3189 : since XVECEXP (X, 0) and XVECEXP (Y, 0) list all the elements anyway. */
3190 : if (poly_uint64 (CONST_VECTOR_NUNITS (x)).is_constant ())
3191 : return true;
3192 :
3193 : return (CONST_VECTOR_NPATTERNS (x) == CONST_VECTOR_NPATTERNS (y)
3194 : && (CONST_VECTOR_NELTS_PER_PATTERN (x)
3195 : == CONST_VECTOR_NELTS_PER_PATTERN (y)));
3196 : }
3197 :
3198 : /* Return the unpromoted (outer) mode of SUBREG_PROMOTED_VAR_P subreg X. */
3199 :
3200 : inline scalar_int_mode
3201 7 : subreg_unpromoted_mode (rtx x)
3202 : {
3203 7 : gcc_checking_assert (SUBREG_PROMOTED_VAR_P (x));
3204 7 : return as_a <scalar_int_mode> (GET_MODE (x));
3205 : }
3206 :
3207 : /* Return the promoted (inner) mode of SUBREG_PROMOTED_VAR_P subreg X. */
3208 :
3209 : inline scalar_int_mode
3210 14 : subreg_promoted_mode (rtx x)
3211 : {
3212 14 : gcc_checking_assert (SUBREG_PROMOTED_VAR_P (x));
3213 14 : return as_a <scalar_int_mode> (GET_MODE (SUBREG_REG (x)));
3214 : }
3215 :
3216 : /* In emit-rtl.cc */
3217 : extern rtvec gen_rtvec_v (int, rtx *);
3218 : extern rtvec gen_rtvec_v (int, rtx_insn **);
3219 : extern rtx gen_reg_rtx (machine_mode);
3220 : extern rtx gen_rtx_REG_offset (rtx, machine_mode, unsigned int, poly_int64);
3221 : extern rtx gen_reg_rtx_offset (rtx, machine_mode, int);
3222 : extern rtx gen_reg_rtx_and_attrs (rtx);
3223 : extern rtx_code_label *gen_label_rtx (void);
3224 : extern rtx gen_lowpart_common (machine_mode, rtx);
3225 :
3226 : /* In cse.cc */
3227 : extern rtx gen_lowpart_if_possible (machine_mode, rtx);
3228 :
3229 : /* In emit-rtl.cc */
3230 : extern rtx gen_highpart (machine_mode, rtx);
3231 : extern rtx gen_highpart_mode (machine_mode, machine_mode, rtx);
3232 : extern rtx operand_subword (rtx, poly_uint64, int, machine_mode);
3233 :
3234 : /* In emit-rtl.cc */
3235 : extern rtx operand_subword_force (rtx, poly_uint64, machine_mode);
3236 : extern bool subreg_lowpart_p (const_rtx);
3237 : extern poly_uint64 subreg_size_lowpart_offset (poly_uint64, poly_uint64);
3238 :
3239 : /* Return true if a subreg of mode OUTERMODE would only access part of
3240 : an inner register with mode INNERMODE. The other bits of the inner
3241 : register would then be "don't care" on read. The behavior for writes
3242 : depends on REGMODE_NATURAL_SIZE; bits in the same REGMODE_NATURAL_SIZE-d
3243 : chunk would be clobbered but other bits would be preserved. */
3244 :
3245 : inline bool
3246 895966056 : partial_subreg_p (machine_mode outermode, machine_mode innermode)
3247 : {
3248 : /* Modes involved in a subreg must be ordered. In particular, we must
3249 : always know at compile time whether the subreg is paradoxical. */
3250 879105727 : poly_int64 outer_prec = GET_MODE_PRECISION (outermode);
3251 895966056 : poly_int64 inner_prec = GET_MODE_PRECISION (innermode);
3252 895966056 : gcc_checking_assert (ordered_p (outer_prec, inner_prec));
3253 895882457 : return maybe_lt (outer_prec, inner_prec);
3254 : }
3255 :
3256 : /* Likewise return true if X is a subreg that is smaller than the inner
3257 : register. Use read_modify_subreg_p to test whether writing to such
3258 : a subreg preserves any part of the inner register. */
3259 :
3260 : inline bool
3261 59815667 : partial_subreg_p (const_rtx x)
3262 : {
3263 59397465 : if (GET_CODE (x) != SUBREG)
3264 : return false;
3265 10546378 : return partial_subreg_p (GET_MODE (x), GET_MODE (SUBREG_REG (x)));
3266 : }
3267 :
3268 : /* Return true if a subreg with the given outer and inner modes is
3269 : paradoxical. */
3270 :
3271 : inline bool
3272 288921531 : paradoxical_subreg_p (machine_mode outermode, machine_mode innermode)
3273 : {
3274 : /* Modes involved in a subreg must be ordered. In particular, we must
3275 : always know at compile time whether the subreg is paradoxical. */
3276 248394782 : poly_int64 outer_prec = GET_MODE_PRECISION (outermode);
3277 288921526 : poly_int64 inner_prec = GET_MODE_PRECISION (innermode);
3278 288921531 : gcc_checking_assert (ordered_p (outer_prec, inner_prec));
3279 288357099 : return maybe_gt (outer_prec, inner_prec);
3280 : }
3281 :
3282 : /* Return true if X is a paradoxical subreg, false otherwise. */
3283 :
3284 : inline bool
3285 499865265 : paradoxical_subreg_p (const_rtx x)
3286 : {
3287 489180516 : if (GET_CODE (x) != SUBREG)
3288 : return false;
3289 58565003 : return paradoxical_subreg_p (GET_MODE (x), GET_MODE (SUBREG_REG (x)));
3290 : }
3291 :
3292 : /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
3293 :
3294 : inline poly_uint64
3295 214732293 : subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
3296 : {
3297 429464586 : return subreg_size_lowpart_offset (GET_MODE_SIZE (outermode),
3298 214732293 : GET_MODE_SIZE (innermode));
3299 : }
3300 :
3301 : /* Given that a subreg has outer mode OUTERMODE and inner mode INNERMODE,
3302 : return the smaller of the two modes if they are different sizes,
3303 : otherwise return the outer mode. */
3304 :
3305 : inline machine_mode
3306 13199424 : narrower_subreg_mode (machine_mode outermode, machine_mode innermode)
3307 : {
3308 13199424 : return paradoxical_subreg_p (outermode, innermode) ? innermode : outermode;
3309 : }
3310 :
3311 : /* Given that a subreg has outer mode OUTERMODE and inner mode INNERMODE,
3312 : return the mode that is big enough to hold both the outer and inner
3313 : values. Prefer the outer mode in the event of a tie. */
3314 :
3315 : inline machine_mode
3316 114031692 : wider_subreg_mode (machine_mode outermode, machine_mode innermode)
3317 : {
3318 109922012 : return partial_subreg_p (outermode, innermode) ? innermode : outermode;
3319 : }
3320 :
3321 : /* Likewise for subreg X. */
3322 :
3323 : inline machine_mode
3324 9049572 : wider_subreg_mode (const_rtx x)
3325 : {
3326 13582137 : return wider_subreg_mode (GET_MODE (x), GET_MODE (SUBREG_REG (x)));
3327 : }
3328 :
3329 : extern poly_uint64 subreg_size_highpart_offset (poly_uint64, poly_uint64);
3330 :
3331 : /* Return the SUBREG_BYTE for an OUTERMODE highpart of an INNERMODE value. */
3332 :
3333 : inline poly_uint64
3334 42823 : subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
3335 : {
3336 85646 : return subreg_size_highpart_offset (GET_MODE_SIZE (outermode),
3337 42823 : GET_MODE_SIZE (innermode));
3338 : }
3339 :
3340 : extern poly_int64 byte_lowpart_offset (machine_mode, machine_mode);
3341 : extern poly_int64 subreg_memory_offset (machine_mode, machine_mode,
3342 : poly_uint64);
3343 : extern poly_int64 subreg_memory_offset (const_rtx);
3344 : extern rtx make_safe_from (rtx, rtx);
3345 : extern rtx convert_memory_address_addr_space_1 (scalar_int_mode, rtx,
3346 : addr_space_t, bool, bool);
3347 : extern rtx convert_memory_address_addr_space (scalar_int_mode, rtx,
3348 : addr_space_t);
3349 : #define convert_memory_address(to_mode,x) \
3350 : convert_memory_address_addr_space ((to_mode), (x), ADDR_SPACE_GENERIC)
3351 : extern const char *get_insn_name (int);
3352 : extern rtx_insn *get_last_insn_anywhere (void);
3353 : extern rtx_insn *get_first_nonnote_insn (void);
3354 : extern rtx_insn *get_last_nonnote_insn (void);
3355 : extern void start_sequence (void);
3356 : extern void push_to_sequence (rtx_insn *);
3357 : extern void push_to_sequence2 (rtx_insn *, rtx_insn *);
3358 : extern rtx_insn *end_sequence (void);
3359 : #if TARGET_SUPPORTS_WIDE_INT == 0
3360 : extern double_int rtx_to_double_int (const_rtx);
3361 : #endif
3362 : extern void cwi_output_hex (FILE *, const_rtx);
3363 : #if TARGET_SUPPORTS_WIDE_INT == 0
3364 : extern rtx immed_double_const (HOST_WIDE_INT, HOST_WIDE_INT,
3365 : machine_mode);
3366 : #endif
3367 : extern rtx immed_wide_int_const (const poly_wide_int_ref &, machine_mode);
3368 :
3369 : /* In varasm.cc */
3370 : extern rtx force_const_mem (machine_mode, rtx);
3371 :
3372 : /* In varasm.cc */
3373 :
3374 : struct function;
3375 : extern rtx get_pool_constant (const_rtx);
3376 : extern rtx get_pool_constant_mark (rtx, bool *);
3377 : extern fixed_size_mode get_pool_mode (const_rtx);
3378 : extern rtx simplify_subtraction (rtx);
3379 : extern void decide_function_section (tree);
3380 :
3381 : /* In emit-rtl.cc */
3382 : extern rtx_insn *emit_insn_before (rtx, rtx_insn *);
3383 : extern rtx_insn *emit_insn_before_noloc (rtx, rtx_insn *, basic_block);
3384 : extern rtx_insn *emit_insn_before_setloc (rtx, rtx_insn *, location_t);
3385 : extern rtx_jump_insn *emit_jump_insn_before (rtx, rtx_insn *);
3386 : extern rtx_jump_insn *emit_jump_insn_before_noloc (rtx, rtx_insn *);
3387 : extern rtx_jump_insn *emit_jump_insn_before_setloc (rtx, rtx_insn *,
3388 : location_t);
3389 : extern rtx_insn *emit_call_insn_before (rtx, rtx_insn *);
3390 : extern rtx_insn *emit_call_insn_before_noloc (rtx, rtx_insn *);
3391 : extern rtx_insn *emit_call_insn_before_setloc (rtx, rtx_insn *, location_t);
3392 : extern rtx_insn *emit_debug_insn_before (rtx, rtx_insn *);
3393 : extern rtx_insn *emit_debug_insn_before_noloc (rtx, rtx_insn *);
3394 : extern rtx_insn *emit_debug_insn_before_setloc (rtx, rtx_insn *, location_t);
3395 : extern rtx_barrier *emit_barrier_before (rtx_insn *);
3396 : extern rtx_code_label *emit_label_before (rtx_code_label *, rtx_insn *);
3397 : extern rtx_note *emit_note_before (enum insn_note, rtx_insn *);
3398 : extern rtx_insn *emit_insn_after (rtx, rtx_insn *);
3399 : extern rtx_insn *emit_insn_after_noloc (rtx, rtx_insn *, basic_block);
3400 : extern rtx_insn *emit_insn_after_setloc (rtx, rtx_insn *, location_t);
3401 : extern rtx_jump_insn *emit_jump_insn_after (rtx, rtx_insn *);
3402 : extern rtx_jump_insn *emit_jump_insn_after_noloc (rtx, rtx_insn *);
3403 : extern rtx_jump_insn *emit_jump_insn_after_setloc (rtx, rtx_insn *, location_t);
3404 : extern rtx_insn *emit_call_insn_after (rtx, rtx_insn *);
3405 : extern rtx_insn *emit_call_insn_after_noloc (rtx, rtx_insn *);
3406 : extern rtx_insn *emit_call_insn_after_setloc (rtx, rtx_insn *, location_t);
3407 : extern rtx_insn *emit_debug_insn_after (rtx, rtx_insn *);
3408 : extern rtx_insn *emit_debug_insn_after_noloc (rtx, rtx_insn *);
3409 : extern rtx_insn *emit_debug_insn_after_setloc (rtx, rtx_insn *, location_t);
3410 : extern rtx_barrier *emit_barrier_after (rtx_insn *);
3411 : extern rtx_insn *emit_label_after (rtx_insn *, rtx_insn *);
3412 : extern rtx_note *emit_note_after (enum insn_note, rtx_insn *);
3413 : extern rtx_insn *emit_insn (rtx);
3414 : extern rtx_insn *emit_debug_insn (rtx);
3415 : extern rtx_insn *emit_jump_insn (rtx);
3416 : extern rtx_insn *emit_likely_jump_insn (rtx);
3417 : extern rtx_insn *emit_unlikely_jump_insn (rtx);
3418 : extern rtx_insn *emit_call_insn (rtx);
3419 : extern rtx_code_label *emit_label (rtx);
3420 : extern rtx_jump_table_data *emit_jump_table_data (rtx);
3421 : extern rtx_barrier *emit_barrier (void);
3422 : extern rtx_note *emit_note (enum insn_note);
3423 : extern rtx_note *emit_note_copy (rtx_note *);
3424 : extern rtx_insn *gen_clobber (rtx);
3425 : extern rtx_insn *emit_clobber (rtx);
3426 : extern rtx_insn *gen_use (rtx);
3427 : extern rtx_insn *emit_use (rtx);
3428 : extern rtx_insn *make_insn_raw (rtx);
3429 : extern void add_function_usage_to (rtx, rtx);
3430 : extern rtx_call_insn *last_call_insn (void);
3431 : extern rtx_insn *previous_insn (rtx_insn *);
3432 : extern rtx_insn *next_insn (rtx_insn *);
3433 : extern rtx_insn *prev_nonnote_insn (rtx_insn *);
3434 : extern rtx_insn *next_nonnote_insn (rtx_insn *);
3435 : extern rtx_insn *prev_nondebug_insn (rtx_insn *);
3436 : extern rtx_insn *next_nondebug_insn (rtx_insn *);
3437 : extern rtx_insn *prev_nonnote_nondebug_insn (rtx_insn *);
3438 : extern rtx_insn *prev_nonnote_nondebug_insn_bb (rtx_insn *);
3439 : extern rtx_insn *next_nonnote_nondebug_insn (rtx_insn *);
3440 : extern rtx_insn *next_nonnote_nondebug_insn_bb (rtx_insn *);
3441 : extern rtx_insn *prev_real_insn (rtx_insn *);
3442 : extern rtx_insn *next_real_insn (rtx_insn *);
3443 : extern rtx_insn *prev_real_nondebug_insn (rtx_insn *);
3444 : extern rtx_insn *next_real_nondebug_insn (rtx);
3445 : extern rtx_insn *prev_active_insn (rtx_insn *);
3446 : extern rtx_insn *next_active_insn (rtx_insn *);
3447 : extern bool active_insn_p (const rtx_insn *);
3448 :
3449 : /* In emit-rtl.cc */
3450 : extern int insn_line (const rtx_insn *);
3451 : extern const char * insn_file (const rtx_insn *);
3452 : extern tree insn_scope (const rtx_insn *);
3453 : extern expanded_location insn_location (const rtx_insn *);
3454 : extern int insn_discriminator (const rtx_insn *);
3455 : extern location_t prologue_location, epilogue_location;
3456 :
3457 : /* In jump.cc */
3458 : extern enum rtx_code reverse_condition (enum rtx_code);
3459 : extern enum rtx_code reverse_condition_maybe_unordered (enum rtx_code);
3460 : extern enum rtx_code swap_condition (enum rtx_code);
3461 : extern enum rtx_code unsigned_condition (enum rtx_code);
3462 : extern enum rtx_code signed_condition (enum rtx_code);
3463 : extern void mark_jump_label (rtx, rtx_insn *, int);
3464 :
3465 : /* Return true if integer comparison operator CODE interprets its operands
3466 : as unsigned. */
3467 :
3468 : inline bool
3469 4662135 : unsigned_condition_p (enum rtx_code code)
3470 : {
3471 4662135 : return unsigned_condition (code) == code;
3472 : }
3473 :
3474 : /* In jump.cc */
3475 : extern rtx_insn *delete_related_insns (rtx);
3476 :
3477 : /* In recog.cc */
3478 : extern rtx *find_constant_term_loc (rtx *);
3479 :
3480 : /* In emit-rtl.cc */
3481 : extern rtx_insn *try_split (rtx, rtx_insn *, int);
3482 :
3483 : /* In insn-recog.cc (generated by genrecog). */
3484 : extern rtx_insn *split_insns (rtx, rtx_insn *);
3485 :
3486 : /* In simplify-rtx.cc */
3487 :
3488 : /* A class that records the context in which a simplification
3489 : is being mode. */
3490 68680843 : class simplify_context
3491 : {
3492 : public:
3493 : rtx simplify_unary_operation (rtx_code, machine_mode, rtx, machine_mode);
3494 : rtx simplify_binary_operation (rtx_code, machine_mode, rtx, rtx);
3495 : rtx simplify_ternary_operation (rtx_code, machine_mode, machine_mode,
3496 : rtx, rtx, rtx);
3497 : rtx simplify_relational_operation (rtx_code, machine_mode, machine_mode,
3498 : rtx, rtx);
3499 : rtx simplify_ior_with_common_term (machine_mode, rtx, rtx);
3500 : rtx simplify_subreg (machine_mode, rtx, machine_mode, poly_uint64);
3501 :
3502 : rtx lowpart_subreg (machine_mode, rtx, machine_mode);
3503 :
3504 : rtx simplify_merge_mask (rtx, rtx, int);
3505 :
3506 : rtx simplify_gen_unary (rtx_code, machine_mode, rtx, machine_mode);
3507 : rtx simplify_gen_binary (rtx_code, machine_mode, rtx, rtx);
3508 : rtx simplify_gen_ternary (rtx_code, machine_mode, machine_mode,
3509 : rtx, rtx, rtx);
3510 : rtx simplify_gen_relational (rtx_code, machine_mode, machine_mode, rtx, rtx);
3511 : rtx simplify_gen_subreg (machine_mode, rtx, machine_mode, poly_uint64);
3512 : rtx simplify_gen_vec_select (rtx, unsigned int);
3513 :
3514 : /* Tracks the level of MEM nesting for the value being simplified:
3515 : 0 means the value is not in a MEM, >0 means it is. This is needed
3516 : because the canonical representation of multiplication is different
3517 : inside a MEM than outside. */
3518 : unsigned int mem_depth = 0;
3519 :
3520 : /* Tracks number of simplify_associative_operation calls performed during
3521 : outermost simplify* call. */
3522 : unsigned int assoc_count = 0;
3523 :
3524 : /* Limit for the above number, return NULL from
3525 : simplify_associative_operation after we reach that assoc_count. */
3526 : static const unsigned int max_assoc_count = 64;
3527 :
3528 : private:
3529 : rtx simplify_truncation (machine_mode, rtx, machine_mode);
3530 : rtx simplify_byte_swapping_operation (rtx_code, machine_mode, rtx, rtx);
3531 : rtx simplify_associative_operation (rtx_code, machine_mode, rtx, rtx);
3532 : rtx simplify_distributive_operation (rtx_code, machine_mode, rtx, rtx);
3533 : rtx simplify_logical_relational_operation (rtx_code, machine_mode, rtx, rtx,
3534 : bool = false);
3535 : rtx simplify_binary_operation_series (rtx_code, machine_mode, rtx, rtx);
3536 : rtx simplify_distribute_over_subregs (rtx_code, machine_mode, rtx, rtx);
3537 : rtx simplify_shift_const_int (rtx_code, machine_mode, rtx, unsigned int);
3538 : rtx simplify_plus_minus (rtx_code, machine_mode, rtx, rtx);
3539 : rtx simplify_cond_clz_ctz (rtx, rtx_code, rtx, rtx);
3540 :
3541 : rtx simplify_unary_operation_1 (rtx_code, machine_mode, rtx);
3542 : rtx simplify_binary_operation_1 (rtx_code, machine_mode, rtx, rtx, rtx, rtx);
3543 : rtx simplify_ternary_operation_1 (rtx_code, machine_mode, machine_mode,
3544 : rtx, rtx, rtx);
3545 : rtx simplify_relational_operation_1 (rtx_code, machine_mode, machine_mode,
3546 : rtx, rtx);
3547 : };
3548 :
3549 : inline rtx
3550 18916042 : simplify_unary_operation (rtx_code code, machine_mode mode, rtx op,
3551 : machine_mode op_mode)
3552 : {
3553 18915766 : return simplify_context ().simplify_unary_operation (code, mode, op,
3554 : op_mode);
3555 : }
3556 :
3557 : inline rtx
3558 202238133 : simplify_binary_operation (rtx_code code, machine_mode mode, rtx op0, rtx op1)
3559 : {
3560 202236689 : return simplify_context ().simplify_binary_operation (code, mode, op0, op1);
3561 : }
3562 :
3563 : inline rtx
3564 34738012 : simplify_ternary_operation (rtx_code code, machine_mode mode,
3565 : machine_mode op0_mode, rtx op0, rtx op1, rtx op2)
3566 : {
3567 34738012 : return simplify_context ().simplify_ternary_operation (code, mode, op0_mode,
3568 68924 : op0, op1, op2);
3569 : }
3570 :
3571 : inline rtx
3572 62069280 : simplify_relational_operation (rtx_code code, machine_mode mode,
3573 : machine_mode op_mode, rtx op0, rtx op1)
3574 : {
3575 62069280 : return simplify_context ().simplify_relational_operation (code, mode,
3576 176045 : op_mode, op0, op1);
3577 : }
3578 :
3579 : inline rtx
3580 20682822 : simplify_subreg (machine_mode outermode, rtx op, machine_mode innermode,
3581 : poly_uint64 byte)
3582 : {
3583 20682822 : return simplify_context ().simplify_subreg (outermode, op, innermode, byte);
3584 : }
3585 :
3586 : inline rtx
3587 780562 : simplify_gen_unary (rtx_code code, machine_mode mode, rtx op,
3588 : machine_mode op_mode)
3589 : {
3590 1628837 : return simplify_context ().simplify_gen_unary (code, mode, op, op_mode);
3591 : }
3592 :
3593 : inline rtx
3594 36802312 : simplify_gen_binary (rtx_code code, machine_mode mode, rtx op0, rtx op1)
3595 : {
3596 87043149 : return simplify_context ().simplify_gen_binary (code, mode, op0, op1);
3597 : }
3598 :
3599 : inline rtx
3600 2171560 : simplify_gen_ternary (rtx_code code, machine_mode mode, machine_mode op0_mode,
3601 : rtx op0, rtx op1, rtx op2)
3602 : {
3603 2171560 : return simplify_context ().simplify_gen_ternary (code, mode, op0_mode,
3604 : op0, op1, op2);
3605 : }
3606 :
3607 : inline rtx
3608 2925921 : simplify_gen_relational (rtx_code code, machine_mode mode,
3609 : machine_mode op_mode, rtx op0, rtx op1)
3610 : {
3611 5390495 : return simplify_context ().simplify_gen_relational (code, mode, op_mode,
3612 : op0, op1);
3613 : }
3614 :
3615 : inline rtx
3616 10388747 : simplify_gen_subreg (machine_mode outermode, rtx op, machine_mode innermode,
3617 : poly_uint64 byte)
3618 : {
3619 10078467 : return simplify_context ().simplify_gen_subreg (outermode, op,
3620 : innermode, byte);
3621 : }
3622 :
3623 : inline rtx
3624 639728 : simplify_gen_vec_select (rtx op, unsigned int index)
3625 : {
3626 639728 : return simplify_context ().simplify_gen_vec_select (op, index);
3627 : }
3628 :
3629 : inline rtx
3630 27966546 : lowpart_subreg (machine_mode outermode, rtx op, machine_mode innermode)
3631 : {
3632 29272828 : return simplify_context ().lowpart_subreg (outermode, op, innermode);
3633 : }
3634 :
3635 : extern rtx simplify_const_unary_operation (enum rtx_code, machine_mode,
3636 : rtx, machine_mode);
3637 : extern rtx simplify_const_binary_operation (enum rtx_code, machine_mode,
3638 : rtx, rtx);
3639 : extern rtx simplify_const_relational_operation (enum rtx_code,
3640 : machine_mode, rtx, rtx);
3641 : extern rtx simplify_replace_fn_rtx (rtx, const_rtx,
3642 : rtx (*fn) (rtx, const_rtx, void *), void *);
3643 : extern rtx simplify_replace_rtx (rtx, const_rtx, rtx);
3644 : extern rtx simplify_rtx (const_rtx);
3645 : extern rtx avoid_constant_pool_reference (rtx);
3646 : extern rtx delegitimize_mem_from_attrs (rtx);
3647 : extern bool mode_signbit_p (machine_mode, const_rtx);
3648 : extern bool val_signbit_p (machine_mode, unsigned HOST_WIDE_INT);
3649 : extern bool val_signbit_known_set_p (machine_mode,
3650 : unsigned HOST_WIDE_INT);
3651 : extern bool val_signbit_known_clear_p (machine_mode,
3652 : unsigned HOST_WIDE_INT);
3653 : extern bool reverse_rotate_by_imm_p (machine_mode, unsigned int, rtx);
3654 :
3655 : /* In reginfo.cc */
3656 : extern machine_mode choose_hard_reg_mode (unsigned int, unsigned int,
3657 : const predefined_function_abi *);
3658 : extern const HARD_REG_SET &simplifiable_subregs (const subreg_shape &);
3659 :
3660 : /* In emit-rtl.cc */
3661 : extern rtx set_for_reg_notes (rtx);
3662 : extern rtx set_unique_reg_note (rtx, enum reg_note, rtx);
3663 : extern rtx set_dst_reg_note (rtx, enum reg_note, rtx, rtx);
3664 : extern void set_insn_deleted (rtx_insn *);
3665 :
3666 : /* Functions in rtlanal.cc */
3667 :
3668 : extern rtx single_set_2 (const rtx_insn *, const_rtx);
3669 : extern rtx simple_regno_set (rtx, unsigned int);
3670 : extern bool contains_symbol_ref_p (const_rtx);
3671 : extern bool contains_symbolic_reference_p (const_rtx);
3672 : extern bool contains_constant_pool_address_p (const_rtx);
3673 : extern void add_auto_inc_notes (rtx_insn *, rtx);
3674 : extern bool single_output_fused_pair_p (rtx_insn *);
3675 :
3676 : /* Handle the cheap and common cases inline for performance. */
3677 :
3678 6727724338 : inline rtx single_set (const rtx_insn *insn)
3679 : {
3680 6727724338 : if (!INSN_P (insn))
3681 : return NULL_RTX;
3682 :
3683 6572432869 : if (GET_CODE (PATTERN (insn)) == SET)
3684 : return PATTERN (insn);
3685 :
3686 : /* Defer to the more expensive case. */
3687 1911525009 : return single_set_2 (insn, PATTERN (insn));
3688 : }
3689 :
3690 : extern scalar_int_mode get_address_mode (rtx mem);
3691 : extern bool rtx_addr_can_trap_p (const_rtx);
3692 : extern bool nonzero_address_p (const_rtx);
3693 : extern bool rtx_unstable_p (const_rtx);
3694 : extern bool rtx_varies_p (const_rtx, bool);
3695 : extern bool rtx_addr_varies_p (const_rtx, bool);
3696 : extern tree get_call_fndecl (const rtx_insn *);
3697 : extern HOST_WIDE_INT get_integer_term (const_rtx);
3698 : extern rtx get_related_value (const_rtx);
3699 : extern bool offset_within_block_p (const_rtx, HOST_WIDE_INT);
3700 : extern void split_const (rtx, rtx *, rtx *);
3701 : extern rtx strip_offset (rtx, poly_int64 *);
3702 : extern poly_int64 get_args_size (const_rtx);
3703 : extern bool unsigned_reg_p (rtx);
3704 : extern bool reg_mentioned_p (const_rtx, const_rtx);
3705 : extern int count_occurrences (const_rtx, const_rtx, int);
3706 : extern bool reg_referenced_p (const_rtx, const_rtx);
3707 : extern bool reg_used_between_p (const_rtx, const rtx_insn *, const rtx_insn *);
3708 : extern bool reg_set_between_p (const_rtx, const rtx_insn *, const rtx_insn *);
3709 : extern int commutative_operand_precedence (rtx);
3710 : extern bool swap_commutative_operands_p (rtx, rtx);
3711 : extern bool modified_between_p (const_rtx, const rtx_insn *, const rtx_insn *);
3712 : extern bool no_labels_between_p (const rtx_insn *, const rtx_insn *);
3713 : extern bool modified_in_p (const_rtx, const_rtx);
3714 : extern bool reg_set_p (const_rtx, const_rtx);
3715 : extern bool multiple_sets (const_rtx);
3716 : extern bool set_noop_p (const_rtx);
3717 : extern bool noop_move_p (const rtx_insn *);
3718 : extern bool refers_to_regno_p (unsigned int, unsigned int, const_rtx, rtx *);
3719 : extern bool reg_overlap_mentioned_p (const_rtx, const_rtx);
3720 : extern const_rtx set_of (const_rtx, const_rtx);
3721 : extern void record_hard_reg_sets (rtx, const_rtx, void *);
3722 : extern void record_hard_reg_uses (rtx *, void *);
3723 : extern void find_all_hard_regs (const_rtx, HARD_REG_SET *);
3724 : extern void find_all_hard_reg_sets (const rtx_insn *, HARD_REG_SET *, bool);
3725 : extern void note_pattern_stores (const_rtx,
3726 : void (*) (rtx, const_rtx, void *), void *);
3727 : extern void note_stores (const rtx_insn *,
3728 : void (*) (rtx, const_rtx, void *), void *);
3729 : extern void note_uses (rtx *, void (*) (rtx *, void *), void *);
3730 : extern bool dead_or_set_p (const rtx_insn *, const_rtx);
3731 : extern bool dead_or_set_regno_p (const rtx_insn *, unsigned int);
3732 : extern rtx find_reg_note (const_rtx, enum reg_note, const_rtx);
3733 : extern rtx find_regno_note (const_rtx, enum reg_note, unsigned int);
3734 : extern rtx find_reg_equal_equiv_note (const_rtx);
3735 : extern rtx find_constant_src (const rtx_insn *);
3736 : extern bool find_reg_fusage (const_rtx, enum rtx_code, const_rtx);
3737 : extern bool find_regno_fusage (const_rtx, enum rtx_code, unsigned int);
3738 : extern rtx alloc_reg_note (enum reg_note, rtx, rtx);
3739 : extern void add_reg_note (rtx, enum reg_note, rtx);
3740 : extern void add_int_reg_note (rtx_insn *, enum reg_note, int);
3741 : extern void add_args_size_note (rtx_insn *, poly_int64);
3742 : extern void add_shallow_copy_of_reg_note (rtx_insn *, rtx);
3743 : extern rtx duplicate_reg_note (rtx);
3744 : extern void remove_note (rtx_insn *, const_rtx);
3745 : extern bool remove_reg_equal_equiv_notes (rtx_insn *, bool = false);
3746 : extern void remove_reg_equal_equiv_notes_for_regno (unsigned int);
3747 : extern bool side_effects_p (const_rtx);
3748 : extern bool volatile_refs_p (const_rtx);
3749 : extern bool volatile_insn_p (const_rtx);
3750 : extern bool may_trap_p_1 (const_rtx, unsigned);
3751 : extern bool may_trap_p (const_rtx);
3752 : extern bool may_trap_or_fault_p (const_rtx);
3753 : extern bool can_throw_internal (const_rtx);
3754 : extern bool can_throw_external (const_rtx);
3755 : extern bool insn_could_throw_p (const_rtx);
3756 : extern bool insn_nothrow_p (const_rtx);
3757 : extern bool can_nonlocal_goto (const rtx_insn *);
3758 : extern void copy_reg_eh_region_note_forward (rtx, rtx_insn *, rtx);
3759 : extern void copy_reg_eh_region_note_backward (rtx, rtx_insn *, rtx);
3760 : extern rtx replace_rtx (rtx, rtx, rtx, bool = false);
3761 : extern void replace_label (rtx *, rtx, rtx, bool);
3762 : extern void replace_label_in_insn (rtx_insn *, rtx_insn *, rtx_insn *, bool);
3763 : extern bool rtx_referenced_p (const_rtx, const_rtx);
3764 : extern bool tablejump_p (const rtx_insn *, rtx_insn **, rtx_jump_table_data **);
3765 : extern rtx tablejump_casesi_pattern (const rtx_insn *insn);
3766 : extern bool computed_jump_p (const rtx_insn *);
3767 : extern bool tls_referenced_p (const_rtx);
3768 : extern bool contains_mem_rtx_p (rtx x);
3769 : extern bool register_asm_p (const_rtx);
3770 :
3771 : /* Overload for refers_to_regno_p for checking a single register. */
3772 : inline bool
3773 140151269 : refers_to_regno_p (unsigned int regnum, const_rtx x, rtx* loc = NULL)
3774 : {
3775 140151269 : return refers_to_regno_p (regnum, regnum + 1, x, loc);
3776 : }
3777 :
3778 : /* Callback for for_each_inc_dec, to process the autoinc operation OP
3779 : within MEM that sets DEST to SRC + SRCOFF, or SRC if SRCOFF is
3780 : NULL. The callback is passed the same opaque ARG passed to
3781 : for_each_inc_dec. Return zero to continue looking for other
3782 : autoinc operations or any other value to interrupt the traversal and
3783 : return that value to the caller of for_each_inc_dec. */
3784 : typedef int (*for_each_inc_dec_fn) (rtx mem, rtx op, rtx dest, rtx src,
3785 : rtx srcoff, void *arg);
3786 : extern int for_each_inc_dec (rtx, for_each_inc_dec_fn, void *arg);
3787 :
3788 : extern rtx regno_use_in (unsigned int, rtx);
3789 : extern bool auto_inc_p (const_rtx);
3790 : extern bool in_insn_list_p (const rtx_insn_list *, const rtx_insn *);
3791 : extern void remove_node_from_insn_list (const rtx_insn *, rtx_insn_list **);
3792 : extern bool loc_mentioned_in_p (rtx *, const_rtx);
3793 : extern rtx_insn *find_first_parameter_load (rtx_insn *, rtx_insn *);
3794 : extern bool keep_with_call_p (const rtx_insn *);
3795 : extern bool label_is_jump_target_p (const_rtx, const rtx_insn *);
3796 : extern int pattern_cost (rtx, bool);
3797 : extern int insn_cost (rtx_insn *, bool);
3798 : extern unsigned seq_cost (const rtx_insn *, bool);
3799 :
3800 : /* Given an insn and condition, return a canonical description of
3801 : the test being made. */
3802 : extern rtx canonicalize_condition (rtx_insn *, rtx, int, rtx_insn **, rtx,
3803 : int, int);
3804 :
3805 : /* Given a JUMP_INSN, return a canonical description of the test
3806 : being made. */
3807 : extern rtx get_condition (rtx_insn *, rtx_insn **, int, int);
3808 :
3809 : /* Information about a subreg of a hard register. */
3810 : struct subreg_info
3811 : {
3812 : /* Offset of first hard register involved in the subreg. */
3813 : int offset;
3814 : /* Number of hard registers involved in the subreg. In the case of
3815 : a paradoxical subreg, this is the number of registers that would
3816 : be modified by writing to the subreg; some of them may be don't-care
3817 : when reading from the subreg. */
3818 : int nregs;
3819 : /* Whether this subreg can be represented as a hard reg with the new
3820 : mode (by adding OFFSET to the original hard register). */
3821 : bool representable_p;
3822 : };
3823 :
3824 : extern void subreg_get_info (unsigned int, machine_mode,
3825 : poly_uint64, machine_mode,
3826 : struct subreg_info *);
3827 :
3828 : /* lists.cc */
3829 :
3830 : extern void free_EXPR_LIST_list (rtx_expr_list **);
3831 : extern void free_INSN_LIST_list (rtx_insn_list **);
3832 : extern void free_EXPR_LIST_node (rtx);
3833 : extern void free_INSN_LIST_node (rtx);
3834 : extern rtx_insn_list *alloc_INSN_LIST (rtx, rtx);
3835 : extern rtx_insn_list *copy_INSN_LIST (rtx_insn_list *);
3836 : extern rtx_insn_list *concat_INSN_LIST (rtx_insn_list *, rtx_insn_list *);
3837 : extern rtx_expr_list *alloc_EXPR_LIST (int, rtx, rtx);
3838 : extern void remove_free_INSN_LIST_elem (rtx_insn *, rtx_insn_list **);
3839 : extern rtx remove_list_elem (rtx, rtx *);
3840 : extern rtx_insn *remove_free_INSN_LIST_node (rtx_insn_list **);
3841 : extern rtx remove_free_EXPR_LIST_node (rtx_expr_list **);
3842 :
3843 :
3844 : /* reginfo.cc */
3845 :
3846 : /* Resize reg info. */
3847 : extern bool resize_reg_info (void);
3848 : /* Free up register info memory. */
3849 : extern void free_reg_info (void);
3850 : extern void init_subregs_of_mode (void);
3851 : extern void finish_subregs_of_mode (void);
3852 : extern void reginfo_cc_finalize (void);
3853 :
3854 : /* recog.cc */
3855 : extern rtx extract_asm_operands (rtx);
3856 : extern int asm_noperands (const_rtx);
3857 : extern const char *decode_asm_operands (rtx, rtx *, rtx **, const char **,
3858 : machine_mode *, location_t *);
3859 : extern void get_referenced_operands (const char *, bool *, unsigned int);
3860 :
3861 : extern enum reg_class reg_preferred_class (int);
3862 : extern enum reg_class reg_alternate_class (int);
3863 : extern enum reg_class reg_allocno_class (int);
3864 : extern void setup_reg_classes (int, enum reg_class, enum reg_class,
3865 : enum reg_class);
3866 :
3867 : extern void split_all_insns (void);
3868 : extern void split_all_insns_noflow (void);
3869 :
3870 : #define MAX_SAVED_CONST_INT 64
3871 : extern GTY(()) rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
3872 :
3873 : #define const0_rtx (const_int_rtx[MAX_SAVED_CONST_INT])
3874 : #define const1_rtx (const_int_rtx[MAX_SAVED_CONST_INT+1])
3875 : #define const2_rtx (const_int_rtx[MAX_SAVED_CONST_INT+2])
3876 : #define constm1_rtx (const_int_rtx[MAX_SAVED_CONST_INT-1])
3877 : extern GTY(()) rtx const_true_rtx;
3878 :
3879 : extern GTY(()) rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
3880 :
3881 : /* Returns a constant 0 rtx in mode MODE. Integer modes are treated the
3882 : same as VOIDmode. */
3883 :
3884 : #define CONST0_RTX(MODE) (const_tiny_rtx[0][(int) (MODE)])
3885 :
3886 : /* Likewise, for the constants 1 and 2 and -1. */
3887 :
3888 : #define CONST1_RTX(MODE) (const_tiny_rtx[1][(int) (MODE)])
3889 : #define CONST2_RTX(MODE) (const_tiny_rtx[2][(int) (MODE)])
3890 : #define CONSTM1_RTX(MODE) (const_tiny_rtx[3][(int) (MODE)])
3891 :
3892 : extern GTY(()) rtx pc_rtx;
3893 : extern GTY(()) rtx ret_rtx;
3894 : extern GTY(()) rtx simple_return_rtx;
3895 : extern GTY(()) rtx_insn *invalid_insn_rtx;
3896 :
3897 : /* If HARD_FRAME_POINTER_REGNUM is defined, then a special dummy reg
3898 : is used to represent the frame pointer. This is because the
3899 : hard frame pointer and the automatic variables are separated by an amount
3900 : that cannot be determined until after register allocation. We can assume
3901 : that in this case ELIMINABLE_REGS will be defined, one action of which
3902 : will be to eliminate FRAME_POINTER_REGNUM into HARD_FRAME_POINTER_REGNUM. */
3903 : #ifndef HARD_FRAME_POINTER_REGNUM
3904 : #define HARD_FRAME_POINTER_REGNUM FRAME_POINTER_REGNUM
3905 : #endif
3906 :
3907 : #ifndef HARD_FRAME_POINTER_IS_FRAME_POINTER
3908 : #define HARD_FRAME_POINTER_IS_FRAME_POINTER \
3909 : (HARD_FRAME_POINTER_REGNUM == FRAME_POINTER_REGNUM)
3910 : #endif
3911 :
3912 : #ifndef HARD_FRAME_POINTER_IS_ARG_POINTER
3913 : #define HARD_FRAME_POINTER_IS_ARG_POINTER \
3914 : (HARD_FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM)
3915 : #endif
3916 :
3917 : /* Index labels for global_rtl. */
3918 : enum global_rtl_index
3919 : {
3920 : GR_STACK_POINTER,
3921 : GR_FRAME_POINTER,
3922 : /* For register elimination to work properly these hard_frame_pointer_rtx,
3923 : frame_pointer_rtx, and arg_pointer_rtx must be the same if they refer to
3924 : the same register. */
3925 : #if FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM
3926 : GR_ARG_POINTER = GR_FRAME_POINTER,
3927 : #endif
3928 : #if HARD_FRAME_POINTER_IS_FRAME_POINTER
3929 : GR_HARD_FRAME_POINTER = GR_FRAME_POINTER,
3930 : #else
3931 : GR_HARD_FRAME_POINTER,
3932 : #endif
3933 : #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3934 : #if HARD_FRAME_POINTER_IS_ARG_POINTER
3935 : GR_ARG_POINTER = GR_HARD_FRAME_POINTER,
3936 : #else
3937 : GR_ARG_POINTER,
3938 : #endif
3939 : #endif
3940 : GR_VIRTUAL_INCOMING_ARGS,
3941 : GR_VIRTUAL_STACK_ARGS,
3942 : GR_VIRTUAL_STACK_DYNAMIC,
3943 : GR_VIRTUAL_OUTGOING_ARGS,
3944 : GR_VIRTUAL_CFA,
3945 : GR_VIRTUAL_PREFERRED_STACK_BOUNDARY,
3946 :
3947 : GR_MAX
3948 : };
3949 :
3950 : /* Target-dependent globals. */
3951 : struct GTY(()) target_rtl {
3952 : /* All references to the hard registers in global_rtl_index go through
3953 : these unique rtl objects. On machines where the frame-pointer and
3954 : arg-pointer are the same register, they use the same unique object.
3955 :
3956 : After register allocation, other rtl objects which used to be pseudo-regs
3957 : may be clobbered to refer to the frame-pointer register.
3958 : But references that were originally to the frame-pointer can be
3959 : distinguished from the others because they contain frame_pointer_rtx.
3960 :
3961 : When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
3962 : tricky: until register elimination has taken place hard_frame_pointer_rtx
3963 : should be used if it is being set, and frame_pointer_rtx otherwise. After
3964 : register elimination hard_frame_pointer_rtx should always be used.
3965 : On machines where the two registers are same (most) then these are the
3966 : same. */
3967 : rtx x_global_rtl[GR_MAX];
3968 :
3969 : /* A unique representation of (REG:Pmode PIC_OFFSET_TABLE_REGNUM). */
3970 : rtx x_pic_offset_table_rtx;
3971 :
3972 : /* A unique representation of (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM).
3973 : This is used to implement __builtin_return_address for some machines;
3974 : see for instance the MIPS port. */
3975 : rtx x_return_address_pointer_rtx;
3976 :
3977 : /* Commonly used RTL for hard registers. These objects are not
3978 : necessarily unique, so we allocate them separately from global_rtl.
3979 : They are initialized once per compilation unit, then copied into
3980 : regno_reg_rtx at the beginning of each function. */
3981 : rtx x_initial_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
3982 :
3983 : /* A sample (mem:M stack_pointer_rtx) rtx for each mode M. */
3984 : rtx x_top_of_stack[MAX_MACHINE_MODE];
3985 :
3986 : /* Static hunks of RTL used by the aliasing code; these are treated
3987 : as persistent to avoid unnecessary RTL allocations. */
3988 : rtx x_static_reg_base_value[FIRST_PSEUDO_REGISTER];
3989 :
3990 : /* The default memory attributes for each mode. */
3991 : class mem_attrs *x_mode_mem_attrs[(int) MAX_MACHINE_MODE];
3992 :
3993 : /* Track if RTL has been initialized. */
3994 : bool target_specific_initialized;
3995 : };
3996 :
3997 : extern GTY(()) struct target_rtl default_target_rtl;
3998 : #if SWITCHABLE_TARGET
3999 : extern struct target_rtl *this_target_rtl;
4000 : #else
4001 : #define this_target_rtl (&default_target_rtl)
4002 : #endif
4003 :
4004 : #define global_rtl \
4005 : (this_target_rtl->x_global_rtl)
4006 : #define pic_offset_table_rtx \
4007 : (this_target_rtl->x_pic_offset_table_rtx)
4008 : #define return_address_pointer_rtx \
4009 : (this_target_rtl->x_return_address_pointer_rtx)
4010 : #define top_of_stack \
4011 : (this_target_rtl->x_top_of_stack)
4012 : #define mode_mem_attrs \
4013 : (this_target_rtl->x_mode_mem_attrs)
4014 :
4015 : /* All references to certain hard regs, except those created
4016 : by allocating pseudo regs into them (when that's possible),
4017 : go through these unique rtx objects. */
4018 : #define stack_pointer_rtx (global_rtl[GR_STACK_POINTER])
4019 : #define frame_pointer_rtx (global_rtl[GR_FRAME_POINTER])
4020 : #define hard_frame_pointer_rtx (global_rtl[GR_HARD_FRAME_POINTER])
4021 : #define arg_pointer_rtx (global_rtl[GR_ARG_POINTER])
4022 :
4023 : #ifndef GENERATOR_FILE
4024 : /* Return the attributes of a MEM rtx. */
4025 : inline const class mem_attrs *
4026 11422054404 : get_mem_attrs (const_rtx x)
4027 : {
4028 11422054404 : class mem_attrs *attrs;
4029 :
4030 3858445902 : attrs = MEM_ATTRS (x);
4031 9645783873 : if (!attrs)
4032 583679245 : attrs = mode_mem_attrs[(int) GET_MODE (x)];
4033 9613673150 : return attrs;
4034 : }
4035 : #endif
4036 :
4037 : /* Include the RTL generation functions. */
4038 :
4039 : #ifndef GENERATOR_FILE
4040 : #include "genrtl.h"
4041 : #undef gen_rtx_ASM_INPUT
4042 : #define gen_rtx_ASM_INPUT(MODE, ARG0) \
4043 : gen_rtx_fmt_sL (ASM_INPUT, (MODE), (ARG0), 0)
4044 : #define gen_rtx_ASM_INPUT_loc(MODE, ARG0, LOC) \
4045 : gen_rtx_fmt_sL (ASM_INPUT, (MODE), (ARG0), (LOC))
4046 : #endif
4047 :
4048 : /* There are some RTL codes that require special attention; the
4049 : generation functions included above do the raw handling. If you
4050 : add to this list, modify special_rtx in gengenrtl.cc as well. */
4051 :
4052 : extern rtx_expr_list *gen_rtx_EXPR_LIST (machine_mode, rtx, rtx);
4053 : extern rtx_insn_list *gen_rtx_INSN_LIST (machine_mode, rtx, rtx);
4054 : extern rtx_insn *
4055 : gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
4056 : basic_block bb, rtx pattern, location_t location, int code,
4057 : rtx reg_notes);
4058 : extern rtx gen_rtx_CONST_INT (machine_mode, HOST_WIDE_INT);
4059 : extern rtx gen_rtx_CONST_VECTOR (machine_mode, rtvec);
4060 : extern void set_mode_and_regno (rtx, machine_mode, unsigned int);
4061 : extern rtx init_raw_REG (rtx, machine_mode, unsigned int);
4062 : extern rtx gen_raw_REG (machine_mode, unsigned int);
4063 : #define alloca_raw_REG(mode, regno) \
4064 : init_raw_REG (rtx_alloca (REG), (mode), (regno))
4065 : extern rtx gen_rtx_REG (machine_mode, unsigned int);
4066 : extern rtx gen_rtx_SUBREG (machine_mode, rtx, poly_uint64);
4067 : extern rtx gen_rtx_MEM (machine_mode, rtx);
4068 : extern rtx gen_rtx_VAR_LOCATION (machine_mode, tree, rtx,
4069 : enum var_init_status);
4070 :
4071 : #ifdef GENERATOR_FILE
4072 : #define PUT_MODE(RTX, MODE) PUT_MODE_RAW (RTX, MODE)
4073 : #else
4074 : inline void
4075 1351874505 : PUT_MODE (rtx x, machine_mode mode)
4076 : {
4077 1351874505 : if (REG_P (x))
4078 336665543 : set_mode_and_regno (x, mode, REGNO (x));
4079 : else
4080 1015208962 : PUT_MODE_RAW (x, mode);
4081 1351874505 : }
4082 : #endif
4083 :
4084 : #define GEN_INT(N) gen_rtx_CONST_INT (VOIDmode, (N))
4085 :
4086 : /* Virtual registers are used during RTL generation to refer to locations into
4087 : the stack frame when the actual location isn't known until RTL generation
4088 : is complete. The routine instantiate_virtual_regs replaces these with
4089 : the proper value, which is normally {frame,arg,stack}_pointer_rtx plus
4090 : a constant. */
4091 :
4092 : #define FIRST_VIRTUAL_REGISTER (FIRST_PSEUDO_REGISTER)
4093 :
4094 : /* This points to the first word of the incoming arguments passed on the stack,
4095 : either by the caller or by the callee when pretending it was passed by the
4096 : caller. */
4097 :
4098 : #define virtual_incoming_args_rtx (global_rtl[GR_VIRTUAL_INCOMING_ARGS])
4099 :
4100 : #define VIRTUAL_INCOMING_ARGS_REGNUM (FIRST_VIRTUAL_REGISTER)
4101 :
4102 : /* If FRAME_GROWS_DOWNWARD, this points to immediately above the first
4103 : variable on the stack. Otherwise, it points to the first variable on
4104 : the stack. */
4105 :
4106 : #define virtual_stack_vars_rtx (global_rtl[GR_VIRTUAL_STACK_ARGS])
4107 :
4108 : #define VIRTUAL_STACK_VARS_REGNUM ((FIRST_VIRTUAL_REGISTER) + 1)
4109 :
4110 : /* This points to the location of dynamically-allocated memory on the stack
4111 : immediately after the stack pointer has been adjusted by the amount
4112 : desired. */
4113 :
4114 : #define virtual_stack_dynamic_rtx (global_rtl[GR_VIRTUAL_STACK_DYNAMIC])
4115 :
4116 : #define VIRTUAL_STACK_DYNAMIC_REGNUM ((FIRST_VIRTUAL_REGISTER) + 2)
4117 :
4118 : /* This points to the location in the stack at which outgoing arguments should
4119 : be written when the stack is pre-pushed (arguments pushed using push
4120 : insns always use sp). */
4121 :
4122 : #define virtual_outgoing_args_rtx (global_rtl[GR_VIRTUAL_OUTGOING_ARGS])
4123 :
4124 : #define VIRTUAL_OUTGOING_ARGS_REGNUM ((FIRST_VIRTUAL_REGISTER) + 3)
4125 :
4126 : /* This points to the Canonical Frame Address of the function. This
4127 : should correspond to the CFA produced by INCOMING_FRAME_SP_OFFSET,
4128 : but is calculated relative to the arg pointer for simplicity; the
4129 : frame pointer nor stack pointer are necessarily fixed relative to
4130 : the CFA until after reload. */
4131 :
4132 : #define virtual_cfa_rtx (global_rtl[GR_VIRTUAL_CFA])
4133 :
4134 : #define VIRTUAL_CFA_REGNUM ((FIRST_VIRTUAL_REGISTER) + 4)
4135 :
4136 : #define LAST_VIRTUAL_POINTER_REGISTER ((FIRST_VIRTUAL_REGISTER) + 4)
4137 :
4138 : /* This is replaced by crtl->preferred_stack_boundary / BITS_PER_UNIT
4139 : when finalized. */
4140 :
4141 : #define virtual_preferred_stack_boundary_rtx \
4142 : (global_rtl[GR_VIRTUAL_PREFERRED_STACK_BOUNDARY])
4143 :
4144 : #define VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM \
4145 : ((FIRST_VIRTUAL_REGISTER) + 5)
4146 :
4147 : #define LAST_VIRTUAL_REGISTER ((FIRST_VIRTUAL_REGISTER) + 5)
4148 :
4149 : /* Nonzero if REGNUM is a pointer into the stack frame. */
4150 : #define REGNO_PTR_FRAME_P(REGNUM) \
4151 : ((REGNUM) == STACK_POINTER_REGNUM \
4152 : || (REGNUM) == FRAME_POINTER_REGNUM \
4153 : || (REGNUM) == HARD_FRAME_POINTER_REGNUM \
4154 : || (REGNUM) == ARG_POINTER_REGNUM \
4155 : || VIRTUAL_REGISTER_NUM_P (REGNUM))
4156 :
4157 : /* REGNUM never really appearing in the INSN stream. */
4158 : #define INVALID_REGNUM (~(unsigned int) 0)
4159 :
4160 : /* REGNUM for which no debug information can be generated. */
4161 : #define IGNORED_DWARF_REGNUM (INVALID_REGNUM - 1)
4162 :
4163 : extern rtx output_constant_def (tree, int);
4164 : extern rtx lookup_constant_def (tree);
4165 :
4166 : /* Nonzero after end of reload pass.
4167 : Set to 1 or 0 by reload1.cc. */
4168 :
4169 : extern int reload_completed;
4170 :
4171 : /* Nonzero after thread_prologue_and_epilogue_insns has run. */
4172 : extern int epilogue_completed;
4173 :
4174 : /* Set to true once the first split pass after register allocation has
4175 : been run. Ports can treat that split pass as a "lowering" pass,
4176 : with some instructions only being valid before the lowering
4177 : and others only being valid after the lowering.
4178 :
4179 : One use of this variable is to cope with address calculations during
4180 : register allocation. The register allocator needs to be able to perform
4181 : address arithmetic (such as addition) at arbitrary points in the program,
4182 : regardless of whether the condition-code flags are live at that point.
4183 : If a target cannot add without clobbering the condition-code flags,
4184 : it must either (1) hide the condition-code flags entirely from RTL
4185 : or (2) ensure that the condition-code flags are never live before
4186 : or during register allocation.
4187 :
4188 : (2) requires a boundary between "the condition-code flags are never live"
4189 : and "the condition-code flags might be live". reload_completed can be
4190 : used for this purpose, provided that all clobbers of the CC register
4191 : are explicit before and during register allocation.
4192 :
4193 : However, if the condition-code flags are never live before or during
4194 : register allocation, there is no real need for patterns to have an explicit
4195 : clobber of the flags at that point. Not having a clobber would allow more
4196 : recog attempts to succeed, both before and during register allocation.
4197 :
4198 : post_ra_split_completed is an alternative boundary to reload_completed.
4199 : It allows sets and uses of the condition-code flags, such as individual
4200 : comparison and jump instructions, to be introduced in the first split pass
4201 : after register allocation, while also allowing new implicit clobbers of
4202 : the condition-code flags to be introduced at any time before that point.
4203 :
4204 : Ports that use post_ra_split_completed for this purpose would have an
4205 : "unlowered" form with the following properties:
4206 :
4207 : (a) The condition-code flags are never live between instructions.
4208 : (That is, they are never defined by one instruction and used
4209 : by another instruction.)
4210 :
4211 : (b) As a consequence, new clobbers of the condition-code flags
4212 : can be introduced at any time.
4213 :
4214 : (c) RTL instruction patterns (such as addition) can omit clobbers of the
4215 : condition-code flags even if the flags are in fact clobbered.
4216 :
4217 : In contrast, the "lowered" form would have these properties:
4218 :
4219 : (d) The condition-code flags can be live between instructions.
4220 : That is, RTL instruction patterns can set the condition-code flags
4221 : or use the condition-code flags.
4222 :
4223 : (e) All clobbers of the condition-code flags must be explicit in the RTL
4224 : instruction patterns.
4225 :
4226 : Instructions covered by (c) would require !post_ra_split_completed
4227 : and would need to be split into instructions that satisfy (d) or (e).
4228 : Instructions covered by (d) would require post_ra_split_completed,
4229 : so that they are not accidentally matched before lowering has taken
4230 : place. */
4231 : extern bool post_ra_split_completed;
4232 :
4233 : /* Set to 1 while reload_as_needed is operating.
4234 : Required by some machines to handle any generated moves differently. */
4235 :
4236 : extern int reload_in_progress;
4237 :
4238 : /* Set to true while in IRA. */
4239 : extern bool ira_in_progress;
4240 :
4241 : /* Set to true while in LRA. */
4242 : extern bool lra_in_progress;
4243 :
4244 : /* This macro indicates whether you may create a new
4245 : pseudo-register. */
4246 :
4247 : #define can_create_pseudo_p() (!reload_in_progress && !reload_completed)
4248 :
4249 : #ifdef STACK_REGS
4250 : /* Nonzero after end of regstack pass.
4251 : Set to 1 or 0 by reg-stack.cc. */
4252 : extern int regstack_completed;
4253 : #endif
4254 :
4255 : /* If this is nonzero, we do not bother generating VOLATILE
4256 : around volatile memory references, and we are willing to
4257 : output indirect addresses. If cse is to follow, we reject
4258 : indirect addresses so a useful potential cse is generated;
4259 : if it is used only once, instruction combination will produce
4260 : the same indirect address eventually. */
4261 : extern int cse_not_expected;
4262 :
4263 : /* Translates rtx code to tree code, for those codes needed by
4264 : real_arithmetic. The function returns an int because the caller may not
4265 : know what `enum tree_code' means. */
4266 :
4267 : extern int rtx_to_tree_code (enum rtx_code);
4268 :
4269 : /* In cse.cc */
4270 : extern int delete_trivially_dead_insns (rtx_insn *, int);
4271 : extern bool exp_equiv_p (const_rtx, const_rtx, int, bool);
4272 :
4273 : typedef bool (*hash_rtx_callback_function) (const_rtx, machine_mode, rtx *,
4274 : machine_mode *);
4275 : extern unsigned hash_rtx (const_rtx, machine_mode, int *, int *,
4276 : bool, hash_rtx_callback_function = NULL);
4277 :
4278 : /* In dse.cc */
4279 : extern bool check_for_inc_dec (rtx_insn *insn);
4280 :
4281 : /* In jump.cc */
4282 : extern bool comparison_dominates_p (enum rtx_code, enum rtx_code);
4283 : extern bool jump_to_label_p (const rtx_insn *);
4284 : extern bool condjump_p (const rtx_insn *);
4285 : extern bool any_condjump_p (const rtx_insn *);
4286 : extern bool any_uncondjump_p (const rtx_insn *);
4287 : extern rtx pc_set (const rtx_insn *);
4288 : extern rtx condjump_label (const rtx_insn *);
4289 : extern bool simplejump_p (const rtx_insn *);
4290 : extern bool returnjump_p (const rtx_insn *);
4291 : extern bool eh_returnjump_p (rtx_insn *);
4292 : extern bool onlyjump_p (const rtx_insn *);
4293 : extern bool invert_jump_1 (rtx_jump_insn *, rtx);
4294 : extern bool invert_jump (rtx_jump_insn *, rtx, int);
4295 : extern bool rtx_renumbered_equal_p (const_rtx, const_rtx);
4296 : extern int true_regnum (const_rtx);
4297 : extern unsigned int reg_or_subregno (const_rtx);
4298 : extern bool redirect_jump_1 (rtx_insn *, rtx);
4299 : extern void redirect_jump_2 (rtx_jump_insn *, rtx, rtx, int, int);
4300 : extern bool redirect_jump (rtx_jump_insn *, rtx, int);
4301 : extern void rebuild_jump_labels (rtx_insn *);
4302 : extern void rebuild_jump_labels_chain (rtx_insn *);
4303 : extern rtx reversed_comparison (const_rtx, machine_mode);
4304 : extern enum rtx_code reversed_comparison_code (const_rtx, const rtx_insn *);
4305 : extern enum rtx_code reversed_comparison_code_parts (enum rtx_code, const_rtx,
4306 : const_rtx, const rtx_insn *);
4307 : extern void delete_for_peephole (rtx_insn *, rtx_insn *);
4308 : extern bool condjump_in_parallel_p (const rtx_insn *);
4309 :
4310 : /* In emit-rtl.cc. */
4311 : extern int max_reg_num (void);
4312 : extern int max_label_num (void);
4313 : extern int get_first_label_num (void);
4314 : extern void maybe_set_first_label_num (rtx_code_label *);
4315 : extern void delete_insns_since (rtx_insn *);
4316 : extern void mark_reg_pointer (rtx, int);
4317 : extern void mark_user_reg (rtx);
4318 : extern void reset_used_flags (rtx);
4319 : extern void set_used_flags (rtx);
4320 : extern void reorder_insns (rtx_insn *, rtx_insn *, rtx_insn *);
4321 : extern void reorder_insns_nobb (rtx_insn *, rtx_insn *, rtx_insn *);
4322 : extern int get_max_insn_count (void);
4323 : extern bool in_sequence_p (void);
4324 : extern void init_emit (void);
4325 : extern void init_emit_regs (void);
4326 : extern void init_derived_machine_modes (void);
4327 : extern void init_emit_once (void);
4328 : extern void push_topmost_sequence (void);
4329 : extern void pop_topmost_sequence (void);
4330 : extern void set_new_first_and_last_insn (rtx_insn *, rtx_insn *);
4331 : extern void unshare_all_rtl (void);
4332 : extern void unshare_all_rtl_again (rtx_insn *);
4333 : extern void unshare_all_rtl_in_chain (rtx_insn *);
4334 : extern void verify_rtl_sharing (void);
4335 : extern void add_insn (rtx_insn *);
4336 : extern void add_insn_before (rtx_insn *, rtx_insn *, basic_block);
4337 : extern void add_insn_after (rtx_insn *, rtx_insn *, basic_block);
4338 : extern void remove_insn (rtx_insn *);
4339 : extern rtx_insn *emit (rtx, bool = true);
4340 : extern void emit_insn_at_entry (rtx);
4341 : extern rtx gen_lowpart_SUBREG (machine_mode, rtx);
4342 : extern rtx gen_const_mem (machine_mode, rtx);
4343 : extern rtx gen_frame_mem (machine_mode, rtx);
4344 : extern rtx gen_tmp_stack_mem (machine_mode, rtx);
4345 : extern bool validate_subreg (machine_mode, machine_mode,
4346 : const_rtx, poly_uint64);
4347 :
4348 : /* In combine.cc */
4349 : extern unsigned int extended_count (const_rtx, machine_mode, bool);
4350 : extern rtx remove_death (unsigned int, rtx_insn *);
4351 : extern rtx make_compound_operation (rtx, enum rtx_code);
4352 :
4353 : /* In sched-rgn.cc. */
4354 : extern void schedule_insns (void);
4355 :
4356 : /* In sched-ebb.cc. */
4357 : extern void schedule_ebbs (void);
4358 :
4359 : /* In sel-sched-dump.cc. */
4360 : extern void sel_sched_fix_param (const char *param, const char *val);
4361 :
4362 : /* In print-rtl.cc */
4363 : extern const char *print_rtx_head;
4364 : extern void debug (const rtx_def &ref);
4365 : extern void debug (const rtx_def *ptr);
4366 : extern void debug_rtx (const_rtx);
4367 : extern void debug_rtx_list (const rtx_insn *, int);
4368 : extern void debug_rtx_range (const rtx_insn *, const rtx_insn *);
4369 : extern const rtx_insn *debug_rtx_find (const rtx_insn *, int);
4370 : extern void print_mem_expr (FILE *, const_tree);
4371 : extern void print_rtl (FILE *, const_rtx);
4372 : extern void print_simple_rtl (FILE *, const_rtx);
4373 : extern void print_rtl_single (FILE *, const_rtx);
4374 : extern void print_rtl_single_with_indent (FILE *, const_rtx, int);
4375 : extern void print_inline_rtx (FILE *, const_rtx, int);
4376 :
4377 : /* In stmt.cc */
4378 : extern void expand_null_return (void);
4379 : extern void expand_naked_return (void);
4380 : extern void emit_jump (rtx);
4381 :
4382 : /* Memory operation built-ins differ by return value. Mapping
4383 : of the enum values is following:
4384 : - RETURN_BEGIN - return destination, e.g. memcpy
4385 : - RETURN_END - return destination + n, e.g. mempcpy
4386 : - RETURN_END_MINUS_ONE - return a pointer to the terminating
4387 : null byte of the string, e.g. strcpy
4388 : */
4389 :
4390 : enum memop_ret
4391 : {
4392 : RETURN_BEGIN,
4393 : RETURN_END,
4394 : RETURN_END_MINUS_ONE
4395 : };
4396 :
4397 : /* In expr.cc */
4398 : extern rtx move_by_pieces (rtx, rtx, unsigned HOST_WIDE_INT,
4399 : unsigned int, memop_ret);
4400 : extern poly_int64 find_args_size_adjust (rtx_insn *);
4401 : extern poly_int64 fixup_args_size_notes (rtx_insn *, rtx_insn *, poly_int64);
4402 :
4403 : /* In expmed.cc */
4404 : extern void init_expmed (void);
4405 : extern void expand_inc (rtx, rtx);
4406 : extern void expand_dec (rtx, rtx);
4407 :
4408 : /* In lower-subreg.cc */
4409 : extern void init_lower_subreg (void);
4410 :
4411 : /* In gcse.cc */
4412 : extern bool can_copy_p (machine_mode);
4413 : extern bool can_assign_to_reg_without_clobbers_p (rtx, machine_mode);
4414 : extern rtx_insn *prepare_copy_insn (rtx, rtx);
4415 :
4416 : /* In cprop.cc */
4417 : extern rtx fis_get_condition (rtx_insn *);
4418 :
4419 : /* In ira.cc */
4420 : extern HARD_REG_SET eliminable_regset;
4421 : extern void mark_elimination (int, int);
4422 :
4423 : /* In reginfo.cc */
4424 : extern bool reg_classes_intersect_p (reg_class_t, reg_class_t);
4425 : extern bool reg_class_subset_p (reg_class_t, reg_class_t);
4426 : extern void globalize_reg (tree, int);
4427 : extern void init_reg_modes_target (void);
4428 : extern void init_regs (void);
4429 : extern void reinit_regs (void);
4430 : extern void init_fake_stack_mems (void);
4431 : extern void save_register_info (void);
4432 : extern void init_reg_sets (void);
4433 : extern void regclass (rtx, int);
4434 : extern void reg_scan (rtx_insn *, unsigned int);
4435 : extern void fix_register (const char *, int, int);
4436 : extern const HARD_REG_SET *valid_mode_changes_for_regno (unsigned int);
4437 :
4438 : /* In reload1.cc */
4439 : extern bool function_invariant_p (const_rtx);
4440 :
4441 : /* In calls.cc */
4442 : enum libcall_type
4443 : {
4444 : LCT_NORMAL = 0,
4445 : LCT_CONST = 1,
4446 : LCT_PURE = 2,
4447 : LCT_NORETURN = 3,
4448 : LCT_THROW = 4,
4449 : LCT_RETURNS_TWICE = 5
4450 : };
4451 :
4452 : extern rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
4453 : machine_mode, int, rtx_mode_t *);
4454 :
4455 : /* Output a library call and discard the returned value. FUN is the
4456 : address of the function, as a SYMBOL_REF rtx, and OUTMODE is the mode
4457 : of the (discarded) return value. FN_TYPE is LCT_NORMAL for `normal'
4458 : calls, LCT_CONST for `const' calls, LCT_PURE for `pure' calls, or
4459 : another LCT_ value for other types of library calls.
4460 :
4461 : There are different overloads of this function for different numbers
4462 : of arguments. In each case the argument value is followed by its mode. */
4463 :
4464 : inline void
4465 0 : emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode)
4466 : {
4467 0 : emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 0, NULL);
4468 0 : }
4469 :
4470 : inline void
4471 0 : emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode,
4472 : rtx arg1, machine_mode arg1_mode)
4473 : {
4474 0 : rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode) };
4475 0 : emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 1, args);
4476 0 : }
4477 :
4478 : inline void
4479 182 : emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode,
4480 : rtx arg1, machine_mode arg1_mode,
4481 : rtx arg2, machine_mode arg2_mode)
4482 : {
4483 182 : rtx_mode_t args[] = {
4484 : rtx_mode_t (arg1, arg1_mode),
4485 : rtx_mode_t (arg2, arg2_mode)
4486 182 : };
4487 182 : emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 2, args);
4488 182 : }
4489 :
4490 : inline void
4491 429 : emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode,
4492 : rtx arg1, machine_mode arg1_mode,
4493 : rtx arg2, machine_mode arg2_mode,
4494 : rtx arg3, machine_mode arg3_mode)
4495 : {
4496 429 : rtx_mode_t args[] = {
4497 : rtx_mode_t (arg1, arg1_mode),
4498 : rtx_mode_t (arg2, arg2_mode),
4499 : rtx_mode_t (arg3, arg3_mode)
4500 429 : };
4501 429 : emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 3, args);
4502 429 : }
4503 :
4504 : inline void
4505 : emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode,
4506 : rtx arg1, machine_mode arg1_mode,
4507 : rtx arg2, machine_mode arg2_mode,
4508 : rtx arg3, machine_mode arg3_mode,
4509 : rtx arg4, machine_mode arg4_mode)
4510 : {
4511 : rtx_mode_t args[] = {
4512 : rtx_mode_t (arg1, arg1_mode),
4513 : rtx_mode_t (arg2, arg2_mode),
4514 : rtx_mode_t (arg3, arg3_mode),
4515 : rtx_mode_t (arg4, arg4_mode)
4516 : };
4517 : emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 4, args);
4518 : }
4519 :
4520 : /* Like emit_library_call, but return the value produced by the call.
4521 : Use VALUE to store the result if it is nonnull, otherwise pick a
4522 : convenient location. */
4523 :
4524 : inline rtx
4525 16 : emit_library_call_value (rtx fun, rtx value, libcall_type fn_type,
4526 : machine_mode outmode)
4527 : {
4528 16 : return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 0, NULL);
4529 : }
4530 :
4531 : inline rtx
4532 38734 : emit_library_call_value (rtx fun, rtx value, libcall_type fn_type,
4533 : machine_mode outmode,
4534 : rtx arg1, machine_mode arg1_mode)
4535 : {
4536 38734 : rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode) };
4537 38734 : return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 1, args);
4538 : }
4539 :
4540 : inline rtx
4541 74906 : emit_library_call_value (rtx fun, rtx value, libcall_type fn_type,
4542 : machine_mode outmode,
4543 : rtx arg1, machine_mode arg1_mode,
4544 : rtx arg2, machine_mode arg2_mode)
4545 : {
4546 74906 : rtx_mode_t args[] = {
4547 : rtx_mode_t (arg1, arg1_mode),
4548 : rtx_mode_t (arg2, arg2_mode)
4549 74906 : };
4550 74906 : return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 2, args);
4551 : }
4552 :
4553 : inline rtx
4554 896 : emit_library_call_value (rtx fun, rtx value, libcall_type fn_type,
4555 : machine_mode outmode,
4556 : rtx arg1, machine_mode arg1_mode,
4557 : rtx arg2, machine_mode arg2_mode,
4558 : rtx arg3, machine_mode arg3_mode)
4559 : {
4560 896 : rtx_mode_t args[] = {
4561 : rtx_mode_t (arg1, arg1_mode),
4562 : rtx_mode_t (arg2, arg2_mode),
4563 : rtx_mode_t (arg3, arg3_mode)
4564 896 : };
4565 896 : return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 3, args);
4566 : }
4567 :
4568 : inline rtx
4569 : emit_library_call_value (rtx fun, rtx value, libcall_type fn_type,
4570 : machine_mode outmode,
4571 : rtx arg1, machine_mode arg1_mode,
4572 : rtx arg2, machine_mode arg2_mode,
4573 : rtx arg3, machine_mode arg3_mode,
4574 : rtx arg4, machine_mode arg4_mode)
4575 : {
4576 : rtx_mode_t args[] = {
4577 : rtx_mode_t (arg1, arg1_mode),
4578 : rtx_mode_t (arg2, arg2_mode),
4579 : rtx_mode_t (arg3, arg3_mode),
4580 : rtx_mode_t (arg4, arg4_mode)
4581 : };
4582 : return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 4, args);
4583 : }
4584 :
4585 : /* In varasm.cc */
4586 : extern void init_varasm_once (void);
4587 :
4588 : extern rtx make_debug_expr_from_rtl (const_rtx);
4589 :
4590 : /* In read-rtl.cc */
4591 : #ifdef GENERATOR_FILE
4592 : extern bool read_rtx (const char *, vec<rtx> *);
4593 : #endif
4594 :
4595 : /* In alias.cc */
4596 : extern rtx canon_rtx (rtx);
4597 : extern rtx get_addr (rtx);
4598 : extern bool read_dependence (const_rtx, const_rtx);
4599 : extern bool true_dependence (const_rtx, machine_mode, const_rtx);
4600 : extern bool canon_true_dependence (const_rtx, machine_mode, rtx,
4601 : const_rtx, rtx);
4602 : extern bool anti_dependence (const_rtx, const_rtx);
4603 : extern bool canon_anti_dependence (const_rtx, bool,
4604 : const_rtx, machine_mode, rtx);
4605 : extern bool output_dependence (const_rtx, const_rtx);
4606 : extern bool canon_output_dependence (const_rtx, bool,
4607 : const_rtx, machine_mode, rtx);
4608 : extern bool may_alias_p (const_rtx, const_rtx);
4609 : extern void init_alias_target (void);
4610 : extern void init_alias_analysis (void);
4611 : extern void end_alias_analysis (void);
4612 : extern void vt_equate_reg_base_value (const_rtx, const_rtx);
4613 : extern bool memory_modified_in_insn_p (const_rtx, const_rtx);
4614 : extern bool may_be_sp_based_p (rtx);
4615 : extern rtx gen_hard_reg_clobber (machine_mode, unsigned int);
4616 : extern rtx get_reg_known_value (unsigned int);
4617 : extern bool get_reg_known_equiv_p (unsigned int);
4618 : extern rtx get_reg_base_value (unsigned int);
4619 : extern rtx extract_mem_from_operand (rtx);
4620 :
4621 : #ifdef STACK_REGS
4622 : extern bool stack_regs_mentioned (const_rtx insn);
4623 : #endif
4624 :
4625 : /* In toplev.cc */
4626 : extern GTY(()) rtx stack_limit_rtx;
4627 :
4628 : /* In var-tracking.cc */
4629 : extern unsigned int variable_tracking_main (void);
4630 : extern void delete_vta_debug_insns (bool);
4631 :
4632 : /* In stor-layout.cc. */
4633 : extern void get_mode_bounds (scalar_int_mode, int,
4634 : scalar_int_mode, rtx *, rtx *);
4635 :
4636 : /* In loop-iv.cc */
4637 : extern rtx canon_condition (rtx);
4638 : extern void simplify_using_condition (rtx, rtx *, bitmap);
4639 :
4640 : /* In final.cc */
4641 : extern void compute_alignments (void);
4642 : extern void update_alignments (vec<rtx> &);
4643 : extern int asm_str_count (const char *templ);
4644 : extern rtx get_call_rtx_from (const rtx_insn *);
4645 :
4646 : struct rtl_hooks
4647 : {
4648 : rtx (*gen_lowpart) (machine_mode, rtx);
4649 : rtx (*gen_lowpart_no_emit) (machine_mode, rtx);
4650 : rtx (*reg_nonzero_bits) (const_rtx, scalar_int_mode, scalar_int_mode,
4651 : unsigned HOST_WIDE_INT *);
4652 : rtx (*reg_num_sign_bit_copies) (const_rtx, scalar_int_mode, scalar_int_mode,
4653 : unsigned int *);
4654 : bool (*reg_truncated_to_mode) (machine_mode, const_rtx);
4655 :
4656 : /* Whenever you add entries here, make sure you adjust rtlhooks-def.h. */
4657 : };
4658 :
4659 : /* Each pass can provide its own. */
4660 : extern struct rtl_hooks rtl_hooks;
4661 :
4662 : /* ... but then it has to restore these. */
4663 : extern const struct rtl_hooks general_rtl_hooks;
4664 :
4665 : /* Keep this for the nonce. */
4666 : #define gen_lowpart rtl_hooks.gen_lowpart
4667 :
4668 : extern void insn_locations_init (void);
4669 : extern void insn_locations_finalize (void);
4670 : extern void set_curr_insn_location (location_t);
4671 : extern location_t curr_insn_location (void);
4672 : extern void set_insn_locations (rtx_insn *, location_t);
4673 :
4674 : /* rtl-error.cc */
4675 : extern void _fatal_insn_not_found (const_rtx, const char *, int, const char *)
4676 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
4677 : extern void _fatal_insn (const char *, const_rtx, const char *, int, const char *)
4678 : ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
4679 :
4680 : #define fatal_insn(msgid, insn) \
4681 : _fatal_insn (msgid, insn, __FILE__, __LINE__, __FUNCTION__)
4682 : #define fatal_insn_not_found(insn) \
4683 : _fatal_insn_not_found (insn, __FILE__, __LINE__, __FUNCTION__)
4684 :
4685 : /* reginfo.cc */
4686 : extern tree GTY(()) global_regs_decl[FIRST_PSEUDO_REGISTER];
4687 :
4688 : /* Information about the function that is propagated by the RTL backend.
4689 : Available only for functions that has been already assembled. */
4690 :
4691 : struct GTY(()) cgraph_rtl_info {
4692 : unsigned int preferred_incoming_stack_boundary;
4693 :
4694 : /* Which registers the function clobbers, either directly or by
4695 : calling another function. */
4696 : HARD_REG_SET function_used_regs;
4697 : };
4698 :
4699 : /* If loads from memories of mode MODE always sign or zero extend,
4700 : return SIGN_EXTEND or ZERO_EXTEND as appropriate. Return UNKNOWN
4701 : otherwise. */
4702 :
4703 : inline rtx_code
4704 36528 : load_extend_op (machine_mode mode)
4705 : {
4706 36528 : scalar_int_mode int_mode;
4707 36528 : if (is_a <scalar_int_mode> (mode, &int_mode)
4708 : && GET_MODE_PRECISION (int_mode) < BITS_PER_WORD)
4709 : return LOAD_EXTEND_OP (int_mode);
4710 : return UNKNOWN;
4711 : }
4712 :
4713 : /* If X is a PLUS of a base and a constant offset, add the constant to *OFFSET
4714 : and return the base. Return X otherwise. */
4715 :
4716 : inline rtx
4717 109379756 : strip_offset_and_add (rtx x, poly_int64 *offset)
4718 : {
4719 109379756 : if (GET_CODE (x) == PLUS)
4720 : {
4721 57827196 : poly_int64 suboffset;
4722 57827196 : x = strip_offset (x, &suboffset);
4723 115654392 : *offset = poly_uint64 (*offset) + suboffset;
4724 : }
4725 109379756 : return x;
4726 : }
4727 :
4728 : /* Return true if X is an operation that always operates on the full
4729 : registers for WORD_REGISTER_OPERATIONS architectures. */
4730 :
4731 : inline bool
4732 : word_register_operation_p (const_rtx x)
4733 : {
4734 : switch (GET_CODE (x))
4735 : {
4736 : case CONST_INT:
4737 : case ROTATE:
4738 : case ROTATERT:
4739 : case SIGN_EXTRACT:
4740 : case ZERO_EXTRACT:
4741 : return false;
4742 :
4743 : default:
4744 : return true;
4745 : }
4746 : }
4747 :
4748 : /* Holds an rtx comparison to simplify passing many parameters pertaining to a
4749 : single comparison. */
4750 :
4751 : struct rtx_comparison {
4752 : rtx_code code;
4753 : rtx op0, op1;
4754 : machine_mode mode;
4755 : };
4756 :
4757 : /* gtype-desc.cc. */
4758 : extern void gt_ggc_mx (rtx &);
4759 : extern void gt_pch_nx (rtx &);
4760 : extern void gt_pch_nx (rtx &, gt_pointer_operator, void *);
4761 :
4762 : #endif /* ! GCC_RTL_H */
|