Line data Source code
1 : /* Analyze RTL for GNU compiler.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "target.h"
26 : #include "rtl.h"
27 : #include "rtlanal.h"
28 : #include "tree.h"
29 : #include "predict.h"
30 : #include "df.h"
31 : #include "memmodel.h"
32 : #include "tm_p.h"
33 : #include "insn-config.h"
34 : #include "regs.h"
35 : #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
36 : #include "recog.h"
37 : #include "addresses.h"
38 : #include "rtl-iter.h"
39 : #include "hard-reg-set.h"
40 : #include "function-abi.h"
41 :
42 : /* Forward declarations */
43 : static void set_of_1 (rtx, const_rtx, void *);
44 : static bool covers_regno_p (const_rtx, unsigned int);
45 : static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
46 : static bool computed_jump_p_1 (const_rtx);
47 : static void parms_set (rtx, const_rtx, void *);
48 :
49 : static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, scalar_int_mode,
50 : const_rtx, machine_mode,
51 : unsigned HOST_WIDE_INT);
52 : static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, scalar_int_mode,
53 : const_rtx, machine_mode,
54 : unsigned HOST_WIDE_INT);
55 : static unsigned int cached_num_sign_bit_copies (const_rtx, scalar_int_mode,
56 : const_rtx, machine_mode,
57 : unsigned int);
58 : static unsigned int num_sign_bit_copies1 (const_rtx, scalar_int_mode,
59 : const_rtx, machine_mode,
60 : unsigned int);
61 :
62 : rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
63 : rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
64 :
65 : /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
66 : If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
67 : SIGN_EXTEND then while narrowing we also have to enforce the
68 : representation and sign-extend the value to mode DESTINATION_REP.
69 :
70 : If the value is already sign-extended to DESTINATION_REP mode we
71 : can just switch to DESTINATION mode on it. For each pair of
72 : integral modes SOURCE and DESTINATION, when truncating from SOURCE
73 : to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
74 : contains the number of high-order bits in SOURCE that have to be
75 : copies of the sign-bit so that we can do this mode-switch to
76 : DESTINATION. */
77 :
78 : static unsigned int
79 : num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
80 :
81 : /* Store X into index I of ARRAY. ARRAY is known to have at least I
82 : elements. Return the new base of ARRAY. */
83 :
84 : template <typename T>
85 : typename T::value_type *
86 7417348 : generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
87 : value_type *base,
88 : size_t i, value_type x)
89 : {
90 7417348 : if (base == array.stack)
91 : {
92 3843463 : if (i < LOCAL_ELEMS)
93 : {
94 3591373 : base[i] = x;
95 3591373 : return base;
96 : }
97 252090 : gcc_checking_assert (i == LOCAL_ELEMS);
98 : /* A previous iteration might also have moved from the stack to the
99 : heap, in which case the heap array will already be big enough. */
100 252090 : if (vec_safe_length (array.heap) <= i)
101 252090 : vec_safe_grow (array.heap, i + 1, true);
102 252090 : base = array.heap->address ();
103 252090 : memcpy (base, array.stack, sizeof (array.stack));
104 252090 : base[LOCAL_ELEMS] = x;
105 252090 : return base;
106 : }
107 3573885 : unsigned int length = array.heap->length ();
108 3573885 : if (length > i)
109 : {
110 1123698 : gcc_checking_assert (base == array.heap->address ());
111 1123698 : base[i] = x;
112 1123698 : return base;
113 : }
114 : else
115 : {
116 2450187 : gcc_checking_assert (i == length);
117 2450187 : vec_safe_push (array.heap, x);
118 2450187 : return array.heap->address ();
119 : }
120 : }
121 :
122 : /* Add the subrtxes of X to worklist ARRAY, starting at END. Return the
123 : number of elements added to the worklist. */
124 :
125 : template <typename T>
126 : size_t
127 323000345 : generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
128 : value_type *base,
129 : size_t end, rtx_type x)
130 : {
131 323000345 : enum rtx_code code = GET_CODE (x);
132 323000345 : const char *format = GET_RTX_FORMAT (code);
133 323000345 : size_t orig_end = end;
134 323000345 : if (UNLIKELY (INSN_P (x)))
135 : {
136 : /* Put the pattern at the top of the queue, since that's what
137 : we're likely to want most. It also allows for the SEQUENCE
138 : code below. */
139 92124 : for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
140 81122 : if (format[i] == 'e')
141 : {
142 22720 : value_type subx = T::get_value (x->u.fld[i].rt_rtx);
143 22720 : if (LIKELY (end < LOCAL_ELEMS))
144 22720 : base[end++] = subx;
145 : else
146 0 : base = add_single_to_queue (array, base, end++, subx);
147 : }
148 : }
149 : else
150 705799791 : for (int i = 0; format[i]; ++i)
151 382810448 : if (format[i] == 'e')
152 : {
153 519101 : value_type subx = T::get_value (x->u.fld[i].rt_rtx);
154 519101 : if (LIKELY (end < LOCAL_ELEMS))
155 16 : base[end++] = subx;
156 : else
157 519085 : base = add_single_to_queue (array, base, end++, subx);
158 : }
159 382291347 : else if (format[i] == 'E')
160 : {
161 329351961 : unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
162 329351961 : rtx *vec = x->u.fld[i].rt_rtvec->elem;
163 329351961 : if (LIKELY (end + length <= LOCAL_ELEMS))
164 1006979496 : for (unsigned int j = 0; j < length; j++)
165 677960860 : base[end++] = T::get_value (vec[j]);
166 : else
167 7231588 : for (unsigned int j = 0; j < length; j++)
168 6898263 : base = add_single_to_queue (array, base, end++,
169 6898263 : T::get_value (vec[j]));
170 329351961 : if (code == SEQUENCE && end == length)
171 : /* If the subrtxes of the sequence fill the entire array then
172 : we know that no other parts of a containing insn are queued.
173 : The caller is therefore iterating over the sequence as a
174 : PATTERN (...), so we also want the patterns of the
175 : subinstructions. */
176 0 : for (unsigned int j = 0; j < length; j++)
177 : {
178 0 : typename T::rtx_type x = T::get_rtx (base[j]);
179 0 : if (INSN_P (x))
180 0 : base[j] = T::get_value (PATTERN (x));
181 : }
182 : }
183 323000345 : return end - orig_end;
184 : }
185 :
186 : template <typename T>
187 : void
188 252090 : generic_subrtx_iterator <T>::free_array (array_type &array)
189 : {
190 252090 : vec_free (array.heap);
191 252090 : }
192 :
193 : template <typename T>
194 : const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
195 :
196 : template class generic_subrtx_iterator <const_rtx_accessor>;
197 : template class generic_subrtx_iterator <rtx_var_accessor>;
198 : template class generic_subrtx_iterator <rtx_ptr_accessor>;
199 :
200 : /* Return true if the value of X is unstable
201 : (would be different at a different point in the program).
202 : The frame pointer, arg pointer, etc. are considered stable
203 : (within one function) and so is anything marked `unchanging'. */
204 :
205 : bool
206 0 : rtx_unstable_p (const_rtx x)
207 : {
208 0 : const RTX_CODE code = GET_CODE (x);
209 0 : int i;
210 0 : const char *fmt;
211 :
212 0 : switch (code)
213 : {
214 0 : case MEM:
215 0 : return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
216 :
217 : case CONST:
218 : CASE_CONST_ANY:
219 : case SYMBOL_REF:
220 : case LABEL_REF:
221 : return false;
222 :
223 0 : case REG:
224 : /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
225 0 : if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
226 : /* The arg pointer varies if it is not a fixed register. */
227 0 : || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
228 : return false;
229 : /* ??? When call-clobbered, the value is stable modulo the restore
230 : that must happen after a call. This currently screws up local-alloc
231 : into believing that the restore is not needed. */
232 0 : if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
233 : return false;
234 : return true;
235 :
236 0 : case ASM_OPERANDS:
237 0 : if (MEM_VOLATILE_P (x))
238 : return true;
239 :
240 : /* Fall through. */
241 :
242 0 : default:
243 0 : break;
244 : }
245 :
246 0 : fmt = GET_RTX_FORMAT (code);
247 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
248 0 : if (fmt[i] == 'e')
249 : {
250 0 : if (rtx_unstable_p (XEXP (x, i)))
251 : return true;
252 : }
253 0 : else if (fmt[i] == 'E')
254 : {
255 : int j;
256 0 : for (j = 0; j < XVECLEN (x, i); j++)
257 0 : if (rtx_unstable_p (XVECEXP (x, i, j)))
258 : return true;
259 : }
260 :
261 : return false;
262 : }
263 :
264 : /* Return true if X has a value that can vary even between two
265 : executions of the program. false means X can be compared reliably
266 : against certain constants or near-constants.
267 : FOR_ALIAS is nonzero if we are called from alias analysis; if it is
268 : zero, we are slightly more conservative.
269 : The frame pointer and the arg pointer are considered constant. */
270 :
271 : bool
272 490501405 : rtx_varies_p (const_rtx x, bool for_alias)
273 : {
274 490501405 : RTX_CODE code;
275 490501405 : int i;
276 490501405 : const char *fmt;
277 :
278 490501405 : if (!x)
279 : return false;
280 :
281 490501405 : code = GET_CODE (x);
282 490501405 : switch (code)
283 : {
284 91784941 : case MEM:
285 91784941 : return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
286 :
287 : case CONST:
288 : CASE_CONST_ANY:
289 : case SYMBOL_REF:
290 : case LABEL_REF:
291 : return false;
292 :
293 158364170 : case REG:
294 : /* Note that we have to test for the actual rtx used for the frame
295 : and arg pointers and not just the register number in case we have
296 : eliminated the frame and/or arg pointer and are using it
297 : for pseudos. */
298 158364170 : if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
299 : /* The arg pointer varies if it is not a fixed register. */
300 140372122 : || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
301 : return false;
302 140137714 : if (x == pic_offset_table_rtx
303 : /* ??? When call-clobbered, the value is stable modulo the restore
304 : that must happen after a call. This currently screws up
305 : local-alloc into believing that the restore is not needed, so we
306 : must return 0 only if we are called from alias analysis. */
307 : && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
308 : return false;
309 : return true;
310 :
311 0 : case LO_SUM:
312 : /* The operand 0 of a LO_SUM is considered constant
313 : (in fact it is related specifically to operand 1)
314 : during alias analysis. */
315 0 : return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
316 0 : || rtx_varies_p (XEXP (x, 1), for_alias);
317 :
318 91953 : case ASM_OPERANDS:
319 91953 : if (MEM_VOLATILE_P (x))
320 : return true;
321 :
322 : /* Fall through. */
323 :
324 135960505 : default:
325 135960505 : break;
326 : }
327 :
328 135960505 : fmt = GET_RTX_FORMAT (code);
329 238347352 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
330 215835208 : if (fmt[i] == 'e')
331 : {
332 197655964 : if (rtx_varies_p (XEXP (x, i), for_alias))
333 : return true;
334 : }
335 18179244 : else if (fmt[i] == 'E')
336 : {
337 : int j;
338 18814252 : for (j = 0; j < XVECLEN (x, i); j++)
339 14705392 : if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
340 : return true;
341 : }
342 :
343 : return false;
344 : }
345 :
346 : /* Compute an approximation for the offset between the register
347 : FROM and TO for the current function, as it was at the start
348 : of the routine. */
349 :
350 : static poly_int64
351 238916180 : get_initial_register_offset (int from, int to)
352 : {
353 238916180 : static const struct elim_table_t
354 : {
355 : const int from;
356 : const int to;
357 : } table[] = ELIMINABLE_REGS;
358 238916180 : poly_int64 offset1, offset2;
359 238916180 : unsigned int i, j;
360 :
361 238916180 : if (to == from)
362 0 : return 0;
363 :
364 : /* It is not safe to call INITIAL_ELIMINATION_OFFSET before the epilogue
365 : is completed, but we need to give at least an estimate for the stack
366 : pointer based on the frame size. */
367 238916180 : if (!epilogue_completed)
368 : {
369 131336296 : offset1 = crtl->outgoing_args_size + get_frame_size ();
370 : #if !STACK_GROWS_DOWNWARD
371 : offset1 = - offset1;
372 : #endif
373 131336296 : if (to == STACK_POINTER_REGNUM)
374 130806692 : return offset1;
375 529604 : else if (from == STACK_POINTER_REGNUM)
376 264802 : return - offset1;
377 : else
378 264802 : return 0;
379 : }
380 :
381 108722669 : for (i = 0; i < ARRAY_SIZE (table); i++)
382 108722669 : if (table[i].from == from)
383 : {
384 107579884 : if (table[i].to == to)
385 : {
386 106437099 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
387 : offset1);
388 106437099 : return offset1;
389 : }
390 5713925 : for (j = 0; j < ARRAY_SIZE (table); j++)
391 : {
392 4571140 : if (table[j].to == to
393 2285570 : && table[j].from == table[i].to)
394 : {
395 0 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
396 : offset1);
397 0 : INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
398 : offset2);
399 0 : return offset1 + offset2;
400 : }
401 4571140 : if (table[j].from == to
402 0 : && table[j].to == table[i].to)
403 : {
404 0 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
405 : offset1);
406 0 : INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
407 : offset2);
408 0 : return offset1 - offset2;
409 : }
410 : }
411 : }
412 1142785 : else if (table[i].to == from)
413 : {
414 1142785 : if (table[i].from == to)
415 : {
416 0 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
417 : offset1);
418 0 : return - offset1;
419 : }
420 2285570 : for (j = 0; j < ARRAY_SIZE (table); j++)
421 : {
422 2285570 : if (table[j].to == to
423 1142785 : && table[j].from == table[i].from)
424 : {
425 1142785 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
426 : offset1);
427 1142785 : INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
428 : offset2);
429 1142785 : return - offset1 + offset2;
430 : }
431 1142785 : if (table[j].from == to
432 0 : && table[j].to == table[i].from)
433 : {
434 0 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
435 : offset1);
436 0 : INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
437 : offset2);
438 0 : return - offset1 - offset2;
439 : }
440 : }
441 : }
442 :
443 : /* If the requested register combination was not found,
444 : try a different more simple combination. */
445 0 : if (from == ARG_POINTER_REGNUM)
446 : return get_initial_register_offset (HARD_FRAME_POINTER_REGNUM, to);
447 0 : else if (to == ARG_POINTER_REGNUM)
448 : return get_initial_register_offset (from, HARD_FRAME_POINTER_REGNUM);
449 0 : else if (from == HARD_FRAME_POINTER_REGNUM)
450 : return get_initial_register_offset (FRAME_POINTER_REGNUM, to);
451 0 : else if (to == HARD_FRAME_POINTER_REGNUM)
452 : return get_initial_register_offset (from, FRAME_POINTER_REGNUM);
453 : else
454 0 : return 0;
455 : }
456 :
457 : /* Return true if the use of X+OFFSET as an address in a MEM with SIZE
458 : bytes can cause a trap. MODE is the mode of the MEM (not that of X) and
459 : UNALIGNED_MEMS controls whether true is returned for unaligned memory
460 : references on strict alignment machines. */
461 :
462 : static bool
463 810019410 : rtx_addr_can_trap_p_1 (const_rtx x, poly_int64 offset, poly_int64 size,
464 : machine_mode mode, bool unaligned_mems)
465 : {
466 810019410 : enum rtx_code code = GET_CODE (x);
467 810019410 : gcc_checking_assert (mode == BLKmode
468 : || mode == VOIDmode
469 : || known_size_p (size));
470 810019410 : poly_int64 const_x1;
471 :
472 : /* The offset must be a multiple of the mode size if we are considering
473 : unaligned memory references on strict alignment machines. */
474 810019410 : if (STRICT_ALIGNMENT
475 : && unaligned_mems
476 : && mode != BLKmode
477 : && mode != VOIDmode)
478 : {
479 : poly_int64 actual_offset = offset;
480 :
481 : #ifdef SPARC_STACK_BOUNDARY_HACK
482 : /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
483 : the real alignment of %sp. However, when it does this, the
484 : alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
485 : if (SPARC_STACK_BOUNDARY_HACK
486 : && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
487 : actual_offset -= STACK_POINTER_OFFSET;
488 : #endif
489 :
490 : if (!multiple_p (actual_offset, GET_MODE_SIZE (mode)))
491 : return true;
492 : }
493 :
494 810019410 : switch (code)
495 : {
496 6033035 : case SYMBOL_REF:
497 6033035 : if (SYMBOL_REF_WEAK (x))
498 : return true;
499 5754467 : if (!CONSTANT_POOL_ADDRESS_P (x) && !SYMBOL_REF_FUNCTION_P (x))
500 : {
501 701263 : tree decl;
502 701263 : poly_int64 decl_size;
503 :
504 701263 : if (maybe_lt (offset, 0))
505 : return true;
506 700366 : if (!known_size_p (size))
507 621 : return maybe_ne (offset, 0);
508 :
509 : /* If the size of the access or of the symbol is unknown,
510 : assume the worst. */
511 699745 : decl = SYMBOL_REF_DECL (x);
512 :
513 : /* Else check that the access is in bounds. TODO: restructure
514 : expr_size/tree_expr_size/int_expr_size and just use the latter. */
515 699745 : if (!decl)
516 239259 : decl_size = -1;
517 460486 : else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
518 : {
519 452541 : if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &decl_size))
520 0 : decl_size = -1;
521 : }
522 7945 : else if (TREE_CODE (decl) == STRING_CST)
523 0 : decl_size = TREE_STRING_LENGTH (decl);
524 7945 : else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
525 0 : decl_size = int_size_in_bytes (TREE_TYPE (decl));
526 : else
527 7945 : decl_size = -1;
528 :
529 699745 : return (!known_size_p (decl_size) || known_eq (decl_size, 0)
530 699745 : ? maybe_ne (offset, 0)
531 699745 : : !known_subrange_p (offset, size, 0, decl_size));
532 : }
533 :
534 : return false;
535 :
536 : case LABEL_REF:
537 : return false;
538 :
539 404676277 : case REG:
540 : /* Stack references are assumed not to trap, but we need to deal with
541 : nonsensical offsets. */
542 404676277 : if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
543 396718165 : || x == stack_pointer_rtx
544 : /* The arg pointer varies if it is not a fixed register. */
545 160614367 : || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
546 : {
547 : #ifdef RED_ZONE_SIZE
548 244149190 : poly_int64 red_zone_size = RED_ZONE_SIZE;
549 : #else
550 : poly_int64 red_zone_size = 0;
551 : #endif
552 244149190 : poly_int64 stack_boundary = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
553 244149190 : poly_int64 low_bound, high_bound;
554 :
555 244149190 : if (!known_size_p (size))
556 : return true;
557 :
558 244145359 : if (x == frame_pointer_rtx)
559 : {
560 6549491 : if (FRAME_GROWS_DOWNWARD)
561 : {
562 6549491 : high_bound = targetm.starting_frame_offset ();
563 6549491 : low_bound = high_bound - get_frame_size ();
564 : }
565 : else
566 : {
567 : low_bound = targetm.starting_frame_offset ();
568 : high_bound = low_bound + get_frame_size ();
569 : }
570 : }
571 237595868 : else if (x == hard_frame_pointer_rtx)
572 : {
573 1407587 : poly_int64 sp_offset
574 1407587 : = get_initial_register_offset (STACK_POINTER_REGNUM,
575 : HARD_FRAME_POINTER_REGNUM);
576 1407587 : poly_int64 ap_offset
577 1407587 : = get_initial_register_offset (ARG_POINTER_REGNUM,
578 : HARD_FRAME_POINTER_REGNUM);
579 :
580 : #if STACK_GROWS_DOWNWARD
581 1407587 : low_bound = sp_offset - red_zone_size - stack_boundary;
582 1407587 : high_bound = ap_offset
583 1407587 : + FIRST_PARM_OFFSET (current_function_decl)
584 : #if !ARGS_GROW_DOWNWARD
585 1407587 : + crtl->args.size
586 : #endif
587 1407587 : + stack_boundary;
588 : #else
589 : high_bound = sp_offset + red_zone_size + stack_boundary;
590 : low_bound = ap_offset
591 : + FIRST_PARM_OFFSET (current_function_decl)
592 : #if ARGS_GROW_DOWNWARD
593 : - crtl->args.size
594 : #endif
595 : - stack_boundary;
596 : #endif
597 : }
598 236188281 : else if (x == stack_pointer_rtx)
599 : {
600 236101006 : poly_int64 ap_offset
601 236101006 : = get_initial_register_offset (ARG_POINTER_REGNUM,
602 : STACK_POINTER_REGNUM);
603 :
604 : #if STACK_GROWS_DOWNWARD
605 236101006 : low_bound = - red_zone_size - stack_boundary;
606 236101006 : high_bound = ap_offset
607 236101006 : + FIRST_PARM_OFFSET (current_function_decl)
608 : #if !ARGS_GROW_DOWNWARD
609 236101006 : + crtl->args.size
610 : #endif
611 236101006 : + stack_boundary;
612 : #else
613 : high_bound = red_zone_size + stack_boundary;
614 : low_bound = ap_offset
615 : + FIRST_PARM_OFFSET (current_function_decl)
616 : #if ARGS_GROW_DOWNWARD
617 : - crtl->args.size
618 : #endif
619 : - stack_boundary;
620 : #endif
621 : }
622 : else
623 : {
624 : /* We assume that accesses are safe to at least the
625 : next stack boundary.
626 : Examples are varargs and __builtin_return_address. */
627 : #if ARGS_GROW_DOWNWARD
628 : high_bound = FIRST_PARM_OFFSET (current_function_decl)
629 : + stack_boundary;
630 : low_bound = FIRST_PARM_OFFSET (current_function_decl)
631 : - crtl->args.size - stack_boundary;
632 : #else
633 87275 : low_bound = FIRST_PARM_OFFSET (current_function_decl)
634 87275 : - stack_boundary;
635 87275 : high_bound = FIRST_PARM_OFFSET (current_function_decl)
636 87275 : + crtl->args.size + stack_boundary;
637 : #endif
638 : }
639 :
640 244145359 : if (known_ge (offset, low_bound)
641 244145359 : && known_le (offset, high_bound - size))
642 : return false;
643 : return true;
644 : }
645 : /* All of the virtual frame registers are stack references. */
646 160527087 : if (VIRTUAL_REGISTER_P (x))
647 : return false;
648 : return true;
649 :
650 296883 : case CONST:
651 296883 : return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
652 296883 : mode, unaligned_mems);
653 :
654 176056123 : case PLUS:
655 : /* An address is assumed not to trap if:
656 : - it is the pic register plus a const unspec without offset. */
657 176056123 : if (XEXP (x, 0) == pic_offset_table_rtx
658 38924 : && GET_CODE (XEXP (x, 1)) == CONST
659 38890 : && GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
660 176091718 : && known_eq (offset, 0))
661 : return false;
662 :
663 : /* - or it is an address that can't trap plus a constant integer. */
664 176020528 : if (poly_int_rtx_p (XEXP (x, 1), &const_x1)
665 147885057 : && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + const_x1,
666 : size, mode, unaligned_mems))
667 : return false;
668 :
669 : return true;
670 :
671 413442 : case LO_SUM:
672 413442 : case PRE_MODIFY:
673 413442 : return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
674 413442 : mode, unaligned_mems);
675 :
676 196337473 : case PRE_DEC:
677 196337473 : case PRE_INC:
678 196337473 : case POST_DEC:
679 196337473 : case POST_INC:
680 196337473 : case POST_MODIFY:
681 196337473 : return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
682 196337473 : mode, unaligned_mems);
683 :
684 : default:
685 : break;
686 : }
687 :
688 : /* If it isn't one of the case above, it can cause a trap. */
689 : return true;
690 : }
691 :
692 : /* Return true if the use of X as an address in a MEM can cause a trap. */
693 :
694 : bool
695 13842610 : rtx_addr_can_trap_p (const_rtx x)
696 : {
697 13842610 : return rtx_addr_can_trap_p_1 (x, 0, -1, BLKmode, false);
698 : }
699 :
700 : /* Return true if X contains a MEM subrtx. */
701 :
702 : bool
703 21883651 : contains_mem_rtx_p (rtx x)
704 : {
705 21883651 : subrtx_iterator::array_type array;
706 68127806 : FOR_EACH_SUBRTX (iter, array, x, ALL)
707 53368736 : if (MEM_P (*iter))
708 7124581 : return true;
709 :
710 14759070 : return false;
711 21883651 : }
712 :
713 : /* Return true if X is an address that is known to not be zero. */
714 :
715 : bool
716 56093445 : nonzero_address_p (const_rtx x)
717 : {
718 56095860 : const enum rtx_code code = GET_CODE (x);
719 :
720 56095860 : switch (code)
721 : {
722 3789 : case SYMBOL_REF:
723 3789 : return flag_delete_null_pointer_checks && !SYMBOL_REF_WEAK (x);
724 :
725 : case LABEL_REF:
726 : return true;
727 :
728 26451998 : case REG:
729 : /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
730 26451998 : if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
731 26451944 : || x == stack_pointer_rtx
732 26451944 : || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
733 : return true;
734 : /* All of the virtual frame registers are stack references. */
735 26451944 : if (VIRTUAL_REGISTER_P (x))
736 : return true;
737 : return false;
738 :
739 2415 : case CONST:
740 2415 : return nonzero_address_p (XEXP (x, 0));
741 :
742 12250883 : case PLUS:
743 : /* Handle PIC references. */
744 12250883 : if (XEXP (x, 0) == pic_offset_table_rtx
745 0 : && CONSTANT_P (XEXP (x, 1)))
746 : return true;
747 : return false;
748 :
749 0 : case PRE_MODIFY:
750 : /* Similar to the above; allow positive offsets. Further, since
751 : auto-inc is only allowed in memories, the register must be a
752 : pointer. */
753 0 : if (CONST_INT_P (XEXP (x, 1))
754 0 : && INTVAL (XEXP (x, 1)) > 0)
755 : return true;
756 0 : return nonzero_address_p (XEXP (x, 0));
757 :
758 : case PRE_INC:
759 : /* Similarly. Further, the offset is always positive. */
760 : return true;
761 :
762 0 : case PRE_DEC:
763 0 : case POST_DEC:
764 0 : case POST_INC:
765 0 : case POST_MODIFY:
766 0 : return nonzero_address_p (XEXP (x, 0));
767 :
768 0 : case LO_SUM:
769 0 : return nonzero_address_p (XEXP (x, 1));
770 :
771 : default:
772 : break;
773 : }
774 :
775 : /* If it isn't one of the case above, might be zero. */
776 : return false;
777 : }
778 :
779 : /* Return true if X refers to a memory location whose address
780 : cannot be compared reliably with constant addresses,
781 : or if X refers to a BLKmode memory object.
782 : FOR_ALIAS is nonzero if we are called from alias analysis; if it is
783 : zero, we are slightly more conservative. */
784 :
785 : bool
786 0 : rtx_addr_varies_p (const_rtx x, bool for_alias)
787 : {
788 0 : enum rtx_code code;
789 0 : int i;
790 0 : const char *fmt;
791 :
792 0 : if (x == 0)
793 : return false;
794 :
795 0 : code = GET_CODE (x);
796 0 : if (code == MEM)
797 0 : return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
798 :
799 0 : fmt = GET_RTX_FORMAT (code);
800 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
801 0 : if (fmt[i] == 'e')
802 : {
803 0 : if (rtx_addr_varies_p (XEXP (x, i), for_alias))
804 : return true;
805 : }
806 0 : else if (fmt[i] == 'E')
807 : {
808 : int j;
809 0 : for (j = 0; j < XVECLEN (x, i); j++)
810 0 : if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
811 : return true;
812 : }
813 : return false;
814 : }
815 :
816 : /* Get the declaration of the function called by INSN. */
817 :
818 : tree
819 308226481 : get_call_fndecl (const rtx_insn *insn)
820 : {
821 308226481 : rtx note, datum;
822 :
823 308226481 : note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
824 308226481 : if (note == NULL_RTX)
825 : return NULL_TREE;
826 :
827 304966000 : datum = XEXP (note, 0);
828 304966000 : if (datum != NULL_RTX)
829 293977787 : return SYMBOL_REF_DECL (datum);
830 :
831 : return NULL_TREE;
832 : }
833 :
834 : /* Return the value of the integer term in X, if one is apparent;
835 : otherwise return 0.
836 : Only obvious integer terms are detected.
837 : This is used in cse.cc with the `related_value' field. */
838 :
839 : HOST_WIDE_INT
840 465350 : get_integer_term (const_rtx x)
841 : {
842 465350 : if (GET_CODE (x) == CONST)
843 260916 : x = XEXP (x, 0);
844 :
845 465350 : if (GET_CODE (x) == MINUS
846 0 : && CONST_INT_P (XEXP (x, 1)))
847 0 : return - INTVAL (XEXP (x, 1));
848 465350 : if (GET_CODE (x) == PLUS
849 260916 : && CONST_INT_P (XEXP (x, 1)))
850 260916 : return INTVAL (XEXP (x, 1));
851 : return 0;
852 : }
853 :
854 : /* If X is a constant, return the value sans apparent integer term;
855 : otherwise return 0.
856 : Only obvious integer terms are detected. */
857 :
858 : rtx
859 1370770 : get_related_value (const_rtx x)
860 : {
861 1370770 : if (GET_CODE (x) != CONST)
862 : return 0;
863 1370770 : x = XEXP (x, 0);
864 1370770 : if (GET_CODE (x) == PLUS
865 1343155 : && CONST_INT_P (XEXP (x, 1)))
866 1343155 : return XEXP (x, 0);
867 27615 : else if (GET_CODE (x) == MINUS
868 0 : && CONST_INT_P (XEXP (x, 1)))
869 0 : return XEXP (x, 0);
870 : return 0;
871 : }
872 :
873 : /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
874 : to somewhere in the same object or object_block as SYMBOL. */
875 :
876 : bool
877 0 : offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
878 : {
879 0 : tree decl;
880 :
881 0 : if (GET_CODE (symbol) != SYMBOL_REF)
882 : return false;
883 :
884 0 : if (offset == 0)
885 : return true;
886 :
887 0 : if (offset > 0)
888 : {
889 0 : if (CONSTANT_POOL_ADDRESS_P (symbol)
890 0 : && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
891 0 : return true;
892 :
893 0 : decl = SYMBOL_REF_DECL (symbol);
894 0 : if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
895 : return true;
896 : }
897 :
898 0 : if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
899 0 : && SYMBOL_REF_BLOCK (symbol)
900 0 : && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
901 0 : && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
902 0 : < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
903 : return true;
904 :
905 : return false;
906 : }
907 :
908 : /* Split X into a base and a constant offset, storing them in *BASE_OUT
909 : and *OFFSET_OUT respectively. */
910 :
911 : void
912 0 : split_const (rtx x, rtx *base_out, rtx *offset_out)
913 : {
914 0 : if (GET_CODE (x) == CONST)
915 : {
916 0 : x = XEXP (x, 0);
917 0 : if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
918 : {
919 0 : *base_out = XEXP (x, 0);
920 0 : *offset_out = XEXP (x, 1);
921 0 : return;
922 : }
923 : }
924 0 : *base_out = x;
925 0 : *offset_out = const0_rtx;
926 : }
927 :
928 : /* Express integer value X as some value Y plus a polynomial offset,
929 : where Y is either const0_rtx, X or something within X (as opposed
930 : to a new rtx). Return the Y and store the offset in *OFFSET_OUT. */
931 :
932 : rtx
933 410452095 : strip_offset (rtx x, poly_int64 *offset_out)
934 : {
935 410452095 : rtx base = const0_rtx;
936 410452095 : rtx test = x;
937 410452095 : if (GET_CODE (test) == CONST)
938 8851809 : test = XEXP (test, 0);
939 410452095 : if (GET_CODE (test) == PLUS)
940 : {
941 309735872 : base = XEXP (test, 0);
942 309735872 : test = XEXP (test, 1);
943 : }
944 410452095 : if (poly_int_rtx_p (test, offset_out))
945 290032296 : return base;
946 120419799 : *offset_out = 0;
947 120419799 : return x;
948 : }
949 :
950 : /* Return the argument size in REG_ARGS_SIZE note X. */
951 :
952 : poly_int64
953 5300499 : get_args_size (const_rtx x)
954 : {
955 5300499 : gcc_checking_assert (REG_NOTE_KIND (x) == REG_ARGS_SIZE);
956 5300499 : return rtx_to_poly_int64 (XEXP (x, 0));
957 : }
958 :
959 : /* Return the number of places FIND appears within X. If COUNT_DEST is
960 : zero, we do not count occurrences inside the destination of a SET. */
961 :
962 : int
963 8686945 : count_occurrences (const_rtx x, const_rtx find, int count_dest)
964 : {
965 8686945 : int i, j;
966 8686945 : enum rtx_code code;
967 8686945 : const char *format_ptr;
968 8686945 : int count;
969 :
970 8686945 : if (x == find)
971 : return 1;
972 :
973 6304623 : code = GET_CODE (x);
974 :
975 6304623 : switch (code)
976 : {
977 : case REG:
978 : CASE_CONST_ANY:
979 : case SYMBOL_REF:
980 : case CODE_LABEL:
981 : case PC:
982 : return 0;
983 :
984 0 : case EXPR_LIST:
985 0 : count = count_occurrences (XEXP (x, 0), find, count_dest);
986 0 : if (XEXP (x, 1))
987 0 : count += count_occurrences (XEXP (x, 1), find, count_dest);
988 : return count;
989 :
990 73812 : case MEM:
991 73812 : if (MEM_P (find) && rtx_equal_p (x, find))
992 : return 1;
993 : break;
994 :
995 0 : case SET:
996 0 : if (SET_DEST (x) == find && ! count_dest)
997 0 : return count_occurrences (SET_SRC (x), find, count_dest);
998 : break;
999 :
1000 : default:
1001 : break;
1002 : }
1003 :
1004 3194070 : format_ptr = GET_RTX_FORMAT (code);
1005 3194070 : count = 0;
1006 :
1007 9465151 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
1008 : {
1009 6271081 : switch (*format_ptr++)
1010 : {
1011 6111984 : case 'e':
1012 6111984 : count += count_occurrences (XEXP (x, i), find, count_dest);
1013 6111984 : break;
1014 :
1015 : case 'E':
1016 135583 : for (j = 0; j < XVECLEN (x, i); j++)
1017 112337 : count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
1018 : break;
1019 : }
1020 : }
1021 : return count;
1022 : }
1023 :
1024 :
1025 : /* Return TRUE if OP is a register or subreg of a register that
1026 : holds an unsigned quantity. Otherwise, return FALSE. */
1027 :
1028 : bool
1029 0 : unsigned_reg_p (rtx op)
1030 : {
1031 0 : if (REG_P (op)
1032 0 : && REG_EXPR (op)
1033 0 : && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
1034 : return true;
1035 :
1036 0 : if (GET_CODE (op) == SUBREG
1037 0 : && SUBREG_PROMOTED_SIGN (op))
1038 0 : return true;
1039 :
1040 : return false;
1041 : }
1042 :
1043 :
1044 : /* Return true if register REG appears somewhere within IN.
1045 : Also works if REG is not a register; in this case it checks
1046 : for a subexpression of IN that is Lisp "equal" to REG. */
1047 :
1048 : bool
1049 366965004 : reg_mentioned_p (const_rtx reg, const_rtx in)
1050 : {
1051 366965004 : const char *fmt;
1052 366965004 : int i;
1053 366965004 : enum rtx_code code;
1054 :
1055 366965004 : if (in == 0)
1056 : return false;
1057 :
1058 361671905 : if (reg == in)
1059 : return true;
1060 :
1061 349416372 : if (GET_CODE (in) == LABEL_REF)
1062 6221812 : return reg == label_ref_label (in);
1063 :
1064 343194560 : code = GET_CODE (in);
1065 :
1066 343194560 : switch (code)
1067 : {
1068 : /* Compare registers by number. */
1069 128164160 : case REG:
1070 128164160 : return REG_P (reg) && REGNO (in) == REGNO (reg);
1071 :
1072 : /* These codes have no constituent expressions
1073 : and are unique. */
1074 : case SCRATCH:
1075 : case PC:
1076 : return false;
1077 :
1078 : CASE_CONST_ANY:
1079 : /* These are kept unique for a given value. */
1080 : return false;
1081 :
1082 132246407 : default:
1083 132246407 : break;
1084 : }
1085 :
1086 132246407 : if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
1087 : return true;
1088 :
1089 132079901 : fmt = GET_RTX_FORMAT (code);
1090 :
1091 367685630 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1092 : {
1093 253288984 : if (fmt[i] == 'E')
1094 : {
1095 3153455 : int j;
1096 10711246 : for (j = XVECLEN (in, i) - 1; j >= 0; j--)
1097 7781296 : if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
1098 : return true;
1099 : }
1100 250135529 : else if (fmt[i] == 'e'
1101 250135529 : && reg_mentioned_p (reg, XEXP (in, i)))
1102 : return true;
1103 : }
1104 : return false;
1105 : }
1106 :
1107 : /* Return true if in between BEG and END, exclusive of BEG and END, there is
1108 : no CODE_LABEL insn. */
1109 :
1110 : bool
1111 0 : no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
1112 : {
1113 0 : rtx_insn *p;
1114 0 : if (beg == end)
1115 : return false;
1116 0 : for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
1117 0 : if (LABEL_P (p))
1118 : return false;
1119 : return true;
1120 : }
1121 :
1122 : /* Return true if register REG is used in an insn between
1123 : FROM_INSN and TO_INSN (exclusive of those two). */
1124 :
1125 : bool
1126 23664204 : reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
1127 : const rtx_insn *to_insn)
1128 : {
1129 23664204 : rtx_insn *insn;
1130 :
1131 23664204 : if (from_insn == to_insn)
1132 : return false;
1133 :
1134 171028965 : for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
1135 124087808 : if (NONDEBUG_INSN_P (insn)
1136 124087808 : && (reg_overlap_mentioned_p (reg, PATTERN (insn))
1137 69179022 : || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
1138 387251 : return true;
1139 : return false;
1140 : }
1141 :
1142 : /* Return true if the old value of X, a register, is referenced in BODY. If X
1143 : is entirely replaced by a new value and the only use is as a SET_DEST,
1144 : we do not consider it a reference. */
1145 :
1146 : bool
1147 130273934 : reg_referenced_p (const_rtx x, const_rtx body)
1148 : {
1149 130273934 : int i;
1150 :
1151 130273934 : switch (GET_CODE (body))
1152 : {
1153 96884369 : case SET:
1154 96884369 : if (reg_overlap_mentioned_p (x, SET_SRC (body)))
1155 : return true;
1156 :
1157 : /* If the destination is anything other than PC, a REG or a SUBREG
1158 : of a REG that occupies all of the REG, the insn references X if
1159 : it is mentioned in the destination. */
1160 61680860 : if (GET_CODE (SET_DEST (body)) != PC
1161 61680860 : && !REG_P (SET_DEST (body))
1162 2738621 : && ! (GET_CODE (SET_DEST (body)) == SUBREG
1163 514970 : && REG_P (SUBREG_REG (SET_DEST (body)))
1164 514970 : && !read_modify_subreg_p (SET_DEST (body)))
1165 63575797 : && reg_overlap_mentioned_p (x, SET_DEST (body)))
1166 : return true;
1167 : return false;
1168 :
1169 6957 : case ASM_OPERANDS:
1170 16214 : for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1171 14869 : if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
1172 : return true;
1173 : return false;
1174 :
1175 388705 : case CALL:
1176 388705 : case USE:
1177 388705 : case IF_THEN_ELSE:
1178 388705 : return reg_overlap_mentioned_p (x, body);
1179 :
1180 0 : case TRAP_IF:
1181 0 : return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
1182 :
1183 2086 : case PREFETCH:
1184 2086 : return reg_overlap_mentioned_p (x, XEXP (body, 0));
1185 :
1186 28603 : case UNSPEC:
1187 28603 : case UNSPEC_VOLATILE:
1188 57180 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1189 31228 : if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
1190 : return true;
1191 : return false;
1192 :
1193 16901021 : case PARALLEL:
1194 48480636 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1195 34804258 : if (reg_referenced_p (x, XVECEXP (body, 0, i)))
1196 : return true;
1197 : return false;
1198 :
1199 16056499 : case CLOBBER:
1200 16056499 : if (MEM_P (XEXP (body, 0)))
1201 11909 : if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
1202 : return true;
1203 : return false;
1204 :
1205 0 : case COND_EXEC:
1206 0 : if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
1207 : return true;
1208 0 : return reg_referenced_p (x, COND_EXEC_CODE (body));
1209 :
1210 : default:
1211 : return false;
1212 : }
1213 : }
1214 :
1215 : /* Return true if register REG is set or clobbered in an insn between
1216 : FROM_INSN and TO_INSN (exclusive of those two). */
1217 :
1218 : bool
1219 64487699 : reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
1220 : const rtx_insn *to_insn)
1221 : {
1222 64487699 : const rtx_insn *insn;
1223 :
1224 64487699 : if (from_insn == to_insn)
1225 : return false;
1226 :
1227 316596715 : for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
1228 191114496 : if (INSN_P (insn) && reg_set_p (reg, insn))
1229 : return true;
1230 : return false;
1231 : }
1232 :
1233 : /* Return true if REG is set or clobbered inside INSN. */
1234 :
1235 : bool
1236 1154122599 : reg_set_p (const_rtx reg, const_rtx insn)
1237 : {
1238 : /* After delay slot handling, call and branch insns might be in a
1239 : sequence. Check all the elements there. */
1240 1154122599 : if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1241 : {
1242 0 : for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i)
1243 0 : if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i)))
1244 : return true;
1245 :
1246 : return false;
1247 : }
1248 :
1249 : /* We can be passed an insn or part of one. If we are passed an insn,
1250 : check if a side-effect of the insn clobbers REG. */
1251 1154122599 : if (INSN_P (insn)
1252 1154122599 : && (FIND_REG_INC_NOTE (insn, reg)
1253 : || (CALL_P (insn)
1254 66382049 : && ((REG_P (reg)
1255 66382049 : && REGNO (reg) < FIRST_PSEUDO_REGISTER
1256 62112661 : && (insn_callee_abi (as_a<const rtx_insn *> (insn))
1257 62112661 : .clobbers_reg_p (GET_MODE (reg), REGNO (reg))))
1258 65376347 : || MEM_P (reg)
1259 65376347 : || find_reg_fusage (insn, CLOBBER, reg)))))
1260 1005702 : return true;
1261 :
1262 : /* There are no REG_INC notes for SP autoinc. */
1263 1153116897 : if (reg == stack_pointer_rtx && INSN_P (insn))
1264 : {
1265 5858703 : subrtx_var_iterator::array_type array;
1266 46839578 : FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), NONCONST)
1267 : {
1268 41673083 : rtx mem = *iter;
1269 41673083 : if (mem
1270 41673083 : && MEM_P (mem)
1271 4610663 : && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
1272 : {
1273 692208 : if (XEXP (XEXP (mem, 0), 0) == stack_pointer_rtx)
1274 692208 : return true;
1275 0 : iter.skip_subrtxes ();
1276 : }
1277 : }
1278 5858703 : }
1279 :
1280 1152424689 : return set_of (reg, insn) != NULL_RTX;
1281 : }
1282 :
1283 : /* Similar to reg_set_between_p, but check all registers in X. Return false
1284 : only if none of them are modified between START and END. Return true if
1285 : X contains a MEM; this routine does use memory aliasing. */
1286 :
1287 : bool
1288 158832320 : modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
1289 : {
1290 158832320 : const enum rtx_code code = GET_CODE (x);
1291 158832320 : const char *fmt;
1292 158832320 : int i, j;
1293 158832320 : rtx_insn *insn;
1294 :
1295 158832320 : if (start == end)
1296 : return false;
1297 :
1298 158832320 : switch (code)
1299 : {
1300 : CASE_CONST_ANY:
1301 : case CONST:
1302 : case SYMBOL_REF:
1303 : case LABEL_REF:
1304 : return false;
1305 :
1306 : case PC:
1307 : return true;
1308 :
1309 10694707 : case MEM:
1310 10694707 : if (modified_between_p (XEXP (x, 0), start, end))
1311 : return true;
1312 10685633 : if (MEM_READONLY_P (x))
1313 : return false;
1314 57310389 : for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1315 36935831 : if (memory_modified_in_insn_p (x, insn))
1316 : return true;
1317 : return false;
1318 :
1319 61298448 : case REG:
1320 61298448 : return reg_set_between_p (x, start, end);
1321 :
1322 49977189 : default:
1323 49977189 : break;
1324 : }
1325 :
1326 49977189 : fmt = GET_RTX_FORMAT (code);
1327 146753912 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1328 : {
1329 98093345 : if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1330 : return true;
1331 :
1332 96777419 : else if (fmt[i] == 'E')
1333 3744329 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1334 2679345 : if (modified_between_p (XVECEXP (x, i, j), start, end))
1335 : return true;
1336 : }
1337 :
1338 : return false;
1339 : }
1340 :
1341 : /* Similar to reg_set_p, but check all registers in X. Return false only if
1342 : none of them are modified in INSN. Return true if X contains a MEM; this
1343 : routine does use memory aliasing. */
1344 :
1345 : bool
1346 1059986651 : modified_in_p (const_rtx x, const_rtx insn)
1347 : {
1348 1059986651 : const enum rtx_code code = GET_CODE (x);
1349 1059986651 : const char *fmt;
1350 1059986651 : int i, j;
1351 :
1352 1059986651 : switch (code)
1353 : {
1354 : CASE_CONST_ANY:
1355 : case CONST:
1356 : case SYMBOL_REF:
1357 : case LABEL_REF:
1358 : return false;
1359 :
1360 : case PC:
1361 : return true;
1362 :
1363 7986514 : case MEM:
1364 7986514 : if (modified_in_p (XEXP (x, 0), insn))
1365 : return true;
1366 7943315 : if (MEM_READONLY_P (x))
1367 : return false;
1368 7734292 : if (memory_modified_in_insn_p (x, insn))
1369 : return true;
1370 : return false;
1371 :
1372 926015485 : case REG:
1373 926015485 : return reg_set_p (x, insn);
1374 :
1375 70683932 : default:
1376 70683932 : break;
1377 : }
1378 :
1379 70683932 : fmt = GET_RTX_FORMAT (code);
1380 203667038 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1381 : {
1382 138884004 : if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1383 : return true;
1384 :
1385 132991723 : else if (fmt[i] == 'E')
1386 1301144 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1387 671214 : if (modified_in_p (XVECEXP (x, i, j), insn))
1388 : return true;
1389 : }
1390 :
1391 : return false;
1392 : }
1393 :
1394 : /* Return true if X is a SUBREG and if storing a value to X would
1395 : preserve some of its SUBREG_REG. For example, on a normal 32-bit
1396 : target, using a SUBREG to store to one half of a DImode REG would
1397 : preserve the other half. */
1398 :
1399 : bool
1400 142243552 : read_modify_subreg_p (const_rtx x)
1401 : {
1402 142243552 : if (GET_CODE (x) != SUBREG)
1403 : return false;
1404 48895208 : poly_uint64 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1405 48895208 : poly_uint64 osize = GET_MODE_SIZE (GET_MODE (x));
1406 24447604 : poly_uint64 regsize = REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
1407 : /* The inner and outer modes of a subreg must be ordered, so that we
1408 : can tell whether they're paradoxical or partial. */
1409 24447604 : gcc_checking_assert (ordered_p (isize, osize));
1410 24447604 : return (maybe_gt (isize, osize) && maybe_gt (isize, regsize));
1411 : }
1412 :
1413 : /* Helper function for set_of. */
1414 : struct set_of_data
1415 : {
1416 : const_rtx found;
1417 : const_rtx pat;
1418 : };
1419 :
1420 : static void
1421 1222579185 : set_of_1 (rtx x, const_rtx pat, void *data1)
1422 : {
1423 1222579185 : struct set_of_data *const data = (struct set_of_data *) (data1);
1424 1222579185 : if (rtx_equal_p (x, data->pat)
1425 1222579185 : || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1426 70248066 : data->found = pat;
1427 1222579185 : }
1428 :
1429 : /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1430 : (either directly or via STRICT_LOW_PART and similar modifiers). */
1431 : const_rtx
1432 1202975707 : set_of (const_rtx pat, const_rtx insn)
1433 : {
1434 1202975707 : struct set_of_data data;
1435 1202975707 : data.found = NULL_RTX;
1436 1202975707 : data.pat = pat;
1437 1202975707 : note_pattern_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1438 1202975707 : return data.found;
1439 : }
1440 :
1441 : /* Check whether instruction pattern PAT contains a SET with the following
1442 : properties:
1443 :
1444 : - the SET is executed unconditionally; and
1445 : - either:
1446 : - the destination of the SET is a REG that contains REGNO; or
1447 : - both:
1448 : - the destination of the SET is a SUBREG of such a REG; and
1449 : - writing to the subreg clobbers all of the SUBREG_REG
1450 : (in other words, read_modify_subreg_p is false).
1451 :
1452 : If PAT does have a SET like that, return the set, otherwise return null.
1453 :
1454 : This is intended to be an alternative to single_set for passes that
1455 : can handle patterns with multiple_sets. */
1456 : rtx
1457 131716604 : simple_regno_set (rtx pat, unsigned int regno)
1458 : {
1459 131716604 : if (GET_CODE (pat) == PARALLEL)
1460 : {
1461 22033251 : int last = XVECLEN (pat, 0) - 1;
1462 22033365 : for (int i = 0; i < last; ++i)
1463 22033251 : if (rtx set = simple_regno_set (XVECEXP (pat, 0, i), regno))
1464 : return set;
1465 :
1466 114 : pat = XVECEXP (pat, 0, last);
1467 : }
1468 :
1469 109683467 : if (GET_CODE (pat) == SET
1470 109683467 : && covers_regno_no_parallel_p (SET_DEST (pat), regno))
1471 : return pat;
1472 :
1473 : return nullptr;
1474 : }
1475 :
1476 : /* Add all hard register in X to *PSET. */
1477 : void
1478 3876398 : find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
1479 : {
1480 3876398 : subrtx_iterator::array_type array;
1481 10520034 : FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1482 : {
1483 6643636 : const_rtx x = *iter;
1484 6643636 : if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1485 3285100 : add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1486 : }
1487 3876398 : }
1488 :
1489 : /* This function, called through note_stores, collects sets and
1490 : clobbers of hard registers in a HARD_REG_SET, which is pointed to
1491 : by DATA. */
1492 : void
1493 22168280 : record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1494 : {
1495 22168280 : HARD_REG_SET *pset = (HARD_REG_SET *)data;
1496 22168280 : if (REG_P (x) && HARD_REGISTER_P (x))
1497 14657518 : add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1498 22168280 : }
1499 :
1500 : /* Examine INSN, and compute the set of hard registers written by it.
1501 : Store it in *PSET. Should only be called after reload.
1502 :
1503 : IMPLICIT is true if we should include registers that are fully-clobbered
1504 : by calls. This should be used with caution, since it doesn't include
1505 : partially-clobbered registers. */
1506 : void
1507 17066854 : find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit)
1508 : {
1509 17066854 : rtx link;
1510 :
1511 17066854 : CLEAR_HARD_REG_SET (*pset);
1512 17066854 : note_stores (insn, record_hard_reg_sets, pset);
1513 17066854 : if (CALL_P (insn) && implicit)
1514 0 : *pset |= insn_callee_abi (insn).full_reg_clobbers ();
1515 33397085 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1516 16330231 : if (REG_NOTE_KIND (link) == REG_INC)
1517 0 : record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1518 17066854 : }
1519 :
1520 : /* Like record_hard_reg_sets, but called through note_uses. */
1521 : void
1522 3876398 : record_hard_reg_uses (rtx *px, void *data)
1523 : {
1524 3876398 : find_all_hard_regs (*px, (HARD_REG_SET *) data);
1525 3876398 : }
1526 :
1527 : /* Given an INSN, return a SET expression if this insn has only a single SET.
1528 : It may also have CLOBBERs, USEs, or SET whose output
1529 : will not be used, which we ignore. */
1530 :
1531 : rtx
1532 1911525009 : single_set_2 (const rtx_insn *insn, const_rtx pat)
1533 : {
1534 1911525009 : rtx set = NULL;
1535 1911525009 : int set_verified = 1;
1536 1911525009 : int i;
1537 :
1538 1911525009 : if (GET_CODE (pat) == PARALLEL)
1539 : {
1540 2158032717 : for (i = 0; i < XVECLEN (pat, 0); i++)
1541 : {
1542 1468601793 : rtx sub = XVECEXP (pat, 0, i);
1543 1468601793 : switch (GET_CODE (sub))
1544 : {
1545 : case USE:
1546 : case CLOBBER:
1547 1431627320 : break;
1548 :
1549 : default:
1550 36974473 : return NULL_RTX;
1551 :
1552 740363780 : case SET:
1553 : /* We can consider insns having multiple sets, where all
1554 : but one are dead as single set insns. In common case
1555 : only single set is present in the pattern so we want
1556 : to avoid checking for REG_UNUSED notes unless necessary.
1557 :
1558 : When we reach set first time, we just expect this is
1559 : the single set we are looking for and only when more
1560 : sets are found in the insn, we check them. */
1561 794895758 : auto unused = [] (const rtx_insn *insn, rtx dest) {
1562 54531978 : if (!df)
1563 : return false;
1564 53714622 : if (df_note)
1565 30724250 : return !!find_reg_note (insn, REG_UNUSED, dest);
1566 22990372 : return (REG_P (dest)
1567 21010849 : && !HARD_REGISTER_P (dest)
1568 5751682 : && REGNO (dest) < df->regs_inited
1569 28741396 : && DF_REG_USE_COUNT (REGNO (dest)) == 0);
1570 : };
1571 740363780 : if (!set_verified)
1572 : {
1573 28966292 : if (unused (insn, SET_DEST (set)) && !side_effects_p (set))
1574 : set = NULL;
1575 : else
1576 : set_verified = 1;
1577 : }
1578 740363780 : if (!set)
1579 : set = sub, set_verified = 0;
1580 25565686 : else if (!unused (insn, SET_DEST (sub)) || side_effects_p (sub))
1581 20938776 : return NULL_RTX;
1582 : break;
1583 : }
1584 : }
1585 : }
1586 : return set;
1587 : }
1588 :
1589 : /* Given an INSN, return true if it has more than one SET, else return
1590 : false. */
1591 :
1592 : bool
1593 282447736 : multiple_sets (const_rtx insn)
1594 : {
1595 282447736 : bool found;
1596 282447736 : int i;
1597 :
1598 : /* INSN must be an insn. */
1599 282447736 : if (! INSN_P (insn))
1600 : return false;
1601 :
1602 : /* Only a PARALLEL can have multiple SETs. */
1603 282447736 : if (GET_CODE (PATTERN (insn)) == PARALLEL)
1604 : {
1605 253731821 : for (i = 0, found = false; i < XVECLEN (PATTERN (insn), 0); i++)
1606 170975824 : if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1607 : {
1608 : /* If we have already found a SET, then return now. */
1609 86361914 : if (found)
1610 : return true;
1611 : else
1612 : found = true;
1613 : }
1614 : }
1615 :
1616 : /* Either zero or one SET. */
1617 : return false;
1618 : }
1619 :
1620 : /* Return true if the destination of SET equals the source
1621 : and there are no side effects. */
1622 :
1623 : bool
1624 1663552152 : set_noop_p (const_rtx set)
1625 : {
1626 1663552152 : rtx src = SET_SRC (set);
1627 1663552152 : rtx dst = SET_DEST (set);
1628 :
1629 1663552152 : if (dst == pc_rtx && src == pc_rtx)
1630 : return true;
1631 :
1632 1663544712 : if (MEM_P (dst) && MEM_P (src))
1633 9021080 : return (rtx_equal_p (dst, src)
1634 4255 : && !side_effects_p (dst)
1635 9025196 : && !side_effects_p (src));
1636 :
1637 1654523632 : if (GET_CODE (dst) == ZERO_EXTRACT)
1638 107346 : return (rtx_equal_p (XEXP (dst, 0), src)
1639 6552 : && !BITS_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1640 0 : && !side_effects_p (src)
1641 107346 : && !side_effects_p (XEXP (dst, 0)));
1642 :
1643 1654416286 : if (GET_CODE (dst) == STRICT_LOW_PART)
1644 264803 : dst = XEXP (dst, 0);
1645 :
1646 1654416286 : if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1647 : {
1648 836145 : if (maybe_ne (SUBREG_BYTE (src), SUBREG_BYTE (dst)))
1649 : return false;
1650 786650 : src = SUBREG_REG (src);
1651 786650 : dst = SUBREG_REG (dst);
1652 786650 : if (GET_MODE (src) != GET_MODE (dst))
1653 : /* It is hard to tell whether subregs refer to the same bits, so act
1654 : conservatively and return false. */
1655 : return false;
1656 : }
1657 :
1658 : /* It is a NOOP if destination overlaps with selected src vector
1659 : elements. */
1660 1654296193 : if (GET_CODE (src) == VEC_SELECT
1661 7757681 : && REG_P (XEXP (src, 0)) && REG_P (dst)
1662 2660043 : && HARD_REGISTER_P (XEXP (src, 0))
1663 1654867151 : && HARD_REGISTER_P (dst))
1664 : {
1665 570722 : int i;
1666 570722 : rtx par = XEXP (src, 1);
1667 570722 : rtx src0 = XEXP (src, 0);
1668 570722 : poly_int64 c0;
1669 570722 : if (!poly_int_rtx_p (XVECEXP (par, 0, 0), &c0))
1670 : return false;
1671 1141444 : poly_int64 offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
1672 :
1673 911448 : for (i = 1; i < XVECLEN (par, 0); i++)
1674 : {
1675 581363 : poly_int64 c0i;
1676 581363 : if (!poly_int_rtx_p (XVECEXP (par, 0, i), &c0i)
1677 581363 : || maybe_ne (c0i, c0 + i))
1678 1662923832 : return false;
1679 : }
1680 330085 : return
1681 330085 : REG_CAN_CHANGE_MODE_P (REGNO (dst), GET_MODE (src0), GET_MODE (dst))
1682 330085 : && validate_subreg (GET_MODE (dst), GET_MODE (src0), src0, offset)
1683 768259 : && simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
1684 122139 : offset, GET_MODE (dst)) == (int) REGNO (dst);
1685 : }
1686 :
1687 419911838 : return (REG_P (src) && REG_P (dst)
1688 1884136727 : && REGNO (src) == REGNO (dst));
1689 : }
1690 :
1691 : /* Return true if an insn consists only of SETs, each of which only sets a
1692 : value to itself. */
1693 :
1694 : bool
1695 1134433387 : noop_move_p (const rtx_insn *insn)
1696 : {
1697 1134433387 : rtx pat = PATTERN (insn);
1698 :
1699 1134433387 : if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1700 : return true;
1701 :
1702 : /* Check the code to be executed for COND_EXEC. */
1703 1134426850 : if (GET_CODE (pat) == COND_EXEC)
1704 0 : pat = COND_EXEC_CODE (pat);
1705 :
1706 1134426850 : if (GET_CODE (pat) == SET && set_noop_p (pat))
1707 : return true;
1708 :
1709 1134396755 : if (GET_CODE (pat) == PARALLEL)
1710 : {
1711 : int i;
1712 : /* If nothing but SETs of registers to themselves,
1713 : this insn can also be deleted. */
1714 157028062 : for (i = 0; i < XVECLEN (pat, 0); i++)
1715 : {
1716 157027956 : rtx tem = XVECEXP (pat, 0, i);
1717 :
1718 157027956 : if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1719 21413 : continue;
1720 :
1721 157006543 : if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1722 157006437 : return false;
1723 : }
1724 :
1725 : return true;
1726 : }
1727 : return false;
1728 : }
1729 :
1730 :
1731 : /* Return true if register in range [REGNO, ENDREGNO)
1732 : appears either explicitly or implicitly in X
1733 : other than being stored into.
1734 :
1735 : References contained within the substructure at LOC do not count.
1736 : LOC may be zero, meaning don't ignore anything. */
1737 :
1738 : bool
1739 2197382295 : refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1740 : rtx *loc)
1741 : {
1742 2922146435 : int i;
1743 2922146435 : unsigned int x_regno;
1744 2922146435 : RTX_CODE code;
1745 2922146435 : const char *fmt;
1746 :
1747 2922146435 : repeat:
1748 : /* The contents of a REG_NONNEG note is always zero, so we must come here
1749 : upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1750 2922146435 : if (x == 0)
1751 : return false;
1752 :
1753 2922146435 : code = GET_CODE (x);
1754 :
1755 2922146435 : switch (code)
1756 : {
1757 1590854255 : case REG:
1758 1590854255 : x_regno = REGNO (x);
1759 :
1760 : /* If we modifying the stack, frame, or argument pointer, it will
1761 : clobber a virtual register. In fact, we could be more precise,
1762 : but it isn't worth it. */
1763 1590854255 : if ((x_regno == STACK_POINTER_REGNUM
1764 1590854255 : || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1765 1590854255 : && x_regno == ARG_POINTER_REGNUM)
1766 : || x_regno == FRAME_POINTER_REGNUM)
1767 125510796 : && VIRTUAL_REGISTER_NUM_P (regno))
1768 : return true;
1769 :
1770 1590854253 : return endregno > x_regno && regno < END_REGNO (x);
1771 :
1772 33200486 : case SUBREG:
1773 : /* If this is a SUBREG of a hard reg, we can see exactly which
1774 : registers are being modified. Otherwise, handle normally. */
1775 33200486 : if (REG_P (SUBREG_REG (x))
1776 33200486 : && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1777 : {
1778 2293 : unsigned int inner_regno = subreg_regno (x);
1779 2293 : unsigned int inner_endregno
1780 : = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1781 2293 : ? subreg_nregs (x) : 1);
1782 :
1783 2293 : return endregno > inner_regno && regno < inner_endregno;
1784 : }
1785 : break;
1786 :
1787 98337125 : case CLOBBER:
1788 98337125 : case SET:
1789 98337125 : if (&SET_DEST (x) != loc
1790 : /* Note setting a SUBREG counts as referring to the REG it is in for
1791 : a pseudo but not for hard registers since we can
1792 : treat each word individually. */
1793 98337125 : && ((GET_CODE (SET_DEST (x)) == SUBREG
1794 707536 : && loc != &SUBREG_REG (SET_DEST (x))
1795 707536 : && REG_P (SUBREG_REG (SET_DEST (x)))
1796 707536 : && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1797 707536 : && refers_to_regno_p (regno, endregno,
1798 : SUBREG_REG (SET_DEST (x)), loc))
1799 98316214 : || (!REG_P (SET_DEST (x))
1800 10437670 : && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1801 131409 : return true;
1802 :
1803 98205716 : if (code == CLOBBER || loc == &SET_SRC (x))
1804 : return false;
1805 81761580 : x = SET_SRC (x);
1806 81761580 : goto repeat;
1807 :
1808 : default:
1809 : break;
1810 : }
1811 :
1812 : /* X does not match, so try its subexpressions. */
1813 :
1814 1232952762 : fmt = GET_RTX_FORMAT (code);
1815 2405652467 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1816 : {
1817 1822849717 : if (fmt[i] == 'e' && loc != &XEXP (x, i))
1818 : {
1819 1121854901 : if (i == 0)
1820 : {
1821 643002560 : x = XEXP (x, 0);
1822 643002560 : goto repeat;
1823 : }
1824 : else
1825 478852341 : if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1826 : return true;
1827 : }
1828 700994816 : else if (fmt[i] == 'E')
1829 : {
1830 44958535 : int j;
1831 163972141 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1832 120681821 : if (loc != &XVECEXP (x, i, j)
1833 120681821 : && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1834 : return true;
1835 : }
1836 : }
1837 : return false;
1838 : }
1839 :
1840 : /* Rreturn true if modifying X will affect IN. If X is a register or a SUBREG,
1841 : we check if any register number in X conflicts with the relevant register
1842 : numbers. If X is a constant, return false. If X is a MEM, return true iff
1843 : IN contains a MEM (we don't bother checking for memory addresses that can't
1844 : conflict because we expect this to be a rare case. */
1845 :
1846 : bool
1847 1478763897 : reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1848 : {
1849 1478763897 : unsigned int regno, endregno;
1850 :
1851 : /* If either argument is a constant, then modifying X cannot
1852 : affect IN. Here we look at IN, we can profitably combine
1853 : CONSTANT_P (x) with the switch statement below. */
1854 1478763897 : if (CONSTANT_P (in))
1855 : return false;
1856 :
1857 1450240058 : recurse:
1858 1450243995 : switch (GET_CODE (x))
1859 : {
1860 3937 : case CLOBBER:
1861 3937 : case STRICT_LOW_PART:
1862 3937 : case ZERO_EXTRACT:
1863 3937 : case SIGN_EXTRACT:
1864 : /* Overly conservative. */
1865 3937 : x = XEXP (x, 0);
1866 3937 : goto recurse;
1867 :
1868 1323623 : case SUBREG:
1869 1323623 : regno = REGNO (SUBREG_REG (x));
1870 1323623 : if (regno < FIRST_PSEUDO_REGISTER)
1871 0 : regno = subreg_regno (x);
1872 1323623 : endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1873 0 : ? subreg_nregs (x) : 1);
1874 1323623 : goto do_reg;
1875 :
1876 1445121424 : case REG:
1877 1445121424 : regno = REGNO (x);
1878 1445121424 : endregno = END_REGNO (x);
1879 1446445047 : do_reg:
1880 1446445047 : return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1881 :
1882 1810169 : case MEM:
1883 1810169 : {
1884 1810169 : const char *fmt;
1885 1810169 : int i;
1886 :
1887 1810169 : if (MEM_P (in))
1888 : return true;
1889 :
1890 1671303 : fmt = GET_RTX_FORMAT (GET_CODE (in));
1891 3563479 : for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1892 1940191 : if (fmt[i] == 'e')
1893 : {
1894 531159 : if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1895 : return true;
1896 : }
1897 1409032 : else if (fmt[i] == 'E')
1898 : {
1899 8337 : int j;
1900 36727 : for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1901 29429 : if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1902 : return true;
1903 : }
1904 :
1905 : return false;
1906 : }
1907 :
1908 1864506 : case SCRATCH:
1909 1864506 : case PC:
1910 1864506 : return reg_mentioned_p (x, in);
1911 :
1912 676 : case PARALLEL:
1913 676 : {
1914 676 : int i;
1915 :
1916 : /* If any register in here refers to it we return true. */
1917 1218 : for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1918 1084 : if (XEXP (XVECEXP (x, 0, i), 0) != 0
1919 1084 : && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1920 : return true;
1921 : return false;
1922 : }
1923 :
1924 119660 : default:
1925 119660 : gcc_assert (CONSTANT_P (x));
1926 : return false;
1927 : }
1928 : }
1929 :
1930 : /* Call FUN on each register or MEM that is stored into or clobbered by X.
1931 : (X would be the pattern of an insn). DATA is an arbitrary pointer,
1932 : ignored by note_stores, but passed to FUN.
1933 :
1934 : FUN receives three arguments:
1935 : 1. the REG, MEM or PC being stored in or clobbered,
1936 : 2. the SET or CLOBBER rtx that does the store,
1937 : 3. the pointer DATA provided to note_stores.
1938 :
1939 : If the item being stored in or clobbered is a SUBREG of a hard register,
1940 : the SUBREG will be passed. */
1941 :
1942 : void
1943 6961640077 : note_pattern_stores (const_rtx x,
1944 : void (*fun) (rtx, const_rtx, void *), void *data)
1945 : {
1946 6961640077 : int i;
1947 :
1948 6961640077 : if (GET_CODE (x) == COND_EXEC)
1949 0 : x = COND_EXEC_CODE (x);
1950 :
1951 6961640077 : if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1952 : {
1953 5127577501 : rtx dest = SET_DEST (x);
1954 :
1955 5127577501 : while ((GET_CODE (dest) == SUBREG
1956 20741018 : && (!REG_P (SUBREG_REG (dest))
1957 20741018 : || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1958 5128523998 : || GET_CODE (dest) == ZERO_EXTRACT
1959 10277492505 : || GET_CODE (dest) == STRICT_LOW_PART)
1960 21655772 : dest = XEXP (dest, 0);
1961 :
1962 : /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1963 : each of whose first operand is a register. */
1964 5127577501 : if (GET_CODE (dest) == PARALLEL)
1965 : {
1966 1469233 : for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1967 931905 : if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1968 931905 : (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1969 : }
1970 : else
1971 5127040173 : (*fun) (dest, x, data);
1972 : }
1973 :
1974 1834062576 : else if (GET_CODE (x) == PARALLEL)
1975 2086775618 : for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1976 1405733656 : note_pattern_stores (XVECEXP (x, 0, i), fun, data);
1977 6961640077 : }
1978 :
1979 : /* Same, but for an instruction. If the instruction is a call, include
1980 : any CLOBBERs in its CALL_INSN_FUNCTION_USAGE. */
1981 :
1982 : void
1983 3364035248 : note_stores (const rtx_insn *insn,
1984 : void (*fun) (rtx, const_rtx, void *), void *data)
1985 : {
1986 3364035248 : if (CALL_P (insn))
1987 164152803 : for (rtx link = CALL_INSN_FUNCTION_USAGE (insn);
1988 499477839 : link; link = XEXP (link, 1))
1989 335325036 : if (GET_CODE (XEXP (link, 0)) == CLOBBER)
1990 12056865 : note_pattern_stores (XEXP (link, 0), fun, data);
1991 3364035248 : note_pattern_stores (PATTERN (insn), fun, data);
1992 3364035248 : }
1993 :
1994 : /* Like notes_stores, but call FUN for each expression that is being
1995 : referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1996 : FUN for each expression, not any interior subexpressions. FUN receives a
1997 : pointer to the expression and the DATA passed to this function.
1998 :
1999 : Note that this is not quite the same test as that done in reg_referenced_p
2000 : since that considers something as being referenced if it is being
2001 : partially set, while we do not. */
2002 :
2003 : void
2004 1098649148 : note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
2005 : {
2006 1098649148 : rtx body = *pbody;
2007 1098649148 : int i;
2008 :
2009 1098649148 : switch (GET_CODE (body))
2010 : {
2011 0 : case COND_EXEC:
2012 0 : (*fun) (&COND_EXEC_TEST (body), data);
2013 0 : note_uses (&COND_EXEC_CODE (body), fun, data);
2014 0 : return;
2015 :
2016 85001924 : case PARALLEL:
2017 262848429 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2018 177846505 : note_uses (&XVECEXP (body, 0, i), fun, data);
2019 : return;
2020 :
2021 0 : case SEQUENCE:
2022 0 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2023 0 : note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
2024 : return;
2025 :
2026 4849273 : case USE:
2027 4849273 : (*fun) (&XEXP (body, 0), data);
2028 4849273 : return;
2029 :
2030 267186 : case ASM_OPERANDS:
2031 374126 : for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
2032 106940 : (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
2033 : return;
2034 :
2035 138447 : case TRAP_IF:
2036 138447 : (*fun) (&TRAP_CONDITION (body), data);
2037 138447 : return;
2038 :
2039 10856 : case PREFETCH:
2040 10856 : (*fun) (&XEXP (body, 0), data);
2041 10856 : return;
2042 :
2043 3237525 : case UNSPEC:
2044 3237525 : case UNSPEC_VOLATILE:
2045 6495803 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2046 3258278 : (*fun) (&XVECEXP (body, 0, i), data);
2047 : return;
2048 :
2049 82996841 : case CLOBBER:
2050 82996841 : if (MEM_P (XEXP (body, 0)))
2051 3451247 : (*fun) (&XEXP (XEXP (body, 0), 0), data);
2052 : return;
2053 :
2054 545255295 : case SET:
2055 545255295 : {
2056 545255295 : rtx dest = SET_DEST (body);
2057 :
2058 : /* For sets we replace everything in source plus registers in memory
2059 : expression in store and operands of a ZERO_EXTRACT. */
2060 545255295 : (*fun) (&SET_SRC (body), data);
2061 :
2062 545255295 : if (GET_CODE (dest) == ZERO_EXTRACT)
2063 : {
2064 32826 : (*fun) (&XEXP (dest, 1), data);
2065 32826 : (*fun) (&XEXP (dest, 2), data);
2066 : }
2067 :
2068 547413921 : while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
2069 2158626 : dest = XEXP (dest, 0);
2070 :
2071 545255295 : if (MEM_P (dest))
2072 94601608 : (*fun) (&XEXP (dest, 0), data);
2073 : }
2074 : return;
2075 :
2076 376891801 : default:
2077 : /* All the other possibilities never store. */
2078 376891801 : (*fun) (pbody, data);
2079 376891801 : return;
2080 : }
2081 : }
2082 :
2083 : /* Try to add a description of REG X to this object, stopping once
2084 : the REF_END limit has been reached. FLAGS is a bitmask of
2085 : rtx_obj_reference flags that describe the context. */
2086 :
2087 : void
2088 828444901 : rtx_properties::try_to_add_reg (const_rtx x, unsigned int flags)
2089 : {
2090 828444901 : if (REG_NREGS (x) != 1)
2091 2610513 : flags |= rtx_obj_flags::IS_MULTIREG;
2092 828444901 : machine_mode mode = GET_MODE (x);
2093 828444901 : unsigned int start_regno = REGNO (x);
2094 828444901 : unsigned int end_regno = END_REGNO (x);
2095 1659500315 : for (unsigned int regno = start_regno; regno < end_regno; ++regno)
2096 831055414 : if (ref_iter != ref_end)
2097 830900746 : *ref_iter++ = rtx_obj_reference (regno, flags, mode,
2098 830900746 : regno - start_regno);
2099 828444901 : }
2100 :
2101 : /* Add a description of destination X to this object. FLAGS is a bitmask
2102 : of rtx_obj_reference flags that describe the context.
2103 :
2104 : This routine accepts all rtxes that can legitimately appear in a
2105 : SET_DEST. */
2106 :
2107 : void
2108 426125915 : rtx_properties::try_to_add_dest (const_rtx x, unsigned int flags)
2109 : {
2110 : /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
2111 : each of whose first operand is a register. */
2112 426125915 : if (UNLIKELY (GET_CODE (x) == PARALLEL))
2113 : {
2114 132508 : for (int i = XVECLEN (x, 0) - 1; i >= 0; --i)
2115 83609 : if (rtx dest = XEXP (XVECEXP (x, 0, i), 0))
2116 83609 : try_to_add_dest (dest, flags);
2117 : return;
2118 : }
2119 :
2120 426077016 : unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
2121 426077016 : flags |= rtx_obj_flags::IS_WRITE;
2122 427750617 : for (;;)
2123 427750617 : if (GET_CODE (x) == ZERO_EXTRACT)
2124 : {
2125 20473 : try_to_add_src (XEXP (x, 1), base_flags);
2126 20473 : try_to_add_src (XEXP (x, 2), base_flags);
2127 20473 : flags |= rtx_obj_flags::IS_READ;
2128 20473 : x = XEXP (x, 0);
2129 : }
2130 427730144 : else if (GET_CODE (x) == STRICT_LOW_PART)
2131 : {
2132 56098 : flags |= rtx_obj_flags::IS_READ;
2133 56098 : x = XEXP (x, 0);
2134 : }
2135 427674046 : else if (GET_CODE (x) == SUBREG)
2136 : {
2137 1597030 : flags |= rtx_obj_flags::IN_SUBREG;
2138 1597030 : if (read_modify_subreg_p (x))
2139 908120 : flags |= rtx_obj_flags::IS_READ;
2140 1597030 : x = SUBREG_REG (x);
2141 : }
2142 : else
2143 : break;
2144 :
2145 426077016 : if (MEM_P (x))
2146 : {
2147 59710507 : if (ref_iter != ref_end)
2148 59699851 : *ref_iter++ = rtx_obj_reference (MEM_REGNO, flags, GET_MODE (x));
2149 :
2150 59710507 : unsigned int addr_flags = base_flags | rtx_obj_flags::IN_MEM_STORE;
2151 59710507 : if (flags & rtx_obj_flags::IS_READ)
2152 3869 : addr_flags |= rtx_obj_flags::IN_MEM_LOAD;
2153 59710507 : try_to_add_src (XEXP (x, 0), addr_flags);
2154 59710507 : return;
2155 : }
2156 :
2157 366366509 : if (LIKELY (REG_P (x)))
2158 : {
2159 323943526 : if (REGNO (x) == STACK_POINTER_REGNUM)
2160 : {
2161 : /* Stack accesses are dependent on previous allocations and
2162 : anti-dependent on later deallocations, so both types of
2163 : stack operation are akin to a memory write. */
2164 23559446 : if (ref_iter != ref_end)
2165 23559446 : *ref_iter++ = rtx_obj_reference (MEM_REGNO, flags, BLKmode);
2166 :
2167 : /* We want to keep sp alive everywhere - by making all
2168 : writes to sp also use sp. */
2169 23559446 : flags |= rtx_obj_flags::IS_READ;
2170 : }
2171 323943526 : try_to_add_reg (x, flags);
2172 323943526 : return;
2173 : }
2174 : }
2175 :
2176 : /* Try to add a description of source X to this object, stopping once
2177 : the REF_END limit has been reached. FLAGS is a bitmask of
2178 : rtx_obj_reference flags that describe the context.
2179 :
2180 : This routine accepts all rtxes that can legitimately appear in a SET_SRC. */
2181 :
2182 : void
2183 921509829 : rtx_properties::try_to_add_src (const_rtx x, unsigned int flags)
2184 : {
2185 921509829 : unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
2186 921509829 : subrtx_iterator::array_type array;
2187 2916126965 : FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2188 : {
2189 1994617136 : const_rtx x = *iter;
2190 1994617136 : rtx_code code = GET_CODE (x);
2191 1994617136 : if (code == REG)
2192 504501375 : try_to_add_reg (x, flags | rtx_obj_flags::IS_READ);
2193 : else if (code == MEM)
2194 : {
2195 108005920 : if (MEM_VOLATILE_P (x))
2196 3178975 : has_volatile_refs = true;
2197 :
2198 108005920 : if (!MEM_READONLY_P (x) && ref_iter != ref_end)
2199 : {
2200 103208972 : auto mem_flags = flags | rtx_obj_flags::IS_READ;
2201 103208972 : *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags,
2202 103208972 : GET_MODE (x));
2203 : }
2204 :
2205 108005920 : try_to_add_src (XEXP (x, 0),
2206 : base_flags | rtx_obj_flags::IN_MEM_LOAD);
2207 108005920 : iter.skip_subrtxes ();
2208 : }
2209 : else if (code == SUBREG)
2210 : {
2211 8163654 : try_to_add_src (SUBREG_REG (x), flags | rtx_obj_flags::IN_SUBREG);
2212 8163654 : iter.skip_subrtxes ();
2213 : }
2214 : else if (code == UNSPEC_VOLATILE)
2215 2644673 : has_volatile_refs = true;
2216 : else if (code == ASM_INPUT || code == ASM_OPERANDS)
2217 : {
2218 1084211 : has_asm = true;
2219 1084211 : if (MEM_VOLATILE_P (x))
2220 347688 : has_volatile_refs = true;
2221 : }
2222 : else if (code == PRE_INC
2223 : || code == PRE_DEC
2224 : || code == POST_INC
2225 : || code == POST_DEC
2226 : || code == PRE_MODIFY
2227 : || code == POST_MODIFY)
2228 : {
2229 12250139 : has_pre_post_modify = true;
2230 :
2231 12250139 : unsigned int addr_flags = (flags
2232 : | rtx_obj_flags::IS_PRE_POST_MODIFY
2233 : | rtx_obj_flags::IS_READ);
2234 12250139 : try_to_add_dest (XEXP (x, 0), addr_flags);
2235 12250139 : if (code == PRE_MODIFY || code == POST_MODIFY)
2236 357684 : iter.substitute (XEXP (XEXP (x, 1), 1));
2237 : else
2238 11892455 : iter.skip_subrtxes ();
2239 : }
2240 : else if (code == CALL)
2241 26771036 : has_call = true;
2242 : }
2243 921509829 : }
2244 :
2245 : /* Try to add a description of instruction pattern PAT to this object,
2246 : stopping once the REF_END limit has been reached. */
2247 :
2248 : void
2249 722680033 : rtx_properties::try_to_add_pattern (const_rtx pat)
2250 : {
2251 774550493 : switch (GET_CODE (pat))
2252 : {
2253 0 : case COND_EXEC:
2254 0 : try_to_add_src (COND_EXEC_TEST (pat));
2255 0 : try_to_add_pattern (COND_EXEC_CODE (pat));
2256 0 : break;
2257 :
2258 51870460 : case PARALLEL:
2259 51870460 : {
2260 51870460 : int last = XVECLEN (pat, 0) - 1;
2261 107557823 : for (int i = 0; i < last; ++i)
2262 55687363 : try_to_add_pattern (XVECEXP (pat, 0, i));
2263 51870460 : try_to_add_pattern (XVECEXP (pat, 0, last));
2264 51870460 : break;
2265 : }
2266 :
2267 228812 : case ASM_OPERANDS:
2268 299172 : for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (pat); i < len; ++i)
2269 70360 : try_to_add_src (ASM_OPERANDS_INPUT (pat, i));
2270 : break;
2271 :
2272 51872365 : case CLOBBER:
2273 51872365 : try_to_add_dest (XEXP (pat, 0), rtx_obj_flags::IS_CLOBBER);
2274 51872365 : break;
2275 :
2276 359171606 : case SET:
2277 359171606 : try_to_add_dest (SET_DEST (pat));
2278 359171606 : try_to_add_src (SET_SRC (pat));
2279 359171606 : break;
2280 :
2281 311407250 : default:
2282 : /* All the other possibilities never store and can use a normal
2283 : rtx walk. This includes:
2284 :
2285 : - USE
2286 : - TRAP_IF
2287 : - PREFETCH
2288 : - UNSPEC
2289 : - UNSPEC_VOLATILE. */
2290 311407250 : try_to_add_src (pat);
2291 311407250 : break;
2292 : }
2293 722680033 : }
2294 :
2295 : /* Try to add a description of INSN to this object, stopping once
2296 : the REF_END limit has been reached. INCLUDE_NOTES is true if the
2297 : description should include REG_EQUAL and REG_EQUIV notes; all such
2298 : references will then be marked with rtx_obj_flags::IN_NOTE.
2299 :
2300 : For calls, this description includes all accesses in
2301 : CALL_INSN_FUNCTION_USAGE. It also include all implicit accesses
2302 : to global registers by the target function. However, it does not
2303 : include clobbers performed by the target function; callers that want
2304 : this information should instead use the function_abi interface. */
2305 :
2306 : void
2307 649802612 : rtx_properties::try_to_add_insn (const rtx_insn *insn, bool include_notes)
2308 : {
2309 649802612 : if (CALL_P (insn))
2310 : {
2311 : /* Non-const functions can read from global registers. Impure
2312 : functions can also set them.
2313 :
2314 : Adding the global registers first removes a situation in which
2315 : a fixed-form clobber of register R could come before a real set
2316 : of register R. */
2317 26754576 : if (!hard_reg_set_empty_p (global_reg_set)
2318 26754576 : && !RTL_CONST_CALL_P (insn))
2319 : {
2320 514 : unsigned int flags = rtx_obj_flags::IS_READ;
2321 514 : if (!RTL_PURE_CALL_P (insn))
2322 470 : flags |= rtx_obj_flags::IS_WRITE;
2323 47802 : for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2324 : /* As a special case, the stack pointer is invariant across calls
2325 : even if it has been marked global; see the corresponding
2326 : handling in df_get_call_refs. */
2327 47288 : if (regno != STACK_POINTER_REGNUM
2328 46774 : && global_regs[regno]
2329 436 : && ref_iter != ref_end)
2330 436 : *ref_iter++ = rtx_obj_reference (regno, flags,
2331 436 : reg_raw_mode[regno], 0);
2332 : }
2333 : /* Untyped calls implicitly set all function value registers.
2334 : Again, we add them first in case the main pattern contains
2335 : a fixed-form clobber. */
2336 26754576 : if (find_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX))
2337 209994 : for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2338 207736 : if (targetm.calls.function_value_regno_p (regno)
2339 207736 : && ref_iter != ref_end)
2340 13548 : *ref_iter++ = rtx_obj_reference (regno, rtx_obj_flags::IS_WRITE,
2341 13548 : reg_raw_mode[regno], 0);
2342 26754576 : if (ref_iter != ref_end && !RTL_CONST_CALL_P (insn))
2343 : {
2344 25862675 : auto mem_flags = rtx_obj_flags::IS_READ;
2345 25862675 : if (!RTL_PURE_CALL_P (insn))
2346 24199943 : mem_flags |= rtx_obj_flags::IS_WRITE;
2347 25862675 : *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags, BLKmode);
2348 : }
2349 26754576 : try_to_add_pattern (PATTERN (insn));
2350 82566609 : for (rtx link = CALL_INSN_FUNCTION_USAGE (insn); link;
2351 55812033 : link = XEXP (link, 1))
2352 : {
2353 55812033 : rtx x = XEXP (link, 0);
2354 55812033 : if (GET_CODE (x) == CLOBBER)
2355 2748196 : try_to_add_dest (XEXP (x, 0), rtx_obj_flags::IS_CLOBBER);
2356 53063837 : else if (GET_CODE (x) == USE)
2357 52668346 : try_to_add_src (XEXP (x, 0));
2358 : }
2359 : }
2360 : else
2361 623048036 : try_to_add_pattern (PATTERN (insn));
2362 :
2363 649802612 : if (include_notes)
2364 985891716 : for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1))
2365 336089514 : if (REG_NOTE_KIND (note) == REG_EQUAL
2366 336089514 : || REG_NOTE_KIND (note) == REG_EQUIV)
2367 22271240 : try_to_add_note (XEXP (note, 0));
2368 649802612 : }
2369 :
2370 : /* Grow the storage by a bit while keeping the contents of the first
2371 : START elements. */
2372 :
2373 : void
2374 28520 : vec_rtx_properties_base::grow (ptrdiff_t start)
2375 : {
2376 : /* The same heuristic that vec uses. */
2377 28520 : ptrdiff_t new_elems = (ref_end - ref_begin) * 3 / 2;
2378 28520 : if (ref_begin == m_storage)
2379 : {
2380 24416 : ref_begin = XNEWVEC (rtx_obj_reference, new_elems);
2381 24416 : if (start)
2382 0 : memcpy (ref_begin, m_storage, start * sizeof (rtx_obj_reference));
2383 : }
2384 : else
2385 4104 : ref_begin = reinterpret_cast<rtx_obj_reference *>
2386 4104 : (xrealloc (ref_begin, new_elems * sizeof (rtx_obj_reference)));
2387 28520 : ref_iter = ref_begin + start;
2388 28520 : ref_end = ref_begin + new_elems;
2389 28520 : }
2390 :
2391 : /* Return true if X's old contents don't survive after INSN.
2392 : This will be true if X is a register and X dies in INSN or because
2393 : INSN entirely sets X.
2394 :
2395 : "Entirely set" means set directly and not through a SUBREG, or
2396 : ZERO_EXTRACT, so no trace of the old contents remains.
2397 : Likewise, REG_INC does not count.
2398 :
2399 : REG may be a hard or pseudo reg. Renumbering is not taken into account,
2400 : but for this use that makes no difference, since regs don't overlap
2401 : during their lifetimes. Therefore, this function may be used
2402 : at any time after deaths have been computed.
2403 :
2404 : If REG is a hard reg that occupies multiple machine registers, this
2405 : function will only return true if each of those registers will be replaced
2406 : by INSN. */
2407 :
2408 : bool
2409 115274786 : dead_or_set_p (const rtx_insn *insn, const_rtx x)
2410 : {
2411 115274786 : unsigned int regno, end_regno;
2412 115274786 : unsigned int i;
2413 :
2414 115274786 : gcc_assert (REG_P (x));
2415 :
2416 115274786 : regno = REGNO (x);
2417 115274786 : end_regno = END_REGNO (x);
2418 204080623 : for (i = regno; i < end_regno; i++)
2419 115275828 : if (! dead_or_set_regno_p (insn, i))
2420 : return false;
2421 :
2422 : return true;
2423 : }
2424 :
2425 : /* Return TRUE iff DEST is a register or subreg of a register, is a
2426 : complete rather than read-modify-write destination, and contains
2427 : register TEST_REGNO. */
2428 :
2429 : static bool
2430 205529987 : covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
2431 : {
2432 205529987 : unsigned int regno, endregno;
2433 :
2434 205529987 : if (GET_CODE (dest) == SUBREG && !read_modify_subreg_p (dest))
2435 498336 : dest = SUBREG_REG (dest);
2436 :
2437 205529987 : if (!REG_P (dest))
2438 : return false;
2439 :
2440 196823520 : regno = REGNO (dest);
2441 196823520 : endregno = END_REGNO (dest);
2442 196823520 : return (test_regno >= regno && test_regno < endregno);
2443 : }
2444 :
2445 : /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
2446 : any member matches the covers_regno_no_parallel_p criteria. */
2447 :
2448 : static bool
2449 95846420 : covers_regno_p (const_rtx dest, unsigned int test_regno)
2450 : {
2451 95846420 : if (GET_CODE (dest) == PARALLEL)
2452 : {
2453 : /* Some targets place small structures in registers for return
2454 : values of functions, and those registers are wrapped in
2455 : PARALLELs that we may see as the destination of a SET. */
2456 304 : int i;
2457 :
2458 822 : for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
2459 : {
2460 518 : rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
2461 518 : if (inner != NULL_RTX
2462 518 : && covers_regno_no_parallel_p (inner, test_regno))
2463 : return true;
2464 : }
2465 :
2466 : return false;
2467 : }
2468 : else
2469 95846116 : return covers_regno_no_parallel_p (dest, test_regno);
2470 : }
2471 :
2472 : /* Utility function for dead_or_set_p to check an individual register. */
2473 :
2474 : bool
2475 117496229 : dead_or_set_regno_p (const rtx_insn *insn, unsigned int test_regno)
2476 : {
2477 117496229 : const_rtx pattern;
2478 :
2479 : /* See if there is a death note for something that includes TEST_REGNO. */
2480 117496229 : if (find_regno_note (insn, REG_DEAD, test_regno))
2481 : return true;
2482 :
2483 76029233 : if (CALL_P (insn)
2484 76029233 : && find_regno_fusage (insn, CLOBBER, test_regno))
2485 : return true;
2486 :
2487 76015145 : pattern = PATTERN (insn);
2488 :
2489 : /* If a COND_EXEC is not executed, the value survives. */
2490 76015145 : if (GET_CODE (pattern) == COND_EXEC)
2491 : return false;
2492 :
2493 76015145 : if (GET_CODE (pattern) == SET || GET_CODE (pattern) == CLOBBER)
2494 55838344 : return covers_regno_p (SET_DEST (pattern), test_regno);
2495 20176801 : else if (GET_CODE (pattern) == PARALLEL)
2496 : {
2497 19967925 : int i;
2498 :
2499 46356976 : for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2500 : {
2501 40252187 : rtx body = XVECEXP (pattern, 0, i);
2502 :
2503 40252187 : if (GET_CODE (body) == COND_EXEC)
2504 0 : body = COND_EXEC_CODE (body);
2505 :
2506 20233667 : if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
2507 60241743 : && covers_regno_p (SET_DEST (body), test_regno))
2508 : return true;
2509 : }
2510 : }
2511 :
2512 : return false;
2513 : }
2514 :
2515 : /* Return the reg-note of kind KIND in insn INSN, if there is one.
2516 : If DATUM is nonzero, look for one whose datum is DATUM. */
2517 :
2518 : rtx
2519 8145796263 : find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
2520 : {
2521 8145796263 : rtx link;
2522 :
2523 8145796263 : gcc_checking_assert (insn);
2524 :
2525 : /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2526 8145796263 : if (! INSN_P (insn))
2527 : return 0;
2528 8030938662 : if (datum == 0)
2529 : {
2530 16013289029 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2531 9552168920 : if (REG_NOTE_KIND (link) == kind)
2532 : return link;
2533 : return 0;
2534 : }
2535 :
2536 684949728 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2537 392042697 : if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2538 : return link;
2539 : return 0;
2540 : }
2541 :
2542 : /* Return the reg-note of kind KIND in insn INSN which applies to register
2543 : number REGNO, if any. Return 0 if there is no such reg-note. Note that
2544 : the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
2545 : it might be the case that the note overlaps REGNO. */
2546 :
2547 : rtx
2548 388371190 : find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
2549 : {
2550 388371190 : rtx link;
2551 :
2552 : /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2553 388371190 : if (! INSN_P (insn))
2554 : return 0;
2555 :
2556 575470557 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2557 361553835 : if (REG_NOTE_KIND (link) == kind
2558 : /* Verify that it is a register, so that scratch and MEM won't cause a
2559 : problem here. */
2560 254270973 : && REG_P (XEXP (link, 0))
2561 254270973 : && REGNO (XEXP (link, 0)) <= regno
2562 565670950 : && END_REGNO (XEXP (link, 0)) > regno)
2563 : return link;
2564 : return 0;
2565 : }
2566 :
2567 : /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
2568 : has such a note. */
2569 :
2570 : rtx
2571 1751075429 : find_reg_equal_equiv_note (const_rtx insn)
2572 : {
2573 1751075429 : rtx link;
2574 :
2575 1751075429 : if (!INSN_P (insn))
2576 : return 0;
2577 :
2578 3026720495 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2579 1373769294 : if (REG_NOTE_KIND (link) == REG_EQUAL
2580 1373769294 : || REG_NOTE_KIND (link) == REG_EQUIV)
2581 : {
2582 : /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
2583 : insns that have multiple sets. Checking single_set to
2584 : make sure of this is not the proper check, as explained
2585 : in the comment in set_unique_reg_note.
2586 :
2587 : This should be changed into an assert. */
2588 91886649 : if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
2589 : return 0;
2590 91886649 : return link;
2591 : }
2592 : return NULL;
2593 : }
2594 :
2595 : /* Check whether INSN is a single_set whose source is known to be
2596 : equivalent to a constant. Return that constant if so, otherwise
2597 : return null. */
2598 :
2599 : rtx
2600 2210290 : find_constant_src (const rtx_insn *insn)
2601 : {
2602 2210290 : rtx note, set, x;
2603 :
2604 2210290 : set = single_set (insn);
2605 2210290 : if (set)
2606 : {
2607 2210290 : x = avoid_constant_pool_reference (SET_SRC (set));
2608 2210290 : if (CONSTANT_P (x))
2609 : return x;
2610 : }
2611 :
2612 1542092 : note = find_reg_equal_equiv_note (insn);
2613 1542092 : if (note && CONSTANT_P (XEXP (note, 0)))
2614 610 : return XEXP (note, 0);
2615 :
2616 : return NULL_RTX;
2617 : }
2618 :
2619 : /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
2620 : in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2621 :
2622 : bool
2623 87993118 : find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
2624 : {
2625 : /* If it's not a CALL_INSN, it can't possibly have a
2626 : CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
2627 87993118 : if (!CALL_P (insn))
2628 : return false;
2629 :
2630 87993118 : gcc_assert (datum);
2631 :
2632 87993118 : if (!REG_P (datum))
2633 : {
2634 33607 : rtx link;
2635 :
2636 33607 : for (link = CALL_INSN_FUNCTION_USAGE (insn);
2637 69509 : link;
2638 35902 : link = XEXP (link, 1))
2639 35902 : if (GET_CODE (XEXP (link, 0)) == code
2640 35902 : && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
2641 : return true;
2642 : }
2643 : else
2644 : {
2645 87959511 : unsigned int regno = REGNO (datum);
2646 :
2647 : /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2648 : to pseudo registers, so don't bother checking. */
2649 :
2650 87959511 : if (regno < FIRST_PSEUDO_REGISTER)
2651 : {
2652 81017175 : unsigned int end_regno = END_REGNO (datum);
2653 81017175 : unsigned int i;
2654 :
2655 142156026 : for (i = regno; i < end_regno; i++)
2656 81017175 : if (find_regno_fusage (insn, code, i))
2657 : return true;
2658 : }
2659 : }
2660 :
2661 : return false;
2662 : }
2663 :
2664 : /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2665 : in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2666 :
2667 : bool
2668 81236705 : find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
2669 : {
2670 81236705 : rtx link;
2671 :
2672 : /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2673 : to pseudo registers, so don't bother checking. */
2674 :
2675 81236705 : if (regno >= FIRST_PSEUDO_REGISTER
2676 81183013 : || !CALL_P (insn) )
2677 : return false;
2678 :
2679 240982311 : for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2680 : {
2681 179693264 : rtx op, reg;
2682 :
2683 179693264 : if (GET_CODE (op = XEXP (link, 0)) == code
2684 51087694 : && REG_P (reg = XEXP (op, 0))
2685 51079049 : && REGNO (reg) <= regno
2686 213637242 : && END_REGNO (reg) > regno)
2687 : return true;
2688 : }
2689 :
2690 : return false;
2691 : }
2692 :
2693 :
2694 : /* Return true if KIND is an integer REG_NOTE. */
2695 :
2696 : static bool
2697 0 : int_reg_note_p (enum reg_note kind)
2698 : {
2699 0 : return kind == REG_BR_PROB;
2700 : }
2701 :
2702 : /* Allocate a register note with kind KIND and datum DATUM. LIST is
2703 : stored as the pointer to the next register note. */
2704 :
2705 : rtx
2706 751297323 : alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
2707 : {
2708 751297323 : rtx note;
2709 :
2710 751297323 : gcc_checking_assert (!int_reg_note_p (kind));
2711 751297323 : switch (kind)
2712 : {
2713 24569 : case REG_LABEL_TARGET:
2714 24569 : case REG_LABEL_OPERAND:
2715 24569 : case REG_TM:
2716 : /* These types of register notes use an INSN_LIST rather than an
2717 : EXPR_LIST, so that copying is done right and dumps look
2718 : better. */
2719 24569 : note = alloc_INSN_LIST (datum, list);
2720 24569 : PUT_REG_NOTE_KIND (note, kind);
2721 24569 : break;
2722 :
2723 751272754 : default:
2724 751272754 : note = alloc_EXPR_LIST (kind, datum, list);
2725 751272754 : break;
2726 : }
2727 :
2728 751297323 : return note;
2729 : }
2730 :
2731 : /* Add register note with kind KIND and datum DATUM to INSN. */
2732 :
2733 : void
2734 744506500 : add_reg_note (rtx insn, enum reg_note kind, rtx datum)
2735 : {
2736 744506500 : REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
2737 744506500 : }
2738 :
2739 : /* Add an integer register note with kind KIND and datum DATUM to INSN. */
2740 :
2741 : void
2742 5227630 : add_int_reg_note (rtx_insn *insn, enum reg_note kind, int datum)
2743 : {
2744 5227630 : gcc_checking_assert (int_reg_note_p (kind));
2745 5227630 : REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
2746 : datum, REG_NOTES (insn));
2747 5227630 : }
2748 :
2749 : /* Add a REG_ARGS_SIZE note to INSN with value VALUE. */
2750 :
2751 : void
2752 5484531 : add_args_size_note (rtx_insn *insn, poly_int64 value)
2753 : {
2754 5484531 : gcc_checking_assert (!find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX));
2755 8983627 : add_reg_note (insn, REG_ARGS_SIZE, gen_int_mode (value, Pmode));
2756 5484531 : }
2757 :
2758 : /* Add a register note like NOTE to INSN. */
2759 :
2760 : void
2761 0 : add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note)
2762 : {
2763 0 : if (GET_CODE (note) == INT_LIST)
2764 0 : add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
2765 : else
2766 0 : add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2767 0 : }
2768 :
2769 : /* Duplicate NOTE and return the copy. */
2770 : rtx
2771 2319693 : duplicate_reg_note (rtx note)
2772 : {
2773 2319693 : reg_note kind = REG_NOTE_KIND (note);
2774 :
2775 2319693 : if (GET_CODE (note) == INT_LIST)
2776 308899 : return gen_rtx_INT_LIST ((machine_mode) kind, XINT (note, 0), NULL_RTX);
2777 2010794 : else if (GET_CODE (note) == EXPR_LIST)
2778 2010794 : return alloc_reg_note (kind, copy_insn_1 (XEXP (note, 0)), NULL_RTX);
2779 : else
2780 0 : return alloc_reg_note (kind, XEXP (note, 0), NULL_RTX);
2781 : }
2782 :
2783 : /* Remove register note NOTE from the REG_NOTES of INSN. */
2784 :
2785 : void
2786 8217380 : remove_note (rtx_insn *insn, const_rtx note)
2787 : {
2788 8217380 : rtx link;
2789 :
2790 8217380 : if (note == NULL_RTX)
2791 : return;
2792 :
2793 7408166 : if (REG_NOTES (insn) == note)
2794 6909442 : REG_NOTES (insn) = XEXP (note, 1);
2795 : else
2796 988378 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2797 988378 : if (XEXP (link, 1) == note)
2798 : {
2799 498724 : XEXP (link, 1) = XEXP (note, 1);
2800 498724 : break;
2801 : }
2802 :
2803 7408166 : switch (REG_NOTE_KIND (note))
2804 : {
2805 2613997 : case REG_EQUAL:
2806 2613997 : case REG_EQUIV:
2807 2613997 : df_notes_rescan (insn);
2808 2613997 : break;
2809 : default:
2810 : break;
2811 : }
2812 : }
2813 :
2814 : /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.
2815 : If NO_RESCAN is false and any notes were removed, call
2816 : df_notes_rescan. Return true if any note has been removed. */
2817 :
2818 : bool
2819 34399 : remove_reg_equal_equiv_notes (rtx_insn *insn, bool no_rescan)
2820 : {
2821 34399 : rtx *loc;
2822 34399 : bool ret = false;
2823 :
2824 34399 : loc = ®_NOTES (insn);
2825 42189 : while (*loc)
2826 : {
2827 7790 : enum reg_note kind = REG_NOTE_KIND (*loc);
2828 7790 : if (kind == REG_EQUAL || kind == REG_EQUIV)
2829 : {
2830 373 : *loc = XEXP (*loc, 1);
2831 373 : ret = true;
2832 : }
2833 : else
2834 7417 : loc = &XEXP (*loc, 1);
2835 : }
2836 34399 : if (ret && !no_rescan)
2837 373 : df_notes_rescan (insn);
2838 34399 : return ret;
2839 : }
2840 :
2841 : /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */
2842 :
2843 : void
2844 3917416 : remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
2845 : {
2846 3917416 : df_ref eq_use;
2847 :
2848 3917416 : if (!df)
2849 : return;
2850 :
2851 : /* This loop is a little tricky. We cannot just go down the chain because
2852 : it is being modified by some actions in the loop. So we just iterate
2853 : over the head. We plan to drain the list anyway. */
2854 4053533 : while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2855 : {
2856 136117 : rtx_insn *insn = DF_REF_INSN (eq_use);
2857 136117 : rtx note = find_reg_equal_equiv_note (insn);
2858 :
2859 : /* This assert is generally triggered when someone deletes a REG_EQUAL
2860 : or REG_EQUIV note by hacking the list manually rather than calling
2861 : remove_note. */
2862 136117 : gcc_assert (note);
2863 :
2864 136117 : remove_note (insn, note);
2865 : }
2866 : }
2867 :
2868 : /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2869 : return 1 if it is found. A simple equality test is used to determine if
2870 : NODE matches. */
2871 :
2872 : bool
2873 26 : in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node)
2874 : {
2875 26 : const_rtx x;
2876 :
2877 26 : for (x = listp; x; x = XEXP (x, 1))
2878 0 : if (node == XEXP (x, 0))
2879 : return true;
2880 :
2881 : return false;
2882 : }
2883 :
2884 : /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
2885 : remove that entry from the list if it is found.
2886 :
2887 : A simple equality test is used to determine if NODE matches. */
2888 :
2889 : void
2890 7609683 : remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
2891 : {
2892 7609683 : rtx_insn_list *temp = *listp;
2893 7609683 : rtx_insn_list *prev = NULL;
2894 :
2895 7626144 : while (temp)
2896 : {
2897 16487 : if (node == temp->insn ())
2898 : {
2899 : /* Splice the node out of the list. */
2900 26 : if (prev)
2901 0 : XEXP (prev, 1) = temp->next ();
2902 : else
2903 26 : *listp = temp->next ();
2904 :
2905 26 : gcc_checking_assert (!in_insn_list_p (temp->next (), node));
2906 : return;
2907 : }
2908 :
2909 16461 : prev = temp;
2910 16461 : temp = temp->next ();
2911 : }
2912 : }
2913 :
2914 : /* Return true if X contains any volatile instructions. These are instructions
2915 : which may cause unpredictable machine state instructions, and thus no
2916 : instructions or register uses should be moved or combined across them.
2917 : This includes only volatile asms and UNSPEC_VOLATILE instructions. */
2918 :
2919 : bool
2920 687629405 : volatile_insn_p (const_rtx x)
2921 : {
2922 687629405 : const RTX_CODE code = GET_CODE (x);
2923 687629405 : switch (code)
2924 : {
2925 : case LABEL_REF:
2926 : case SYMBOL_REF:
2927 : case CONST:
2928 : CASE_CONST_ANY:
2929 : case PC:
2930 : case REG:
2931 : case SCRATCH:
2932 : case CLOBBER:
2933 : case ADDR_VEC:
2934 : case ADDR_DIFF_VEC:
2935 : case CALL:
2936 : case MEM:
2937 : return false;
2938 :
2939 : case UNSPEC_VOLATILE:
2940 : return true;
2941 :
2942 136691 : case ASM_INPUT:
2943 136691 : case ASM_OPERANDS:
2944 136691 : if (MEM_VOLATILE_P (x))
2945 : return true;
2946 :
2947 320372473 : default:
2948 320372473 : break;
2949 : }
2950 :
2951 : /* Recursively scan the operands of this expression. */
2952 :
2953 320372473 : {
2954 320372473 : const char *const fmt = GET_RTX_FORMAT (code);
2955 320372473 : int i;
2956 :
2957 878504653 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2958 : {
2959 558431514 : if (fmt[i] == 'e')
2960 : {
2961 436072071 : if (volatile_insn_p (XEXP (x, i)))
2962 : return true;
2963 : }
2964 122359443 : else if (fmt[i] == 'E')
2965 : {
2966 : int j;
2967 69817957 : for (j = 0; j < XVECLEN (x, i); j++)
2968 47991609 : if (volatile_insn_p (XVECEXP (x, i, j)))
2969 : return true;
2970 : }
2971 : }
2972 : }
2973 : return false;
2974 : }
2975 :
2976 : /* Return true if X contains any volatile memory references
2977 : UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2978 :
2979 : bool
2980 5132716797 : volatile_refs_p (const_rtx x)
2981 : {
2982 5132716797 : const RTX_CODE code = GET_CODE (x);
2983 5132716797 : switch (code)
2984 : {
2985 : case LABEL_REF:
2986 : case SYMBOL_REF:
2987 : case CONST:
2988 : CASE_CONST_ANY:
2989 : case PC:
2990 : case REG:
2991 : case SCRATCH:
2992 : case CLOBBER:
2993 : case ADDR_VEC:
2994 : case ADDR_DIFF_VEC:
2995 : return false;
2996 :
2997 : case UNSPEC_VOLATILE:
2998 : return true;
2999 :
3000 391715015 : case MEM:
3001 391715015 : case ASM_INPUT:
3002 391715015 : case ASM_OPERANDS:
3003 391715015 : if (MEM_VOLATILE_P (x))
3004 : return true;
3005 :
3006 2187323874 : default:
3007 2187323874 : break;
3008 : }
3009 :
3010 : /* Recursively scan the operands of this expression. */
3011 :
3012 2187323874 : {
3013 2187323874 : const char *const fmt = GET_RTX_FORMAT (code);
3014 2187323874 : int i;
3015 :
3016 6430507750 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3017 : {
3018 4259695321 : if (fmt[i] == 'e')
3019 : {
3020 3787868904 : if (volatile_refs_p (XEXP (x, i)))
3021 : return true;
3022 : }
3023 471826417 : else if (fmt[i] == 'E')
3024 : {
3025 : int j;
3026 157956074 : for (j = 0; j < XVECLEN (x, i); j++)
3027 109213970 : if (volatile_refs_p (XVECEXP (x, i, j)))
3028 : return true;
3029 : }
3030 : }
3031 : }
3032 : return false;
3033 : }
3034 :
3035 : /* Similar to above, except that it also rejects register pre- and post-
3036 : incrementing. */
3037 :
3038 : bool
3039 5286656400 : side_effects_p (const_rtx x)
3040 : {
3041 5286656400 : const RTX_CODE code = GET_CODE (x);
3042 5286656400 : switch (code)
3043 : {
3044 : case LABEL_REF:
3045 : case SYMBOL_REF:
3046 : case CONST:
3047 : CASE_CONST_ANY:
3048 : case PC:
3049 : case REG:
3050 : case SCRATCH:
3051 : case ADDR_VEC:
3052 : case ADDR_DIFF_VEC:
3053 : case VAR_LOCATION:
3054 : return false;
3055 :
3056 61630157 : case CLOBBER:
3057 : /* Reject CLOBBER with a non-VOID mode. These are made by combine.cc
3058 : when some combination can't be done. If we see one, don't think
3059 : that we can simplify the expression. */
3060 61630157 : return (GET_MODE (x) != VOIDmode);
3061 :
3062 : case PRE_INC:
3063 : case PRE_DEC:
3064 : case POST_INC:
3065 : case POST_DEC:
3066 : case PRE_MODIFY:
3067 : case POST_MODIFY:
3068 : case CALL:
3069 : case UNSPEC_VOLATILE:
3070 : return true;
3071 :
3072 325727543 : case MEM:
3073 325727543 : case ASM_INPUT:
3074 325727543 : case ASM_OPERANDS:
3075 325727543 : if (MEM_VOLATILE_P (x))
3076 : return true;
3077 :
3078 1874802450 : default:
3079 1874802450 : break;
3080 : }
3081 :
3082 : /* Recursively scan the operands of this expression. */
3083 :
3084 1874802450 : {
3085 1874802450 : const char *fmt = GET_RTX_FORMAT (code);
3086 1874802450 : int i;
3087 :
3088 5576277440 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3089 : {
3090 3751316894 : if (fmt[i] == 'e')
3091 : {
3092 3317724478 : if (side_effects_p (XEXP (x, i)))
3093 : return true;
3094 : }
3095 433592416 : else if (fmt[i] == 'E')
3096 : {
3097 : int j;
3098 232434682 : for (j = 0; j < XVECLEN (x, i); j++)
3099 156757577 : if (side_effects_p (XVECEXP (x, i, j)))
3100 : return true;
3101 : }
3102 : }
3103 : }
3104 : return false;
3105 : }
3106 :
3107 : /* Return true if evaluating rtx X might cause a trap.
3108 : FLAGS controls how to consider MEMs. A true means the context
3109 : of the access may have changed from the original, such that the
3110 : address may have become invalid. */
3111 :
3112 : bool
3113 8965717768 : may_trap_p_1 (const_rtx x, unsigned flags)
3114 : {
3115 8965717768 : int i;
3116 8965717768 : enum rtx_code code;
3117 8965717768 : const char *fmt;
3118 :
3119 : /* We make no distinction currently, but this function is part of
3120 : the internal target-hooks ABI so we keep the parameter as
3121 : "unsigned flags". */
3122 8965717768 : bool code_changed = flags != 0;
3123 :
3124 8965717768 : if (x == 0)
3125 : return false;
3126 8965715882 : code = GET_CODE (x);
3127 8965715882 : switch (code)
3128 : {
3129 : /* Handle these cases quickly. */
3130 : CASE_CONST_ANY:
3131 : case SYMBOL_REF:
3132 : case LABEL_REF:
3133 : case CONST:
3134 : case PC:
3135 : case REG:
3136 : case SCRATCH:
3137 : return false;
3138 :
3139 5814932 : case UNSPEC:
3140 5814932 : return targetm.unspec_may_trap_p (x, flags);
3141 :
3142 : case UNSPEC_VOLATILE:
3143 : case ASM_INPUT:
3144 : case TRAP_IF:
3145 : return true;
3146 :
3147 188468 : case ASM_OPERANDS:
3148 188468 : return MEM_VOLATILE_P (x);
3149 :
3150 : /* Memory ref can trap unless it's a static var or a stack slot. */
3151 1068122953 : case MEM:
3152 : /* Recognize specific pattern of stack checking probes. */
3153 1068122953 : if (flag_stack_check
3154 7726 : && MEM_VOLATILE_P (x)
3155 1068123753 : && XEXP (x, 0) == stack_pointer_rtx)
3156 : return true;
3157 1068122154 : if (/* MEM_NOTRAP_P only relates to the actual position of the memory
3158 : reference; moving it out of context such as when moving code
3159 : when optimizing, might cause its address to become invalid. */
3160 : code_changed
3161 1068122154 : || !MEM_NOTRAP_P (x))
3162 : {
3163 526571883 : poly_int64 size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : -1;
3164 451243945 : return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
3165 451243945 : GET_MODE (x), code_changed);
3166 : }
3167 :
3168 : return false;
3169 :
3170 : /* Division by a non-constant might trap. */
3171 995691 : case DIV:
3172 995691 : case MOD:
3173 995691 : case UDIV:
3174 995691 : case UMOD:
3175 995691 : if (HONOR_SNANS (x))
3176 : return true;
3177 995024 : if (FLOAT_MODE_P (GET_MODE (x)))
3178 419958 : return flag_trapping_math;
3179 575066 : if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
3180 : return true;
3181 79186 : if (GET_CODE (XEXP (x, 1)) == CONST_VECTOR)
3182 : {
3183 : /* For CONST_VECTOR, return 1 if any element is or might be zero. */
3184 0 : unsigned int n_elts;
3185 0 : rtx op = XEXP (x, 1);
3186 0 : if (!GET_MODE_NUNITS (GET_MODE (op)).is_constant (&n_elts))
3187 : {
3188 : if (!CONST_VECTOR_DUPLICATE_P (op))
3189 293916720 : return true;
3190 : for (unsigned i = 0; i < (unsigned int) XVECLEN (op, 0); i++)
3191 : if (CONST_VECTOR_ENCODED_ELT (op, i) == const0_rtx)
3192 : return true;
3193 : }
3194 : else
3195 0 : for (unsigned i = 0; i < n_elts; i++)
3196 0 : if (CONST_VECTOR_ELT (op, i) == const0_rtx)
3197 : return true;
3198 : }
3199 : break;
3200 :
3201 : case EXPR_LIST:
3202 : /* An EXPR_LIST is used to represent a function call. This
3203 : certainly may trap. */
3204 : return true;
3205 :
3206 184563215 : case GE:
3207 184563215 : case GT:
3208 184563215 : case LE:
3209 184563215 : case LT:
3210 184563215 : case LTGT:
3211 184563215 : case COMPARE:
3212 : /* Treat min/max similar as comparisons. */
3213 184563215 : case SMIN:
3214 184563215 : case SMAX:
3215 : /* Some floating point comparisons may trap. */
3216 184563215 : if (!flag_trapping_math)
3217 : break;
3218 : /* ??? There is no machine independent way to check for tests that trap
3219 : when COMPARE is used, though many targets do make this distinction.
3220 : For instance, sparc uses CCFPE for compares which generate exceptions
3221 : and CCFP for compares which do not generate exceptions. */
3222 182961492 : if (HONOR_NANS (x))
3223 : return true;
3224 : /* But often the compare has some CC mode, so check operand
3225 : modes as well. */
3226 182935155 : if (HONOR_NANS (XEXP (x, 0))
3227 182935155 : || HONOR_NANS (XEXP (x, 1)))
3228 2180835 : return true;
3229 : break;
3230 :
3231 27924359 : case EQ:
3232 27924359 : case NE:
3233 27924359 : if (HONOR_SNANS (x))
3234 : return true;
3235 : /* Often comparison is CC mode, so check operand modes. */
3236 27924342 : if (HONOR_SNANS (XEXP (x, 0))
3237 27924342 : || HONOR_SNANS (XEXP (x, 1)))
3238 0 : return true;
3239 : break;
3240 :
3241 354281 : case FIX:
3242 354281 : case UNSIGNED_FIX:
3243 : /* Conversion of floating point might trap. */
3244 354281 : if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
3245 : return true;
3246 : break;
3247 :
3248 : case PARALLEL:
3249 : case NEG:
3250 : case ABS:
3251 : case SUBREG:
3252 : case VEC_MERGE:
3253 : case VEC_SELECT:
3254 : case VEC_CONCAT:
3255 : case VEC_DUPLICATE:
3256 : /* These operations don't trap even with floating point. */
3257 : break;
3258 :
3259 3051489996 : default:
3260 : /* Any floating arithmetic may trap. */
3261 3051489996 : if (FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
3262 : return true;
3263 : }
3264 :
3265 3674983959 : fmt = GET_RTX_FORMAT (code);
3266 9892309789 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3267 : {
3268 6467412657 : if (fmt[i] == 'e')
3269 : {
3270 6018947450 : if (may_trap_p_1 (XEXP (x, i), flags))
3271 : return true;
3272 : }
3273 448465207 : else if (fmt[i] == 'E')
3274 : {
3275 : int j;
3276 964565773 : for (j = 0; j < XVECLEN (x, i); j++)
3277 664727223 : if (may_trap_p_1 (XVECEXP (x, i, j), flags))
3278 : return true;
3279 : }
3280 : }
3281 : return false;
3282 : }
3283 :
3284 : /* Return true if evaluating rtx X might cause a trap. */
3285 :
3286 : bool
3287 2264264973 : may_trap_p (const_rtx x)
3288 : {
3289 2264264973 : return may_trap_p_1 (x, 0);
3290 : }
3291 :
3292 : /* Same as above, but additionally return true if evaluating rtx X might
3293 : cause a fault. We define a fault for the purpose of this function as a
3294 : erroneous execution condition that cannot be encountered during the normal
3295 : execution of a valid program; the typical example is an unaligned memory
3296 : access on a strict alignment machine. The compiler guarantees that it
3297 : doesn't generate code that will fault from a valid program, but this
3298 : guarantee doesn't mean anything for individual instructions. Consider
3299 : the following example:
3300 :
3301 : struct S { int d; union { char *cp; int *ip; }; };
3302 :
3303 : int foo(struct S *s)
3304 : {
3305 : if (s->d == 1)
3306 : return *s->ip;
3307 : else
3308 : return *s->cp;
3309 : }
3310 :
3311 : on a strict alignment machine. In a valid program, foo will never be
3312 : invoked on a structure for which d is equal to 1 and the underlying
3313 : unique field of the union not aligned on a 4-byte boundary, but the
3314 : expression *s->ip might cause a fault if considered individually.
3315 :
3316 : At the RTL level, potentially problematic expressions will almost always
3317 : verify may_trap_p; for example, the above dereference can be emitted as
3318 : (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
3319 : However, suppose that foo is inlined in a caller that causes s->cp to
3320 : point to a local character variable and guarantees that s->d is not set
3321 : to 1; foo may have been effectively translated into pseudo-RTL as:
3322 :
3323 : if ((reg:SI) == 1)
3324 : (set (reg:SI) (mem:SI (%fp - 7)))
3325 : else
3326 : (set (reg:QI) (mem:QI (%fp - 7)))
3327 :
3328 : Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
3329 : memory reference to a stack slot, but it will certainly cause a fault
3330 : on a strict alignment machine. */
3331 :
3332 : bool
3333 10444920 : may_trap_or_fault_p (const_rtx x)
3334 : {
3335 10444920 : return may_trap_p_1 (x, 1);
3336 : }
3337 :
3338 : /* Replace any occurrence of FROM in X with TO. The function does
3339 : not enter into CONST_DOUBLE for the replace.
3340 :
3341 : Note that copying is not done so X must not be shared unless all copies
3342 : are to be modified.
3343 :
3344 : ALL_REGS is true if we want to replace all REGs equal to FROM, not just
3345 : those pointer-equal ones. */
3346 :
3347 : rtx
3348 8724405 : replace_rtx (rtx x, rtx from, rtx to, bool all_regs)
3349 : {
3350 8724405 : int i, j;
3351 8724405 : const char *fmt;
3352 :
3353 8724405 : if (x == from)
3354 : return to;
3355 :
3356 : /* Allow this function to make replacements in EXPR_LISTs. */
3357 6312952 : if (x == 0)
3358 : return 0;
3359 :
3360 6312952 : if (all_regs
3361 0 : && REG_P (x)
3362 0 : && REG_P (from)
3363 6312952 : && REGNO (x) == REGNO (from))
3364 : {
3365 0 : gcc_assert (GET_MODE (x) == GET_MODE (from));
3366 : return to;
3367 : }
3368 6312952 : else if (GET_CODE (x) == SUBREG)
3369 : {
3370 46358 : rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to, all_regs);
3371 :
3372 46358 : if (CONST_SCALAR_INT_P (new_rtx))
3373 : {
3374 2 : x = simplify_subreg (GET_MODE (x), new_rtx,
3375 1 : GET_MODE (SUBREG_REG (x)),
3376 1 : SUBREG_BYTE (x));
3377 1 : gcc_assert (x);
3378 : }
3379 : else
3380 46357 : SUBREG_REG (x) = new_rtx;
3381 :
3382 46358 : return x;
3383 : }
3384 6266594 : else if (GET_CODE (x) == ZERO_EXTEND)
3385 : {
3386 181735 : rtx new_rtx = replace_rtx (XEXP (x, 0), from, to, all_regs);
3387 :
3388 181735 : if (CONST_SCALAR_INT_P (new_rtx))
3389 : {
3390 2 : x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3391 1 : new_rtx, GET_MODE (XEXP (x, 0)));
3392 1 : gcc_assert (x);
3393 : }
3394 : else
3395 181734 : XEXP (x, 0) = new_rtx;
3396 :
3397 181735 : return x;
3398 : }
3399 :
3400 6084859 : fmt = GET_RTX_FORMAT (GET_CODE (x));
3401 15100919 : for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3402 : {
3403 9016060 : if (fmt[i] == 'e')
3404 5891361 : XEXP (x, i) = replace_rtx (XEXP (x, i), from, to, all_regs);
3405 3124699 : else if (fmt[i] == 'E')
3406 135676 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3407 112419 : XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j),
3408 : from, to, all_regs);
3409 : }
3410 :
3411 : return x;
3412 : }
3413 :
3414 : /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL. Also track
3415 : the change in LABEL_NUSES if UPDATE_LABEL_NUSES. */
3416 :
3417 : void
3418 9978 : replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
3419 : {
3420 : /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long. */
3421 9978 : rtx x = *loc;
3422 9978 : if (JUMP_TABLE_DATA_P (x))
3423 : {
3424 12 : x = PATTERN (x);
3425 12 : rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
3426 12 : int len = GET_NUM_ELEM (vec);
3427 138 : for (int i = 0; i < len; ++i)
3428 : {
3429 126 : rtx ref = RTVEC_ELT (vec, i);
3430 126 : if (XEXP (ref, 0) == old_label)
3431 : {
3432 0 : XEXP (ref, 0) = new_label;
3433 0 : if (update_label_nuses)
3434 : {
3435 0 : ++LABEL_NUSES (new_label);
3436 0 : --LABEL_NUSES (old_label);
3437 : }
3438 : }
3439 : }
3440 12 : return;
3441 : }
3442 :
3443 : /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
3444 : field. This is not handled by the iterator because it doesn't
3445 : handle unprinted ('0') fields. */
3446 9966 : if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
3447 740 : JUMP_LABEL (x) = new_label;
3448 :
3449 9966 : subrtx_ptr_iterator::array_type array;
3450 88598 : FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
3451 : {
3452 78632 : rtx *loc = *iter;
3453 78632 : if (rtx x = *loc)
3454 : {
3455 69287 : if (GET_CODE (x) == SYMBOL_REF
3456 69287 : && CONSTANT_POOL_ADDRESS_P (x))
3457 : {
3458 339 : rtx c = get_pool_constant (x);
3459 339 : if (rtx_referenced_p (old_label, c))
3460 : {
3461 : /* Create a copy of constant C; replace the label inside
3462 : but do not update LABEL_NUSES because uses in constant pool
3463 : are not counted. */
3464 0 : rtx new_c = copy_rtx (c);
3465 0 : replace_label (&new_c, old_label, new_label, false);
3466 :
3467 : /* Add the new constant NEW_C to constant pool and replace
3468 : the old reference to constant by new reference. */
3469 0 : rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
3470 0 : *loc = replace_rtx (x, x, XEXP (new_mem, 0));
3471 : }
3472 : }
3473 :
3474 69287 : if ((GET_CODE (x) == LABEL_REF
3475 67589 : || GET_CODE (x) == INSN_LIST)
3476 2352 : && XEXP (x, 0) == old_label)
3477 : {
3478 2032 : XEXP (x, 0) = new_label;
3479 2032 : if (update_label_nuses)
3480 : {
3481 0 : ++LABEL_NUSES (new_label);
3482 0 : --LABEL_NUSES (old_label);
3483 : }
3484 : }
3485 : }
3486 : }
3487 9966 : }
3488 :
3489 : void
3490 9978 : replace_label_in_insn (rtx_insn *insn, rtx_insn *old_label,
3491 : rtx_insn *new_label, bool update_label_nuses)
3492 : {
3493 9978 : rtx insn_as_rtx = insn;
3494 9978 : replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
3495 9978 : gcc_checking_assert (insn_as_rtx == insn);
3496 9978 : }
3497 :
3498 : /* Return true if X is referenced in BODY. */
3499 :
3500 : bool
3501 300859 : rtx_referenced_p (const_rtx x, const_rtx body)
3502 : {
3503 300859 : subrtx_iterator::array_type array;
3504 1428199 : FOR_EACH_SUBRTX (iter, array, body, ALL)
3505 1148668 : if (const_rtx y = *iter)
3506 : {
3507 : /* Check if a label_ref Y refers to label X. */
3508 1146318 : if (GET_CODE (y) == LABEL_REF
3509 4291 : && LABEL_P (x)
3510 1150606 : && label_ref_label (y) == x)
3511 21328 : return true;
3512 :
3513 1146318 : if (rtx_equal_p (x, y))
3514 : return true;
3515 :
3516 : /* If Y is a reference to pool constant traverse the constant. */
3517 1124990 : if (GET_CODE (y) == SYMBOL_REF
3518 1124990 : && CONSTANT_POOL_ADDRESS_P (y))
3519 7138 : iter.substitute (get_pool_constant (y));
3520 : }
3521 279531 : return false;
3522 300859 : }
3523 :
3524 : /* If INSN is a tablejump return true and store the label (before jump table) to
3525 : *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
3526 :
3527 : bool
3528 106611856 : tablejump_p (const rtx_insn *insn, rtx_insn **labelp,
3529 : rtx_jump_table_data **tablep)
3530 : {
3531 106611856 : if (!JUMP_P (insn))
3532 : return false;
3533 :
3534 80189555 : rtx target = JUMP_LABEL (insn);
3535 80189555 : if (target == NULL_RTX || ANY_RETURN_P (target))
3536 : return false;
3537 :
3538 76618099 : rtx_insn *label = as_a<rtx_insn *> (target);
3539 76618099 : rtx_insn *table = next_insn (label);
3540 76618099 : if (table == NULL_RTX || !JUMP_TABLE_DATA_P (table))
3541 : return false;
3542 :
3543 91991 : if (labelp)
3544 59019 : *labelp = label;
3545 91991 : if (tablep)
3546 88281 : *tablep = as_a <rtx_jump_table_data *> (table);
3547 : return true;
3548 : }
3549 :
3550 : /* For INSN known to satisfy tablejump_p, determine if it actually is a
3551 : CASESI. Return the insn pattern if so, NULL_RTX otherwise. */
3552 :
3553 : rtx
3554 16886 : tablejump_casesi_pattern (const rtx_insn *insn)
3555 : {
3556 16886 : rtx tmp;
3557 :
3558 16886 : if ((tmp = single_set (insn)) != NULL
3559 16886 : && SET_DEST (tmp) == pc_rtx
3560 16886 : && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
3561 16886 : && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
3562 0 : return tmp;
3563 :
3564 : return NULL_RTX;
3565 : }
3566 :
3567 : /* A subroutine of computed_jump_p, return true if X contains a REG or MEM or
3568 : constant that is not in the constant pool and not in the condition
3569 : of an IF_THEN_ELSE. */
3570 :
3571 : static bool
3572 1920 : computed_jump_p_1 (const_rtx x)
3573 : {
3574 1920 : const enum rtx_code code = GET_CODE (x);
3575 1920 : int i, j;
3576 1920 : const char *fmt;
3577 :
3578 1920 : switch (code)
3579 : {
3580 : case LABEL_REF:
3581 : case PC:
3582 : return false;
3583 :
3584 : case CONST:
3585 : CASE_CONST_ANY:
3586 : case SYMBOL_REF:
3587 : case REG:
3588 : return true;
3589 :
3590 323 : case MEM:
3591 323 : return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3592 14 : && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
3593 :
3594 0 : case IF_THEN_ELSE:
3595 0 : return (computed_jump_p_1 (XEXP (x, 1))
3596 0 : || computed_jump_p_1 (XEXP (x, 2)));
3597 :
3598 0 : default:
3599 0 : break;
3600 : }
3601 :
3602 0 : fmt = GET_RTX_FORMAT (code);
3603 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3604 : {
3605 0 : if (fmt[i] == 'e'
3606 0 : && computed_jump_p_1 (XEXP (x, i)))
3607 : return true;
3608 :
3609 0 : else if (fmt[i] == 'E')
3610 0 : for (j = 0; j < XVECLEN (x, i); j++)
3611 0 : if (computed_jump_p_1 (XVECEXP (x, i, j)))
3612 : return true;
3613 : }
3614 :
3615 : return false;
3616 : }
3617 :
3618 : /* Return true if INSN is an indirect jump (aka computed jump).
3619 :
3620 : Tablejumps and casesi insns are not considered indirect jumps;
3621 : we can recognize them by a (use (label_ref)). */
3622 :
3623 : bool
3624 45843481 : computed_jump_p (const rtx_insn *insn)
3625 : {
3626 45843481 : int i;
3627 45843481 : if (JUMP_P (insn))
3628 : {
3629 41846979 : rtx pat = PATTERN (insn);
3630 :
3631 : /* If we have a JUMP_LABEL set, we're not a computed jump. */
3632 41846979 : if (JUMP_LABEL (insn) != NULL)
3633 : return false;
3634 :
3635 2440 : if (GET_CODE (pat) == PARALLEL)
3636 : {
3637 489 : int len = XVECLEN (pat, 0);
3638 489 : bool has_use_labelref = false;
3639 :
3640 1467 : for (i = len - 1; i >= 0; i--)
3641 978 : if (GET_CODE (XVECEXP (pat, 0, i)) == USE
3642 0 : && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
3643 : == LABEL_REF))
3644 : {
3645 : has_use_labelref = true;
3646 : break;
3647 : }
3648 :
3649 489 : if (! has_use_labelref)
3650 1467 : for (i = len - 1; i >= 0; i--)
3651 978 : if (GET_CODE (XVECEXP (pat, 0, i)) == SET
3652 0 : && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
3653 978 : && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
3654 : return true;
3655 : }
3656 1951 : else if (GET_CODE (pat) == SET
3657 1920 : && SET_DEST (pat) == pc_rtx
3658 3871 : && computed_jump_p_1 (SET_SRC (pat)))
3659 : return true;
3660 : }
3661 : return false;
3662 : }
3663 :
3664 :
3665 :
3666 : /* MEM has a PRE/POST-INC/DEC/MODIFY address X. Extract the operands of
3667 : the equivalent add insn and pass the result to FN, using DATA as the
3668 : final argument. */
3669 :
3670 : static int
3671 19705531 : for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
3672 : {
3673 19705531 : rtx x = XEXP (mem, 0);
3674 19705531 : switch (GET_CODE (x))
3675 : {
3676 2305790 : case PRE_INC:
3677 2305790 : case POST_INC:
3678 2305790 : {
3679 4611580 : poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
3680 2305790 : rtx r1 = XEXP (x, 0);
3681 2305790 : rtx c = gen_int_mode (size, GET_MODE (r1));
3682 2305790 : return fn (mem, x, r1, r1, c, data);
3683 : }
3684 :
3685 17005357 : case PRE_DEC:
3686 17005357 : case POST_DEC:
3687 17005357 : {
3688 34010714 : poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
3689 17005357 : rtx r1 = XEXP (x, 0);
3690 17005357 : rtx c = gen_int_mode (-size, GET_MODE (r1));
3691 17005357 : return fn (mem, x, r1, r1, c, data);
3692 : }
3693 :
3694 394384 : case PRE_MODIFY:
3695 394384 : case POST_MODIFY:
3696 394384 : {
3697 394384 : rtx r1 = XEXP (x, 0);
3698 394384 : rtx add = XEXP (x, 1);
3699 394384 : return fn (mem, x, r1, add, NULL, data);
3700 : }
3701 :
3702 0 : default:
3703 0 : gcc_unreachable ();
3704 : }
3705 : }
3706 :
3707 : /* Traverse *LOC looking for MEMs that have autoinc addresses.
3708 : For each such autoinc operation found, call FN, passing it
3709 : the innermost enclosing MEM, the operation itself, the RTX modified
3710 : by the operation, two RTXs (the second may be NULL) that, once
3711 : added, represent the value to be held by the modified RTX
3712 : afterwards, and DATA. FN is to return 0 to continue the
3713 : traversal or any other value to have it returned to the caller of
3714 : for_each_inc_dec. */
3715 :
3716 : int
3717 1011196995 : for_each_inc_dec (rtx x,
3718 : for_each_inc_dec_fn fn,
3719 : void *data)
3720 : {
3721 1011196995 : subrtx_var_iterator::array_type array;
3722 5367976402 : FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
3723 : {
3724 4356779407 : rtx mem = *iter;
3725 4356779407 : if (mem
3726 4356779407 : && MEM_P (mem)
3727 253241357 : && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
3728 : {
3729 19705531 : int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
3730 19705531 : if (res != 0)
3731 0 : return res;
3732 19705531 : iter.skip_subrtxes ();
3733 : }
3734 : }
3735 1011196995 : return 0;
3736 1011196995 : }
3737 :
3738 :
3739 : /* Searches X for any reference to REGNO, returning the rtx of the
3740 : reference found if any. Otherwise, returns NULL_RTX. */
3741 :
3742 : rtx
3743 0 : regno_use_in (unsigned int regno, rtx x)
3744 : {
3745 0 : const char *fmt;
3746 0 : int i, j;
3747 0 : rtx tem;
3748 :
3749 0 : if (REG_P (x) && REGNO (x) == regno)
3750 : return x;
3751 :
3752 0 : fmt = GET_RTX_FORMAT (GET_CODE (x));
3753 0 : for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3754 : {
3755 0 : if (fmt[i] == 'e')
3756 : {
3757 0 : if ((tem = regno_use_in (regno, XEXP (x, i))))
3758 : return tem;
3759 : }
3760 0 : else if (fmt[i] == 'E')
3761 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3762 0 : if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3763 : return tem;
3764 : }
3765 :
3766 : return NULL_RTX;
3767 : }
3768 :
3769 : /* Return a value indicating whether OP, an operand of a commutative
3770 : operation, is preferred as the first or second operand. The more
3771 : positive the value, the stronger the preference for being the first
3772 : operand. */
3773 :
3774 : int
3775 2118437696 : commutative_operand_precedence (rtx op)
3776 : {
3777 2118437696 : enum rtx_code code = GET_CODE (op);
3778 :
3779 : /* Constants always become the second operand. Prefer "nice" constants. */
3780 2118437696 : if (code == CONST_INT)
3781 : return -10;
3782 : if (code == CONST_WIDE_INT)
3783 : return -9;
3784 : if (code == CONST_POLY_INT)
3785 : return -8;
3786 : if (code == CONST_DOUBLE)
3787 : return -8;
3788 : if (code == CONST_FIXED)
3789 : return -8;
3790 1257706526 : op = avoid_constant_pool_reference (op);
3791 1257706526 : code = GET_CODE (op);
3792 :
3793 1257706526 : switch (GET_RTX_CLASS (code))
3794 : {
3795 25321437 : case RTX_CONST_OBJ:
3796 25321437 : if (code == CONST_INT)
3797 : return -7;
3798 : if (code == CONST_WIDE_INT)
3799 : return -6;
3800 : if (code == CONST_POLY_INT)
3801 : return -5;
3802 : if (code == CONST_DOUBLE)
3803 : return -5;
3804 : if (code == CONST_FIXED)
3805 : return -5;
3806 : return -4;
3807 :
3808 40378109 : case RTX_EXTRA:
3809 : /* SUBREGs of objects should come second. */
3810 40378109 : if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3811 : return -3;
3812 : return 0;
3813 :
3814 932436512 : case RTX_OBJ:
3815 : /* Complex expressions should be the first, so decrease priority
3816 : of objects. Prefer pointer objects over non pointer objects. */
3817 840204748 : if ((REG_P (op) && REG_POINTER (op))
3818 1402518073 : || (MEM_P (op) && MEM_POINTER (op)))
3819 387219216 : return -1;
3820 : return -2;
3821 :
3822 : case RTX_COMM_ARITH:
3823 : /* Prefer operands that are themselves commutative to be first.
3824 : This helps to make things linear. In particular,
3825 : (and (and (reg) (reg)) (not (reg))) is canonical. */
3826 : return 4;
3827 :
3828 76977530 : case RTX_BIN_ARITH:
3829 : /* If only one operand is a binary expression, it will be the first
3830 : operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3831 : is canonical, although it will usually be further simplified. */
3832 76977530 : return 3;
3833 :
3834 8520405 : case RTX_COMM_COMPARE:
3835 8520405 : case RTX_COMPARE:
3836 : /* Give comparisons a cost between the unary expressions below
3837 : and the other binary expressions above, so that we don't have
3838 : a situation where the canonical order is binary, unary, binary. */
3839 8520405 : return 2;
3840 :
3841 26295025 : case RTX_UNARY:
3842 : /* Then prefer NEG and NOT. */
3843 26295025 : if (code == NEG || code == NOT)
3844 : return 1;
3845 : /* FALLTHRU */
3846 :
3847 : default:
3848 : return 0;
3849 : }
3850 : }
3851 :
3852 : /* Return true iff it is necessary to swap operands of commutative operation
3853 : in order to canonicalize expression. */
3854 :
3855 : bool
3856 931254598 : swap_commutative_operands_p (rtx x, rtx y)
3857 : {
3858 931254598 : return (commutative_operand_precedence (x)
3859 931254598 : < commutative_operand_precedence (y));
3860 : }
3861 :
3862 : /* Return true if X is an autoincrement side effect and the register is
3863 : not the stack pointer. */
3864 : bool
3865 0 : auto_inc_p (const_rtx x)
3866 : {
3867 0 : switch (GET_CODE (x))
3868 : {
3869 0 : case PRE_INC:
3870 0 : case POST_INC:
3871 0 : case PRE_DEC:
3872 0 : case POST_DEC:
3873 0 : case PRE_MODIFY:
3874 0 : case POST_MODIFY:
3875 : /* There are no REG_INC notes for SP. */
3876 0 : if (XEXP (x, 0) != stack_pointer_rtx)
3877 0 : return true;
3878 : default:
3879 : break;
3880 : }
3881 : return false;
3882 : }
3883 :
3884 : /* Return true if IN contains a piece of rtl that has the address LOC. */
3885 : bool
3886 1054812 : loc_mentioned_in_p (rtx *loc, const_rtx in)
3887 : {
3888 1054812 : enum rtx_code code;
3889 1054812 : const char *fmt;
3890 1054812 : int i, j;
3891 :
3892 1054812 : if (!in)
3893 : return false;
3894 :
3895 1054812 : code = GET_CODE (in);
3896 1054812 : fmt = GET_RTX_FORMAT (code);
3897 2107500 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3898 : {
3899 1674879 : if (fmt[i] == 'e')
3900 : {
3901 1034545 : if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3902 621479 : return true;
3903 : }
3904 640334 : else if (fmt[i] == 'E')
3905 27133 : for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3906 18790 : if (loc == &XVECEXP (in, i, j)
3907 18790 : || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3908 712 : return true;
3909 : }
3910 : return false;
3911 : }
3912 :
3913 : /* Reinterpret a subreg as a bit extraction from an integer and return
3914 : the position of the least significant bit of the extracted value.
3915 : In other words, if the extraction were performed as a shift right
3916 : and mask, return the number of bits to shift right.
3917 :
3918 : The outer value of the subreg has OUTER_BYTES bytes and starts at
3919 : byte offset SUBREG_BYTE within an inner value of INNER_BYTES bytes. */
3920 :
3921 : poly_uint64
3922 47378894 : subreg_size_lsb (poly_uint64 outer_bytes,
3923 : poly_uint64 inner_bytes,
3924 : poly_uint64 subreg_byte)
3925 : {
3926 47378894 : poly_uint64 subreg_end, trailing_bytes, byte_pos;
3927 :
3928 : /* A paradoxical subreg begins at bit position 0. */
3929 47378894 : gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
3930 47378894 : if (maybe_gt (outer_bytes, inner_bytes))
3931 : {
3932 43277 : gcc_checking_assert (known_eq (subreg_byte, 0U));
3933 43277 : return 0;
3934 : }
3935 :
3936 47335617 : subreg_end = subreg_byte + outer_bytes;
3937 47335617 : trailing_bytes = inner_bytes - subreg_end;
3938 47335617 : if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
3939 : byte_pos = trailing_bytes;
3940 47335617 : else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
3941 47335617 : byte_pos = subreg_byte;
3942 : else
3943 : {
3944 : /* When bytes and words have opposite endianness, we must be able
3945 : to split offsets into words and bytes at compile time. */
3946 : poly_uint64 leading_word_part
3947 : = force_align_down (subreg_byte, UNITS_PER_WORD);
3948 : poly_uint64 trailing_word_part
3949 : = force_align_down (trailing_bytes, UNITS_PER_WORD);
3950 : /* If the subreg crosses a word boundary ensure that
3951 : it also begins and ends on a word boundary. */
3952 : gcc_assert (known_le (subreg_end - leading_word_part,
3953 : (unsigned int) UNITS_PER_WORD)
3954 : || (known_eq (leading_word_part, subreg_byte)
3955 : && known_eq (trailing_word_part, trailing_bytes)));
3956 : if (WORDS_BIG_ENDIAN)
3957 : byte_pos = trailing_word_part + (subreg_byte - leading_word_part);
3958 : else
3959 : byte_pos = leading_word_part + (trailing_bytes - trailing_word_part);
3960 : }
3961 :
3962 47335617 : return byte_pos * BITS_PER_UNIT;
3963 : }
3964 :
3965 : /* Given a subreg X, return the bit offset where the subreg begins
3966 : (counting from the least significant bit of the reg). */
3967 :
3968 : poly_uint64
3969 2944507 : subreg_lsb (const_rtx x)
3970 : {
3971 5889014 : return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3972 2944507 : SUBREG_BYTE (x));
3973 : }
3974 :
3975 : /* Return the subreg byte offset for a subreg whose outer value has
3976 : OUTER_BYTES bytes, whose inner value has INNER_BYTES bytes, and where
3977 : there are LSB_SHIFT *bits* between the lsb of the outer value and the
3978 : lsb of the inner value. This is the inverse of the calculation
3979 : performed by subreg_lsb_1 (which converts byte offsets to bit shifts). */
3980 :
3981 : poly_uint64
3982 40122849 : subreg_size_offset_from_lsb (poly_uint64 outer_bytes, poly_uint64 inner_bytes,
3983 : poly_uint64 lsb_shift)
3984 : {
3985 : /* A paradoxical subreg begins at bit position 0. */
3986 40122849 : gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
3987 40122849 : if (maybe_gt (outer_bytes, inner_bytes))
3988 : {
3989 0 : gcc_checking_assert (known_eq (lsb_shift, 0U));
3990 0 : return 0;
3991 : }
3992 :
3993 40122849 : poly_uint64 lower_bytes = exact_div (lsb_shift, BITS_PER_UNIT);
3994 40122849 : poly_uint64 upper_bytes = inner_bytes - (lower_bytes + outer_bytes);
3995 40122849 : if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
3996 : return upper_bytes;
3997 40122849 : else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
3998 40122849 : return lower_bytes;
3999 : else
4000 : {
4001 : /* When bytes and words have opposite endianness, we must be able
4002 : to split offsets into words and bytes at compile time. */
4003 : poly_uint64 lower_word_part = force_align_down (lower_bytes,
4004 : UNITS_PER_WORD);
4005 : poly_uint64 upper_word_part = force_align_down (upper_bytes,
4006 : UNITS_PER_WORD);
4007 : if (WORDS_BIG_ENDIAN)
4008 : return upper_word_part + (lower_bytes - lower_word_part);
4009 : else
4010 : return lower_word_part + (upper_bytes - upper_word_part);
4011 : }
4012 : }
4013 :
4014 : /* Fill in information about a subreg of a hard register.
4015 : xregno - A regno of an inner hard subreg_reg (or what will become one).
4016 : xmode - The mode of xregno.
4017 : offset - The byte offset.
4018 : ymode - The mode of a top level SUBREG (or what may become one).
4019 : info - Pointer to structure to fill in.
4020 :
4021 : Rather than considering one particular inner register (and thus one
4022 : particular "outer" register) in isolation, this function really uses
4023 : XREGNO as a model for a sequence of isomorphic hard registers. Thus the
4024 : function does not check whether adding INFO->offset to XREGNO gives
4025 : a valid hard register; even if INFO->offset + XREGNO is out of range,
4026 : there might be another register of the same type that is in range.
4027 : Likewise it doesn't check whether targetm.hard_regno_mode_ok accepts
4028 : the new register, since that can depend on things like whether the final
4029 : register number is even or odd. Callers that want to check whether
4030 : this particular subreg can be replaced by a simple (reg ...) should
4031 : use simplify_subreg_regno. */
4032 :
4033 : void
4034 33445425 : subreg_get_info (unsigned int xregno, machine_mode xmode,
4035 : poly_uint64 offset, machine_mode ymode,
4036 : struct subreg_info *info)
4037 : {
4038 33445425 : unsigned int nregs_xmode, nregs_ymode;
4039 :
4040 33445425 : gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
4041 :
4042 66890850 : poly_uint64 xsize = GET_MODE_SIZE (xmode);
4043 66890850 : poly_uint64 ysize = GET_MODE_SIZE (ymode);
4044 :
4045 33445425 : bool rknown = false;
4046 :
4047 : /* If the register representation of a non-scalar mode has holes in it,
4048 : we expect the scalar units to be concatenated together, with the holes
4049 : distributed evenly among the scalar units. Each scalar unit must occupy
4050 : at least one register. */
4051 33445425 : if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
4052 : {
4053 : /* As a consequence, we must be dealing with a constant number of
4054 : scalars, and thus a constant offset and number of units. */
4055 0 : HOST_WIDE_INT coffset = offset.to_constant ();
4056 0 : HOST_WIDE_INT cysize = ysize.to_constant ();
4057 0 : nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
4058 0 : unsigned int nunits = GET_MODE_NUNITS (xmode).to_constant ();
4059 0 : scalar_mode xmode_unit = GET_MODE_INNER (xmode);
4060 0 : gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
4061 0 : gcc_assert (nregs_xmode
4062 : == (nunits
4063 : * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
4064 0 : gcc_assert (hard_regno_nregs (xregno, xmode)
4065 : == hard_regno_nregs (xregno, xmode_unit) * nunits);
4066 :
4067 : /* You can only ask for a SUBREG of a value with holes in the middle
4068 : if you don't cross the holes. (Such a SUBREG should be done by
4069 : picking a different register class, or doing it in memory if
4070 : necessary.) An example of a value with holes is XCmode on 32-bit
4071 : x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
4072 : 3 for each part, but in memory it's two 128-bit parts.
4073 : Padding is assumed to be at the end (not necessarily the 'high part')
4074 : of each unit. */
4075 0 : if ((coffset / GET_MODE_SIZE (xmode_unit) + 1 < nunits)
4076 0 : && (coffset / GET_MODE_SIZE (xmode_unit)
4077 0 : != ((coffset + cysize - 1) / GET_MODE_SIZE (xmode_unit))))
4078 : {
4079 0 : info->representable_p = false;
4080 0 : rknown = true;
4081 : }
4082 : }
4083 : else
4084 33445425 : nregs_xmode = hard_regno_nregs (xregno, xmode);
4085 :
4086 33445425 : nregs_ymode = hard_regno_nregs (xregno, ymode);
4087 :
4088 : /* Subreg sizes must be ordered, so that we can tell whether they are
4089 : partial, paradoxical or complete. */
4090 33445425 : gcc_checking_assert (ordered_p (xsize, ysize));
4091 :
4092 : /* Paradoxical subregs are otherwise valid. */
4093 33445425 : if (!rknown && known_eq (offset, 0U) && maybe_gt (ysize, xsize))
4094 : {
4095 12638484 : info->representable_p = true;
4096 : /* If this is a big endian paradoxical subreg, which uses more
4097 : actual hard registers than the original register, we must
4098 : return a negative offset so that we find the proper highpart
4099 : of the register.
4100 :
4101 : We assume that the ordering of registers within a multi-register
4102 : value has a consistent endianness: if bytes and register words
4103 : have different endianness, the hard registers that make up a
4104 : multi-register value must be at least word-sized. */
4105 12638484 : if (REG_WORDS_BIG_ENDIAN)
4106 : info->offset = (int) nregs_xmode - (int) nregs_ymode;
4107 : else
4108 12638484 : info->offset = 0;
4109 12638484 : info->nregs = nregs_ymode;
4110 12638484 : return;
4111 : }
4112 :
4113 : /* If registers store different numbers of bits in the different
4114 : modes, we cannot generally form this subreg. */
4115 20806941 : poly_uint64 regsize_xmode, regsize_ymode;
4116 17945649 : if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
4117 0 : && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
4118 20806941 : && multiple_p (xsize, nregs_xmode, ®size_xmode)
4119 20806941 : && multiple_p (ysize, nregs_ymode, ®size_ymode))
4120 : {
4121 20806941 : if (!rknown
4122 20806941 : && ((nregs_ymode > 1 && maybe_gt (regsize_xmode, regsize_ymode))
4123 20806929 : || (nregs_xmode > 1 && maybe_gt (regsize_ymode, regsize_xmode))))
4124 : {
4125 119 : info->representable_p = false;
4126 119 : if (!can_div_away_from_zero_p (ysize, regsize_xmode, &info->nregs)
4127 119 : || !can_div_trunc_p (offset, regsize_xmode, &info->offset))
4128 : /* Checked by validate_subreg. We must know at compile time
4129 : which inner registers are being accessed. */
4130 : gcc_unreachable ();
4131 32931494 : return;
4132 : }
4133 : /* It's not valid to extract a subreg of mode YMODE at OFFSET that
4134 : would go outside of XMODE. */
4135 20806822 : if (!rknown && maybe_gt (ysize + offset, xsize))
4136 : {
4137 0 : info->representable_p = false;
4138 0 : info->nregs = nregs_ymode;
4139 0 : if (!can_div_trunc_p (offset, regsize_xmode, &info->offset))
4140 : /* Checked by validate_subreg. We must know at compile time
4141 : which inner registers are being accessed. */
4142 : gcc_unreachable ();
4143 0 : return;
4144 : }
4145 : /* Quick exit for the simple and common case of extracting whole
4146 : subregisters from a multiregister value. */
4147 : /* ??? It would be better to integrate this into the code below,
4148 : if we can generalize the concept enough and figure out how
4149 : odd-sized modes can coexist with the other weird cases we support. */
4150 20806822 : HOST_WIDE_INT count;
4151 20806822 : if (!rknown
4152 : && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN
4153 20806822 : && known_eq (regsize_xmode, regsize_ymode)
4154 20806822 : && constant_multiple_p (offset, regsize_ymode, &count))
4155 : {
4156 13479395 : info->representable_p = true;
4157 13479395 : info->nregs = nregs_ymode;
4158 13479395 : info->offset = count;
4159 13479395 : gcc_assert (info->offset + info->nregs <= (int) nregs_xmode);
4160 : return;
4161 : }
4162 : }
4163 :
4164 : /* Lowpart subregs are otherwise valid. */
4165 7327427 : if (!rknown && known_eq (offset, subreg_lowpart_offset (ymode, xmode)))
4166 : {
4167 6813496 : info->representable_p = true;
4168 6813496 : rknown = true;
4169 :
4170 6813496 : if (known_eq (offset, 0U) || nregs_xmode == nregs_ymode)
4171 : {
4172 6813496 : info->offset = 0;
4173 6813496 : info->nregs = nregs_ymode;
4174 6813496 : return;
4175 : }
4176 : }
4177 :
4178 : /* Set NUM_BLOCKS to the number of independently-representable YMODE
4179 : values there are in (reg:XMODE XREGNO). We can view the register
4180 : as consisting of this number of independent "blocks", where each
4181 : block occupies NREGS_YMODE registers and contains exactly one
4182 : representable YMODE value. */
4183 513931 : gcc_assert ((nregs_xmode % nregs_ymode) == 0);
4184 513931 : unsigned int num_blocks = nregs_xmode / nregs_ymode;
4185 :
4186 : /* Calculate the number of bytes in each block. This must always
4187 : be exact, otherwise we don't know how to verify the constraint.
4188 : These conditions may be relaxed but subreg_regno_offset would
4189 : need to be redesigned. */
4190 513931 : poly_uint64 bytes_per_block = exact_div (xsize, num_blocks);
4191 :
4192 : /* Get the number of the first block that contains the subreg and the byte
4193 : offset of the subreg from the start of that block. */
4194 513931 : unsigned int block_number;
4195 513931 : poly_uint64 subblock_offset;
4196 513931 : if (!can_div_trunc_p (offset, bytes_per_block, &block_number,
4197 : &subblock_offset))
4198 : /* Checked by validate_subreg. We must know at compile time which
4199 : inner registers are being accessed. */
4200 : gcc_unreachable ();
4201 :
4202 513931 : if (!rknown)
4203 : {
4204 : /* Only the lowpart of each block is representable. */
4205 513931 : info->representable_p
4206 513931 : = known_eq (subblock_offset,
4207 : subreg_size_lowpart_offset (ysize, bytes_per_block));
4208 513931 : rknown = true;
4209 : }
4210 :
4211 : /* We assume that the ordering of registers within a multi-register
4212 : value has a consistent endianness: if bytes and register words
4213 : have different endianness, the hard registers that make up a
4214 : multi-register value must be at least word-sized. */
4215 513931 : if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN)
4216 : /* The block number we calculated above followed memory endianness.
4217 : Convert it to register endianness by counting back from the end.
4218 : (Note that, because of the assumption above, each block must be
4219 : at least word-sized.) */
4220 : info->offset = (num_blocks - block_number - 1) * nregs_ymode;
4221 : else
4222 513931 : info->offset = block_number * nregs_ymode;
4223 513931 : info->nregs = nregs_ymode;
4224 : }
4225 :
4226 : /* This function returns the regno offset of a subreg expression.
4227 : xregno - A regno of an inner hard subreg_reg (or what will become one).
4228 : xmode - The mode of xregno.
4229 : offset - The byte offset.
4230 : ymode - The mode of a top level SUBREG (or what may become one).
4231 : RETURN - The regno offset which would be used. */
4232 : unsigned int
4233 5411657 : subreg_regno_offset (unsigned int xregno, machine_mode xmode,
4234 : poly_uint64 offset, machine_mode ymode)
4235 : {
4236 5411657 : struct subreg_info info;
4237 5411657 : subreg_get_info (xregno, xmode, offset, ymode, &info);
4238 5411657 : return info.offset;
4239 : }
4240 :
4241 : /* This function returns true when the offset is representable via
4242 : subreg_offset in the given regno.
4243 : xregno - A regno of an inner hard subreg_reg (or what will become one).
4244 : xmode - The mode of xregno.
4245 : offset - The byte offset.
4246 : ymode - The mode of a top level SUBREG (or what may become one).
4247 : RETURN - Whether the offset is representable. */
4248 : bool
4249 0 : subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
4250 : poly_uint64 offset, machine_mode ymode)
4251 : {
4252 0 : struct subreg_info info;
4253 0 : subreg_get_info (xregno, xmode, offset, ymode, &info);
4254 0 : return info.representable_p;
4255 : }
4256 :
4257 : /* Return the number of a YMODE register to which
4258 :
4259 : (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
4260 :
4261 : can be simplified. Return -1 if the subreg can't be simplified.
4262 :
4263 : XREGNO is a hard register number. ALLOW_STACK_REGS is true if
4264 : we should allow subregs of stack_pointer_rtx, frame_pointer_rtx.
4265 : and arg_pointer_rtx (which are normally expected to be the unique
4266 : way of referring to their respective registers). */
4267 :
4268 :
4269 : int
4270 28850367 : simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
4271 : poly_uint64 offset, machine_mode ymode,
4272 : bool allow_stack_regs)
4273 : {
4274 28850367 : struct subreg_info info;
4275 28850367 : unsigned int yregno;
4276 :
4277 : /* Give the backend a chance to disallow the mode change. */
4278 28850367 : if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
4279 28850367 : && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
4280 28850367 : && !REG_CAN_CHANGE_MODE_P (xregno, xmode, ymode))
4281 : return -1;
4282 :
4283 28202269 : if (!allow_stack_regs)
4284 : {
4285 : /* We shouldn't simplify stack-related registers. */
4286 27852496 : if ((!reload_completed || frame_pointer_needed)
4287 24480469 : && xregno == FRAME_POINTER_REGNUM)
4288 : return -1;
4289 :
4290 27734926 : if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4291 : && xregno == ARG_POINTER_REGNUM)
4292 : return -1;
4293 :
4294 27621220 : if (xregno == STACK_POINTER_REGNUM
4295 : /* We should convert hard stack register in LRA if it is
4296 : possible. */
4297 114160 : && ! lra_in_progress)
4298 : return -1;
4299 : }
4300 :
4301 : /* Try to get the register offset. */
4302 27857944 : subreg_get_info (xregno, xmode, offset, ymode, &info);
4303 27857944 : if (!info.representable_p)
4304 : return -1;
4305 :
4306 : /* Make sure that the offsetted register value is in range. */
4307 27403786 : yregno = xregno + info.offset;
4308 27403786 : if (!HARD_REGISTER_NUM_P (yregno))
4309 : return -1;
4310 :
4311 : /* See whether (reg:YMODE YREGNO) is valid.
4312 :
4313 : ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
4314 : This is a kludge to work around how complex FP arguments are passed
4315 : on IA-64 and should be fixed. See PR target/49226. */
4316 27391742 : if (!targetm.hard_regno_mode_ok (yregno, ymode)
4317 27391742 : && targetm.hard_regno_mode_ok (xregno, xmode))
4318 : return -1;
4319 :
4320 27150263 : return (int) yregno;
4321 : }
4322 :
4323 : /* A wrapper around simplify_subreg_regno that uses subreg_lowpart_offset
4324 : (xmode, ymode) as the offset. */
4325 :
4326 : int
4327 0 : lowpart_subreg_regno (unsigned int regno, machine_mode xmode,
4328 : machine_mode ymode)
4329 : {
4330 0 : poly_uint64 offset = subreg_lowpart_offset (xmode, ymode);
4331 0 : return simplify_subreg_regno (regno, xmode, offset, ymode);
4332 : }
4333 :
4334 : /* Return the final regno that a subreg expression refers to. */
4335 : unsigned int
4336 11691 : subreg_regno (const_rtx x)
4337 : {
4338 11691 : unsigned int ret;
4339 11691 : rtx subreg = SUBREG_REG (x);
4340 11691 : int regno = REGNO (subreg);
4341 :
4342 23382 : ret = regno + subreg_regno_offset (regno,
4343 11691 : GET_MODE (subreg),
4344 11691 : SUBREG_BYTE (x),
4345 11691 : GET_MODE (x));
4346 11691 : return ret;
4347 :
4348 : }
4349 :
4350 : /* Return the number of registers that a subreg expression refers
4351 : to. */
4352 : unsigned int
4353 169209 : subreg_nregs (const_rtx x)
4354 : {
4355 169209 : return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
4356 : }
4357 :
4358 : /* Return the number of registers that a subreg REG with REGNO
4359 : expression refers to. This is a copy of the rtlanal.cc:subreg_nregs
4360 : changed so that the regno can be passed in. */
4361 :
4362 : unsigned int
4363 169209 : subreg_nregs_with_regno (unsigned int regno, const_rtx x)
4364 : {
4365 169209 : struct subreg_info info;
4366 169209 : rtx subreg = SUBREG_REG (x);
4367 :
4368 169209 : subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
4369 : &info);
4370 169209 : return info.nregs;
4371 : }
4372 :
4373 : struct parms_set_data
4374 : {
4375 : int nregs;
4376 : HARD_REG_SET regs;
4377 : };
4378 :
4379 : /* Helper function for noticing stores to parameter registers. */
4380 : static void
4381 63063 : parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
4382 : {
4383 63063 : struct parms_set_data *const d = (struct parms_set_data *) data;
4384 63061 : if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4385 126124 : && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
4386 : {
4387 62724 : CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
4388 62724 : d->nregs--;
4389 : }
4390 63063 : }
4391 :
4392 : /* Look backward for first parameter to be loaded.
4393 : Note that loads of all parameters will not necessarily be
4394 : found if CSE has eliminated some of them (e.g., an argument
4395 : to the outer function is passed down as a parameter).
4396 : Do not skip BOUNDARY. */
4397 : rtx_insn *
4398 40148 : find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
4399 : {
4400 40148 : struct parms_set_data parm;
4401 40148 : rtx p;
4402 40148 : rtx_insn *before, *first_set;
4403 :
4404 : /* Since different machines initialize their parameter registers
4405 : in different orders, assume nothing. Collect the set of all
4406 : parameter registers. */
4407 40148 : CLEAR_HARD_REG_SET (parm.regs);
4408 40148 : parm.nregs = 0;
4409 118297 : for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
4410 78149 : if (GET_CODE (XEXP (p, 0)) == USE
4411 77987 : && REG_P (XEXP (XEXP (p, 0), 0))
4412 143345 : && !STATIC_CHAIN_REG_P (XEXP (XEXP (p, 0), 0)))
4413 : {
4414 64921 : gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
4415 :
4416 : /* We only care about registers which can hold function
4417 : arguments. */
4418 64921 : if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
4419 1821 : continue;
4420 :
4421 63100 : SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
4422 63100 : parm.nregs++;
4423 : }
4424 : before = call_insn;
4425 : first_set = call_insn;
4426 :
4427 : /* Search backward for the first set of a register in this set. */
4428 102872 : while (parm.nregs && before != boundary)
4429 : {
4430 63063 : before = PREV_INSN (before);
4431 :
4432 : /* It is possible that some loads got CSEed from one call to
4433 : another. Stop in that case. */
4434 63063 : if (CALL_P (before))
4435 : break;
4436 :
4437 : /* Our caller needs either ensure that we will find all sets
4438 : (in case code has not been optimized yet), or take care
4439 : for possible labels in a way by setting boundary to preceding
4440 : CODE_LABEL. */
4441 63063 : if (LABEL_P (before))
4442 : {
4443 0 : gcc_assert (before == boundary);
4444 : break;
4445 : }
4446 :
4447 63063 : if (INSN_P (before))
4448 : {
4449 63063 : int nregs_old = parm.nregs;
4450 63063 : note_stores (before, parms_set, &parm);
4451 : /* If we found something that did not set a parameter reg,
4452 : we're done. Do not keep going, as that might result
4453 : in hoisting an insn before the setting of a pseudo
4454 : that is used by the hoisted insn. */
4455 63063 : if (nregs_old != parm.nregs)
4456 : first_set = before;
4457 : else
4458 : break;
4459 : }
4460 : }
4461 40148 : return first_set;
4462 : }
4463 :
4464 : /* Return true if we should avoid inserting code between INSN and preceding
4465 : call instruction. */
4466 :
4467 : bool
4468 11087535 : keep_with_call_p (const rtx_insn *insn)
4469 : {
4470 11087535 : rtx set;
4471 :
4472 11087535 : if (INSN_P (insn) && (set = single_set (insn)) != NULL)
4473 : {
4474 7483684 : if (REG_P (SET_DEST (set))
4475 2005697 : && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
4476 2005697 : && fixed_regs[REGNO (SET_DEST (set))]
4477 7650936 : && general_operand (SET_SRC (set), VOIDmode))
4478 : return true;
4479 7483333 : if (REG_P (SET_SRC (set))
4480 786182 : && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
4481 452509 : && REG_P (SET_DEST (set))
4482 7617081 : && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
4483 : return true;
4484 : /* There may be a stack pop just after the call and before the store
4485 : of the return register. Search for the actual store when deciding
4486 : if we can break or not. */
4487 7483333 : if (SET_DEST (set) == stack_pointer_rtx)
4488 : {
4489 : /* This CONST_CAST is okay because next_nonnote_insn just
4490 : returns its argument and we assign it to a const_rtx
4491 : variable. */
4492 165689 : const rtx_insn *i2
4493 165689 : = next_nonnote_insn (const_cast<rtx_insn *> (insn));
4494 165689 : if (i2 && keep_with_call_p (i2))
4495 : return true;
4496 : }
4497 : }
4498 : return false;
4499 : }
4500 :
4501 : /* Return true if LABEL is a target of JUMP_INSN. This applies only
4502 : to non-complex jumps. That is, direct unconditional, conditional,
4503 : and tablejumps, but not computed jumps or returns. It also does
4504 : not apply to the fallthru case of a conditional jump. */
4505 :
4506 : bool
4507 23968497 : label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
4508 : {
4509 23968497 : rtx tmp = JUMP_LABEL (jump_insn);
4510 23968497 : rtx_jump_table_data *table;
4511 :
4512 23968497 : if (label == tmp)
4513 : return true;
4514 :
4515 3995791 : if (tablejump_p (jump_insn, NULL, &table))
4516 : {
4517 0 : rtvec vec = table->get_labels ();
4518 0 : int i, veclen = GET_NUM_ELEM (vec);
4519 :
4520 0 : for (i = 0; i < veclen; ++i)
4521 0 : if (XEXP (RTVEC_ELT (vec, i), 0) == label)
4522 : return true;
4523 : }
4524 :
4525 3995791 : if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
4526 : return true;
4527 :
4528 : return false;
4529 : }
4530 :
4531 :
4532 : /* Return an estimate of the cost of computing rtx X.
4533 : One use is in cse, to decide which expression to keep in the hash table.
4534 : Another is in rtl generation, to pick the cheapest way to multiply.
4535 : Other uses like the latter are expected in the future.
4536 :
4537 : X appears as operand OPNO in an expression with code OUTER_CODE.
4538 : SPEED specifies whether costs optimized for speed or size should
4539 : be returned. */
4540 :
4541 : int
4542 12555418361 : rtx_cost (rtx x, machine_mode mode, enum rtx_code outer_code,
4543 : int opno, bool speed)
4544 : {
4545 12555418361 : int i, j;
4546 12555418361 : enum rtx_code code;
4547 12555418361 : const char *fmt;
4548 12555418361 : int total;
4549 12555418361 : int factor;
4550 12555418361 : unsigned mode_size;
4551 :
4552 12555418361 : if (x == 0)
4553 : return 0;
4554 :
4555 12555418361 : if (GET_CODE (x) == SET)
4556 : /* A SET doesn't have a mode, so let's look at the SET_DEST to get
4557 : the mode for the factor. */
4558 47468905 : mode = GET_MODE (SET_DEST (x));
4559 12507949456 : else if (GET_MODE (x) != VOIDmode)
4560 9641910639 : mode = GET_MODE (x);
4561 :
4562 25110836722 : mode_size = estimated_poly_value (GET_MODE_SIZE (mode));
4563 :
4564 : /* A size N times larger than UNITS_PER_WORD likely needs N times as
4565 : many insns, taking N times as long. */
4566 13062785431 : factor = mode_size > UNITS_PER_WORD ? mode_size / UNITS_PER_WORD : 1;
4567 :
4568 : /* Compute the default costs of certain things.
4569 : Note that targetm.rtx_costs can override the defaults. */
4570 :
4571 12555418361 : code = GET_CODE (x);
4572 12555418361 : switch (code)
4573 : {
4574 1754426710 : case MULT:
4575 1754426710 : case FMA:
4576 1754426710 : case SS_MULT:
4577 1754426710 : case US_MULT:
4578 1754426710 : case SMUL_HIGHPART:
4579 1754426710 : case UMUL_HIGHPART:
4580 : /* Multiplication has time-complexity O(N*N), where N is the
4581 : number of units (translated from digits) when using
4582 : schoolbook long multiplication. */
4583 1754426710 : total = factor * factor * COSTS_N_INSNS (5);
4584 1754426710 : break;
4585 72322975 : case DIV:
4586 72322975 : case UDIV:
4587 72322975 : case MOD:
4588 72322975 : case UMOD:
4589 72322975 : case SS_DIV:
4590 72322975 : case US_DIV:
4591 : /* Similarly, complexity for schoolbook long division. */
4592 72322975 : total = factor * factor * COSTS_N_INSNS (7);
4593 72322975 : break;
4594 0 : case USE:
4595 : /* Used in combine.cc as a marker. */
4596 0 : total = 0;
4597 0 : break;
4598 10728668676 : default:
4599 10728668676 : total = factor * COSTS_N_INSNS (1);
4600 : }
4601 :
4602 12555418361 : switch (code)
4603 : {
4604 : case REG:
4605 : return 0;
4606 :
4607 11580341 : case SUBREG:
4608 11580341 : total = 0;
4609 : /* If we can't tie these modes, make this expensive. The larger
4610 : the mode, the more expensive it is. */
4611 11580341 : if (!targetm.modes_tieable_p (mode, GET_MODE (SUBREG_REG (x))))
4612 4194075 : return COSTS_N_INSNS (2 + factor);
4613 : break;
4614 :
4615 17238522 : case TRUNCATE:
4616 17238522 : if (targetm.modes_tieable_p (mode, GET_MODE (XEXP (x, 0))))
4617 : {
4618 3843532 : total = 0;
4619 3843532 : break;
4620 : }
4621 : /* FALLTHRU */
4622 7680980804 : default:
4623 7680980804 : if (targetm.rtx_costs (x, mode, outer_code, opno, &total, speed))
4624 3521946629 : return total;
4625 : break;
4626 : }
4627 :
4628 : /* Sum the costs of the sub-rtx's, plus cost of this operation,
4629 : which is already in total. */
4630 :
4631 4170263973 : fmt = GET_RTX_FORMAT (code);
4632 12420403570 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4633 8250139597 : if (fmt[i] == 'e')
4634 8181298834 : total += rtx_cost (XEXP (x, i), mode, code, i, speed);
4635 68840763 : else if (fmt[i] == 'E')
4636 11623575 : for (j = 0; j < XVECLEN (x, i); j++)
4637 6609164 : total += rtx_cost (XVECEXP (x, i, j), mode, code, i, speed);
4638 :
4639 4170263973 : return total;
4640 : }
4641 :
4642 : /* Fill in the structure C with information about both speed and size rtx
4643 : costs for X, which is operand OPNO in an expression with code OUTER. */
4644 :
4645 : void
4646 2219228 : get_full_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno,
4647 : struct full_rtx_costs *c)
4648 : {
4649 2219228 : c->speed = rtx_cost (x, mode, outer, opno, true);
4650 2219228 : c->size = rtx_cost (x, mode, outer, opno, false);
4651 2219228 : }
4652 :
4653 :
4654 : /* Return cost of address expression X.
4655 : Expect that X is properly formed address reference.
4656 :
4657 : SPEED parameter specify whether costs optimized for speed or size should
4658 : be returned. */
4659 :
4660 : int
4661 10895960 : address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
4662 : {
4663 : /* We may be asked for cost of various unusual addresses, such as operands
4664 : of push instruction. It is not worthwhile to complicate writing
4665 : of the target hook by such cases. */
4666 :
4667 10895960 : if (!memory_address_addr_space_p (mode, x, as))
4668 : return 1000;
4669 :
4670 10812491 : return targetm.address_cost (x, mode, as, speed);
4671 : }
4672 :
4673 : /* If the target doesn't override, compute the cost as with arithmetic. */
4674 :
4675 : int
4676 0 : default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
4677 : {
4678 0 : return rtx_cost (x, Pmode, MEM, 0, speed);
4679 : }
4680 :
4681 :
4682 : unsigned HOST_WIDE_INT
4683 683592590 : nonzero_bits (const_rtx x, machine_mode mode)
4684 : {
4685 683592590 : if (mode == VOIDmode)
4686 0 : mode = GET_MODE (x);
4687 683592590 : scalar_int_mode int_mode;
4688 683592590 : if (!is_a <scalar_int_mode> (mode, &int_mode))
4689 20252540 : return GET_MODE_MASK (mode);
4690 663340050 : return cached_nonzero_bits (x, int_mode, NULL_RTX, VOIDmode, 0);
4691 : }
4692 :
4693 : unsigned int
4694 242572290 : num_sign_bit_copies (const_rtx x, machine_mode mode)
4695 : {
4696 242572290 : if (mode == VOIDmode)
4697 1 : mode = GET_MODE (x);
4698 242572290 : scalar_int_mode int_mode;
4699 242572290 : if (!is_a <scalar_int_mode> (mode, &int_mode))
4700 : return 1;
4701 222632614 : return cached_num_sign_bit_copies (x, int_mode, NULL_RTX, VOIDmode, 0);
4702 : }
4703 :
4704 : /* Return true if nonzero_bits1 might recurse into both operands
4705 : of X. */
4706 :
4707 : static inline bool
4708 1398597890 : nonzero_bits_binary_arith_p (const_rtx x)
4709 : {
4710 1398597890 : if (!ARITHMETIC_P (x))
4711 : return false;
4712 243794623 : switch (GET_CODE (x))
4713 : {
4714 : case AND:
4715 : case XOR:
4716 : case IOR:
4717 : case UMIN:
4718 : case UMAX:
4719 : case SMIN:
4720 : case SMAX:
4721 : case PLUS:
4722 : case MINUS:
4723 : case MULT:
4724 : case DIV:
4725 : case UDIV:
4726 : case MOD:
4727 : case UMOD:
4728 : return true;
4729 : default:
4730 : return false;
4731 : }
4732 : }
4733 :
4734 : /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
4735 : It avoids exponential behavior in nonzero_bits1 when X has
4736 : identical subexpressions on the first or the second level. */
4737 :
4738 : static unsigned HOST_WIDE_INT
4739 1126992421 : cached_nonzero_bits (const_rtx x, scalar_int_mode mode, const_rtx known_x,
4740 : machine_mode known_mode,
4741 : unsigned HOST_WIDE_INT known_ret)
4742 : {
4743 1126992421 : if (x == known_x && mode == known_mode)
4744 : return known_ret;
4745 :
4746 : /* Try to find identical subexpressions. If found call
4747 : nonzero_bits1 on X with the subexpressions as KNOWN_X and the
4748 : precomputed value for the subexpression as KNOWN_RET. */
4749 :
4750 1124692295 : if (nonzero_bits_binary_arith_p (x))
4751 : {
4752 137562844 : rtx x0 = XEXP (x, 0);
4753 137562844 : rtx x1 = XEXP (x, 1);
4754 :
4755 : /* Check the first level. */
4756 137562844 : if (x0 == x1)
4757 59952 : return nonzero_bits1 (x, mode, x0, mode,
4758 : cached_nonzero_bits (x0, mode, known_x,
4759 59952 : known_mode, known_ret));
4760 :
4761 : /* Check the second level. */
4762 137502892 : if (nonzero_bits_binary_arith_p (x0)
4763 137502892 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4764 1100189 : return nonzero_bits1 (x, mode, x1, mode,
4765 : cached_nonzero_bits (x1, mode, known_x,
4766 1100189 : known_mode, known_ret));
4767 :
4768 136402703 : if (nonzero_bits_binary_arith_p (x1)
4769 136402703 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4770 6329 : return nonzero_bits1 (x, mode, x0, mode,
4771 : cached_nonzero_bits (x0, mode, known_x,
4772 6329 : known_mode, known_ret));
4773 : }
4774 :
4775 1123525825 : return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
4776 : }
4777 :
4778 : /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
4779 : We don't let nonzero_bits recur into num_sign_bit_copies, because that
4780 : is less useful. We can't allow both, because that results in exponential
4781 : run time recursion. There is a nullstone testcase that triggered
4782 : this. This macro avoids accidental uses of num_sign_bit_copies. */
4783 : #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
4784 :
4785 : /* Given an expression, X, compute which bits in X can be nonzero.
4786 : We don't care about bits outside of those defined in MODE.
4787 :
4788 : For most X this is simply GET_MODE_MASK (GET_MODE (X)), but if X is
4789 : an arithmetic operation, we can do better. */
4790 :
4791 : static unsigned HOST_WIDE_INT
4792 1124692295 : nonzero_bits1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
4793 : machine_mode known_mode,
4794 : unsigned HOST_WIDE_INT known_ret)
4795 : {
4796 1124692295 : unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
4797 1124692295 : unsigned HOST_WIDE_INT inner_nz;
4798 1124692295 : enum rtx_code code = GET_CODE (x);
4799 1124692295 : machine_mode inner_mode;
4800 1124692295 : unsigned int inner_width;
4801 1124692295 : scalar_int_mode xmode;
4802 :
4803 1124692295 : unsigned int mode_width = GET_MODE_PRECISION (mode);
4804 :
4805 : /* For unary ops like ffs or popcount we want to determine the number of
4806 : nonzero bits from the operand. This only matters with very large
4807 : vector modes. A
4808 : (popcount:DI (V128BImode)
4809 : should not get a nonzero-bit mask of (1 << 7) - 1 as that could
4810 : lead to incorrect optimizations based on it, see PR123501. */
4811 1124692295 : unsigned int op_mode_width = mode_width;
4812 1124692295 : machine_mode op_mode = mode;
4813 1124692295 : if (UNARY_P (x))
4814 : {
4815 15579907 : const_rtx op = XEXP (x, 0);
4816 15579907 : if (GET_MODE_PRECISION (GET_MODE (op)).is_constant ())
4817 : {
4818 15579907 : op_mode = GET_MODE (op);
4819 15579907 : op_mode_width = GET_MODE_PRECISION (op_mode).to_constant ();
4820 : }
4821 : }
4822 :
4823 1124692295 : if (CONST_INT_P (x))
4824 : {
4825 115440256 : if (SHORT_IMMEDIATES_SIGN_EXTEND
4826 : && INTVAL (x) > 0
4827 : && mode_width < BITS_PER_WORD
4828 : && (UINTVAL (x) & (HOST_WIDE_INT_1U << (mode_width - 1))) != 0)
4829 : return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
4830 :
4831 115440256 : return UINTVAL (x);
4832 : }
4833 :
4834 1009252039 : if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
4835 : return nonzero;
4836 1008919165 : unsigned int xmode_width = GET_MODE_PRECISION (xmode);
4837 :
4838 : /* If X is wider than MODE, use its mode instead. */
4839 1008919165 : if (xmode_width > mode_width)
4840 : {
4841 18079252 : mode = xmode;
4842 18079252 : nonzero = GET_MODE_MASK (mode);
4843 18079252 : mode_width = xmode_width;
4844 : }
4845 :
4846 1008919165 : if (mode_width > HOST_BITS_PER_WIDE_INT)
4847 : /* Our only callers in this case look for single bit values. So
4848 : just return the mode mask. Those tests will then be false. */
4849 : return nonzero;
4850 :
4851 : /* If MODE is wider than X, but both are a single word for both the host
4852 : and target machines, we can compute this from which bits of the object
4853 : might be nonzero in its own mode, taking into account the fact that, on
4854 : CISC machines, accessing an object in a wider mode generally causes the
4855 : high-order bits to become undefined, so they are not known to be zero.
4856 : We extend this reasoning to RISC machines for operations that might not
4857 : operate on the full registers. */
4858 1007593130 : if (mode_width > xmode_width
4859 110225368 : && xmode_width <= BITS_PER_WORD
4860 : && xmode_width <= HOST_BITS_PER_WIDE_INT
4861 : && !(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
4862 : {
4863 93726768 : nonzero &= cached_nonzero_bits (x, xmode,
4864 : known_x, known_mode, known_ret);
4865 93726768 : nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode);
4866 93726768 : return nonzero;
4867 : }
4868 :
4869 : /* Please keep nonzero_bits_binary_arith_p above in sync with
4870 : the code in the switch below. */
4871 913866362 : switch (code)
4872 : {
4873 505101519 : case REG:
4874 : #if defined(POINTERS_EXTEND_UNSIGNED)
4875 : /* If pointers extend unsigned and this is a pointer in Pmode, say that
4876 : all the bits above ptr_mode are known to be zero. */
4877 : /* As we do not know which address space the pointer is referring to,
4878 : we can do this only if the target does not support different pointer
4879 : or address modes depending on the address space. */
4880 505101519 : if (target_default_pointer_address_modes_p ()
4881 : && POINTERS_EXTEND_UNSIGNED
4882 567936997 : && xmode == Pmode
4883 323593831 : && REG_POINTER (x)
4884 591035310 : && !targetm.have_ptr_extend ())
4885 85933791 : nonzero &= GET_MODE_MASK (ptr_mode);
4886 : #endif
4887 :
4888 : /* Include declared information about alignment of pointers. */
4889 : /* ??? We don't properly preserve REG_POINTER changes across
4890 : pointer-to-integer casts, so we can't trust it except for
4891 : things that we know must be pointers. See execute/960116-1.c. */
4892 505101519 : if ((x == stack_pointer_rtx
4893 504106439 : || x == frame_pointer_rtx
4894 490129332 : || x == arg_pointer_rtx)
4895 519643732 : && REGNO_POINTER_ALIGN (REGNO (x)))
4896 : {
4897 15537293 : unsigned HOST_WIDE_INT alignment
4898 15537293 : = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
4899 :
4900 : #ifdef PUSH_ROUNDING
4901 : /* If PUSH_ROUNDING is defined, it is possible for the
4902 : stack to be momentarily aligned only to that amount,
4903 : so we pick the least alignment. */
4904 15537293 : if (x == stack_pointer_rtx && targetm.calls.push_argument (0))
4905 : {
4906 770613 : poly_uint64 rounded_1 = PUSH_ROUNDING (poly_int64 (1));
4907 770613 : alignment = MIN (known_alignment (rounded_1), alignment);
4908 : }
4909 : #endif
4910 :
4911 15537293 : nonzero &= ~(alignment - 1);
4912 : }
4913 :
4914 505101519 : {
4915 505101519 : unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4916 505101519 : rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, xmode, mode,
4917 : &nonzero_for_hook);
4918 :
4919 505101519 : if (new_rtx)
4920 6 : nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
4921 : known_mode, known_ret);
4922 :
4923 505101519 : return nonzero_for_hook;
4924 : }
4925 :
4926 : case MEM:
4927 : /* In many, if not most, RISC machines, reading a byte from memory
4928 : zeros the rest of the register. Noticing that fact saves a lot
4929 : of extra zero-extends. */
4930 : if (load_extend_op (xmode) == ZERO_EXTEND)
4931 : nonzero &= GET_MODE_MASK (xmode);
4932 : break;
4933 :
4934 9124054 : case EQ: case NE:
4935 9124054 : case UNEQ: case LTGT:
4936 9124054 : case GT: case GTU: case UNGT:
4937 9124054 : case LT: case LTU: case UNLT:
4938 9124054 : case GE: case GEU: case UNGE:
4939 9124054 : case LE: case LEU: case UNLE:
4940 9124054 : case UNORDERED: case ORDERED:
4941 : /* If this produces an integer result, we know which bits are set.
4942 : Code here used to clear bits outside the mode of X, but that is
4943 : now done above. */
4944 : /* Mind that MODE is the mode the caller wants to look at this
4945 : operation in, and not the actual operation mode. We can wind
4946 : up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4947 : that describes the results of a vector compare. */
4948 9124054 : if (GET_MODE_CLASS (xmode) == MODE_INT
4949 9124054 : && mode_width <= HOST_BITS_PER_WIDE_INT)
4950 1124692295 : nonzero = STORE_FLAG_VALUE;
4951 : break;
4952 :
4953 973740 : case NEG:
4954 : #if 0
4955 : /* Disabled to avoid exponential mutual recursion between nonzero_bits
4956 : and num_sign_bit_copies. */
4957 : if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
4958 : nonzero = 1;
4959 : #endif
4960 :
4961 973740 : if (xmode_width < mode_width)
4962 0 : nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode));
4963 : break;
4964 :
4965 : case ABS:
4966 : #if 0
4967 : /* Disabled to avoid exponential mutual recursion between nonzero_bits
4968 : and num_sign_bit_copies. */
4969 : if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
4970 : nonzero = 1;
4971 : #endif
4972 : break;
4973 :
4974 9819 : case TRUNCATE:
4975 9819 : nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4976 : known_x, known_mode, known_ret)
4977 9819 : & GET_MODE_MASK (mode));
4978 9819 : break;
4979 :
4980 6715918 : case ZERO_EXTEND:
4981 6715918 : nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4982 : known_x, known_mode, known_ret);
4983 6715918 : if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4984 6715918 : nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4985 : break;
4986 :
4987 1736243 : case SIGN_EXTEND:
4988 : /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4989 : Otherwise, show all the bits in the outer mode but not the inner
4990 : may be nonzero. */
4991 1736243 : inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4992 : known_x, known_mode, known_ret);
4993 1736243 : if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4994 : {
4995 1736243 : inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4996 1736243 : if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
4997 1716659 : inner_nz |= (GET_MODE_MASK (mode)
4998 1716659 : & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4999 : }
5000 :
5001 1736243 : nonzero &= inner_nz;
5002 1736243 : break;
5003 :
5004 15802326 : case AND:
5005 15802326 : nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
5006 : known_x, known_mode, known_ret)
5007 15802326 : & cached_nonzero_bits (XEXP (x, 1), mode,
5008 : known_x, known_mode, known_ret);
5009 15802326 : break;
5010 :
5011 9923253 : case XOR: case IOR:
5012 9923253 : case UMIN: case UMAX: case SMIN: case SMAX:
5013 9923253 : {
5014 9923253 : unsigned HOST_WIDE_INT nonzero0
5015 9923253 : = cached_nonzero_bits (XEXP (x, 0), mode,
5016 : known_x, known_mode, known_ret);
5017 :
5018 : /* Don't call nonzero_bits for the second time if it cannot change
5019 : anything. */
5020 9923253 : if ((nonzero & nonzero0) != nonzero)
5021 9296958 : nonzero &= nonzero0
5022 4648479 : | cached_nonzero_bits (XEXP (x, 1), mode,
5023 : known_x, known_mode, known_ret);
5024 : }
5025 : break;
5026 :
5027 92826887 : case PLUS: case MINUS:
5028 92826887 : case MULT:
5029 92826887 : case DIV: case UDIV:
5030 92826887 : case MOD: case UMOD:
5031 : /* We can apply the rules of arithmetic to compute the number of
5032 : high- and low-order zero bits of these operations. We start by
5033 : computing the width (position of the highest-order nonzero bit)
5034 : and the number of low-order zero bits for each value. */
5035 92826887 : {
5036 92826887 : unsigned HOST_WIDE_INT nz0
5037 92826887 : = cached_nonzero_bits (XEXP (x, 0), mode,
5038 : known_x, known_mode, known_ret);
5039 92826887 : unsigned HOST_WIDE_INT nz1
5040 92826887 : = cached_nonzero_bits (XEXP (x, 1), mode,
5041 : known_x, known_mode, known_ret);
5042 92826887 : int sign_index = xmode_width - 1;
5043 92826887 : int width0 = floor_log2 (nz0) + 1;
5044 92826887 : int width1 = floor_log2 (nz1) + 1;
5045 92826887 : int low0 = ctz_or_zero (nz0);
5046 92826887 : int low1 = ctz_or_zero (nz1);
5047 92826887 : unsigned HOST_WIDE_INT op0_maybe_minusp
5048 92826887 : = nz0 & (HOST_WIDE_INT_1U << sign_index);
5049 92826887 : unsigned HOST_WIDE_INT op1_maybe_minusp
5050 : = nz1 & (HOST_WIDE_INT_1U << sign_index);
5051 92826887 : unsigned int result_width = mode_width;
5052 92826887 : int result_low = 0;
5053 :
5054 92826887 : switch (code)
5055 : {
5056 69082915 : case PLUS:
5057 69082915 : result_width = MAX (width0, width1) + 1;
5058 69082915 : result_low = MIN (low0, low1);
5059 : break;
5060 14712717 : case MINUS:
5061 14712717 : result_low = MIN (low0, low1);
5062 : break;
5063 7245002 : case MULT:
5064 7245002 : result_width = width0 + width1;
5065 7245002 : result_low = low0 + low1;
5066 7245002 : break;
5067 685285 : case DIV:
5068 685285 : if (width1 == 0)
5069 : break;
5070 672378 : if (!op0_maybe_minusp && !op1_maybe_minusp)
5071 23145 : result_width = width0;
5072 : break;
5073 283522 : case UDIV:
5074 283522 : if (width1 == 0)
5075 : break;
5076 282654 : result_width = width0;
5077 282654 : break;
5078 419889 : case MOD:
5079 419889 : if (width1 == 0)
5080 : break;
5081 408993 : if (!op0_maybe_minusp && !op1_maybe_minusp)
5082 21417 : result_width = MIN (width0, width1);
5083 408993 : result_low = MIN (low0, low1);
5084 : break;
5085 397557 : case UMOD:
5086 397557 : if (width1 == 0)
5087 : break;
5088 397453 : result_width = MIN (width0, width1);
5089 397453 : result_low = MIN (low0, low1);
5090 : break;
5091 0 : default:
5092 0 : gcc_unreachable ();
5093 : }
5094 :
5095 : /* Note that mode_width <= HOST_BITS_PER_WIDE_INT, see above. */
5096 92826887 : if (result_width < mode_width)
5097 4091330 : nonzero &= (HOST_WIDE_INT_1U << result_width) - 1;
5098 :
5099 92826887 : if (result_low > 0)
5100 : {
5101 6788441 : if (result_low < HOST_BITS_PER_WIDE_INT)
5102 6788429 : nonzero &= ~((HOST_WIDE_INT_1U << result_low) - 1);
5103 : else
5104 : nonzero = 0;
5105 : }
5106 : }
5107 : break;
5108 :
5109 1161778 : case ZERO_EXTRACT:
5110 1161778 : if (CONST_INT_P (XEXP (x, 1))
5111 1161401 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5112 1161220 : nonzero &= (HOST_WIDE_INT_1U << INTVAL (XEXP (x, 1))) - 1;
5113 : break;
5114 :
5115 73594080 : case SUBREG:
5116 : /* If this is a SUBREG formed for a promoted variable that has
5117 : been zero-extended, we know that at least the high-order bits
5118 : are zero, though others might be too. */
5119 73594080 : if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
5120 39093 : nonzero = GET_MODE_MASK (xmode)
5121 39093 : & cached_nonzero_bits (SUBREG_REG (x), xmode,
5122 : known_x, known_mode, known_ret);
5123 :
5124 : /* If the inner mode is a single word for both the host and target
5125 : machines, we can compute this from which bits of the inner
5126 : object might be nonzero. */
5127 73594080 : inner_mode = GET_MODE (SUBREG_REG (x));
5128 73594080 : if (GET_MODE_PRECISION (inner_mode).is_constant (&inner_width)
5129 78810977 : && inner_width <= BITS_PER_WORD
5130 : && inner_width <= HOST_BITS_PER_WIDE_INT)
5131 : {
5132 69651807 : nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
5133 : known_x, known_mode, known_ret);
5134 :
5135 : /* On a typical CISC machine, accessing an object in a wider mode
5136 : causes the high-order bits to become undefined. So they are
5137 : not known to be zero.
5138 :
5139 : On a typical RISC machine, we only have to worry about the way
5140 : loads are extended. Otherwise, if we get a reload for the inner
5141 : part, it may be loaded from the stack, and then we may lose all
5142 : the zero bits that existed before the store to the stack. */
5143 69651807 : rtx_code extend_op;
5144 69651807 : if ((!WORD_REGISTER_OPERATIONS
5145 : || ((extend_op = load_extend_op (inner_mode)) == SIGN_EXTEND
5146 : ? val_signbit_known_set_p (inner_mode, nonzero)
5147 : : extend_op != ZERO_EXTEND)
5148 : || !MEM_P (SUBREG_REG (x)))
5149 : && xmode_width > inner_width)
5150 53145319 : nonzero
5151 53145319 : |= (GET_MODE_MASK (GET_MODE (x)) & ~GET_MODE_MASK (inner_mode));
5152 : }
5153 : break;
5154 :
5155 55045736 : case ASHIFT:
5156 55045736 : case ASHIFTRT:
5157 55045736 : case LSHIFTRT:
5158 55045736 : case ROTATE:
5159 55045736 : case ROTATERT:
5160 : /* The nonzero bits are in two classes: any bits within MODE
5161 : that aren't in xmode are always significant. The rest of the
5162 : nonzero bits are those that are significant in the operand of
5163 : the shift when shifted the appropriate number of bits. This
5164 : shows that high-order bits are cleared by the right shift and
5165 : low-order bits by left shifts. */
5166 55045736 : if (CONST_INT_P (XEXP (x, 1))
5167 53514407 : && INTVAL (XEXP (x, 1)) >= 0
5168 53514261 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5169 53514173 : && INTVAL (XEXP (x, 1)) < xmode_width)
5170 : {
5171 53514102 : int count = INTVAL (XEXP (x, 1));
5172 53514102 : unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (xmode);
5173 53514102 : unsigned HOST_WIDE_INT op_nonzero
5174 53514102 : = cached_nonzero_bits (XEXP (x, 0), mode,
5175 : known_x, known_mode, known_ret);
5176 53514102 : unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5177 53514102 : unsigned HOST_WIDE_INT outer = 0;
5178 :
5179 53514102 : if (mode_width > xmode_width)
5180 0 : outer = (op_nonzero & nonzero & ~mode_mask);
5181 :
5182 53514102 : switch (code)
5183 : {
5184 31075065 : case ASHIFT:
5185 31075065 : inner <<= count;
5186 31075065 : break;
5187 :
5188 14227336 : case LSHIFTRT:
5189 14227336 : inner >>= count;
5190 14227336 : break;
5191 :
5192 8097841 : case ASHIFTRT:
5193 8097841 : inner >>= count;
5194 :
5195 : /* If the sign bit may have been nonzero before the shift, we
5196 : need to mark all the places it could have been copied to
5197 : by the shift as possibly nonzero. */
5198 8097841 : if (inner & (HOST_WIDE_INT_1U << (xmode_width - 1 - count)))
5199 8083887 : inner |= (((HOST_WIDE_INT_1U << count) - 1)
5200 8083887 : << (xmode_width - count));
5201 : break;
5202 :
5203 72500 : case ROTATE:
5204 72500 : inner = (inner << (count % xmode_width)
5205 72500 : | (inner >> (xmode_width - (count % xmode_width))))
5206 : & mode_mask;
5207 72500 : break;
5208 :
5209 41360 : case ROTATERT:
5210 41360 : inner = (inner >> (count % xmode_width)
5211 41360 : | (inner << (xmode_width - (count % xmode_width))))
5212 : & mode_mask;
5213 41360 : break;
5214 :
5215 : default:
5216 : gcc_unreachable ();
5217 : }
5218 :
5219 53514102 : nonzero &= (outer | inner);
5220 : }
5221 : break;
5222 :
5223 5084 : case FFS:
5224 5084 : case POPCOUNT:
5225 : /* This is at most the number of bits in the mode. */
5226 5084 : nonzero = (HOST_WIDE_INT_UC (2) << (floor_log2 (op_mode_width))) - 1;
5227 5084 : break;
5228 :
5229 527158 : case CLZ:
5230 : /* If CLZ has a known value at zero, then the nonzero bits are
5231 : that value, plus the number of bits in the mode minus one.
5232 : If we have a different operand mode, don't try to get nonzero
5233 : bits as currently nonzero is not a poly_int. */
5234 527158 : if (op_mode == mode
5235 1054304 : && CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
5236 1232 : nonzero
5237 2464 : |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
5238 : else
5239 : nonzero = -1;
5240 : break;
5241 :
5242 47134 : case CTZ:
5243 : /* If CTZ has a known value at zero, then the nonzero bits are
5244 : that value, plus the number of bits in the mode minus one.
5245 : See above for op_mode != mode. */
5246 47134 : if (op_mode == mode
5247 94268 : && CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
5248 1363 : nonzero
5249 2726 : |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
5250 : else
5251 : nonzero = -1;
5252 : break;
5253 :
5254 8 : case CLRSB:
5255 : /* This is at most the number of bits in the mode minus 1. */
5256 8 : nonzero = (HOST_WIDE_INT_1U << (floor_log2 (op_mode_width))) - 1;
5257 8 : break;
5258 :
5259 : case PARITY:
5260 1124692295 : nonzero = 1;
5261 : break;
5262 :
5263 3714349 : case IF_THEN_ELSE:
5264 3714349 : {
5265 3714349 : unsigned HOST_WIDE_INT nonzero_true
5266 3714349 : = cached_nonzero_bits (XEXP (x, 1), mode,
5267 : known_x, known_mode, known_ret);
5268 :
5269 : /* Don't call nonzero_bits for the second time if it cannot change
5270 : anything. */
5271 3714349 : if ((nonzero & nonzero_true) != nonzero)
5272 3095276 : nonzero &= nonzero_true
5273 1547638 : | cached_nonzero_bits (XEXP (x, 2), mode,
5274 : known_x, known_mode, known_ret);
5275 : }
5276 : break;
5277 :
5278 : default:
5279 : break;
5280 : }
5281 :
5282 : return nonzero;
5283 : }
5284 :
5285 : /* See the macro definition above. */
5286 : #undef cached_num_sign_bit_copies
5287 :
5288 :
5289 : /* Return true if num_sign_bit_copies1 might recurse into both operands
5290 : of X. */
5291 :
5292 : static inline bool
5293 436608062 : num_sign_bit_copies_binary_arith_p (const_rtx x)
5294 : {
5295 436608062 : if (!ARITHMETIC_P (x))
5296 : return false;
5297 79460373 : switch (GET_CODE (x))
5298 : {
5299 : case IOR:
5300 : case AND:
5301 : case XOR:
5302 : case SMIN:
5303 : case SMAX:
5304 : case UMIN:
5305 : case UMAX:
5306 : case PLUS:
5307 : case MINUS:
5308 : case MULT:
5309 : return true;
5310 : default:
5311 : return false;
5312 : }
5313 : }
5314 :
5315 : /* The function cached_num_sign_bit_copies is a wrapper around
5316 : num_sign_bit_copies1. It avoids exponential behavior in
5317 : num_sign_bit_copies1 when X has identical subexpressions on the
5318 : first or the second level. */
5319 :
5320 : static unsigned int
5321 341695932 : cached_num_sign_bit_copies (const_rtx x, scalar_int_mode mode,
5322 : const_rtx known_x, machine_mode known_mode,
5323 : unsigned int known_ret)
5324 : {
5325 341695932 : if (x == known_x && mode == known_mode)
5326 : return known_ret;
5327 :
5328 : /* Try to find identical subexpressions. If found call
5329 : num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
5330 : the precomputed value for the subexpression as KNOWN_RET. */
5331 :
5332 339866155 : if (num_sign_bit_copies_binary_arith_p (x))
5333 : {
5334 48779594 : rtx x0 = XEXP (x, 0);
5335 48779594 : rtx x1 = XEXP (x, 1);
5336 :
5337 : /* Check the first level. */
5338 48779594 : if (x0 == x1)
5339 17263 : return
5340 17263 : num_sign_bit_copies1 (x, mode, x0, mode,
5341 : cached_num_sign_bit_copies (x0, mode, known_x,
5342 : known_mode,
5343 17263 : known_ret));
5344 :
5345 : /* Check the second level. */
5346 48762331 : if (num_sign_bit_copies_binary_arith_p (x0)
5347 48762331 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
5348 782755 : return
5349 782755 : num_sign_bit_copies1 (x, mode, x1, mode,
5350 : cached_num_sign_bit_copies (x1, mode, known_x,
5351 : known_mode,
5352 782755 : known_ret));
5353 :
5354 47979576 : if (num_sign_bit_copies_binary_arith_p (x1)
5355 47979576 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
5356 400 : return
5357 400 : num_sign_bit_copies1 (x, mode, x0, mode,
5358 : cached_num_sign_bit_copies (x0, mode, known_x,
5359 : known_mode,
5360 400 : known_ret));
5361 : }
5362 :
5363 339065737 : return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
5364 : }
5365 :
5366 : /* Return the number of bits at the high-order end of X that are known to
5367 : be equal to the sign bit. X will be used in mode MODE. The returned
5368 : value will always be between 1 and the number of bits in MODE. */
5369 :
5370 : static unsigned int
5371 339866155 : num_sign_bit_copies1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
5372 : machine_mode known_mode,
5373 : unsigned int known_ret)
5374 : {
5375 339866155 : enum rtx_code code = GET_CODE (x);
5376 339866155 : unsigned int bitwidth = GET_MODE_PRECISION (mode);
5377 339866155 : int num0, num1, result;
5378 339866155 : unsigned HOST_WIDE_INT nonzero;
5379 :
5380 339866155 : if (CONST_INT_P (x))
5381 : {
5382 : /* If the constant is negative, take its 1's complement and remask.
5383 : Then see how many zero bits we have. */
5384 43639086 : nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
5385 43639086 : if (bitwidth <= HOST_BITS_PER_WIDE_INT
5386 43344325 : && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5387 19276993 : nonzero = (~nonzero) & GET_MODE_MASK (mode);
5388 :
5389 43639086 : return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
5390 : }
5391 :
5392 296227069 : scalar_int_mode xmode, inner_mode;
5393 498769135 : if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
5394 : return 1;
5395 :
5396 295902235 : unsigned int xmode_width = GET_MODE_PRECISION (xmode);
5397 :
5398 : /* For a smaller mode, just ignore the high bits. */
5399 295902235 : if (bitwidth < xmode_width)
5400 : {
5401 35612 : num0 = cached_num_sign_bit_copies (x, xmode,
5402 : known_x, known_mode, known_ret);
5403 35612 : return MAX (1, num0 - (int) (xmode_width - bitwidth));
5404 : }
5405 :
5406 295866623 : if (bitwidth > xmode_width)
5407 : {
5408 : /* If this machine does not do all register operations on the entire
5409 : register and MODE is wider than the mode of X, we can say nothing
5410 : at all about the high-order bits. We extend this reasoning to RISC
5411 : machines for operations that might not operate on full registers. */
5412 : if (!(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
5413 : return 1;
5414 :
5415 : /* Likewise on machines that do, if the mode of the object is smaller
5416 : than a word and loads of that size don't sign extend, we can say
5417 : nothing about the high order bits. */
5418 : if (xmode_width < BITS_PER_WORD
5419 : && load_extend_op (xmode) != SIGN_EXTEND)
5420 : return 1;
5421 : }
5422 :
5423 : /* Please keep num_sign_bit_copies_binary_arith_p above in sync with
5424 : the code in the switch below. */
5425 295866613 : switch (code)
5426 : {
5427 155054607 : case REG:
5428 :
5429 : #if defined(POINTERS_EXTEND_UNSIGNED)
5430 : /* If pointers extend signed and this is a pointer in Pmode, say that
5431 : all the bits above ptr_mode are known to be sign bit copies. */
5432 : /* As we do not know which address space the pointer is referring to,
5433 : we can do this only if the target does not support different pointer
5434 : or address modes depending on the address space. */
5435 155054607 : if (target_default_pointer_address_modes_p ()
5436 : && ! POINTERS_EXTEND_UNSIGNED && xmode == Pmode
5437 : && mode == Pmode && REG_POINTER (x)
5438 : && !targetm.have_ptr_extend ())
5439 : return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
5440 : #endif
5441 :
5442 155054607 : {
5443 155054607 : unsigned int copies_for_hook = 1, copies = 1;
5444 155054607 : rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, xmode, mode,
5445 : &copies_for_hook);
5446 :
5447 155054607 : if (new_rtx)
5448 5 : copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
5449 : known_mode, known_ret);
5450 :
5451 155054607 : if (copies > 1 || copies_for_hook > 1)
5452 22146890 : return MAX (copies, copies_for_hook);
5453 :
5454 : /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
5455 : }
5456 132907717 : break;
5457 :
5458 : case MEM:
5459 : /* Some RISC machines sign-extend all loads of smaller than a word. */
5460 : if (load_extend_op (xmode) == SIGN_EXTEND)
5461 : return MAX (1, ((int) bitwidth - (int) xmode_width + 1));
5462 : break;
5463 :
5464 19329284 : case SUBREG:
5465 : /* If this is a SUBREG for a promoted object that is sign-extended
5466 : and we are looking at it in a wider mode, we know that at least the
5467 : high-order bits are known to be sign bit copies. */
5468 :
5469 19329284 : if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
5470 : {
5471 0 : num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
5472 : known_x, known_mode, known_ret);
5473 0 : return MAX ((int) bitwidth - (int) xmode_width + 1, num0);
5474 : }
5475 :
5476 19329284 : if (is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (x)), &inner_mode))
5477 : {
5478 : /* For a smaller object, just ignore the high bits. */
5479 19125237 : if (bitwidth <= GET_MODE_PRECISION (inner_mode))
5480 : {
5481 5997561 : num0 = cached_num_sign_bit_copies (SUBREG_REG (x), inner_mode,
5482 : known_x, known_mode,
5483 : known_ret);
5484 5997561 : return MAX (1, num0 - (int) (GET_MODE_PRECISION (inner_mode)
5485 : - bitwidth));
5486 : }
5487 :
5488 : /* For paradoxical SUBREGs on machines where all register operations
5489 : affect the entire register, just look inside. Note that we are
5490 : passing MODE to the recursive call, so the number of sign bit
5491 : copies will remain relative to that mode, not the inner mode.
5492 :
5493 : This works only if loads sign extend. Otherwise, if we get a
5494 : reload for the inner part, it may be loaded from the stack, and
5495 : then we lose all sign bit copies that existed before the store
5496 : to the stack. */
5497 : if (WORD_REGISTER_OPERATIONS
5498 : && load_extend_op (inner_mode) == SIGN_EXTEND
5499 : && paradoxical_subreg_p (x)
5500 : && MEM_P (SUBREG_REG (x)))
5501 : return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
5502 : known_x, known_mode, known_ret);
5503 : }
5504 : break;
5505 :
5506 2927 : case SIGN_EXTRACT:
5507 2927 : if (CONST_INT_P (XEXP (x, 1)))
5508 2927 : return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
5509 : break;
5510 :
5511 1740343 : case SIGN_EXTEND:
5512 1740343 : if (is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
5513 1740343 : return (bitwidth - GET_MODE_PRECISION (inner_mode)
5514 1740343 : + cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
5515 1740343 : known_x, known_mode, known_ret));
5516 : break;
5517 :
5518 86 : case TRUNCATE:
5519 : /* For a smaller object, just ignore the high bits. */
5520 86 : inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
5521 86 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
5522 : known_x, known_mode, known_ret);
5523 86 : return MAX (1, (num0 - (int) (GET_MODE_PRECISION (inner_mode)
5524 : - bitwidth)));
5525 :
5526 1054211 : case NOT:
5527 1054211 : return cached_num_sign_bit_copies (XEXP (x, 0), mode,
5528 1054211 : known_x, known_mode, known_ret);
5529 :
5530 21165 : case ROTATE: case ROTATERT:
5531 : /* If we are rotating left by a number of bits less than the number
5532 : of sign bit copies, we can just subtract that amount from the
5533 : number. */
5534 21165 : if (CONST_INT_P (XEXP (x, 1))
5535 11798 : && INTVAL (XEXP (x, 1)) >= 0
5536 11795 : && INTVAL (XEXP (x, 1)) < (int) bitwidth)
5537 : {
5538 11795 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5539 : known_x, known_mode, known_ret);
5540 11795 : return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
5541 : : (int) bitwidth - INTVAL (XEXP (x, 1))));
5542 : }
5543 : break;
5544 :
5545 626117 : case NEG:
5546 : /* In general, this subtracts one sign bit copy. But if the value
5547 : is known to be positive, the number of sign bit copies is the
5548 : same as that of the input. Finally, if the input has just one bit
5549 : that might be nonzero, all the bits are copies of the sign bit. */
5550 626117 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5551 : known_x, known_mode, known_ret);
5552 626117 : if (bitwidth > HOST_BITS_PER_WIDE_INT)
5553 16538 : return num0 > 1 ? num0 - 1 : 1;
5554 :
5555 609579 : nonzero = nonzero_bits (XEXP (x, 0), mode);
5556 609579 : if (nonzero == 1)
5557 : return bitwidth;
5558 :
5559 327319 : if (num0 > 1
5560 83965 : && ((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero))
5561 46254 : num0--;
5562 :
5563 327319 : return num0;
5564 :
5565 5563198 : case IOR: case AND: case XOR:
5566 5563198 : case SMIN: case SMAX: case UMIN: case UMAX:
5567 : /* Logical operations will preserve the number of sign-bit copies.
5568 : MIN and MAX operations always return one of the operands. */
5569 5563198 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5570 : known_x, known_mode, known_ret);
5571 5563198 : num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5572 : known_x, known_mode, known_ret);
5573 :
5574 : /* If num1 is clearing some of the top bits then regardless of
5575 : the other term, we are guaranteed to have at least that many
5576 : high-order zero bits. */
5577 5563198 : if (code == AND
5578 5563198 : && num1 > 1
5579 2181289 : && bitwidth <= HOST_BITS_PER_WIDE_INT
5580 2174805 : && CONST_INT_P (XEXP (x, 1))
5581 1979769 : && (UINTVAL (XEXP (x, 1))
5582 1979769 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) == 0)
5583 : return num1;
5584 :
5585 : /* Similarly for IOR when setting high-order bits. */
5586 4138471 : if (code == IOR
5587 4138471 : && num1 > 1
5588 457761 : && bitwidth <= HOST_BITS_PER_WIDE_INT
5589 456214 : && CONST_INT_P (XEXP (x, 1))
5590 137142 : && (UINTVAL (XEXP (x, 1))
5591 137142 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5592 : return num1;
5593 :
5594 4134363 : return MIN (num0, num1);
5595 :
5596 41958578 : case PLUS: case MINUS:
5597 : /* For addition and subtraction, we can have a 1-bit carry. However,
5598 : if we are subtracting 1 from a positive number, there will not
5599 : be such a carry. Furthermore, if the positive number is known to
5600 : be 0 or 1, we know the result is either -1 or 0. */
5601 :
5602 41958578 : if (code == PLUS && XEXP (x, 1) == constm1_rtx
5603 1288619 : && bitwidth <= HOST_BITS_PER_WIDE_INT)
5604 : {
5605 1283769 : nonzero = nonzero_bits (XEXP (x, 0), mode);
5606 1283769 : if (((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero) == 0)
5607 88939 : return (nonzero == 1 || nonzero == 0 ? bitwidth
5608 83117 : : bitwidth - floor_log2 (nonzero) - 1);
5609 : }
5610 :
5611 41869639 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5612 : known_x, known_mode, known_ret);
5613 41869639 : num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5614 : known_x, known_mode, known_ret);
5615 41869639 : result = MAX (1, MIN (num0, num1) - 1);
5616 :
5617 41869639 : return result;
5618 :
5619 1257692 : case MULT:
5620 : /* The number of bits of the product is the sum of the number of
5621 : bits of both terms. However, unless one of the terms if known
5622 : to be positive, we must allow for an additional bit since negating
5623 : a negative number can remove one sign bit copy. */
5624 :
5625 1257692 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5626 : known_x, known_mode, known_ret);
5627 1257692 : num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5628 : known_x, known_mode, known_ret);
5629 :
5630 1257692 : result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
5631 1257692 : if (result > 0
5632 1257692 : && (bitwidth > HOST_BITS_PER_WIDE_INT
5633 328649 : || (((nonzero_bits (XEXP (x, 0), mode)
5634 328649 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5635 181460 : && ((nonzero_bits (XEXP (x, 1), mode)
5636 : & (HOST_WIDE_INT_1U << (bitwidth - 1)))
5637 181460 : != 0))))
5638 31085 : result--;
5639 :
5640 1257692 : return MAX (1, result);
5641 :
5642 121456 : case UDIV:
5643 : /* The result must be <= the first operand. If the first operand
5644 : has the high bit set, we know nothing about the number of sign
5645 : bit copies. */
5646 121456 : if (bitwidth > HOST_BITS_PER_WIDE_INT)
5647 : return 1;
5648 121456 : else if ((nonzero_bits (XEXP (x, 0), mode)
5649 121456 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5650 : return 1;
5651 : else
5652 22848 : return cached_num_sign_bit_copies (XEXP (x, 0), mode,
5653 22848 : known_x, known_mode, known_ret);
5654 :
5655 118883 : case UMOD:
5656 : /* The result must be <= the second operand. If the second operand
5657 : has (or just might have) the high bit set, we know nothing about
5658 : the number of sign bit copies. */
5659 118883 : if (bitwidth > HOST_BITS_PER_WIDE_INT)
5660 : return 1;
5661 118883 : else if ((nonzero_bits (XEXP (x, 1), mode)
5662 118883 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5663 : return 1;
5664 : else
5665 30807 : return cached_num_sign_bit_copies (XEXP (x, 1), mode,
5666 30807 : known_x, known_mode, known_ret);
5667 :
5668 209372 : case DIV:
5669 : /* Similar to unsigned division, except that we have to worry about
5670 : the case where the divisor is negative, in which case we have
5671 : to add 1. */
5672 209372 : result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5673 : known_x, known_mode, known_ret);
5674 209372 : if (result > 1
5675 209372 : && (bitwidth > HOST_BITS_PER_WIDE_INT
5676 18009 : || (nonzero_bits (XEXP (x, 1), mode)
5677 18009 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
5678 15345 : result--;
5679 :
5680 209372 : return result;
5681 :
5682 134017 : case MOD:
5683 134017 : result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5684 : known_x, known_mode, known_ret);
5685 134017 : if (result > 1
5686 134017 : && (bitwidth > HOST_BITS_PER_WIDE_INT
5687 23090 : || (nonzero_bits (XEXP (x, 1), mode)
5688 23090 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
5689 10154 : result--;
5690 :
5691 134017 : return result;
5692 :
5693 937056 : case ASHIFTRT:
5694 : /* Shifts by a constant add to the number of bits equal to the
5695 : sign bit. */
5696 937056 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5697 : known_x, known_mode, known_ret);
5698 937056 : if (CONST_INT_P (XEXP (x, 1))
5699 897616 : && INTVAL (XEXP (x, 1)) > 0
5700 897616 : && INTVAL (XEXP (x, 1)) < xmode_width)
5701 897616 : num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
5702 :
5703 937056 : return num0;
5704 :
5705 8790182 : case ASHIFT:
5706 : /* Left shifts destroy copies. */
5707 8790182 : if (!CONST_INT_P (XEXP (x, 1))
5708 8594486 : || INTVAL (XEXP (x, 1)) < 0
5709 8594342 : || INTVAL (XEXP (x, 1)) >= (int) bitwidth
5710 8594300 : || INTVAL (XEXP (x, 1)) >= xmode_width)
5711 : return 1;
5712 :
5713 8594300 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5714 : known_x, known_mode, known_ret);
5715 8594300 : return MAX (1, num0 - INTVAL (XEXP (x, 1)));
5716 :
5717 743856 : case IF_THEN_ELSE:
5718 743856 : num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5719 : known_x, known_mode, known_ret);
5720 743856 : num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
5721 : known_x, known_mode, known_ret);
5722 743856 : return MIN (num0, num1);
5723 :
5724 2228979 : case EQ: case NE: case GE: case GT: case LE: case LT:
5725 2228979 : case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
5726 2228979 : case GEU: case GTU: case LEU: case LTU:
5727 2228979 : case UNORDERED: case ORDERED:
5728 : /* If the constant is negative, take its 1's complement and remask.
5729 : Then see how many zero bits we have. */
5730 2228979 : nonzero = STORE_FLAG_VALUE;
5731 2228979 : if (bitwidth <= HOST_BITS_PER_WIDE_INT
5732 2228979 : && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5733 0 : nonzero = (~nonzero) & GET_MODE_MASK (mode);
5734 :
5735 2228979 : return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
5736 :
5737 : default:
5738 : break;
5739 : }
5740 :
5741 : /* If we haven't been able to figure it out by one of the above rules,
5742 : see if some of the high-order bits are known to be zero. If so,
5743 : count those bits and return one less than that amount. If we can't
5744 : safely compute the mask for this mode, always return BITWIDTH. */
5745 :
5746 202223414 : bitwidth = GET_MODE_PRECISION (mode);
5747 202223414 : if (bitwidth > HOST_BITS_PER_WIDE_INT)
5748 : return 1;
5749 :
5750 196666352 : nonzero = nonzero_bits (x, mode);
5751 196666352 : return nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))
5752 201763418 : ? 1 : bitwidth - floor_log2 (nonzero) - 1;
5753 : }
5754 :
5755 : /* Calculate the rtx_cost of a single instruction pattern. A return value of
5756 : zero indicates an instruction pattern without a known cost. */
5757 :
5758 : int
5759 149654144 : pattern_cost (rtx pat, bool speed)
5760 : {
5761 149654144 : int i, cost;
5762 149654144 : rtx set;
5763 :
5764 : /* Extract the single set rtx from the instruction pattern. We
5765 : can't use single_set since we only have the pattern. We also
5766 : consider PARALLELs of a normal set and a single comparison. In
5767 : that case we use the cost of the non-comparison SET operation,
5768 : which is most-likely to be the real cost of this operation. */
5769 149654144 : if (GET_CODE (pat) == SET)
5770 : set = pat;
5771 66577788 : else if (GET_CODE (pat) == PARALLEL)
5772 : {
5773 : set = NULL_RTX;
5774 : rtx comparison = NULL_RTX;
5775 :
5776 45781846 : for (i = 0; i < XVECLEN (pat, 0); i++)
5777 : {
5778 30803212 : rtx x = XVECEXP (pat, 0, i);
5779 30803212 : if (GET_CODE (x) == SET)
5780 : {
5781 15580492 : if (GET_CODE (SET_SRC (x)) == COMPARE
5782 15308032 : || GET_MODE_CLASS (GET_MODE (SET_DEST (x))) == MODE_CC)
5783 : {
5784 338736 : if (comparison)
5785 : return 0;
5786 : comparison = x;
5787 : }
5788 : else
5789 : {
5790 15241756 : if (set)
5791 : return 0;
5792 : set = x;
5793 : }
5794 : }
5795 : }
5796 :
5797 14978634 : if (!set && comparison)
5798 : set = comparison;
5799 :
5800 14832288 : if (!set)
5801 : return 0;
5802 : }
5803 : else
5804 : return 0;
5805 :
5806 97982332 : cost = set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)), speed);
5807 97982332 : return MAX (COSTS_N_INSNS (1), cost);
5808 : }
5809 :
5810 : /* Calculate the cost of a single instruction. A return value of zero
5811 : indicates an instruction pattern without a known cost. */
5812 :
5813 : int
5814 147578485 : insn_cost (rtx_insn *insn, bool speed)
5815 : {
5816 147578485 : if (targetm.insn_cost)
5817 147578485 : return targetm.insn_cost (insn, speed);
5818 :
5819 0 : return pattern_cost (PATTERN (insn), speed);
5820 : }
5821 :
5822 : /* Returns estimate on cost of computing SEQ. */
5823 :
5824 : unsigned
5825 2048239 : seq_cost (const rtx_insn *seq, bool speed)
5826 : {
5827 2048239 : unsigned cost = 0;
5828 2048239 : rtx set;
5829 :
5830 5337518 : for (; seq; seq = NEXT_INSN (seq))
5831 : {
5832 3289279 : set = single_set (seq);
5833 3289279 : if (set)
5834 3280881 : cost += set_rtx_cost (set, speed);
5835 8398 : else if (NONDEBUG_INSN_P (seq))
5836 : {
5837 8065 : int this_cost = insn_cost (const_cast<struct rtx_insn *> (seq),
5838 : speed);
5839 8065 : if (this_cost > 0)
5840 698 : cost += this_cost;
5841 : else
5842 7367 : cost++;
5843 : }
5844 : }
5845 :
5846 2048239 : return cost;
5847 : }
5848 :
5849 : /* Given an insn INSN and condition COND, return the condition in a
5850 : canonical form to simplify testing by callers. Specifically:
5851 :
5852 : (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
5853 : (2) Both operands will be machine operands.
5854 : (3) If an operand is a constant, it will be the second operand.
5855 : (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
5856 : for GE, GEU, and LEU.
5857 :
5858 : If the condition cannot be understood, or is an inequality floating-point
5859 : comparison which needs to be reversed, 0 will be returned.
5860 :
5861 : If REVERSE is nonzero, then reverse the condition prior to canonizing it.
5862 :
5863 : If EARLIEST is nonzero, it is a pointer to a place where the earliest
5864 : insn used in locating the condition was found. If a replacement test
5865 : of the condition is desired, it should be placed in front of that
5866 : insn and we will be sure that the inputs are still valid.
5867 :
5868 : If WANT_REG is nonzero, we wish the condition to be relative to that
5869 : register, if possible. Therefore, do not canonicalize the condition
5870 : further. If ALLOW_CC_MODE is nonzero, allow the condition returned
5871 : to be a compare to a CC mode register.
5872 :
5873 : If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
5874 : and at INSN. */
5875 :
5876 : rtx
5877 37777648 : canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
5878 : rtx_insn **earliest,
5879 : rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
5880 : {
5881 37777648 : enum rtx_code code;
5882 37777648 : rtx_insn *prev = insn;
5883 37777648 : const_rtx set;
5884 37777648 : rtx tem;
5885 37777648 : rtx op0, op1;
5886 37777648 : int reverse_code = 0;
5887 37777648 : machine_mode mode;
5888 37777648 : basic_block bb = BLOCK_FOR_INSN (insn);
5889 :
5890 37777648 : code = GET_CODE (cond);
5891 37777648 : mode = GET_MODE (cond);
5892 37777648 : op0 = XEXP (cond, 0);
5893 37777648 : op1 = XEXP (cond, 1);
5894 :
5895 37777648 : if (reverse)
5896 1873193 : code = reversed_comparison_code (cond, insn);
5897 37777648 : if (code == UNKNOWN)
5898 : return 0;
5899 :
5900 37777648 : if (earliest)
5901 18216061 : *earliest = insn;
5902 :
5903 : /* If we are comparing a register with zero, see if the register is set
5904 : in the previous insn to a COMPARE or a comparison operation. Perform
5905 : the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
5906 : in cse.cc */
5907 :
5908 80416964 : while ((GET_RTX_CLASS (code) == RTX_COMPARE
5909 : || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
5910 80416964 : && op1 == CONST0_RTX (GET_MODE (op0))
5911 141195573 : && op0 != want_reg)
5912 : {
5913 : /* Set nonzero when we find something of interest. */
5914 60778609 : rtx x = 0;
5915 :
5916 : /* If this is a COMPARE, pick up the two things being compared. */
5917 60778609 : if (GET_CODE (op0) == COMPARE)
5918 : {
5919 0 : op1 = XEXP (op0, 1);
5920 0 : op0 = XEXP (op0, 0);
5921 0 : continue;
5922 : }
5923 60778609 : else if (!REG_P (op0))
5924 : break;
5925 :
5926 : /* Go back to the previous insn. Stop if it is not an INSN. We also
5927 : stop if it isn't a single set or if it has a REG_INC note because
5928 : we don't want to bother dealing with it. */
5929 :
5930 55688682 : prev = prev_nonnote_nondebug_insn (prev);
5931 :
5932 55688682 : if (prev == 0
5933 55586016 : || !NONJUMP_INSN_P (prev)
5934 : || FIND_REG_INC_NOTE (prev, NULL_RTX)
5935 : /* In cfglayout mode, there do not have to be labels at the
5936 : beginning of a block, or jumps at the end, so the previous
5937 : conditions would not stop us when we reach bb boundary. */
5938 106147932 : || BLOCK_FOR_INSN (prev) != bb)
5939 : break;
5940 :
5941 50359329 : set = set_of (op0, prev);
5942 :
5943 50359329 : if (set
5944 50359329 : && (GET_CODE (set) != SET
5945 43650560 : || !rtx_equal_p (SET_DEST (set), op0)))
5946 : break;
5947 :
5948 : /* If this is setting OP0, get what it sets it to if it looks
5949 : relevant. */
5950 50258282 : if (set)
5951 : {
5952 43549513 : machine_mode inner_mode = GET_MODE (SET_DEST (set));
5953 : #ifdef FLOAT_STORE_FLAG_VALUE
5954 : REAL_VALUE_TYPE fsfv;
5955 : #endif
5956 :
5957 : /* ??? We may not combine comparisons done in a CCmode with
5958 : comparisons not done in a CCmode. This is to aid targets
5959 : like Alpha that have an IEEE compliant EQ instruction, and
5960 : a non-IEEE compliant BEQ instruction. The use of CCmode is
5961 : actually artificial, simply to prevent the combination, but
5962 : should not affect other platforms.
5963 :
5964 : However, we must allow VOIDmode comparisons to match either
5965 : CCmode or non-CCmode comparison, because some ports have
5966 : modeless comparisons inside branch patterns.
5967 :
5968 : ??? This mode check should perhaps look more like the mode check
5969 : in simplify_comparison in combine. */
5970 43549513 : if (((GET_MODE_CLASS (mode) == MODE_CC)
5971 43549513 : != (GET_MODE_CLASS (inner_mode) == MODE_CC))
5972 37120073 : && mode != VOIDmode
5973 0 : && inner_mode != VOIDmode)
5974 : break;
5975 43549513 : if (GET_CODE (SET_SRC (set)) == COMPARE
5976 43549513 : || (((code == NE
5977 4715986 : || (code == LT
5978 150994 : && val_signbit_known_set_p (inner_mode,
5979 : STORE_FLAG_VALUE))
5980 : #ifdef FLOAT_STORE_FLAG_VALUE
5981 : || (code == LT
5982 : && SCALAR_FLOAT_MODE_P (inner_mode)
5983 : && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5984 : REAL_VALUE_NEGATIVE (fsfv)))
5985 : #endif
5986 : ))
5987 2822667 : && COMPARISON_P (SET_SRC (set))))
5988 36319834 : x = SET_SRC (set);
5989 7229679 : else if (((code == EQ
5990 3975385 : || (code == GE
5991 129639 : && val_signbit_known_set_p (inner_mode,
5992 : STORE_FLAG_VALUE))
5993 : #ifdef FLOAT_STORE_FLAG_VALUE
5994 : || (code == GE
5995 : && SCALAR_FLOAT_MODE_P (inner_mode)
5996 : && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5997 : REAL_VALUE_NEGATIVE (fsfv)))
5998 : #endif
5999 : ))
6000 7229679 : && COMPARISON_P (SET_SRC (set)))
6001 : {
6002 : reverse_code = 1;
6003 : x = SET_SRC (set);
6004 : }
6005 6905221 : else if ((code == EQ || code == NE)
6006 5443529 : && GET_CODE (SET_SRC (set)) == XOR)
6007 : /* Handle sequences like:
6008 :
6009 : (set op0 (xor X Y))
6010 : ...(eq|ne op0 (const_int 0))...
6011 :
6012 : in which case:
6013 :
6014 : (eq op0 (const_int 0)) reduces to (eq X Y)
6015 : (ne op0 (const_int 0)) reduces to (ne X Y)
6016 :
6017 : This is the form used by MIPS16, for example. */
6018 : x = SET_SRC (set);
6019 : else
6020 : break;
6021 : }
6022 :
6023 6708769 : else if (reg_set_p (op0, prev))
6024 : /* If this sets OP0, but not directly, we have to give up. */
6025 : break;
6026 :
6027 43361240 : if (x)
6028 : {
6029 : /* If the caller is expecting the condition to be valid at INSN,
6030 : make sure X doesn't change before INSN. */
6031 36652471 : if (valid_at_insn_p)
6032 23173676 : if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
6033 : break;
6034 35930547 : if (COMPARISON_P (x))
6035 131281 : code = GET_CODE (x);
6036 35930547 : if (reverse_code)
6037 : {
6038 84893 : code = reversed_comparison_code (x, prev);
6039 84893 : if (code == UNKNOWN)
6040 : return 0;
6041 : reverse_code = 0;
6042 : }
6043 :
6044 35930547 : op0 = XEXP (x, 0), op1 = XEXP (x, 1);
6045 35930547 : if (earliest)
6046 17576009 : *earliest = prev;
6047 : }
6048 : }
6049 :
6050 : /* If constant is first, put it last. */
6051 37777648 : if (CONSTANT_P (op0))
6052 20866 : code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
6053 :
6054 : /* If OP0 is the result of a comparison, we weren't able to find what
6055 : was really being compared, so fail. */
6056 37777648 : if (!allow_cc_mode
6057 20318890 : && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
6058 : return 0;
6059 :
6060 : /* Canonicalize any ordered comparison with integers involving equality
6061 : if we can do computations in the relevant mode and we do not
6062 : overflow. */
6063 :
6064 36522484 : scalar_int_mode op0_mode;
6065 36522484 : if (CONST_INT_P (op1)
6066 24605625 : && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
6067 60494303 : && GET_MODE_PRECISION (op0_mode) <= HOST_BITS_PER_WIDE_INT)
6068 : {
6069 23944381 : HOST_WIDE_INT const_val = INTVAL (op1);
6070 23944381 : unsigned HOST_WIDE_INT uconst_val = const_val;
6071 23944381 : unsigned HOST_WIDE_INT max_val
6072 23944381 : = (unsigned HOST_WIDE_INT) GET_MODE_MASK (op0_mode);
6073 :
6074 23944381 : switch (code)
6075 : {
6076 904188 : case LE:
6077 904188 : if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
6078 904188 : code = LT, op1 = gen_int_mode (const_val + 1, op0_mode);
6079 : break;
6080 :
6081 : /* When cross-compiling, const_val might be sign-extended from
6082 : BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
6083 298978 : case GE:
6084 298978 : if ((const_val & max_val)
6085 298978 : != (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (op0_mode) - 1)))
6086 298978 : code = GT, op1 = gen_int_mode (const_val - 1, op0_mode);
6087 : break;
6088 :
6089 683224 : case LEU:
6090 683224 : if (uconst_val < max_val)
6091 673610 : code = LTU, op1 = gen_int_mode (uconst_val + 1, op0_mode);
6092 : break;
6093 :
6094 122579 : case GEU:
6095 122579 : if (uconst_val != 0)
6096 122574 : code = GTU, op1 = gen_int_mode (uconst_val - 1, op0_mode);
6097 : break;
6098 :
6099 : default:
6100 : break;
6101 : }
6102 : }
6103 :
6104 : /* We promised to return a comparison. */
6105 36522484 : rtx ret = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
6106 36522484 : if (COMPARISON_P (ret))
6107 : return ret;
6108 : return 0;
6109 : }
6110 :
6111 : /* Given a jump insn JUMP, return the condition that will cause it to branch
6112 : to its JUMP_LABEL. If the condition cannot be understood, or is an
6113 : inequality floating-point comparison which needs to be reversed, 0 will
6114 : be returned.
6115 :
6116 : If EARLIEST is nonzero, it is a pointer to a place where the earliest
6117 : insn used in locating the condition was found. If a replacement test
6118 : of the condition is desired, it should be placed in front of that
6119 : insn and we will be sure that the inputs are still valid. If EARLIEST
6120 : is null, the returned condition will be valid at INSN.
6121 :
6122 : If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
6123 : compare CC mode register.
6124 :
6125 : VALID_AT_INSN_P is the same as for canonicalize_condition. */
6126 :
6127 : rtx
6128 37189693 : get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
6129 : int valid_at_insn_p)
6130 : {
6131 37189693 : rtx cond;
6132 37189693 : int reverse;
6133 37189693 : rtx set;
6134 :
6135 : /* If this is not a standard conditional jump, we can't parse it. */
6136 37189693 : if (!JUMP_P (jump)
6137 37189693 : || ! any_condjump_p (jump))
6138 3783178 : return 0;
6139 33406515 : set = pc_set (jump);
6140 :
6141 33406515 : cond = XEXP (SET_SRC (set), 0);
6142 :
6143 : /* If this branches to JUMP_LABEL when the condition is false, reverse
6144 : the condition. */
6145 33406515 : reverse
6146 66813030 : = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
6147 33406515 : && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
6148 :
6149 33406515 : return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
6150 33406515 : allow_cc_mode, valid_at_insn_p);
6151 : }
6152 :
6153 : /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
6154 : TARGET_MODE_REP_EXTENDED.
6155 :
6156 : Note that we assume that the property of
6157 : TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
6158 : narrower than mode B. I.e., if A is a mode narrower than B then in
6159 : order to be able to operate on it in mode B, mode A needs to
6160 : satisfy the requirements set by the representation of mode B. */
6161 :
6162 : static void
6163 280958 : init_num_sign_bit_copies_in_rep (void)
6164 : {
6165 280958 : opt_scalar_int_mode in_mode_iter;
6166 280958 : scalar_int_mode mode;
6167 :
6168 2247664 : FOR_EACH_MODE_IN_CLASS (in_mode_iter, MODE_INT)
6169 7866824 : FOR_EACH_MODE_UNTIL (mode, in_mode_iter.require ())
6170 : {
6171 5900118 : scalar_int_mode in_mode = in_mode_iter.require ();
6172 5900118 : scalar_int_mode i;
6173 :
6174 : /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
6175 : extends to the next widest mode. */
6176 5900118 : gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
6177 : || GET_MODE_WIDER_MODE (mode).require () == in_mode);
6178 :
6179 : /* We are in in_mode. Count how many bits outside of mode
6180 : have to be copies of the sign-bit. */
6181 21633766 : FOR_EACH_MODE (i, mode, in_mode)
6182 : {
6183 : /* This must always exist (for the last iteration it will be
6184 : IN_MODE). */
6185 15733648 : scalar_int_mode wider = GET_MODE_WIDER_MODE (i).require ();
6186 :
6187 15733648 : if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
6188 : /* We can only check sign-bit copies starting from the
6189 : top-bit. In order to be able to check the bits we
6190 : have already seen we pretend that subsequent bits
6191 : have to be sign-bit copies too. */
6192 15733648 : || num_sign_bit_copies_in_rep [in_mode][mode])
6193 0 : num_sign_bit_copies_in_rep [in_mode][mode]
6194 0 : += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
6195 : }
6196 : }
6197 280958 : }
6198 :
6199 : /* Suppose that truncation from the machine mode of X to MODE is not a
6200 : no-op. See if there is anything special about X so that we can
6201 : assume it already contains a truncated value of MODE. */
6202 :
6203 : bool
6204 0 : truncated_to_mode (machine_mode mode, const_rtx x)
6205 : {
6206 : /* This register has already been used in MODE without explicit
6207 : truncation. */
6208 0 : if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
6209 : return true;
6210 :
6211 : /* See if we already satisfy the requirements of MODE. If yes we
6212 : can just switch to MODE. */
6213 0 : if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
6214 0 : && (num_sign_bit_copies (x, GET_MODE (x))
6215 0 : >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
6216 : return true;
6217 :
6218 : return false;
6219 : }
6220 :
6221 : /* Return true if RTX code CODE has a single sequence of zero or more
6222 : "e" operands and no rtvec operands. Initialize its rtx_all_subrtx_bounds
6223 : entry in that case. */
6224 :
6225 : static bool
6226 43267532 : setup_reg_subrtx_bounds (unsigned int code)
6227 : {
6228 43267532 : const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
6229 43267532 : unsigned int i = 0;
6230 60967886 : for (; format[i] != 'e'; ++i)
6231 : {
6232 27252926 : if (!format[i])
6233 : /* No subrtxes. Leave start and count as 0. */
6234 : return true;
6235 19948018 : if (format[i] == 'E' || format[i] == 'V')
6236 : return false;
6237 : }
6238 :
6239 : /* Record the sequence of 'e's. */
6240 33714960 : rtx_all_subrtx_bounds[code].start = i;
6241 54786810 : do
6242 54786810 : ++i;
6243 54786810 : while (format[i] == 'e');
6244 33714960 : rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
6245 : /* rtl-iter.h relies on this. */
6246 33714960 : gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
6247 :
6248 37648372 : for (; format[i]; ++i)
6249 5338202 : if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
6250 : return false;
6251 :
6252 : return true;
6253 : }
6254 :
6255 : /* Initialize rtx_all_subrtx_bounds. */
6256 : void
6257 280958 : init_rtlanal (void)
6258 : {
6259 280958 : int i;
6260 43548490 : for (i = 0; i < NUM_RTX_CODE; i++)
6261 : {
6262 43267532 : if (!setup_reg_subrtx_bounds (i))
6263 3652454 : rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
6264 43267532 : if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
6265 40457952 : rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
6266 : }
6267 :
6268 280958 : init_num_sign_bit_copies_in_rep ();
6269 280958 : }
6270 :
6271 : /* Check whether this is a constant pool constant. */
6272 : bool
6273 11628 : constant_pool_constant_p (rtx x)
6274 : {
6275 11628 : x = avoid_constant_pool_reference (x);
6276 11628 : return CONST_DOUBLE_P (x);
6277 : }
6278 :
6279 : /* If M is a bitmask that selects a field of low-order bits within an item but
6280 : not the entire word, return the length of the field. Return -1 otherwise.
6281 : M is used in machine mode MODE. */
6282 :
6283 : int
6284 8392 : low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
6285 : {
6286 8392 : if (mode != VOIDmode)
6287 : {
6288 8392 : if (!HWI_COMPUTABLE_MODE_P (mode))
6289 : return -1;
6290 8392 : m &= GET_MODE_MASK (mode);
6291 : }
6292 :
6293 8392 : return exact_log2 (m + 1);
6294 : }
6295 :
6296 : /* Return the mode of MEM's address. */
6297 :
6298 : scalar_int_mode
6299 179113215 : get_address_mode (rtx mem)
6300 : {
6301 179113215 : machine_mode mode;
6302 :
6303 179113215 : gcc_assert (MEM_P (mem));
6304 179113215 : mode = GET_MODE (XEXP (mem, 0));
6305 179113215 : if (mode != VOIDmode)
6306 178606729 : return as_a <scalar_int_mode> (mode);
6307 529524 : return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
6308 : }
6309 :
6310 : /* Split up a CONST_DOUBLE or integer constant rtx
6311 : into two rtx's for single words,
6312 : storing in *FIRST the word that comes first in memory in the target
6313 : and in *SECOND the other.
6314 :
6315 : TODO: This function needs to be rewritten to work on any size
6316 : integer. */
6317 :
6318 : void
6319 0 : split_double (rtx value, rtx *first, rtx *second)
6320 : {
6321 0 : if (CONST_INT_P (value))
6322 : {
6323 0 : if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
6324 : {
6325 : /* In this case the CONST_INT holds both target words.
6326 : Extract the bits from it into two word-sized pieces.
6327 : Sign extend each half to HOST_WIDE_INT. */
6328 0 : unsigned HOST_WIDE_INT low, high;
6329 0 : unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
6330 0 : unsigned bits_per_word = BITS_PER_WORD;
6331 :
6332 : /* Set sign_bit to the most significant bit of a word. */
6333 0 : sign_bit = 1;
6334 0 : sign_bit <<= bits_per_word - 1;
6335 :
6336 : /* Set mask so that all bits of the word are set. We could
6337 : have used 1 << BITS_PER_WORD instead of basing the
6338 : calculation on sign_bit. However, on machines where
6339 : HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
6340 : compiler warning, even though the code would never be
6341 : executed. */
6342 0 : mask = sign_bit << 1;
6343 0 : mask--;
6344 :
6345 : /* Set sign_extend as any remaining bits. */
6346 0 : sign_extend = ~mask;
6347 :
6348 : /* Pick the lower word and sign-extend it. */
6349 0 : low = INTVAL (value);
6350 0 : low &= mask;
6351 0 : if (low & sign_bit)
6352 0 : low |= sign_extend;
6353 :
6354 : /* Pick the higher word, shifted to the least significant
6355 : bits, and sign-extend it. */
6356 0 : high = INTVAL (value);
6357 0 : high >>= bits_per_word - 1;
6358 0 : high >>= 1;
6359 0 : high &= mask;
6360 0 : if (high & sign_bit)
6361 0 : high |= sign_extend;
6362 :
6363 : /* Store the words in the target machine order. */
6364 0 : if (WORDS_BIG_ENDIAN)
6365 : {
6366 : *first = GEN_INT (high);
6367 : *second = GEN_INT (low);
6368 : }
6369 : else
6370 : {
6371 0 : *first = GEN_INT (low);
6372 0 : *second = GEN_INT (high);
6373 : }
6374 : }
6375 : else
6376 : {
6377 : /* The rule for using CONST_INT for a wider mode
6378 : is that we regard the value as signed.
6379 : So sign-extend it. */
6380 0 : rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
6381 0 : if (WORDS_BIG_ENDIAN)
6382 : {
6383 : *first = high;
6384 : *second = value;
6385 : }
6386 : else
6387 : {
6388 0 : *first = value;
6389 0 : *second = high;
6390 : }
6391 : }
6392 : }
6393 0 : else if (GET_CODE (value) == CONST_WIDE_INT)
6394 : {
6395 : /* All of this is scary code and needs to be converted to
6396 : properly work with any size integer. */
6397 0 : gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
6398 0 : if (WORDS_BIG_ENDIAN)
6399 : {
6400 : *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
6401 : *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
6402 : }
6403 : else
6404 : {
6405 0 : *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
6406 0 : *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
6407 : }
6408 : }
6409 0 : else if (!CONST_DOUBLE_P (value))
6410 : {
6411 0 : if (WORDS_BIG_ENDIAN)
6412 : {
6413 : *first = const0_rtx;
6414 : *second = value;
6415 : }
6416 : else
6417 : {
6418 0 : *first = value;
6419 0 : *second = const0_rtx;
6420 : }
6421 : }
6422 0 : else if (GET_MODE (value) == VOIDmode
6423 : /* This is the old way we did CONST_DOUBLE integers. */
6424 0 : || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
6425 : {
6426 : /* In an integer, the words are defined as most and least significant.
6427 : So order them by the target's convention. */
6428 0 : if (WORDS_BIG_ENDIAN)
6429 : {
6430 : *first = GEN_INT (CONST_DOUBLE_HIGH (value));
6431 : *second = GEN_INT (CONST_DOUBLE_LOW (value));
6432 : }
6433 : else
6434 : {
6435 0 : *first = GEN_INT (CONST_DOUBLE_LOW (value));
6436 0 : *second = GEN_INT (CONST_DOUBLE_HIGH (value));
6437 : }
6438 : }
6439 : else
6440 : {
6441 0 : long l[2];
6442 :
6443 : /* Note, this converts the REAL_VALUE_TYPE to the target's
6444 : format, splits up the floating point double and outputs
6445 : exactly 32 bits of it into each of l[0] and l[1] --
6446 : not necessarily BITS_PER_WORD bits. */
6447 0 : REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (value), l);
6448 :
6449 : /* If 32 bits is an entire word for the target, but not for the host,
6450 : then sign-extend on the host so that the number will look the same
6451 : way on the host that it would on the target. See for instance
6452 : simplify_unary_operation. The #if is needed to avoid compiler
6453 : warnings. */
6454 :
6455 : #if HOST_BITS_PER_LONG > 32
6456 0 : if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
6457 : {
6458 0 : if (l[0] & ((long) 1 << 31))
6459 0 : l[0] |= ((unsigned long) (-1) << 32);
6460 0 : if (l[1] & ((long) 1 << 31))
6461 0 : l[1] |= ((unsigned long) (-1) << 32);
6462 : }
6463 : #endif
6464 :
6465 0 : *first = GEN_INT (l[0]);
6466 0 : *second = GEN_INT (l[1]);
6467 : }
6468 0 : }
6469 :
6470 : /* Return true if X is a sign_extract or zero_extract from the least
6471 : significant bit. */
6472 :
6473 : static bool
6474 210286975 : lsb_bitfield_op_p (rtx x)
6475 : {
6476 0 : if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
6477 : {
6478 0 : machine_mode mode = GET_MODE (XEXP (x, 0));
6479 0 : HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
6480 0 : HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
6481 0 : poly_int64 remaining_bits = GET_MODE_PRECISION (mode) - len;
6482 :
6483 0 : return known_eq (pos, BITS_BIG_ENDIAN ? remaining_bits : 0);
6484 : }
6485 : return false;
6486 : }
6487 :
6488 : /* Strip outer address "mutations" from LOC and return a pointer to the
6489 : inner value. If OUTER_CODE is nonnull, store the code of the innermost
6490 : stripped expression there.
6491 :
6492 : "Mutations" either convert between modes or apply some kind of
6493 : extension, truncation or alignment. */
6494 :
6495 : rtx *
6496 210284819 : strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
6497 : {
6498 210354306 : for (;;)
6499 : {
6500 210354306 : enum rtx_code code = GET_CODE (*loc);
6501 210354306 : if (GET_RTX_CLASS (code) == RTX_UNARY)
6502 : /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
6503 : used to convert between pointer sizes. */
6504 67331 : loc = &XEXP (*loc, 0);
6505 210286975 : else if (lsb_bitfield_op_p (*loc))
6506 : /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
6507 : acts as a combined truncation and extension. */
6508 0 : loc = &XEXP (*loc, 0);
6509 210286975 : else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
6510 : /* (and ... (const_int -X)) is used to align to X bytes. */
6511 2144 : loc = &XEXP (*loc, 0);
6512 210284831 : else if (code == SUBREG
6513 27603 : && (!OBJECT_P (SUBREG_REG (*loc))
6514 27601 : || CONSTANT_P (SUBREG_REG (*loc)))
6515 210284843 : && subreg_lowpart_p (*loc))
6516 : /* (subreg (operator ...) ...) inside AND is used for mode
6517 : conversion too. It is also used for load-address operations
6518 : in which an extension can be done for free, such as:
6519 :
6520 : (zero_extend:DI
6521 : (subreg:SI (plus:DI (reg:DI R) (symbol_ref:DI "foo") 0)))
6522 :
6523 : The latter usage also covers subregs of plain "displacements",
6524 : such as:
6525 :
6526 : (zero_extend:DI (subreg:SI (symbol_ref:DI "foo") 0))
6527 :
6528 : The inner address should then be the symbol_ref, not the subreg,
6529 : similarly to the plus case above.
6530 :
6531 : In contrast, the subreg in:
6532 :
6533 : (zero_extend:DI (subreg:SI (reg:DI R) 0))
6534 :
6535 : should be treated as the base, since it should be replaced by
6536 : an SImode hard register during register allocation. */
6537 12 : loc = &SUBREG_REG (*loc);
6538 : else
6539 210284819 : return loc;
6540 69487 : if (outer_code)
6541 69487 : *outer_code = code;
6542 : }
6543 : }
6544 :
6545 : /* Return true if CODE applies some kind of scale. The scaled value is
6546 : is the first operand and the scale is the second. */
6547 :
6548 : static bool
6549 65924437 : binary_scale_code_p (enum rtx_code code)
6550 : {
6551 65924437 : return (code == MULT
6552 65924437 : || code == ASHIFT
6553 : /* Needed by ARM targets. */
6554 : || code == ASHIFTRT
6555 : || code == LSHIFTRT
6556 63740379 : || code == ROTATE
6557 63740379 : || code == ROTATERT);
6558 : }
6559 :
6560 : /* Return true if X appears to be a valid base or index term. */
6561 : static bool
6562 131848874 : valid_base_or_index_term_p (rtx x)
6563 : {
6564 131848874 : if (GET_CODE (x) == SCRATCH)
6565 : return true;
6566 : /* Handle what appear to be eliminated forms of a register. If we reach
6567 : here, the elimination occurs outside of the outermost PLUS tree,
6568 : and so the elimination offset cannot be treated as a displacement
6569 : of the main address. Instead, we need to treat the whole PLUS as
6570 : the base or index term. The address can only be made legitimate by
6571 : reloading the PLUS. */
6572 131848874 : if (GET_CODE (x) == PLUS && CONST_SCALAR_INT_P (XEXP (x, 1)))
6573 0 : x = XEXP (x, 0);
6574 131848874 : if (GET_CODE (x) == SUBREG)
6575 46536 : x = SUBREG_REG (x);
6576 131848874 : return REG_P (x) || MEM_P (x);
6577 : }
6578 :
6579 : /* If *INNER can be interpreted as a base, return a pointer to the inner term
6580 : (see address_info). Return null otherwise. */
6581 :
6582 : static rtx *
6583 65924437 : get_base_term (rtx *inner)
6584 : {
6585 65924437 : if (GET_CODE (*inner) == LO_SUM)
6586 0 : inner = strip_address_mutations (&XEXP (*inner, 0));
6587 65924437 : if (valid_base_or_index_term_p (*inner))
6588 63740379 : return inner;
6589 : return 0;
6590 : }
6591 :
6592 : /* If *INNER can be interpreted as an index, return a pointer to the inner term
6593 : (see address_info). Return null otherwise. */
6594 :
6595 : static rtx *
6596 65924437 : get_index_term (rtx *inner)
6597 : {
6598 : /* At present, only constant scales are allowed. */
6599 65924437 : if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
6600 2184058 : inner = strip_address_mutations (&XEXP (*inner, 0));
6601 65924437 : if (valid_base_or_index_term_p (*inner))
6602 65924437 : return inner;
6603 : return 0;
6604 : }
6605 :
6606 : /* Set the segment part of address INFO to LOC, given that INNER is the
6607 : unmutated value. */
6608 :
6609 : static void
6610 17 : set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
6611 : {
6612 17 : gcc_assert (!info->segment);
6613 17 : info->segment = loc;
6614 17 : info->segment_term = inner;
6615 17 : }
6616 :
6617 : /* Set the base part of address INFO to LOC, given that INNER is the
6618 : unmutated value. */
6619 :
6620 : static void
6621 64288186 : set_address_base (struct address_info *info, rtx *loc, rtx *inner)
6622 : {
6623 64288186 : gcc_assert (!info->base);
6624 64288186 : info->base = loc;
6625 64288186 : info->base_term = inner;
6626 64288186 : }
6627 :
6628 : /* Set the index part of address INFO to LOC, given that INNER is the
6629 : unmutated value. */
6630 :
6631 : static void
6632 3723029 : set_address_index (struct address_info *info, rtx *loc, rtx *inner)
6633 : {
6634 3723029 : gcc_assert (!info->index);
6635 3723029 : info->index = loc;
6636 3723029 : info->index_term = inner;
6637 3723029 : }
6638 :
6639 : /* Set the displacement part of address INFO to LOC, given that INNER
6640 : is the constant term. */
6641 :
6642 : static void
6643 65440442 : set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
6644 : {
6645 65440442 : gcc_assert (!info->disp);
6646 65440442 : info->disp = loc;
6647 65440442 : info->disp_term = inner;
6648 65440442 : }
6649 :
6650 : /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the
6651 : rest of INFO accordingly. */
6652 :
6653 : static void
6654 1982984 : decompose_incdec_address (struct address_info *info)
6655 : {
6656 1982984 : info->autoinc_p = true;
6657 :
6658 1982984 : rtx *base = &XEXP (*info->inner, 0);
6659 1982984 : set_address_base (info, base, base);
6660 1982984 : gcc_checking_assert (info->base == info->base_term);
6661 :
6662 : /* These addresses are only valid when the size of the addressed
6663 : value is known. */
6664 1982984 : gcc_checking_assert (info->mode != VOIDmode);
6665 1982984 : }
6666 :
6667 : /* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest
6668 : of INFO accordingly. */
6669 :
6670 : static void
6671 103794 : decompose_automod_address (struct address_info *info)
6672 : {
6673 103794 : info->autoinc_p = true;
6674 :
6675 103794 : rtx *base = &XEXP (*info->inner, 0);
6676 103794 : set_address_base (info, base, base);
6677 103794 : gcc_checking_assert (info->base == info->base_term);
6678 :
6679 103794 : rtx plus = XEXP (*info->inner, 1);
6680 103794 : gcc_assert (GET_CODE (plus) == PLUS);
6681 :
6682 103794 : info->base_term2 = &XEXP (plus, 0);
6683 103794 : gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
6684 :
6685 103794 : rtx *step = &XEXP (plus, 1);
6686 103794 : rtx *inner_step = strip_address_mutations (step);
6687 103794 : if (CONSTANT_P (*inner_step))
6688 103794 : set_address_disp (info, step, inner_step);
6689 : else
6690 0 : set_address_index (info, step, inner_step);
6691 103794 : }
6692 :
6693 : /* Treat *LOC as a tree of PLUS operands and store pointers to the summed
6694 : values in [PTR, END). Return a pointer to the end of the used array. */
6695 :
6696 : static rtx **
6697 131261102 : extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
6698 : {
6699 187873117 : rtx x = *loc;
6700 187873117 : if (GET_CODE (x) == PLUS)
6701 : {
6702 56612015 : ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
6703 56612015 : ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
6704 : }
6705 : else
6706 : {
6707 131261102 : gcc_assert (ptr != end);
6708 131261102 : *ptr++ = loc;
6709 : }
6710 131261102 : return ptr;
6711 : }
6712 :
6713 : /* Evaluate the likelihood of X being a base or index value, returning
6714 : positive if it is likely to be a base, negative if it is likely to be
6715 : an index, and 0 if we can't tell. Make the magnitude of the return
6716 : value reflect the amount of confidence we have in the answer.
6717 :
6718 : MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */
6719 :
6720 : static int
6721 3077942 : baseness (rtx x, machine_mode mode, addr_space_t as,
6722 : enum rtx_code outer_code, enum rtx_code index_code)
6723 : {
6724 : /* Believe *_POINTER unless the address shape requires otherwise. */
6725 3077942 : if (REG_P (x) && REG_POINTER (x))
6726 : return 2;
6727 1749198 : if (MEM_P (x) && MEM_POINTER (x))
6728 : return 2;
6729 :
6730 1749198 : if (REG_P (x) && HARD_REGISTER_P (x))
6731 : {
6732 : /* X is a hard register. If it only fits one of the base
6733 : or index classes, choose that interpretation. */
6734 12 : int regno = REGNO (x);
6735 12 : bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
6736 12 : bool index_p = REGNO_OK_FOR_INDEX_P (regno);
6737 12 : if (base_p != index_p)
6738 0 : return base_p ? 1 : -1;
6739 : }
6740 : return 0;
6741 : }
6742 :
6743 : /* INFO->INNER describes a normal, non-automodified address.
6744 : Fill in the rest of INFO accordingly. */
6745 :
6746 : static void
6747 74649087 : decompose_normal_address (struct address_info *info)
6748 : {
6749 : /* Treat the address as the sum of up to four values. */
6750 74649087 : rtx *ops[4];
6751 74649087 : size_t n_ops = extract_plus_operands (info->inner, ops,
6752 74649087 : ops + ARRAY_SIZE (ops)) - ops;
6753 :
6754 : /* If there is more than one component, any base component is in a PLUS. */
6755 74649087 : if (n_ops > 1)
6756 55108048 : info->base_outer_code = PLUS;
6757 :
6758 : /* Try to classify each sum operand now. Leave those that could be
6759 : either a base or an index in OPS. */
6760 : rtx *inner_ops[4];
6761 : size_t out = 0;
6762 205910189 : for (size_t in = 0; in < n_ops; ++in)
6763 : {
6764 131261102 : rtx *loc = ops[in];
6765 131261102 : rtx *inner = strip_address_mutations (loc);
6766 131261102 : if (CONSTANT_P (*inner))
6767 65336648 : set_address_disp (info, loc, inner);
6768 65924454 : else if (GET_CODE (*inner) == UNSPEC)
6769 17 : set_address_segment (info, loc, inner);
6770 : else
6771 : {
6772 : /* The only other possibilities are a base or an index. */
6773 65924437 : rtx *base_term = get_base_term (inner);
6774 65924437 : rtx *index_term = get_index_term (inner);
6775 65924437 : gcc_assert (base_term || index_term);
6776 65924437 : if (!base_term)
6777 2184058 : set_address_index (info, loc, index_term);
6778 63740379 : else if (!index_term)
6779 0 : set_address_base (info, loc, base_term);
6780 : else
6781 : {
6782 63740379 : gcc_assert (base_term == index_term);
6783 63740379 : ops[out] = loc;
6784 63740379 : inner_ops[out] = base_term;
6785 63740379 : ++out;
6786 : }
6787 : }
6788 : }
6789 :
6790 : /* Classify the remaining OPS members as bases and indexes. */
6791 74649087 : if (out == 1)
6792 : {
6793 : /* If we haven't seen a base or an index yet, assume that this is
6794 : the base. If we were confident that another term was the base
6795 : or index, treat the remaining operand as the other kind. */
6796 60662437 : if (!info->base)
6797 60662437 : set_address_base (info, ops[0], inner_ops[0]);
6798 : else
6799 0 : set_address_index (info, ops[0], inner_ops[0]);
6800 : }
6801 13986650 : else if (out == 2)
6802 : {
6803 1538971 : auto address_mode = targetm.addr_space.address_mode (info->as);
6804 1538971 : rtx inner_op0 = *inner_ops[0];
6805 1538971 : rtx inner_op1 = *inner_ops[1];
6806 1538971 : int base;
6807 : /* If one inner operand has the expected mode for a base and the other
6808 : doesn't, assume that the other one is the index. This is useful
6809 : for addresses such as:
6810 :
6811 : (plus (zero_extend X) Y)
6812 :
6813 : zero_extend is not in itself enough to assume an index, since bases
6814 : can be zero-extended on POINTERS_EXTEND_UNSIGNED targets. But if
6815 : Y has address mode and X doesn't, there should be little doubt that
6816 : Y is the base. */
6817 1538971 : if (GET_MODE (inner_op0) == address_mode
6818 1538971 : && GET_MODE (inner_op1) != address_mode)
6819 : base = 0;
6820 1538971 : else if (GET_MODE (inner_op1) == address_mode
6821 1538971 : && GET_MODE (inner_op0) != address_mode)
6822 : base = 1;
6823 : /* In the event of a tie, assume the base comes first. */
6824 1538971 : else if (baseness (inner_op0, info->mode, info->as, PLUS,
6825 1538971 : GET_CODE (*ops[1]))
6826 1538971 : >= baseness (inner_op1, info->mode, info->as, PLUS,
6827 1538971 : GET_CODE (*ops[0])))
6828 : base = 0;
6829 : else
6830 8578 : base = 1;
6831 1538971 : set_address_base (info, ops[base], inner_ops[base]);
6832 1538971 : set_address_index (info, ops[1 - base], inner_ops[1 - base]);
6833 : }
6834 : else
6835 12447679 : gcc_assert (out == 0);
6836 74649087 : }
6837 :
6838 : /* Describe address *LOC in *INFO. MODE is the mode of the addressed value,
6839 : or VOIDmode if not known. AS is the address space associated with LOC.
6840 : OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */
6841 :
6842 : void
6843 76735865 : decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
6844 : addr_space_t as, enum rtx_code outer_code)
6845 : {
6846 76735865 : memset (info, 0, sizeof (*info));
6847 76735865 : info->mode = mode;
6848 76735865 : info->as = as;
6849 76735865 : info->addr_outer_code = outer_code;
6850 76735865 : info->outer = loc;
6851 76735865 : info->inner = strip_address_mutations (loc, &outer_code);
6852 76735865 : info->base_outer_code = outer_code;
6853 76735865 : switch (GET_CODE (*info->inner))
6854 : {
6855 1982984 : case PRE_DEC:
6856 1982984 : case PRE_INC:
6857 1982984 : case POST_DEC:
6858 1982984 : case POST_INC:
6859 1982984 : decompose_incdec_address (info);
6860 1982984 : break;
6861 :
6862 103794 : case PRE_MODIFY:
6863 103794 : case POST_MODIFY:
6864 103794 : decompose_automod_address (info);
6865 103794 : break;
6866 :
6867 74649087 : default:
6868 74649087 : decompose_normal_address (info);
6869 74649087 : break;
6870 : }
6871 76735865 : }
6872 :
6873 : /* Describe address operand LOC in INFO. */
6874 :
6875 : void
6876 3399163 : decompose_lea_address (struct address_info *info, rtx *loc)
6877 : {
6878 3399163 : decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
6879 3399163 : }
6880 :
6881 : /* Describe the address of MEM X in INFO. */
6882 :
6883 : void
6884 73323488 : decompose_mem_address (struct address_info *info, rtx x)
6885 : {
6886 73323488 : gcc_assert (MEM_P (x));
6887 73323488 : decompose_address (info, &XEXP (x, 0), GET_MODE (x),
6888 73323488 : MEM_ADDR_SPACE (x), MEM);
6889 73323488 : }
6890 :
6891 : /* Update INFO after a change to the address it describes. */
6892 :
6893 : void
6894 13214 : update_address (struct address_info *info)
6895 : {
6896 13214 : decompose_address (info, info->outer, info->mode, info->as,
6897 : info->addr_outer_code);
6898 13214 : }
6899 :
6900 : /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
6901 : more complicated than that. */
6902 :
6903 : HOST_WIDE_INT
6904 0 : get_index_scale (const struct address_info *info)
6905 : {
6906 0 : rtx index = *info->index;
6907 0 : if (GET_CODE (index) == MULT
6908 0 : && CONST_INT_P (XEXP (index, 1))
6909 0 : && info->index_term == &XEXP (index, 0))
6910 0 : return INTVAL (XEXP (index, 1));
6911 :
6912 0 : if (GET_CODE (index) == ASHIFT
6913 0 : && CONST_INT_P (XEXP (index, 1))
6914 0 : && info->index_term == &XEXP (index, 0))
6915 0 : return HOST_WIDE_INT_1 << INTVAL (XEXP (index, 1));
6916 :
6917 0 : if (info->index == info->index_term)
6918 0 : return 1;
6919 :
6920 : return 0;
6921 : }
6922 :
6923 : /* Return the "index code" of INFO, in the form required by
6924 : ok_for_base_p_1. */
6925 :
6926 : enum rtx_code
6927 33118679 : get_index_code (const struct address_info *info)
6928 : {
6929 33118679 : if (info->index)
6930 1521212 : return GET_CODE (*info->index);
6931 :
6932 31597467 : if (info->disp)
6933 25780020 : return GET_CODE (*info->disp);
6934 :
6935 : return SCRATCH;
6936 : }
6937 :
6938 : /* Return true if RTL X contains a SYMBOL_REF. */
6939 :
6940 : bool
6941 749971 : contains_symbol_ref_p (const_rtx x)
6942 : {
6943 749971 : subrtx_iterator::array_type array;
6944 3095503 : FOR_EACH_SUBRTX (iter, array, x, ALL)
6945 2423024 : if (SYMBOL_REF_P (*iter))
6946 77492 : return true;
6947 :
6948 672479 : return false;
6949 749971 : }
6950 :
6951 : /* Return true if RTL X contains a SYMBOL_REF or LABEL_REF. */
6952 :
6953 : bool
6954 353277 : contains_symbolic_reference_p (const_rtx x)
6955 : {
6956 353277 : subrtx_iterator::array_type array;
6957 812470 : FOR_EACH_SUBRTX (iter, array, x, ALL)
6958 463651 : if (SYMBOL_REF_P (*iter) || GET_CODE (*iter) == LABEL_REF)
6959 4458 : return true;
6960 :
6961 348819 : return false;
6962 353277 : }
6963 :
6964 : /* Return true if RTL X contains a constant pool address. */
6965 :
6966 : bool
6967 0 : contains_constant_pool_address_p (const_rtx x)
6968 : {
6969 0 : subrtx_iterator::array_type array;
6970 0 : FOR_EACH_SUBRTX (iter, array, x, ALL)
6971 0 : if (SYMBOL_REF_P (*iter) && CONSTANT_POOL_ADDRESS_P (*iter))
6972 0 : return true;
6973 :
6974 0 : return false;
6975 0 : }
6976 :
6977 :
6978 : /* Return true if X contains a thread-local symbol. */
6979 :
6980 : bool
6981 0 : tls_referenced_p (const_rtx x)
6982 : {
6983 0 : if (!targetm.have_tls)
6984 : return false;
6985 :
6986 0 : subrtx_iterator::array_type array;
6987 0 : FOR_EACH_SUBRTX (iter, array, x, ALL)
6988 0 : if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
6989 0 : return true;
6990 0 : return false;
6991 0 : }
6992 :
6993 : /* Process recursively X of INSN and add REG_INC notes if necessary. */
6994 : void
6995 0 : add_auto_inc_notes (rtx_insn *insn, rtx x)
6996 : {
6997 0 : enum rtx_code code = GET_CODE (x);
6998 0 : const char *fmt;
6999 0 : int i, j;
7000 :
7001 0 : if (code == MEM && auto_inc_p (XEXP (x, 0)))
7002 : {
7003 0 : add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
7004 0 : return;
7005 : }
7006 :
7007 : /* Scan all X sub-expressions. */
7008 0 : fmt = GET_RTX_FORMAT (code);
7009 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7010 : {
7011 0 : if (fmt[i] == 'e')
7012 0 : add_auto_inc_notes (insn, XEXP (x, i));
7013 0 : else if (fmt[i] == 'E')
7014 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7015 0 : add_auto_inc_notes (insn, XVECEXP (x, i, j));
7016 : }
7017 : }
7018 :
7019 : /* Return true if INSN is the second element of a pair of macro-fused
7020 : single_sets, both of which having the same register output as another. */
7021 : bool
7022 67045017 : single_output_fused_pair_p (rtx_insn *insn)
7023 : {
7024 67045017 : rtx set, prev_set;
7025 67045017 : rtx_insn *prev;
7026 :
7027 67045017 : return INSN_P (insn)
7028 67045017 : && SCHED_GROUP_P (insn)
7029 4080476 : && (prev = prev_nonnote_nondebug_insn (insn))
7030 4080476 : && (set = single_set (insn)) != NULL_RTX
7031 4080476 : && (prev_set = single_set (prev))
7032 : != NULL_RTX
7033 4080334 : && REG_P (SET_DEST (set))
7034 0 : && REG_P (SET_DEST (prev_set))
7035 67045017 : && (!reload_completed
7036 0 : || REGNO (SET_DEST (set)) == REGNO (SET_DEST (prev_set)));
7037 : }
7038 :
7039 : /* Return true if X is register asm. */
7040 :
7041 : bool
7042 18140044 : register_asm_p (const_rtx x)
7043 : {
7044 18140044 : return (REG_P (x)
7045 18140044 : && REG_EXPR (x) != NULL_TREE
7046 8825808 : && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (x))
7047 2760381 : && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (x))
7048 18186643 : && DECL_REGISTER (REG_EXPR (x)));
7049 : }
7050 :
7051 : /* Return true if, for all OP of mode OP_MODE:
7052 :
7053 : (vec_select:RESULT_MODE OP SEL)
7054 :
7055 : is equivalent to the highpart RESULT_MODE of OP. */
7056 :
7057 : bool
7058 0 : vec_series_highpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
7059 : {
7060 0 : int nunits;
7061 0 : if (GET_MODE_NUNITS (op_mode).is_constant (&nunits)
7062 0 : && targetm.can_change_mode_class (op_mode, result_mode, ALL_REGS))
7063 : {
7064 0 : int offset = BYTES_BIG_ENDIAN ? 0 : nunits - XVECLEN (sel, 0);
7065 0 : return rtvec_series_p (XVEC (sel, 0), offset);
7066 : }
7067 : return false;
7068 : }
7069 :
7070 : /* Return true if, for all OP of mode OP_MODE:
7071 :
7072 : (vec_select:RESULT_MODE OP SEL)
7073 :
7074 : is equivalent to the lowpart RESULT_MODE of OP. */
7075 :
7076 : bool
7077 5163438 : vec_series_lowpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
7078 : {
7079 5163438 : int nunits;
7080 5163438 : if (GET_MODE_NUNITS (op_mode).is_constant (&nunits)
7081 5163438 : && targetm.can_change_mode_class (op_mode, result_mode, ALL_REGS))
7082 : {
7083 667535 : int offset = BYTES_BIG_ENDIAN ? nunits - XVECLEN (sel, 0) : 0;
7084 667535 : return rtvec_series_p (XVEC (sel, 0), offset);
7085 : }
7086 : return false;
7087 : }
7088 :
7089 : /* Return true if X contains a paradoxical subreg. */
7090 :
7091 : bool
7092 1169306 : contains_paradoxical_subreg_p (rtx x)
7093 : {
7094 1169306 : subrtx_var_iterator::array_type array;
7095 4921471 : FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
7096 : {
7097 3802689 : x = *iter;
7098 3802689 : if (SUBREG_P (x) && paradoxical_subreg_p (x))
7099 50524 : return true;
7100 : }
7101 1118782 : return false;
7102 1169306 : }
|