Line data Source code
1 : /* Subroutines used by or related to instruction recognition.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "target.h"
26 : #include "rtl.h"
27 : #include "tree.h"
28 : #include "stmt.h"
29 : #include "cfghooks.h"
30 : #include "df.h"
31 : #include "memmodel.h"
32 : #include "tm_p.h"
33 : #include "insn-config.h"
34 : #include "regs.h"
35 : #include "emit-rtl.h"
36 : #include "recog.h"
37 : #include "insn-attr.h"
38 : #include "addresses.h"
39 : #include "cfgrtl.h"
40 : #include "cfgbuild.h"
41 : #include "cfgcleanup.h"
42 : #include "reload.h"
43 : #include "tree-pass.h"
44 : #include "function-abi.h"
45 : #include "rtl-iter.h"
46 :
47 : #ifndef STACK_POP_CODE
48 : #if STACK_GROWS_DOWNWARD
49 : #define STACK_POP_CODE POST_INC
50 : #else
51 : #define STACK_POP_CODE POST_DEC
52 : #endif
53 : #endif
54 :
55 : static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
56 : static void validate_replace_src_1 (rtx *, void *);
57 : static rtx_insn *split_insn (rtx_insn *);
58 :
59 : struct target_recog default_target_recog;
60 : #if SWITCHABLE_TARGET
61 : struct target_recog *this_target_recog = &default_target_recog;
62 : #endif
63 :
64 : /* Nonzero means allow operands to be volatile.
65 : This should be 0 if you are generating rtl, such as if you are calling
66 : the functions in optabs.cc and expmed.cc (most of the time).
67 : This should be 1 if all valid insns need to be recognized,
68 : such as in reginfo.cc and final.cc and reload.cc.
69 :
70 : init_recog and init_recog_no_volatile are responsible for setting this. */
71 :
72 : int volatile_ok;
73 :
74 : struct recog_data_d recog_data;
75 :
76 : /* Contains a vector of operand_alternative structures, such that
77 : operand OP of alternative A is at index A * n_operands + OP.
78 : Set up by preprocess_constraints. */
79 : const operand_alternative *recog_op_alt;
80 :
81 : /* Used to provide recog_op_alt for asms. */
82 : static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
83 : * MAX_RECOG_ALTERNATIVES];
84 :
85 : /* On return from `constrain_operands', indicate which alternative
86 : was satisfied. */
87 :
88 : int which_alternative;
89 :
90 : /* True for inline asm operands with - constraint modifier. */
91 : bool raw_constraint_p;
92 :
93 : /* Nonzero after end of reload pass.
94 : Set to 1 or 0 by toplev.cc.
95 : Controls the significance of (SUBREG (MEM)). */
96 :
97 : int reload_completed;
98 :
99 : bool post_ra_split_completed;
100 :
101 : /* Nonzero after thread_prologue_and_epilogue_insns has run. */
102 : int epilogue_completed;
103 :
104 : /* Initialize data used by the function `recog'.
105 : This must be called once in the compilation of a function
106 : before any insn recognition may be done in the function. */
107 :
108 : void
109 7793582 : init_recog_no_volatile (void)
110 : {
111 7793582 : volatile_ok = 0;
112 7793582 : }
113 :
114 : void
115 11590345 : init_recog (void)
116 : {
117 11590345 : volatile_ok = 1;
118 11590345 : }
119 :
120 :
121 : /* Return true if labels in asm operands BODY are LABEL_REFs. */
122 :
123 : static bool
124 103890276 : asm_labels_ok (rtx body)
125 : {
126 103890276 : rtx asmop;
127 103890276 : int i;
128 :
129 103890276 : asmop = extract_asm_operands (body);
130 103890276 : if (asmop == NULL_RTX)
131 : return true;
132 :
133 812844 : for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
134 7507 : if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
135 : return false;
136 :
137 : return true;
138 : }
139 :
140 : /* Check that X is an insn-body for an `asm' with operands
141 : and that the operands mentioned in it are legitimate. */
142 :
143 : bool
144 103890276 : check_asm_operands (rtx x)
145 : {
146 103890276 : int noperands;
147 103890276 : rtx *operands;
148 103890276 : const char **constraints;
149 103890276 : int i;
150 :
151 103890276 : if (!asm_labels_ok (x))
152 : return false;
153 :
154 : /* Post-reload, be more strict with things. */
155 103890276 : if (reload_completed)
156 : {
157 : /* ??? Doh! We've not got the wrapping insn. Cook one up. */
158 32730 : rtx_insn *insn = make_insn_raw (x);
159 32730 : extract_insn (insn);
160 32730 : constrain_operands (1, get_enabled_alternatives (insn));
161 32730 : return which_alternative >= 0;
162 : }
163 :
164 103857546 : noperands = asm_noperands (x);
165 103857546 : if (noperands < 0)
166 : return false;
167 757422 : if (noperands == 0)
168 : return true;
169 :
170 644207 : operands = XALLOCAVEC (rtx, noperands);
171 644207 : constraints = XALLOCAVEC (const char *, noperands);
172 :
173 644207 : decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
174 :
175 3108052 : for (i = 0; i < noperands; i++)
176 : {
177 2748499 : const char *c = constraints[i];
178 2748499 : if (c[0] == '%')
179 12327 : c++;
180 2748499 : if (! asm_operand_ok (operands[i], c, constraints))
181 : return false;
182 : }
183 :
184 : return true;
185 : }
186 :
187 : /* Static data for the next two routines. */
188 :
189 : struct change_t
190 : {
191 : rtx object;
192 : int old_code;
193 : int old_len;
194 : bool unshare;
195 : rtx *loc;
196 : rtx old;
197 : };
198 :
199 : static change_t *changes;
200 : static int changes_allocated;
201 :
202 : static int num_changes = 0;
203 : int undo_recog_changes::s_num_changes = 0;
204 :
205 : /* Validate a proposed change to OBJECT. LOC is the location in the rtl
206 : at which NEW_RTX will be placed. If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0)
207 : will also be changed to NEW_LEN, which is no greater than the current
208 : XVECLEN. If OBJECT is zero, no validation is done, the change is
209 : simply made.
210 :
211 : Two types of objects are supported: If OBJECT is a MEM, memory_address_p
212 : will be called with the address and mode as parameters. If OBJECT is
213 : an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
214 : the change in place.
215 :
216 : IN_GROUP is nonzero if this is part of a group of changes that must be
217 : performed as a group. In that case, the changes will be stored. The
218 : function `apply_change_group' will validate and apply the changes.
219 :
220 : If IN_GROUP is zero, this is a single change. Try to recognize the insn
221 : or validate the memory reference with the change applied. If the result
222 : is not valid for the machine, suppress the change and return false.
223 : Otherwise, perform the change and return true. */
224 :
225 : static bool
226 1767921416 : validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group,
227 : bool unshare, int new_len = -1)
228 : {
229 1767921416 : gcc_assert (!undo_recog_changes::is_active ());
230 1767921416 : rtx old = *loc;
231 :
232 : /* Single-element parallels aren't valid and won't match anything.
233 : Replace them with the single element. */
234 1767921416 : if (new_len == 1 && GET_CODE (new_rtx) == PARALLEL)
235 : {
236 6233057 : new_rtx = XVECEXP (new_rtx, 0, 0);
237 6233057 : new_len = -1;
238 : }
239 :
240 : /* When a change is part of a group, callers expect to be able to change
241 : INSN_CODE after making the change and have the code reset to its old
242 : value by a later cancel_changes. We therefore need to register group
243 : changes even if they're no-ops. */
244 1767921416 : if (!in_group
245 206709857 : && (old == new_rtx || rtx_equal_p (old, new_rtx))
246 1956861985 : && (new_len < 0 || XVECLEN (new_rtx, 0) == new_len))
247 : return true;
248 :
249 1578980847 : gcc_assert ((in_group != 0 || num_changes == 0)
250 : && (new_len < 0 || new_rtx == *loc));
251 :
252 1578980847 : *loc = new_rtx;
253 :
254 : /* Save the information describing this change. */
255 1578980847 : if (num_changes >= changes_allocated)
256 : {
257 171623 : if (changes_allocated == 0)
258 : /* This value allows for repeated substitutions inside complex
259 : indexed addresses, or changes in up to 5 insns. */
260 170381 : changes_allocated = MAX_RECOG_OPERANDS * 5;
261 : else
262 1242 : changes_allocated *= 2;
263 :
264 171623 : changes = XRESIZEVEC (change_t, changes, changes_allocated);
265 : }
266 :
267 1578980847 : changes[num_changes].object = object;
268 1578980847 : changes[num_changes].loc = loc;
269 1578980847 : changes[num_changes].old = old;
270 1578980847 : changes[num_changes].old_len = (new_len >= 0 ? XVECLEN (new_rtx, 0) : -1);
271 1578980847 : changes[num_changes].unshare = unshare;
272 :
273 1578980847 : if (new_len >= 0)
274 11277596 : XVECLEN (new_rtx, 0) = new_len;
275 :
276 1578980847 : if (object && !MEM_P (object))
277 : {
278 : /* Set INSN_CODE to force rerecognition of insn. Save old code in
279 : case invalid. */
280 1555188458 : changes[num_changes].old_code = INSN_CODE (object);
281 1555188458 : INSN_CODE (object) = -1;
282 : }
283 :
284 1578980847 : num_changes++;
285 :
286 : /* If we are making a group of changes, return 1. Otherwise, validate the
287 : change group we made. */
288 :
289 1578980847 : if (in_group)
290 : return true;
291 : else
292 17769288 : return apply_change_group ();
293 : }
294 :
295 : /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
296 : UNSHARE to false. */
297 :
298 : bool
299 1467331611 : validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
300 : {
301 1467331611 : return validate_change_1 (object, loc, new_rtx, in_group, false);
302 : }
303 :
304 : /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
305 : UNSHARE to true. */
306 :
307 : bool
308 283079152 : validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
309 : {
310 283079152 : return validate_change_1 (object, loc, new_rtx, in_group, true);
311 : }
312 :
313 : /* Change XVECLEN (*LOC, 0) to NEW_LEN. OBJECT, IN_GROUP and the return
314 : value are as for validate_change_1. */
315 :
316 : bool
317 17510653 : validate_change_xveclen (rtx object, rtx *loc, int new_len, bool in_group)
318 : {
319 17510653 : return validate_change_1 (object, loc, *loc, in_group, false, new_len);
320 : }
321 :
322 : /* Keep X canonicalized if some changes have made it non-canonical; only
323 : modifies the operands of X, not (for example) its code. Simplifications
324 : are not the job of this routine.
325 :
326 : Return true if anything was changed. */
327 : bool
328 1820194 : canonicalize_change_group (rtx_insn *insn, rtx x)
329 : {
330 1820194 : if (COMMUTATIVE_P (x)
331 1820194 : && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
332 : {
333 : /* Oops, the caller has made X no longer canonical.
334 : Let's redo the changes in the correct order. */
335 94513 : rtx tem = XEXP (x, 0);
336 94513 : validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
337 94513 : validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
338 94513 : return true;
339 : }
340 : else
341 1725681 : return false;
342 : }
343 :
344 : /* Check if REG_INC argument in *data overlaps a stored REG. */
345 :
346 : static void
347 0 : check_invalid_inc_dec (rtx reg, const_rtx, void *data)
348 : {
349 0 : rtx *pinc = (rtx *) data;
350 0 : if (*pinc == NULL_RTX || MEM_P (reg))
351 : return;
352 0 : if (reg_overlap_mentioned_p (reg, *pinc))
353 0 : *pinc = NULL_RTX;
354 : }
355 :
356 : /* This subroutine of apply_change_group verifies whether the changes to INSN
357 : were valid; i.e. whether INSN can still be recognized.
358 :
359 : If IN_GROUP is true clobbers which have to be added in order to
360 : match the instructions will be added to the current change group.
361 : Otherwise the changes will take effect immediately. */
362 :
363 : bool
364 475251111 : insn_invalid_p (rtx_insn *insn, bool in_group)
365 : {
366 475251111 : rtx pat = PATTERN (insn);
367 475251111 : int num_clobbers = 0;
368 : /* If we are before reload and the pattern is a SET, see if we can add
369 : clobbers. */
370 475251111 : int icode = recog (pat, insn,
371 475251111 : (GET_CODE (pat) == SET
372 395840401 : && ! reload_completed
373 362189735 : && ! reload_in_progress)
374 : ? &num_clobbers : 0);
375 475251111 : bool is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
376 :
377 :
378 : /* If this is an asm and the operand aren't legal, then fail. Likewise if
379 : this is not an asm and the insn wasn't recognized. */
380 561416 : if ((is_asm && ! check_asm_operands (PATTERN (insn)))
381 475050184 : || (!is_asm && icode < 0))
382 17446045 : return true;
383 :
384 : /* If we have to add CLOBBERs, fail if we have to add ones that reference
385 : hard registers since our callers can't know if they are live or not.
386 : Otherwise, add them. */
387 457805066 : if (num_clobbers > 0)
388 : {
389 1960 : rtx newpat;
390 :
391 1960 : if (added_clobbers_hard_reg_p (icode))
392 : return true;
393 :
394 662 : newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
395 662 : XVECEXP (newpat, 0, 0) = pat;
396 662 : add_clobbers (newpat, icode);
397 662 : if (in_group)
398 661 : validate_change (insn, &PATTERN (insn), newpat, 1);
399 : else
400 1 : PATTERN (insn) = pat = newpat;
401 : }
402 :
403 : /* After reload, verify that all constraints are satisfied. */
404 457803768 : if (reload_completed)
405 : {
406 33655270 : extract_insn (insn);
407 :
408 33655270 : if (! constrain_operands (1, get_preferred_alternatives (insn)))
409 : return true;
410 : }
411 :
412 : /* Punt if REG_INC argument overlaps some stored REG. */
413 457780918 : for (rtx link = FIND_REG_INC_NOTE (insn, NULL_RTX);
414 457780918 : link; link = XEXP (link, 1))
415 : if (REG_NOTE_KIND (link) == REG_INC)
416 : {
417 : rtx reg = XEXP (link, 0);
418 : note_stores (insn, check_invalid_inc_dec, ®);
419 : if (reg == NULL_RTX)
420 : return true;
421 : }
422 :
423 457780918 : INSN_CODE (insn) = icode;
424 457780918 : return false;
425 : }
426 :
427 : /* Return number of changes made and not validated yet. */
428 : int
429 4805147 : num_changes_pending (void)
430 : {
431 4805147 : return num_changes;
432 : }
433 :
434 : /* Tentatively apply the changes numbered NUM and up.
435 : Return true if all changes are valid, false otherwise. */
436 :
437 : bool
438 748416638 : verify_changes (int num)
439 : {
440 748416638 : int i;
441 748416638 : rtx last_validated = NULL_RTX;
442 :
443 : /* The changes have been applied and all INSN_CODEs have been reset to force
444 : rerecognition.
445 :
446 : The changes are valid if we aren't given an object, or if we are
447 : given a MEM and it still is a valid address, or if this is in insn
448 : and it is recognized. In the latter case, if reload has completed,
449 : we also require that the operands meet the constraints for
450 : the insn. */
451 :
452 2193016130 : for (i = num; i < num_changes; i++)
453 : {
454 1458935331 : rtx object = changes[i].object;
455 :
456 : /* If there is no object to test or if it is the same as the one we
457 : already tested, ignore it. */
458 1458935331 : if (object == 0 || object == last_validated)
459 753427076 : continue;
460 :
461 705508255 : if (MEM_P (object))
462 : {
463 34314 : if (! memory_address_addr_space_p (GET_MODE (object),
464 : XEXP (object, 0),
465 17157 : MEM_ADDR_SPACE (object)))
466 : break;
467 : }
468 705491098 : else if (/* changes[i].old might be zero, e.g. when putting a
469 : REG_FRAME_RELATED_EXPR into a previously empty list. */
470 705491098 : changes[i].old
471 705491098 : && REG_P (changes[i].old)
472 228160523 : && asm_noperands (PATTERN (object)) > 0
473 705688756 : && register_asm_p (changes[i].old))
474 : {
475 : /* Don't allow changes of hard register operands to inline
476 : assemblies if they have been defined as register asm ("x"). */
477 : break;
478 : }
479 705491097 : else if (DEBUG_INSN_P (object))
480 245372768 : continue;
481 460118329 : else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
482 : {
483 16916013 : rtx pat = PATTERN (object);
484 :
485 : /* Perhaps we couldn't recognize the insn because there were
486 : extra CLOBBERs at the end. If so, try to re-recognize
487 : without the last CLOBBER (later iterations will cause each of
488 : them to be eliminated, in turn). But don't do this if we
489 : have an ASM_OPERAND. */
490 16916013 : if (GET_CODE (pat) == PARALLEL
491 3972792 : && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
492 19682413 : && asm_noperands (PATTERN (object)) < 0)
493 : {
494 2567131 : rtx newpat;
495 :
496 2567131 : if (XVECLEN (pat, 0) == 2)
497 2168463 : newpat = XVECEXP (pat, 0, 0);
498 : else
499 : {
500 398668 : int j;
501 :
502 398668 : newpat
503 398668 : = gen_rtx_PARALLEL (VOIDmode,
504 : rtvec_alloc (XVECLEN (pat, 0) - 1));
505 1229110 : for (j = 0; j < XVECLEN (newpat, 0); j++)
506 830442 : XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
507 : }
508 :
509 : /* Add a new change to this group to replace the pattern
510 : with this new pattern. Then consider this change
511 : as having succeeded. The change we added will
512 : cause the entire call to fail if things remain invalid.
513 :
514 : Note that this can lose if a later change than the one
515 : we are processing specified &XVECEXP (PATTERN (object), 0, X)
516 : but this shouldn't occur. */
517 :
518 2567131 : validate_change (object, &PATTERN (object), newpat, 1);
519 2567131 : continue;
520 2567131 : }
521 14348882 : else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
522 14335768 : || GET_CODE (pat) == VAR_LOCATION)
523 : /* If this insn is a CLOBBER or USE, it is always valid, but is
524 : never recognized. */
525 13114 : continue;
526 : else
527 : break;
528 : }
529 : last_validated = object;
530 : }
531 :
532 748416638 : return (i == num_changes);
533 : }
534 :
535 : /* A group of changes has previously been issued with validate_change
536 : and verified with verify_changes. Call df_insn_rescan for each of
537 : the insn changed and clear num_changes. */
538 :
539 : void
540 743179640 : confirm_change_group (void)
541 : {
542 743179640 : int i;
543 743179640 : rtx last_object = NULL;
544 :
545 743179640 : gcc_assert (!undo_recog_changes::is_active ());
546 2197327773 : for (i = 0; i < num_changes; i++)
547 : {
548 1454148133 : rtx object = changes[i].object;
549 :
550 1454148133 : if (changes[i].unshare)
551 19523778 : *changes[i].loc = copy_rtx (*changes[i].loc);
552 :
553 : /* Avoid unnecessary rescanning when multiple changes to same instruction
554 : are made. */
555 1454148133 : if (object)
556 : {
557 1451867354 : if (object != last_object && last_object && INSN_P (last_object))
558 7445103 : df_insn_rescan (as_a <rtx_insn *> (last_object));
559 : last_object = object;
560 : }
561 : }
562 :
563 743179640 : if (last_object && INSN_P (last_object))
564 584843554 : df_insn_rescan (as_a <rtx_insn *> (last_object));
565 743179640 : num_changes = 0;
566 743179640 : }
567 :
568 : /* Apply a group of changes previously issued with `validate_change'.
569 : If all changes are valid, call confirm_change_group and return true,
570 : otherwise, call cancel_changes and return false. */
571 :
572 : bool
573 741398257 : apply_change_group (void)
574 : {
575 741398257 : if (verify_changes (0))
576 : {
577 729797980 : confirm_change_group ();
578 729797980 : return true;
579 : }
580 : else
581 : {
582 11600277 : cancel_changes (0);
583 11600277 : return false;
584 : }
585 : }
586 :
587 :
588 : /* Return the number of changes so far in the current group. */
589 :
590 : int
591 723221977 : num_validated_changes (void)
592 : {
593 723221977 : return num_changes;
594 : }
595 :
596 : /* Retract the changes numbered NUM and up. */
597 :
598 : void
599 163748293 : cancel_changes (int num)
600 : {
601 163748293 : gcc_assert (!undo_recog_changes::is_active ());
602 163748293 : int i;
603 :
604 : /* Back out all the changes. Do this in the opposite order in which
605 : they were made. */
606 288581007 : for (i = num_changes - 1; i >= num; i--)
607 : {
608 124832714 : if (changes[i].old_len >= 0)
609 10400874 : XVECLEN (*changes[i].loc, 0) = changes[i].old_len;
610 : else
611 114431840 : *changes[i].loc = changes[i].old;
612 124832714 : if (changes[i].object && !MEM_P (changes[i].object))
613 : {
614 103338191 : INSN_CODE (changes[i].object) = changes[i].old_code;
615 103338191 : if (recog_data.insn == changes[i].object)
616 203 : recog_data.insn = nullptr;
617 : }
618 : }
619 163748293 : num_changes = num;
620 163748293 : }
621 :
622 : /* Swap the status of change NUM from being applied to not being applied,
623 : or vice versa. */
624 :
625 : static void
626 47098102 : swap_change (int num)
627 : {
628 47098102 : if (changes[num].old_len >= 0)
629 2078500 : std::swap (XVECLEN (*changes[num].loc, 0), changes[num].old_len);
630 : else
631 45019602 : std::swap (*changes[num].loc, changes[num].old);
632 47098102 : if (changes[num].object && !MEM_P (changes[num].object))
633 : {
634 47098102 : std::swap (INSN_CODE (changes[num].object), changes[num].old_code);
635 47098102 : if (recog_data.insn == changes[num].object)
636 8 : recog_data.insn = nullptr;
637 : }
638 47098102 : }
639 :
640 27155113 : undo_recog_changes::undo_recog_changes (int num)
641 27155113 : : m_old_num_changes (s_num_changes)
642 : {
643 27155113 : gcc_assert (num <= num_changes - s_num_changes);
644 50704164 : for (int i = num_changes - s_num_changes - 1; i >= num; i--)
645 23549051 : swap_change (i);
646 27155113 : s_num_changes = num_changes - num;
647 27155113 : }
648 :
649 27155113 : undo_recog_changes::~undo_recog_changes ()
650 : {
651 50704164 : for (int i = num_changes - s_num_changes;
652 50704164 : i < num_changes - m_old_num_changes; ++i)
653 23549051 : swap_change (i);
654 27155113 : s_num_changes = m_old_num_changes;
655 27155113 : }
656 :
657 : /* Reduce conditional compilation elsewhere. */
658 : /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
659 : rtx. */
660 :
661 : static void
662 12210869 : simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
663 : machine_mode op0_mode)
664 : {
665 12210869 : rtx x = *loc;
666 12210869 : enum rtx_code code = GET_CODE (x);
667 12210869 : rtx new_rtx = NULL_RTX;
668 12210869 : scalar_int_mode is_mode;
669 :
670 12210869 : if (SWAPPABLE_OPERANDS_P (x)
671 12210869 : && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
672 : {
673 428236 : validate_unshare_change (object, loc,
674 428236 : gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
675 : : swap_condition (code),
676 : GET_MODE (x), XEXP (x, 1),
677 : XEXP (x, 0)), 1);
678 428236 : x = *loc;
679 428236 : code = GET_CODE (x);
680 : }
681 :
682 : /* Canonicalize arithmetics with all constant operands. */
683 12210869 : switch (GET_RTX_CLASS (code))
684 : {
685 798770 : case RTX_UNARY:
686 798770 : if (CONSTANT_P (XEXP (x, 0)))
687 555523 : new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
688 : op0_mode);
689 : break;
690 6335103 : case RTX_COMM_ARITH:
691 6335103 : case RTX_BIN_ARITH:
692 6335103 : if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
693 240701 : new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
694 : XEXP (x, 1));
695 : break;
696 108896 : case RTX_COMPARE:
697 108896 : case RTX_COMM_COMPARE:
698 108896 : if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
699 3079 : new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
700 : XEXP (x, 0), XEXP (x, 1));
701 : break;
702 : default:
703 : break;
704 : }
705 799303 : if (new_rtx)
706 : {
707 769297 : validate_change (object, loc, new_rtx, 1);
708 769297 : return;
709 : }
710 :
711 11441572 : switch (code)
712 : {
713 2239994 : case PLUS:
714 : /* If we have a PLUS whose second operand is now a CONST_INT, use
715 : simplify_gen_binary to try to simplify it.
716 : ??? We may want later to remove this, once simplification is
717 : separated from this function. */
718 2239994 : if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
719 191915 : validate_change (object, loc,
720 : simplify_gen_binary
721 191915 : (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
722 : break;
723 435869 : case MINUS:
724 435869 : if (CONST_SCALAR_INT_P (XEXP (x, 1)))
725 22978 : validate_change (object, loc,
726 : simplify_gen_binary
727 22978 : (PLUS, GET_MODE (x), XEXP (x, 0),
728 : simplify_gen_unary (NEG,
729 : GET_MODE (x), XEXP (x, 1),
730 22978 : GET_MODE (x))), 1);
731 : break;
732 181942 : case ZERO_EXTEND:
733 181942 : case SIGN_EXTEND:
734 181942 : if (GET_MODE (XEXP (x, 0)) == VOIDmode)
735 : {
736 0 : new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
737 : op0_mode);
738 : /* If any of the above failed, substitute in something that
739 : we know won't be recognized. */
740 0 : if (!new_rtx)
741 0 : new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
742 0 : validate_change (object, loc, new_rtx, 1);
743 : }
744 : break;
745 148010 : case SUBREG:
746 : /* All subregs possible to simplify should be simplified. */
747 296020 : new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
748 148010 : SUBREG_BYTE (x));
749 :
750 : /* Subregs of VOIDmode operands are incorrect. */
751 148010 : if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
752 2 : new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
753 2 : if (new_rtx)
754 124164 : validate_change (object, loc, new_rtx, 1);
755 : break;
756 5432 : case ZERO_EXTRACT:
757 5432 : case SIGN_EXTRACT:
758 : /* If we are replacing a register with memory, try to change the memory
759 : to be the mode required for memory in extract operations (this isn't
760 : likely to be an insertion operation; if it was, nothing bad will
761 : happen, we might just fail in some cases). */
762 :
763 5432 : if (MEM_P (XEXP (x, 0))
764 405 : && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
765 405 : && CONST_INT_P (XEXP (x, 1))
766 405 : && CONST_INT_P (XEXP (x, 2))
767 283 : && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
768 301 : MEM_ADDR_SPACE (XEXP (x, 0)))
769 5715 : && !MEM_VOLATILE_P (XEXP (x, 0)))
770 : {
771 279 : int pos = INTVAL (XEXP (x, 2));
772 279 : machine_mode new_mode = is_mode;
773 279 : if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
774 0 : new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
775 279 : else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
776 0 : new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
777 279 : scalar_int_mode wanted_mode = (new_mode == VOIDmode
778 279 : ? word_mode
779 279 : : as_a <scalar_int_mode> (new_mode));
780 :
781 : /* If we have a narrower mode, we can do something. */
782 837 : if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
783 : {
784 0 : int offset = pos / BITS_PER_UNIT;
785 0 : rtx newmem;
786 :
787 : /* If the bytes and bits are counted differently, we
788 : must adjust the offset. */
789 0 : if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
790 : offset =
791 : (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
792 : offset);
793 :
794 0 : gcc_assert (GET_MODE_PRECISION (wanted_mode)
795 : == GET_MODE_BITSIZE (wanted_mode));
796 0 : pos %= GET_MODE_BITSIZE (wanted_mode);
797 :
798 0 : newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
799 :
800 0 : validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
801 0 : validate_change (object, &XEXP (x, 0), newmem, 1);
802 : }
803 : }
804 :
805 : break;
806 :
807 : default:
808 : break;
809 : }
810 : }
811 :
812 : /* Replace every occurrence of FROM in X with TO. Mark each change with
813 : validate_change passing OBJECT. */
814 :
815 : static void
816 68500434 : validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
817 : bool simplify)
818 : {
819 68500434 : int i, j;
820 68500434 : const char *fmt;
821 68500434 : rtx x = *loc;
822 68500434 : enum rtx_code code;
823 68500434 : machine_mode op0_mode = VOIDmode;
824 68500434 : int prev_changes = num_changes;
825 :
826 68500434 : if (!x)
827 : return;
828 :
829 68500434 : code = GET_CODE (x);
830 68500434 : fmt = GET_RTX_FORMAT (code);
831 68500434 : if (fmt[0] == 'e')
832 23572893 : op0_mode = GET_MODE (XEXP (x, 0));
833 :
834 : /* X matches FROM if it is the same rtx or they are both referring to the
835 : same register in the same mode. Avoid calling rtx_equal_p unless the
836 : operands look similar. */
837 :
838 68500434 : if (x == from
839 53010605 : || (REG_P (x) && REG_P (from)
840 15295490 : && GET_MODE (x) == GET_MODE (from)
841 8960270 : && REGNO (x) == REGNO (from))
842 121511036 : || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
843 8960267 : && rtx_equal_p (x, from)))
844 : {
845 15489832 : validate_unshare_change (object, loc, to, 1);
846 15489832 : return;
847 : }
848 :
849 : /* Call ourself recursively to perform the replacements.
850 : We must not replace inside already replaced expression, otherwise we
851 : get infinite recursion for replacements like (reg X)->(subreg (reg X))
852 : so we must special case shared ASM_OPERANDS. */
853 :
854 53010602 : if (GET_CODE (x) == PARALLEL)
855 : {
856 1351994 : for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
857 : {
858 998224 : if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
859 27469 : && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
860 : {
861 : /* Verify that operands are really shared. */
862 271 : gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
863 : == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
864 : (x, 0, j))));
865 271 : validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
866 : from, to, object, simplify);
867 : }
868 : else
869 997953 : validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
870 : simplify);
871 : }
872 : }
873 : else
874 134455644 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
875 : {
876 81798812 : if (fmt[i] == 'e')
877 40229542 : validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
878 41569270 : else if (fmt[i] == 'E')
879 6342480 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
880 3456466 : validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
881 : simplify);
882 : }
883 :
884 : /* If we didn't substitute, there is nothing more to do. */
885 53010602 : if (num_changes == prev_changes)
886 : return;
887 :
888 : /* ??? The regmove is no more, so is this aberration still necessary? */
889 : /* Allow substituted expression to have different mode. This is used by
890 : regmove to change mode of pseudo register. */
891 12210939 : if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
892 9334319 : op0_mode = GET_MODE (XEXP (x, 0));
893 :
894 : /* Do changes needed to keep rtx consistent. Don't do any other
895 : simplifications, as it is not our job. */
896 12210939 : if (simplify)
897 12210869 : simplify_while_replacing (loc, to, object, op0_mode);
898 : }
899 :
900 : /* Try replacing every occurrence of FROM in subexpression LOC of INSN
901 : with TO. After all changes have been made, validate by seeing
902 : if INSN is still valid. */
903 :
904 : bool
905 0 : validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
906 : {
907 0 : validate_replace_rtx_1 (loc, from, to, insn, true);
908 0 : return apply_change_group ();
909 : }
910 :
911 : /* Try replacing every occurrence of FROM in INSN with TO. After all
912 : changes have been made, validate by seeing if INSN is still valid. */
913 :
914 : bool
915 2004787 : validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
916 : {
917 2004787 : validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
918 2004787 : return apply_change_group ();
919 : }
920 :
921 : /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
922 : is a part of INSN. After all changes have been made, validate by seeing if
923 : INSN is still valid.
924 : validate_replace_rtx (from, to, insn) is equivalent to
925 : validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
926 :
927 : bool
928 0 : validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
929 : {
930 0 : validate_replace_rtx_1 (where, from, to, insn, true);
931 0 : return apply_change_group ();
932 : }
933 :
934 : /* Same as above, but do not simplify rtx afterwards. */
935 : bool
936 88 : validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
937 : rtx_insn *insn)
938 : {
939 88 : validate_replace_rtx_1 (where, from, to, insn, false);
940 88 : return apply_change_group ();
941 :
942 : }
943 :
944 : /* Try replacing every occurrence of FROM in INSN with TO. This also
945 : will replace in REG_EQUAL and REG_EQUIV notes. */
946 :
947 : void
948 21 : validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
949 : {
950 21 : rtx note;
951 21 : validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
952 28 : for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
953 7 : if (REG_NOTE_KIND (note) == REG_EQUAL
954 7 : || REG_NOTE_KIND (note) == REG_EQUIV)
955 0 : validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
956 21 : }
957 :
958 : /* Function called by note_uses to replace used subexpressions. */
959 : struct validate_replace_src_data
960 : {
961 : rtx from; /* Old RTX */
962 : rtx to; /* New RTX */
963 : rtx_insn *insn; /* Insn in which substitution is occurring. */
964 : };
965 :
966 : static void
967 21811306 : validate_replace_src_1 (rtx *x, void *data)
968 : {
969 21811306 : struct validate_replace_src_data *d
970 : = (struct validate_replace_src_data *) data;
971 :
972 21811306 : validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
973 21811306 : }
974 :
975 : /* Try replacing every occurrence of FROM in INSN with TO, avoiding
976 : SET_DESTs. */
977 :
978 : void
979 15605601 : validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
980 : {
981 15605601 : struct validate_replace_src_data d;
982 :
983 15605601 : d.from = from;
984 15605601 : d.to = to;
985 15605601 : d.insn = insn;
986 15605601 : note_uses (&PATTERN (insn), validate_replace_src_1, &d);
987 15605601 : }
988 :
989 : /* Try simplify INSN.
990 : Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
991 : pattern and return true if something was simplified. */
992 :
993 : bool
994 0 : validate_simplify_insn (rtx_insn *insn)
995 : {
996 0 : int i;
997 0 : rtx pat = NULL;
998 0 : rtx newpat = NULL;
999 :
1000 0 : pat = PATTERN (insn);
1001 :
1002 0 : if (GET_CODE (pat) == SET)
1003 : {
1004 0 : newpat = simplify_rtx (SET_SRC (pat));
1005 0 : if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
1006 0 : validate_change (insn, &SET_SRC (pat), newpat, 1);
1007 0 : newpat = simplify_rtx (SET_DEST (pat));
1008 0 : if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
1009 0 : validate_change (insn, &SET_DEST (pat), newpat, 1);
1010 : }
1011 0 : else if (GET_CODE (pat) == PARALLEL)
1012 0 : for (i = 0; i < XVECLEN (pat, 0); i++)
1013 : {
1014 0 : rtx s = XVECEXP (pat, 0, i);
1015 :
1016 0 : if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
1017 : {
1018 0 : newpat = simplify_rtx (SET_SRC (s));
1019 0 : if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
1020 0 : validate_change (insn, &SET_SRC (s), newpat, 1);
1021 0 : newpat = simplify_rtx (SET_DEST (s));
1022 0 : if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
1023 0 : validate_change (insn, &SET_DEST (s), newpat, 1);
1024 : }
1025 : }
1026 0 : return ((num_changes_pending () > 0) && (apply_change_group () > 0));
1027 : }
1028 :
1029 : /* Try to process the address of memory expression MEM. Return true on
1030 : success; leave the caller to clean up on failure. */
1031 :
1032 : bool
1033 23993097 : insn_propagation::apply_to_mem_1 (rtx mem)
1034 : {
1035 23993097 : auto old_num_changes = num_validated_changes ();
1036 23993097 : mem_depth += 1;
1037 23993097 : bool res = apply_to_rvalue_1 (&XEXP (mem, 0));
1038 23993097 : mem_depth -= 1;
1039 23993097 : if (!res)
1040 : return false;
1041 :
1042 23992503 : if (old_num_changes != num_validated_changes ()
1043 7836969 : && should_check_mems
1044 27388365 : && !check_mem (old_num_changes, mem))
1045 : return false;
1046 :
1047 : return true;
1048 : }
1049 :
1050 : /* Try to process the rvalue expression at *LOC. Return true on success;
1051 : leave the caller to clean up on failure. */
1052 :
1053 : bool
1054 232772702 : insn_propagation::apply_to_rvalue_1 (rtx *loc)
1055 : {
1056 232772702 : rtx x = *loc;
1057 232772702 : enum rtx_code code = GET_CODE (x);
1058 232772702 : machine_mode mode = GET_MODE (x);
1059 :
1060 232772702 : auto old_num_changes = num_validated_changes ();
1061 232772702 : if (from
1062 221294853 : && GET_CODE (x) == GET_CODE (from)
1063 323775527 : && (REG_P (x)
1064 91002825 : ? REGNO (x) == REGNO (from)
1065 23700 : : rtx_equal_p (x, from)))
1066 : {
1067 : /* Don't replace register asms in asm statements; we mustn't
1068 : change the user's register allocation. */
1069 58575146 : if (REG_P (x)
1070 58552166 : && HARD_REGISTER_P (x)
1071 17942386 : && register_asm_p (x)
1072 58577143 : && asm_noperands (PATTERN (insn)) > 0)
1073 : return false;
1074 :
1075 58573318 : rtx newval = to;
1076 58573318 : if (GET_MODE (x) != GET_MODE (from))
1077 : {
1078 666209 : gcc_assert (REG_P (x) && HARD_REGISTER_P (x));
1079 666209 : if (REG_NREGS (x) != REG_NREGS (from)
1080 666209 : || !REG_CAN_CHANGE_MODE_P (REGNO (x), GET_MODE (from),
1081 : GET_MODE (x)))
1082 338338 : return false;
1083 :
1084 : /* If the reference is paradoxical and the replacement
1085 : value contains registers, we would need to check that the
1086 : simplification below does not increase REG_NREGS for those
1087 : registers either. It seems simpler to punt on nonconstant
1088 : values instead. */
1089 583098 : if (paradoxical_subreg_p (GET_MODE (x), GET_MODE (from))
1090 583098 : && !CONSTANT_P (to))
1091 : return false;
1092 :
1093 563836 : newval = simplify_subreg (GET_MODE (x), to, GET_MODE (from),
1094 : subreg_lowpart_offset (GET_MODE (x),
1095 : GET_MODE (from)));
1096 563836 : if (!newval)
1097 : return false;
1098 :
1099 : /* Check that the simplification didn't just push an explicit
1100 : subreg down into subexpressions. In particular, for a register
1101 : R that has a fixed mode, such as the stack pointer, a subreg of:
1102 :
1103 : (plus:M (reg:M R) (const_int C))
1104 :
1105 : would be:
1106 :
1107 : (plus:N (subreg:N (reg:M R) ...) (const_int C'))
1108 :
1109 : But targets can legitimately assume that subregs of hard registers
1110 : will not be created after RA (except in special circumstances,
1111 : such as strict_low_part). */
1112 330312 : subrtx_iterator::array_type array;
1113 1281118 : FOR_EACH_SUBRTX (iter, array, newval, NONCONST)
1114 953247 : if (GET_CODE (*iter) == SUBREG)
1115 2441 : return false;
1116 330312 : }
1117 :
1118 58234980 : if (should_unshare)
1119 58234980 : validate_unshare_change (insn, loc, newval, 1);
1120 : else
1121 0 : validate_change (insn, loc, newval, 1);
1122 58234980 : if (mem_depth && !REG_P (newval) && !CONSTANT_P (newval))
1123 : {
1124 : /* We're substituting into an address, but TO will have the
1125 : form expected outside an address. Canonicalize it if
1126 : necessary. */
1127 3677237 : insn_propagation subprop (insn);
1128 3677237 : subprop.mem_depth += 1;
1129 3677237 : if (!subprop.apply_to_rvalue (loc))
1130 0 : gcc_unreachable ();
1131 3677237 : if (should_unshare
1132 3677237 : && num_validated_changes () != old_num_changes + 1)
1133 : {
1134 : /* TO is owned by someone else, so create a copy and
1135 : return TO to its original form. */
1136 312193 : newval = copy_rtx (*loc);
1137 312193 : cancel_changes (old_num_changes);
1138 312193 : validate_change (insn, loc, newval, 1);
1139 : }
1140 : }
1141 58234980 : num_replacements += 1;
1142 58234980 : should_unshare = true;
1143 58234980 : result_flags |= UNSIMPLIFIED;
1144 58234980 : return true;
1145 : }
1146 :
1147 : /* Recursively apply the substitution and see if we can simplify
1148 : the result. This specifically shouldn't use simplify_gen_* for
1149 : speculative simplifications, since we want to avoid generating new
1150 : expressions where possible. */
1151 174197556 : auto old_result_flags = result_flags;
1152 174197556 : rtx newx = NULL_RTX;
1153 174197556 : bool recurse_p = false;
1154 174197556 : switch (GET_RTX_CLASS (code))
1155 : {
1156 3013524 : case RTX_UNARY:
1157 3013524 : {
1158 3013524 : machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1159 3013524 : if (!apply_to_rvalue_1 (&XEXP (x, 0)))
1160 : return false;
1161 2976508 : if (from && old_num_changes == num_validated_changes ())
1162 : return true;
1163 :
1164 2414917 : newx = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
1165 2414917 : break;
1166 : }
1167 :
1168 45304258 : case RTX_BIN_ARITH:
1169 45304258 : case RTX_COMM_ARITH:
1170 45304258 : {
1171 45304258 : if (!apply_to_rvalue_1 (&XEXP (x, 0))
1172 45304258 : || !apply_to_rvalue_1 (&XEXP (x, 1)))
1173 447667 : return false;
1174 44856591 : if (from && old_num_changes == num_validated_changes ())
1175 : return true;
1176 :
1177 34493521 : if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
1178 34493521 : && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
1179 2717246 : newx = simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
1180 : else
1181 31776275 : newx = simplify_binary_operation (code, mode,
1182 : XEXP (x, 0), XEXP (x, 1));
1183 : break;
1184 : }
1185 :
1186 5919804 : case RTX_COMPARE:
1187 5919804 : case RTX_COMM_COMPARE:
1188 5919804 : {
1189 11839735 : machine_mode op_mode = (GET_MODE (XEXP (x, 0)) != VOIDmode
1190 5919804 : ? GET_MODE (XEXP (x, 0))
1191 127 : : GET_MODE (XEXP (x, 1)));
1192 5919804 : if (!apply_to_rvalue_1 (&XEXP (x, 0))
1193 5919804 : || !apply_to_rvalue_1 (&XEXP (x, 1)))
1194 4335 : return false;
1195 5915469 : if (from && old_num_changes == num_validated_changes ())
1196 : return true;
1197 :
1198 5060446 : newx = simplify_relational_operation (code, mode, op_mode,
1199 : XEXP (x, 0), XEXP (x, 1));
1200 5060446 : break;
1201 : }
1202 :
1203 5414487 : case RTX_TERNARY:
1204 5414487 : case RTX_BITFIELD_OPS:
1205 5414487 : {
1206 5414487 : machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1207 5414487 : if (!apply_to_rvalue_1 (&XEXP (x, 0))
1208 5407355 : || !apply_to_rvalue_1 (&XEXP (x, 1))
1209 10799539 : || !apply_to_rvalue_1 (&XEXP (x, 2)))
1210 31743 : return false;
1211 5382744 : if (from && old_num_changes == num_validated_changes ())
1212 : return true;
1213 :
1214 5315088 : newx = simplify_ternary_operation (code, mode, op0_mode,
1215 : XEXP (x, 0), XEXP (x, 1),
1216 : XEXP (x, 2));
1217 5315088 : break;
1218 : }
1219 :
1220 10672842 : case RTX_EXTRA:
1221 10672842 : if (code == SUBREG)
1222 : {
1223 2325675 : machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
1224 2325675 : if (!apply_to_rvalue_1 (&SUBREG_REG (x)))
1225 : return false;
1226 2325658 : if (from && old_num_changes == num_validated_changes ())
1227 : return true;
1228 :
1229 1706074 : rtx inner = SUBREG_REG (x);
1230 1706074 : newx = simplify_subreg (mode, inner, inner_mode, SUBREG_BYTE (x));
1231 : /* Reject the same cases that simplify_gen_subreg would. */
1232 1706074 : if (!newx
1233 1706074 : && (GET_CODE (inner) == SUBREG
1234 1059594 : || GET_CODE (inner) == CONCAT
1235 1047333 : || GET_MODE (inner) == VOIDmode
1236 1047332 : || !validate_subreg (mode, inner_mode,
1237 1047332 : inner, SUBREG_BYTE (x))))
1238 : {
1239 12306 : failure_reason = "would create an invalid subreg";
1240 12306 : return false;
1241 : }
1242 : break;
1243 : }
1244 : else
1245 : recurse_p = true;
1246 : break;
1247 :
1248 53248024 : case RTX_OBJ:
1249 53248024 : if (code == LO_SUM)
1250 : {
1251 0 : if (!apply_to_rvalue_1 (&XEXP (x, 0))
1252 0 : || !apply_to_rvalue_1 (&XEXP (x, 1)))
1253 0 : return false;
1254 0 : if (from && old_num_changes == num_validated_changes ())
1255 : return true;
1256 :
1257 : /* (lo_sum (high x) y) -> y where x and y have the same base. */
1258 0 : rtx op0 = XEXP (x, 0);
1259 0 : rtx op1 = XEXP (x, 1);
1260 0 : if (GET_CODE (op0) == HIGH)
1261 : {
1262 0 : rtx base0, base1, offset0, offset1;
1263 0 : split_const (XEXP (op0, 0), &base0, &offset0);
1264 0 : split_const (op1, &base1, &offset1);
1265 0 : if (rtx_equal_p (base0, base1))
1266 0 : newx = op1;
1267 : }
1268 : }
1269 53248024 : else if (code == REG)
1270 : {
1271 36741577 : if (from && REG_P (from) && reg_overlap_mentioned_p (x, from))
1272 : {
1273 61778 : failure_reason = "inexact register overlap";
1274 61778 : return false;
1275 : }
1276 : }
1277 16506447 : else if (code == MEM)
1278 12407505 : return apply_to_mem_1 (x);
1279 : else
1280 : recurse_p = true;
1281 : break;
1282 :
1283 : case RTX_CONST_OBJ:
1284 : break;
1285 :
1286 1426813 : case RTX_AUTOINC:
1287 1426813 : if (from && reg_overlap_mentioned_p (XEXP (x, 0), from))
1288 : {
1289 0 : failure_reason = "is subject to autoinc";
1290 0 : return false;
1291 : }
1292 : recurse_p = true;
1293 : break;
1294 :
1295 0 : case RTX_MATCH:
1296 0 : case RTX_INSN:
1297 0 : gcc_unreachable ();
1298 : }
1299 :
1300 48977740 : if (recurse_p)
1301 : {
1302 13872922 : const char *fmt = GET_RTX_FORMAT (code);
1303 31408098 : for (int i = 0; fmt[i]; i++)
1304 17644428 : switch (fmt[i])
1305 : {
1306 : case 'E':
1307 5982576 : for (int j = 0; j < XVECLEN (x, i); j++)
1308 4253519 : if (!apply_to_rvalue_1 (&XVECEXP (x, i, j)))
1309 : return false;
1310 : break;
1311 :
1312 11988037 : case 'e':
1313 11988037 : if (XEXP (x, i) && !apply_to_rvalue_1 (&XEXP (x, i)))
1314 : return false;
1315 : break;
1316 : }
1317 : }
1318 134855343 : else if (newx && !rtx_equal_p (x, newx))
1319 : {
1320 : /* All substitutions made by OLD_NUM_CHANGES onwards have been
1321 : simplified. */
1322 10718149 : result_flags = ((result_flags & ~UNSIMPLIFIED)
1323 : | (old_result_flags & UNSIMPLIFIED));
1324 :
1325 10718149 : if (should_note_simplifications)
1326 3886246 : note_simplification (old_num_changes, old_result_flags, x, newx);
1327 :
1328 : /* There's no longer any point unsharing the substitutions made
1329 : for subexpressions, since we'll just copy this one instead. */
1330 : bool unshare = false;
1331 21379177 : for (int i = old_num_changes; i < num_changes; ++i)
1332 : {
1333 10661028 : unshare |= changes[i].unshare;
1334 10661028 : changes[i].unshare = false;
1335 : }
1336 10718149 : if (unshare)
1337 10281368 : validate_unshare_change (insn, loc, newx, 1);
1338 : else
1339 436781 : validate_change (insn, loc, newx, 1);
1340 : }
1341 :
1342 : return true;
1343 : }
1344 :
1345 : /* Try to process the lvalue expression at *LOC. Return true on success;
1346 : leave the caller to clean up on failure. */
1347 :
1348 : bool
1349 62115955 : insn_propagation::apply_to_lvalue_1 (rtx dest)
1350 : {
1351 62115955 : rtx old_dest = dest;
1352 62115955 : while (GET_CODE (dest) == SUBREG
1353 62319064 : || GET_CODE (dest) == ZERO_EXTRACT
1354 62319064 : || GET_CODE (dest) == STRICT_LOW_PART)
1355 : {
1356 203109 : if (GET_CODE (dest) == ZERO_EXTRACT
1357 203109 : && (!apply_to_rvalue_1 (&XEXP (dest, 1))
1358 2620 : || !apply_to_rvalue_1 (&XEXP (dest, 2))))
1359 0 : return false;
1360 203109 : dest = XEXP (dest, 0);
1361 : }
1362 :
1363 62115955 : if (MEM_P (dest))
1364 11585592 : return apply_to_mem_1 (dest);
1365 :
1366 : /* Check whether the substitution is safe in the presence of this lvalue. */
1367 50530363 : if (!from
1368 50530363 : || dest == old_dest
1369 199034 : || !REG_P (dest)
1370 50729397 : || !reg_overlap_mentioned_p (dest, from))
1371 50430645 : return true;
1372 :
1373 99718 : if (SUBREG_P (old_dest)
1374 95185 : && SUBREG_REG (old_dest) == dest
1375 194903 : && !read_modify_subreg_p (old_dest))
1376 : return true;
1377 :
1378 99442 : failure_reason = "is part of a read-write destination";
1379 99442 : return false;
1380 : }
1381 :
1382 : /* Try to process the instruction pattern at *LOC. Return true on success;
1383 : leave the caller to clean up on failure. */
1384 :
1385 : bool
1386 65634305 : insn_propagation::apply_to_pattern_1 (rtx *loc)
1387 : {
1388 65634305 : rtx body = *loc;
1389 65634305 : switch (GET_CODE (body))
1390 : {
1391 0 : case COND_EXEC:
1392 0 : return (apply_to_rvalue_1 (&COND_EXEC_TEST (body))
1393 0 : && apply_to_pattern_1 (&COND_EXEC_CODE (body)));
1394 :
1395 : case PARALLEL:
1396 13855979 : for (int i = 0; i < XVECLEN (body, 0); ++i)
1397 : {
1398 9466859 : rtx *subloc = &XVECEXP (body, 0, i);
1399 9466859 : if (GET_CODE (*subloc) == SET)
1400 : {
1401 5157046 : if (!apply_to_lvalue_1 (SET_DEST (*subloc)))
1402 : return false;
1403 : /* ASM_OPERANDS are shared between SETs in the same PARALLEL.
1404 : Only process them on the first iteration. */
1405 727709 : if ((i == 0 || GET_CODE (SET_SRC (*subloc)) != ASM_OPERANDS)
1406 5750939 : && !apply_to_rvalue_1 (&SET_SRC (*subloc)))
1407 : return false;
1408 : }
1409 : else
1410 : {
1411 4309813 : if (!apply_to_pattern_1 (subloc))
1412 : return false;
1413 : }
1414 : }
1415 : return true;
1416 :
1417 8599 : case ASM_OPERANDS:
1418 30350 : for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (body); i < len; ++i)
1419 21971 : if (!apply_to_rvalue_1 (&ASM_OPERANDS_INPUT (body, i)))
1420 : return false;
1421 : return true;
1422 :
1423 4143734 : case CLOBBER:
1424 4143734 : return apply_to_lvalue_1 (XEXP (body, 0));
1425 :
1426 52815175 : case SET:
1427 52815175 : return (apply_to_lvalue_1 (SET_DEST (body))
1428 52815175 : && apply_to_rvalue_1 (&SET_SRC (body)));
1429 :
1430 4227109 : default:
1431 : /* All the other possibilities never store and can use a normal
1432 : rtx walk. This includes:
1433 :
1434 : - USE
1435 : - TRAP_IF
1436 : - PREFETCH
1437 : - UNSPEC
1438 : - UNSPEC_VOLATILE. */
1439 4227109 : return apply_to_rvalue_1 (loc);
1440 : }
1441 : }
1442 :
1443 : /* Apply this insn_propagation object's simplification or substitution
1444 : to the instruction pattern at LOC. */
1445 :
1446 : bool
1447 61324492 : insn_propagation::apply_to_pattern (rtx *loc)
1448 : {
1449 61324492 : unsigned int num_changes = num_validated_changes ();
1450 61324492 : bool res = apply_to_pattern_1 (loc);
1451 61324492 : if (!res)
1452 2370882 : cancel_changes (num_changes);
1453 61324492 : return res;
1454 : }
1455 :
1456 : /* Apply this insn_propagation object's simplification or substitution
1457 : to the rvalue expression at LOC. */
1458 :
1459 : bool
1460 7356351 : insn_propagation::apply_to_rvalue (rtx *loc)
1461 : {
1462 7356351 : unsigned int num_changes = num_validated_changes ();
1463 7356351 : bool res = apply_to_rvalue_1 (loc);
1464 7356351 : if (!res)
1465 19320 : cancel_changes (num_changes);
1466 7356351 : return res;
1467 : }
1468 :
1469 : /* Like apply_to_rvalue, but specifically for the case where *LOC is in
1470 : a note. This never changes the INSN_CODE. */
1471 :
1472 : bool
1473 184588 : insn_propagation::apply_to_note (rtx *loc)
1474 : {
1475 184588 : auto old_code = INSN_CODE (insn);
1476 184588 : bool res = apply_to_rvalue (loc);
1477 184588 : if (INSN_CODE (insn) != old_code)
1478 87098 : INSN_CODE (insn) = old_code;
1479 184588 : return res;
1480 : }
1481 :
1482 : /* Check whether INSN matches a specific alternative of an .md pattern. */
1483 :
1484 : bool
1485 0 : valid_insn_p (rtx_insn *insn)
1486 : {
1487 0 : recog_memoized (insn);
1488 0 : if (INSN_CODE (insn) < 0)
1489 : return false;
1490 0 : extract_insn (insn);
1491 : /* We don't know whether the insn will be in code that is optimized
1492 : for size or speed, so consider all enabled alternatives. */
1493 0 : if (!constrain_operands (1, get_enabled_alternatives (insn)))
1494 : return false;
1495 : return true;
1496 : }
1497 :
1498 : /* Return true if OP is a valid general operand for machine mode MODE.
1499 : This is either a register reference, a memory reference,
1500 : or a constant. In the case of a memory reference, the address
1501 : is checked for general validity for the target machine.
1502 :
1503 : Register and memory references must have mode MODE in order to be valid,
1504 : but some constants have no machine mode and are valid for any mode.
1505 :
1506 : If MODE is VOIDmode, OP is checked for validity for whatever mode
1507 : it has.
1508 :
1509 : The main use of this function is as a predicate in match_operand
1510 : expressions in the machine description. */
1511 :
1512 : bool
1513 4889362567 : general_operand (rtx op, machine_mode mode)
1514 : {
1515 4889362567 : enum rtx_code code = GET_CODE (op);
1516 :
1517 4889362567 : if (mode == VOIDmode)
1518 1263498450 : mode = GET_MODE (op);
1519 :
1520 : /* Don't accept CONST_INT or anything similar
1521 : if the caller wants something floating. */
1522 4889362567 : if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1523 221321941 : && GET_MODE_CLASS (mode) != MODE_INT
1524 6143 : && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1525 : return false;
1526 :
1527 4889356424 : if (CONST_INT_P (op)
1528 270341642 : && mode != VOIDmode
1529 5107466487 : && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1530 : return false;
1531 :
1532 4889356199 : if (CONSTANT_P (op))
1533 63427551 : return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1534 7048 : || mode == VOIDmode)
1535 334324751 : && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1536 720383930 : && targetm.legitimate_constant_p (mode == VOIDmode
1537 52306715 : ? GET_MODE (op)
1538 : : mode, op));
1539 :
1540 : /* Except for certain constants with VOIDmode, already checked for,
1541 : OP's mode must match MODE if MODE specifies a mode. */
1542 :
1543 4555024400 : if (GET_MODE (op) != mode)
1544 : return false;
1545 :
1546 4502095358 : if (code == SUBREG)
1547 : {
1548 33464518 : rtx sub = SUBREG_REG (op);
1549 :
1550 : #ifdef INSN_SCHEDULING
1551 : /* On machines that have insn scheduling, we want all memory
1552 : reference to be explicit, so outlaw paradoxical SUBREGs.
1553 : However, we must allow them after reload so that they can
1554 : get cleaned up by cleanup_subreg_operands. */
1555 33414786 : if (!reload_completed && MEM_P (sub)
1556 33531432 : && paradoxical_subreg_p (op))
1557 : return false;
1558 : #endif
1559 : /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1560 : may result in incorrect reference. We should simplify all valid
1561 : subregs of MEM anyway. But allow this after reload because we
1562 : might be called from cleanup_subreg_operands.
1563 :
1564 : ??? This is a kludge. */
1565 33397742 : if (!reload_completed
1566 33348010 : && maybe_ne (SUBREG_BYTE (op), 0)
1567 38474800 : && MEM_P (sub))
1568 : return false;
1569 :
1570 33397742 : if (REG_P (sub)
1571 31837061 : && REGNO (sub) < FIRST_PSEUDO_REGISTER
1572 5815 : && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1573 0 : && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1574 0 : && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1575 : /* LRA can generate some invalid SUBREGS just for matched
1576 : operand reload presentation. LRA needs to treat them as
1577 : valid. */
1578 33397742 : && ! LRA_SUBREG_P (op))
1579 : return false;
1580 :
1581 : /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1582 : create such rtl, and we must reject it. */
1583 33397742 : if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1584 : /* LRA can use subreg to store a floating point value in an
1585 : integer mode. Although the floating point and the
1586 : integer modes need the same number of hard registers, the
1587 : size of floating point mode can be less than the integer
1588 : mode. */
1589 326836 : && ! lra_in_progress
1590 33707611 : && paradoxical_subreg_p (op))
1591 : return false;
1592 :
1593 33397742 : op = sub;
1594 33397742 : code = GET_CODE (op);
1595 : }
1596 :
1597 4502028582 : if (code == REG)
1598 3663113012 : return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1599 3663113012 : || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1600 :
1601 838915570 : if (code == MEM)
1602 : {
1603 748311639 : rtx y = XEXP (op, 0);
1604 :
1605 : /* If -ffuse-ops-with-volatile-access is enabled, allow volatile
1606 : memory reference. */
1607 748311639 : if (!flag_fuse_ops_with_volatile_access
1608 151866 : && !volatile_ok
1609 748358309 : && MEM_VOLATILE_P (op))
1610 : return false;
1611 :
1612 : /* Use the mem's mode, since it will be reloaded thus. LRA can
1613 : generate move insn with invalid addresses which is made valid
1614 : and efficiently calculated by LRA through further numerous
1615 : transformations. */
1616 748305607 : if (lra_in_progress
1617 804841783 : || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1618 730115540 : return true;
1619 : }
1620 :
1621 : return false;
1622 : }
1623 :
1624 : /* Return true if OP is a valid memory address for a memory reference
1625 : of mode MODE.
1626 :
1627 : The main use of this function is as a predicate in match_operand
1628 : expressions in the machine description. */
1629 :
1630 : bool
1631 111534326 : address_operand (rtx op, machine_mode mode)
1632 : {
1633 : /* Wrong mode for an address expr. */
1634 111534326 : if (GET_MODE (op) != VOIDmode
1635 99300021 : && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1636 : return false;
1637 :
1638 110679578 : return memory_address_p (mode, op);
1639 : }
1640 :
1641 : /* Return true if OP is a register reference of mode MODE.
1642 : If MODE is VOIDmode, accept a register in any mode.
1643 :
1644 : The main use of this function is as a predicate in match_operand
1645 : expressions in the machine description. */
1646 :
1647 : bool
1648 2626082463 : register_operand (rtx op, machine_mode mode)
1649 : {
1650 2626082463 : if (GET_CODE (op) == SUBREG)
1651 : {
1652 11792302 : rtx sub = SUBREG_REG (op);
1653 :
1654 : /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1655 : because it is guaranteed to be reloaded into one.
1656 : Just make sure the MEM is valid in itself.
1657 : (Ideally, (SUBREG (MEM)...) should not exist after reload,
1658 : but currently it does result from (SUBREG (REG)...) where the
1659 : reg went on the stack.) */
1660 11792302 : if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1661 : return false;
1662 : }
1663 2614290161 : else if (!REG_P (op))
1664 : return false;
1665 1919506389 : return general_operand (op, mode);
1666 : }
1667 :
1668 : /* Return true for a register in Pmode; ignore the tested mode. */
1669 :
1670 : bool
1671 0 : pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1672 : {
1673 0 : return register_operand (op, Pmode);
1674 : }
1675 :
1676 : /* Return true if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1677 : or a hard register. */
1678 :
1679 : bool
1680 701637 : scratch_operand (rtx op, machine_mode mode)
1681 : {
1682 701637 : if (GET_MODE (op) != mode && mode != VOIDmode)
1683 : return false;
1684 :
1685 663372 : return (GET_CODE (op) == SCRATCH
1686 663372 : || (REG_P (op)
1687 89060 : && (lra_in_progress
1688 71755 : || (REGNO (op) < FIRST_PSEUDO_REGISTER
1689 69582 : && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1690 : }
1691 :
1692 : /* Return true if OP is a valid immediate operand for mode MODE.
1693 :
1694 : The main use of this function is as a predicate in match_operand
1695 : expressions in the machine description. */
1696 :
1697 : bool
1698 501212764 : immediate_operand (rtx op, machine_mode mode)
1699 : {
1700 : /* Don't accept CONST_INT or anything similar
1701 : if the caller wants something floating. */
1702 501212764 : if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1703 141549722 : && GET_MODE_CLASS (mode) != MODE_INT
1704 0 : && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1705 : return false;
1706 :
1707 501212764 : if (CONST_INT_P (op)
1708 323038835 : && mode != VOIDmode
1709 639726850 : && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1710 : return false;
1711 :
1712 500984193 : return (CONSTANT_P (op)
1713 387651637 : && (GET_MODE (op) == mode || mode == VOIDmode
1714 142772910 : || GET_MODE (op) == VOIDmode)
1715 385177704 : && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1716 1102932706 : && targetm.legitimate_constant_p (mode == VOIDmode
1717 221504218 : ? GET_MODE (op)
1718 : : mode, op));
1719 : }
1720 :
1721 : /* Return true if OP is an operand that is a CONST_INT of mode MODE. */
1722 :
1723 : bool
1724 34059882 : const_int_operand (rtx op, machine_mode mode)
1725 : {
1726 34059882 : if (!CONST_INT_P (op))
1727 : return false;
1728 :
1729 28094093 : if (mode != VOIDmode
1730 28094093 : && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1731 : return false;
1732 :
1733 : return true;
1734 : }
1735 :
1736 : #if TARGET_SUPPORTS_WIDE_INT
1737 : /* Return true if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1738 : of mode MODE. */
1739 : bool
1740 2300984 : const_scalar_int_operand (rtx op, machine_mode mode)
1741 : {
1742 2300984 : if (!CONST_SCALAR_INT_P (op))
1743 : return false;
1744 :
1745 1932992 : if (CONST_INT_P (op))
1746 161672 : return const_int_operand (op, mode);
1747 :
1748 1771320 : if (mode != VOIDmode)
1749 : {
1750 1771320 : scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1751 1771320 : int prec = GET_MODE_PRECISION (int_mode);
1752 1771320 : int bitsize = GET_MODE_BITSIZE (int_mode);
1753 :
1754 1771320 : if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1755 : return false;
1756 :
1757 1771320 : if (prec == bitsize)
1758 : return true;
1759 : else
1760 : {
1761 : /* Multiword partial int. */
1762 5496 : HOST_WIDE_INT x
1763 5496 : = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1764 5496 : return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1765 : }
1766 : }
1767 : return true;
1768 : }
1769 :
1770 : /* Return true if OP is an operand that is a constant integer or constant
1771 : floating-point number of MODE. */
1772 :
1773 : bool
1774 0 : const_double_operand (rtx op, machine_mode mode)
1775 : {
1776 0 : return (GET_CODE (op) == CONST_DOUBLE)
1777 0 : && (GET_MODE (op) == mode || mode == VOIDmode);
1778 : }
1779 : #else
1780 : /* Return true if OP is an operand that is a constant integer or constant
1781 : floating-point number of MODE. */
1782 :
1783 : bool
1784 : const_double_operand (rtx op, machine_mode mode)
1785 : {
1786 : /* Don't accept CONST_INT or anything similar
1787 : if the caller wants something floating. */
1788 : if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1789 : && GET_MODE_CLASS (mode) != MODE_INT
1790 : && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1791 : return false;
1792 :
1793 : return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1794 : && (mode == VOIDmode || GET_MODE (op) == mode
1795 : || GET_MODE (op) == VOIDmode));
1796 : }
1797 : #endif
1798 : /* Return true if OP is a general operand that is not an immediate
1799 : operand of mode MODE. */
1800 :
1801 : bool
1802 1905211021 : nonimmediate_operand (rtx op, machine_mode mode)
1803 : {
1804 1905211021 : return (general_operand (op, mode) && ! CONSTANT_P (op));
1805 : }
1806 :
1807 : /* Return true if OP is a register reference or
1808 : immediate value of mode MODE. */
1809 :
1810 : bool
1811 523176388 : nonmemory_operand (rtx op, machine_mode mode)
1812 : {
1813 523176388 : if (CONSTANT_P (op))
1814 31382092 : return immediate_operand (op, mode);
1815 491794296 : return register_operand (op, mode);
1816 : }
1817 :
1818 : /* Return true if OP is a valid operand that stands for pushing a
1819 : value of mode MODE onto the stack.
1820 :
1821 : The main use of this function is as a predicate in match_operand
1822 : expressions in the machine description. */
1823 :
1824 : bool
1825 851523401 : push_operand (rtx op, machine_mode mode)
1826 : {
1827 851523401 : if (!MEM_P (op))
1828 : return false;
1829 :
1830 258728732 : if (mode != VOIDmode && GET_MODE (op) != mode)
1831 : return false;
1832 :
1833 490933084 : poly_int64 rounded_size = GET_MODE_SIZE (mode);
1834 :
1835 : #ifdef PUSH_ROUNDING
1836 245466542 : rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1837 : #endif
1838 :
1839 245466542 : op = XEXP (op, 0);
1840 :
1841 490933084 : if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1842 : {
1843 209284771 : if (GET_CODE (op) != STACK_PUSH_CODE)
1844 : return false;
1845 : }
1846 : else
1847 : {
1848 36181771 : poly_int64 offset;
1849 36181771 : if (GET_CODE (op) != PRE_MODIFY
1850 1186191 : || GET_CODE (XEXP (op, 1)) != PLUS
1851 1186191 : || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1852 1186191 : || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1853 36181771 : || (STACK_GROWS_DOWNWARD
1854 1186191 : ? maybe_ne (offset, -rounded_size)
1855 : : maybe_ne (offset, rounded_size)))
1856 807231856 : return false;
1857 : }
1858 :
1859 44291545 : return XEXP (op, 0) == stack_pointer_rtx;
1860 : }
1861 :
1862 : /* Return true if OP is a valid operand that stands for popping a
1863 : value of mode MODE off the stack.
1864 :
1865 : The main use of this function is as a predicate in match_operand
1866 : expressions in the machine description. */
1867 :
1868 : bool
1869 304955510 : pop_operand (rtx op, machine_mode mode)
1870 : {
1871 304955510 : if (!MEM_P (op))
1872 : return false;
1873 :
1874 75539075 : if (mode != VOIDmode && GET_MODE (op) != mode)
1875 : return false;
1876 :
1877 75539075 : op = XEXP (op, 0);
1878 :
1879 75539075 : if (GET_CODE (op) != STACK_POP_CODE)
1880 : return false;
1881 :
1882 1362365 : return XEXP (op, 0) == stack_pointer_rtx;
1883 : }
1884 :
1885 : /* Return true if ADDR is a valid memory address
1886 : for mode MODE in address space AS. */
1887 :
1888 : bool
1889 1482808188 : memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED, rtx addr,
1890 : addr_space_t as, code_helper ch ATTRIBUTE_UNUSED)
1891 : {
1892 : #ifdef GO_IF_LEGITIMATE_ADDRESS
1893 : gcc_assert (ADDR_SPACE_GENERIC_P (as));
1894 : GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1895 : return false;
1896 :
1897 : win:
1898 : return true;
1899 : #else
1900 1482808188 : return targetm.addr_space.legitimate_address_p (mode, addr, 0, as, ch);
1901 : #endif
1902 : }
1903 :
1904 : /* Return true if OP is a valid memory reference with mode MODE,
1905 : including a valid address.
1906 :
1907 : The main use of this function is as a predicate in match_operand
1908 : expressions in the machine description. */
1909 :
1910 : bool
1911 1216084160 : memory_operand (rtx op, machine_mode mode)
1912 : {
1913 1216084160 : rtx inner;
1914 :
1915 1216084160 : if (! reload_completed)
1916 : /* Note that no SUBREG is a memory operand before end of reload pass,
1917 : because (SUBREG (MEM...)) forces reloading into a register. */
1918 121684765 : return MEM_P (op) && general_operand (op, mode);
1919 :
1920 1094399395 : if (mode != VOIDmode && GET_MODE (op) != mode)
1921 : return false;
1922 :
1923 800621097 : inner = op;
1924 800621097 : if (GET_CODE (inner) == SUBREG)
1925 7769 : inner = SUBREG_REG (inner);
1926 :
1927 800621097 : return (MEM_P (inner) && general_operand (op, mode));
1928 : }
1929 :
1930 : /* Return true if OP is a valid indirect memory reference with mode MODE;
1931 : that is, a memory reference whose address is a general_operand. */
1932 :
1933 : bool
1934 0 : indirect_operand (rtx op, machine_mode mode)
1935 : {
1936 : /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1937 0 : if (! reload_completed
1938 0 : && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1939 : {
1940 0 : if (mode != VOIDmode && GET_MODE (op) != mode)
1941 : return false;
1942 :
1943 : /* The only way that we can have a general_operand as the resulting
1944 : address is if OFFSET is zero and the address already is an operand
1945 : or if the address is (plus Y (const_int -OFFSET)) and Y is an
1946 : operand. */
1947 0 : poly_int64 offset;
1948 0 : rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1949 0 : return (known_eq (offset + SUBREG_BYTE (op), 0)
1950 0 : && general_operand (addr, Pmode));
1951 : }
1952 :
1953 0 : return (MEM_P (op)
1954 0 : && memory_operand (op, mode)
1955 0 : && general_operand (XEXP (op, 0), Pmode));
1956 : }
1957 :
1958 : /* Return true if this is an ordered comparison operator (not including
1959 : ORDERED and UNORDERED). */
1960 :
1961 : bool
1962 28162549 : ordered_comparison_operator (rtx op, machine_mode mode)
1963 : {
1964 28162549 : if (mode != VOIDmode && GET_MODE (op) != mode)
1965 : return false;
1966 28162549 : switch (GET_CODE (op))
1967 : {
1968 : case EQ:
1969 : case NE:
1970 : case LT:
1971 : case LTU:
1972 : case LE:
1973 : case LEU:
1974 : case GT:
1975 : case GTU:
1976 : case GE:
1977 : case GEU:
1978 : return true;
1979 : default:
1980 : return false;
1981 : }
1982 : }
1983 :
1984 : /* Return true if this is a comparison operator. This allows the use of
1985 : MATCH_OPERATOR to recognize all the branch insns. */
1986 :
1987 : bool
1988 113827520 : comparison_operator (rtx op, machine_mode mode)
1989 : {
1990 4503222 : return ((mode == VOIDmode || GET_MODE (op) == mode)
1991 117955187 : && COMPARISON_P (op));
1992 : }
1993 :
1994 : /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1995 :
1996 : rtx
1997 2040928013 : extract_asm_operands (rtx body)
1998 : {
1999 2040928013 : rtx tmp;
2000 2040928013 : switch (GET_CODE (body))
2001 : {
2002 : case ASM_OPERANDS:
2003 : return body;
2004 :
2005 1571694101 : case SET:
2006 : /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
2007 1571694101 : tmp = SET_SRC (body);
2008 1571694101 : if (GET_CODE (tmp) == ASM_OPERANDS)
2009 : return tmp;
2010 : break;
2011 :
2012 338945368 : case PARALLEL:
2013 338945368 : tmp = XVECEXP (body, 0, 0);
2014 338945368 : if (GET_CODE (tmp) == ASM_OPERANDS)
2015 : return tmp;
2016 336678316 : if (GET_CODE (tmp) == SET)
2017 : {
2018 332341571 : tmp = SET_SRC (tmp);
2019 332341571 : if (GET_CODE (tmp) == ASM_OPERANDS)
2020 : return tmp;
2021 : }
2022 : break;
2023 :
2024 : default:
2025 : break;
2026 : }
2027 2035048810 : return NULL;
2028 : }
2029 :
2030 : /* If BODY is an insn body that uses ASM_OPERANDS,
2031 : return the number of operands (both input and output) in the insn.
2032 : If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2033 : return 0.
2034 : Otherwise return -1. */
2035 :
2036 : int
2037 1580548513 : asm_noperands (const_rtx body)
2038 : {
2039 1580548513 : rtx asm_op = extract_asm_operands (const_cast<rtx> (body));
2040 1580548513 : int i, n_sets = 0;
2041 :
2042 1580548513 : if (asm_op == NULL)
2043 : {
2044 1575815189 : if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
2045 248355386 : && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2046 : {
2047 : /* body is [(asm_input ...) (clobber (reg ...))...]. */
2048 60762 : for (i = XVECLEN (body, 0) - 1; i > 0; i--)
2049 40508 : if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
2050 : return -1;
2051 : return 0;
2052 : }
2053 : return -1;
2054 : }
2055 :
2056 4733324 : if (GET_CODE (body) == SET)
2057 : n_sets = 1;
2058 4725579 : else if (GET_CODE (body) == PARALLEL)
2059 : {
2060 4715391 : if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
2061 : {
2062 : /* Multiple output operands, or 1 output plus some clobbers:
2063 : body is
2064 : [(set OUTPUT (asm_operands ...))...
2065 : (use (reg ...))...
2066 : (clobber (reg ...))...]. */
2067 : /* Count backwards through USEs and CLOBBERs to determine
2068 : number of SETs. */
2069 5710789 : for (i = XVECLEN (body, 0); i > 0; i--)
2070 : {
2071 5710789 : if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
2072 : break;
2073 2877389 : if (GET_CODE (XVECEXP (body, 0, i - 1)) != USE
2074 2877389 : && GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
2075 : return -1;
2076 : }
2077 :
2078 : /* N_SETS is now number of output operands. */
2079 10949104 : n_sets = i;
2080 :
2081 : /* Verify that all the SETs we have
2082 : came from a single original asm_operands insn
2083 : (so that invalid combinations are blocked). */
2084 10949104 : for (i = 0; i < n_sets; i++)
2085 : {
2086 8154952 : rtx elt = XVECEXP (body, 0, i);
2087 8154952 : if (GET_CODE (elt) != SET)
2088 : return -1;
2089 8149733 : if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
2090 : return -1;
2091 : /* If these ASM_OPERANDS rtx's came from different original insns
2092 : then they aren't allowed together. */
2093 8133833 : if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
2094 8133833 : != ASM_OPERANDS_INPUT_VEC (asm_op))
2095 : return -1;
2096 : }
2097 : }
2098 : else
2099 : {
2100 : /* 0 outputs, but some clobbers:
2101 : body is [(asm_operands ...)
2102 : (use (reg ...))...
2103 : (clobber (reg ...))...]. */
2104 : /* Make sure all the other parallel things really are clobbers. */
2105 5795618 : for (i = XVECLEN (body, 0) - 1; i > 0; i--)
2106 3916007 : if (GET_CODE (XVECEXP (body, 0, i)) != USE
2107 3916007 : && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
2108 : return -1;
2109 : }
2110 : }
2111 :
2112 4691696 : return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
2113 4691696 : + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
2114 : }
2115 :
2116 : /* Assuming BODY is an insn body that uses ASM_OPERANDS,
2117 : copy its operands (both input and output) into the vector OPERANDS,
2118 : the locations of the operands within the insn into the vector OPERAND_LOCS,
2119 : and the constraints for the operands into CONSTRAINTS.
2120 : Write the modes of the operands into MODES.
2121 : Write the location info into LOC.
2122 : Return the assembler-template.
2123 : If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2124 : return the basic assembly string.
2125 :
2126 : If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
2127 : we don't store that info. */
2128 :
2129 : const char *
2130 2194845 : decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
2131 : const char **constraints, machine_mode *modes,
2132 : location_t *loc)
2133 : {
2134 2194845 : int nbase = 0, n, i;
2135 2194845 : rtx asmop;
2136 :
2137 2194845 : switch (GET_CODE (body))
2138 : {
2139 : case ASM_OPERANDS:
2140 : /* Zero output asm: BODY is (asm_operands ...). */
2141 : asmop = body;
2142 : break;
2143 :
2144 3658 : case SET:
2145 : /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
2146 3658 : asmop = SET_SRC (body);
2147 :
2148 : /* The output is in the SET.
2149 : Its constraint is in the ASM_OPERANDS itself. */
2150 3658 : if (operands)
2151 3548 : operands[0] = SET_DEST (body);
2152 3658 : if (operand_locs)
2153 455 : operand_locs[0] = &SET_DEST (body);
2154 3658 : if (constraints)
2155 3548 : constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
2156 3658 : if (modes)
2157 455 : modes[0] = GET_MODE (SET_DEST (body));
2158 : nbase = 1;
2159 : break;
2160 :
2161 2186894 : case PARALLEL:
2162 2186894 : {
2163 2186894 : int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
2164 :
2165 2186894 : asmop = XVECEXP (body, 0, 0);
2166 2186894 : if (GET_CODE (asmop) == SET)
2167 : {
2168 1172784 : asmop = SET_SRC (asmop);
2169 :
2170 : /* At least one output, plus some CLOBBERs. The outputs are in
2171 : the SETs. Their constraints are in the ASM_OPERANDS itself. */
2172 4336281 : for (i = 0; i < nparallel; i++)
2173 : {
2174 4315060 : if (GET_CODE (XVECEXP (body, 0, i)) == USE
2175 4315060 : || GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
2176 : break; /* Past last SET */
2177 3163497 : gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
2178 3163497 : if (operands)
2179 2995224 : operands[i] = SET_DEST (XVECEXP (body, 0, i));
2180 3163497 : if (operand_locs)
2181 1147416 : operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
2182 3163497 : if (constraints)
2183 3009802 : constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
2184 3163497 : if (modes)
2185 1147416 : modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
2186 : }
2187 : nbase = i;
2188 : }
2189 1014110 : else if (GET_CODE (asmop) == ASM_INPUT)
2190 : {
2191 10307 : if (loc)
2192 0 : *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
2193 10307 : return XSTR (asmop, 0);
2194 : }
2195 : break;
2196 : }
2197 :
2198 0 : default:
2199 0 : gcc_unreachable ();
2200 : }
2201 :
2202 2184538 : n = ASM_OPERANDS_INPUT_LENGTH (asmop);
2203 4161038 : for (i = 0; i < n; i++)
2204 : {
2205 1976500 : if (operand_locs)
2206 841537 : operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
2207 1976500 : if (operands)
2208 1843332 : operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
2209 1976500 : if (constraints)
2210 1861427 : constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
2211 1976500 : if (modes)
2212 841537 : modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
2213 : }
2214 2184538 : nbase += n;
2215 :
2216 2184538 : n = ASM_OPERANDS_LABEL_LENGTH (asmop);
2217 2204690 : for (i = 0; i < n; i++)
2218 : {
2219 20152 : if (operand_locs)
2220 11148 : operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
2221 20152 : if (operands)
2222 18335 : operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
2223 20152 : if (constraints)
2224 18406 : constraints[nbase + i] = "";
2225 20152 : if (modes)
2226 11148 : modes[nbase + i] = Pmode;
2227 : }
2228 :
2229 2184538 : if (loc)
2230 106705 : *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
2231 :
2232 2184538 : return ASM_OPERANDS_TEMPLATE (asmop);
2233 : }
2234 :
2235 : /* Parse inline assembly string STRING and determine which operands are
2236 : referenced by % markers. For the first NOPERANDS operands, set USED[I]
2237 : to true if operand I is referenced.
2238 :
2239 : This is intended to distinguish barrier-like asms such as:
2240 :
2241 : asm ("" : "=m" (...));
2242 :
2243 : from real references such as:
2244 :
2245 : asm ("sw\t$0, %0" : "=m" (...)); */
2246 :
2247 : void
2248 0 : get_referenced_operands (const char *string, bool *used,
2249 : unsigned int noperands)
2250 : {
2251 0 : memset (used, 0, sizeof (bool) * noperands);
2252 0 : const char *p = string;
2253 0 : while (*p)
2254 0 : switch (*p)
2255 : {
2256 0 : case '%':
2257 0 : p += 1;
2258 : /* A letter followed by a digit indicates an operand number. */
2259 0 : if (ISALPHA (p[0]) && ISDIGIT (p[1]))
2260 0 : p += 1;
2261 0 : if (ISDIGIT (*p))
2262 : {
2263 0 : char *endptr;
2264 0 : unsigned long opnum = strtoul (p, &endptr, 10);
2265 0 : if (endptr != p && opnum < noperands)
2266 0 : used[opnum] = true;
2267 0 : p = endptr;
2268 : }
2269 : else
2270 0 : p += 1;
2271 : break;
2272 :
2273 0 : default:
2274 0 : p++;
2275 0 : break;
2276 : }
2277 0 : }
2278 :
2279 : /* Check if an asm_operand matches its constraints.
2280 : Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
2281 :
2282 : int
2283 3449047 : asm_operand_ok (rtx op, const char *constraint, const char **constraints)
2284 : {
2285 3449047 : int result = 0;
2286 3449047 : bool incdec_ok = false;
2287 :
2288 : /* Use constrain_operands after reload. */
2289 3449047 : gcc_assert (!reload_completed);
2290 :
2291 : /* Empty constraint string is the same as "X,...,X", i.e. X for as
2292 : many alternatives as required to match the other operands. */
2293 3449047 : if (*constraint == '\0')
2294 3781 : result = 1;
2295 :
2296 9520162 : while (*constraint)
2297 : {
2298 6071117 : enum constraint_num cn;
2299 6071117 : char c = *constraint;
2300 6071117 : int len;
2301 6071117 : switch (c)
2302 : {
2303 11497 : case ',':
2304 11497 : raw_constraint_p = false;
2305 11497 : constraint++;
2306 11497 : continue;
2307 :
2308 645106 : case '0': case '1': case '2': case '3': case '4':
2309 645106 : case '5': case '6': case '7': case '8': case '9':
2310 : /* If caller provided constraints pointer, look up
2311 : the matching constraint. Otherwise, our caller should have
2312 : given us the proper matching constraint, but we can't
2313 : actually fail the check if they didn't. Indicate that
2314 : results are inconclusive. */
2315 645106 : if (constraints)
2316 : {
2317 644905 : char *end;
2318 644905 : unsigned long match;
2319 :
2320 644905 : match = strtoul (constraint, &end, 10);
2321 644905 : if (!result)
2322 644644 : result = asm_operand_ok (op, constraints[match], NULL);
2323 644905 : constraint = (const char *) end;
2324 : }
2325 : else
2326 : {
2327 225 : do
2328 225 : constraint++;
2329 225 : while (ISDIGIT (*constraint));
2330 201 : if (! result)
2331 174 : result = -1;
2332 : }
2333 645106 : continue;
2334 :
2335 : /* The rest of the compiler assumes that reloading the address
2336 : of a MEM into a register will make it fit an 'o' constraint.
2337 : That is, if it sees a MEM operand for an 'o' constraint,
2338 : it assumes that (mem (base-reg)) will fit.
2339 :
2340 : That assumption fails on targets that don't have offsettable
2341 : addresses at all. We therefore need to treat 'o' asm
2342 : constraints as a special case and only accept operands that
2343 : are already offsettable, thus proving that at least one
2344 : offsettable address exists. */
2345 36 : case 'o': /* offsettable */
2346 36 : if (offsettable_nonstrict_memref_p (op))
2347 2336351 : result = 1;
2348 : break;
2349 :
2350 115770 : case 'g':
2351 115770 : if (general_operand (op, VOIDmode))
2352 2336351 : result = 1;
2353 : break;
2354 :
2355 32 : case '-':
2356 32 : raw_constraint_p = true;
2357 32 : constraint++;
2358 32 : continue;
2359 :
2360 : case '<':
2361 : case '>':
2362 : /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
2363 : to exist, excepting those that expand_call created. Further,
2364 : on some machines which do not have generalized auto inc/dec,
2365 : an inc/dec is not a memory_operand.
2366 :
2367 : Match any memory and hope things are resolved after reload. */
2368 5298676 : incdec_ok = true;
2369 : /* FALLTHRU */
2370 5298676 : default:
2371 5298676 : cn = lookup_constraint (constraint);
2372 5298676 : rtx mem = NULL;
2373 5298676 : switch (get_constraint_type (cn))
2374 : {
2375 5119849 : case CT_REGISTER:
2376 5119849 : if (!result
2377 2534721 : && (reg_class_for_constraint (cn) != NO_REGS
2378 2583159 : || constraint[0] == '{')
2379 2535379 : && GET_MODE (op) != BLKmode
2380 7655193 : && register_operand (op, VOIDmode))
2381 : result = 1;
2382 : break;
2383 :
2384 4 : case CT_CONST_INT:
2385 4 : if (!result
2386 4 : && CONST_INT_P (op)
2387 6 : && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2388 : result = 1;
2389 : break;
2390 :
2391 155665 : case CT_MEMORY:
2392 155665 : case CT_RELAXED_MEMORY:
2393 155665 : mem = op;
2394 : /* Fall through. */
2395 155665 : case CT_SPECIAL_MEMORY:
2396 : /* Every memory operand can be reloaded to fit. */
2397 155665 : if (!mem)
2398 0 : mem = extract_mem_from_operand (op);
2399 155665 : result = result || memory_operand (mem, VOIDmode);
2400 155665 : break;
2401 :
2402 143 : case CT_ADDRESS:
2403 : /* Every address operand can be reloaded to fit. */
2404 143 : result = result || address_operand (op, VOIDmode);
2405 143 : break;
2406 :
2407 23015 : case CT_FIXED_FORM:
2408 23015 : result = result || constraint_satisfied_p (op, cn);
2409 23015 : break;
2410 : }
2411 : break;
2412 656635 : }
2413 5414482 : len = CONSTRAINT_LEN (c, constraint);
2414 5418561 : do
2415 5418561 : constraint++;
2416 10833043 : while (--len && *constraint && *constraint != ',');
2417 5414482 : if (len)
2418 : {
2419 2 : raw_constraint_p = false;
2420 2 : return 0;
2421 : }
2422 : }
2423 3449045 : raw_constraint_p = false;
2424 :
2425 : /* For operands without < or > constraints reject side-effects. */
2426 3449045 : if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
2427 : switch (GET_CODE (XEXP (op, 0)))
2428 : {
2429 : case PRE_INC:
2430 : case POST_INC:
2431 : case PRE_DEC:
2432 : case POST_DEC:
2433 : case PRE_MODIFY:
2434 : case POST_MODIFY:
2435 : return 0;
2436 : default:
2437 : break;
2438 : }
2439 :
2440 3449045 : return result;
2441 : }
2442 :
2443 : /* Given an rtx *P, if it is a sum containing an integer constant term,
2444 : return the location (type rtx *) of the pointer to that constant term.
2445 : Otherwise, return a null pointer. */
2446 :
2447 : rtx *
2448 41697953 : find_constant_term_loc (rtx *p)
2449 : {
2450 41697953 : rtx *tem;
2451 41697953 : enum rtx_code code = GET_CODE (*p);
2452 :
2453 : /* If *P IS such a constant term, P is its location. */
2454 :
2455 41697953 : if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2456 29828777 : || code == CONST)
2457 : return p;
2458 :
2459 : /* Otherwise, if not a sum, it has no constant term. */
2460 :
2461 29781581 : if (GET_CODE (*p) != PLUS)
2462 : return 0;
2463 :
2464 : /* If one of the summands is constant, return its location. */
2465 :
2466 13928779 : if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2467 0 : && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2468 : return p;
2469 :
2470 : /* Otherwise, check each summand for containing a constant term. */
2471 :
2472 13928779 : if (XEXP (*p, 0) != 0)
2473 : {
2474 13928779 : tem = find_constant_term_loc (&XEXP (*p, 0));
2475 13928779 : if (tem != 0)
2476 : return tem;
2477 : }
2478 :
2479 13928779 : if (XEXP (*p, 1) != 0)
2480 : {
2481 13928779 : tem = find_constant_term_loc (&XEXP (*p, 1));
2482 13928779 : if (tem != 0)
2483 : return tem;
2484 : }
2485 :
2486 : return 0;
2487 : }
2488 :
2489 : /* Return true if OP is a memory reference whose address contains
2490 : no side effects and remains valid after the addition of a positive
2491 : integer less than the size of the object being referenced.
2492 :
2493 : We assume that the original address is valid and do not check it.
2494 :
2495 : This uses strict_memory_address_p as a subroutine, so
2496 : don't use it before reload. */
2497 :
2498 : bool
2499 5632279 : offsettable_memref_p (rtx op)
2500 : {
2501 5632279 : return ((MEM_P (op))
2502 11259099 : && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
2503 5626820 : MEM_ADDR_SPACE (op)));
2504 : }
2505 :
2506 : /* Similar, but don't require a strictly valid mem ref:
2507 : consider pseudo-regs valid as index or base regs. */
2508 :
2509 : bool
2510 12156853 : offsettable_nonstrict_memref_p (rtx op)
2511 : {
2512 12156853 : return ((MEM_P (op))
2513 24313672 : && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
2514 12156819 : MEM_ADDR_SPACE (op)));
2515 : }
2516 :
2517 : /* Return true if Y is a memory address which contains no side effects
2518 : and would remain valid for address space AS after the addition of
2519 : a positive integer less than the size of that mode.
2520 :
2521 : We assume that the original address is valid and do not check it.
2522 : We do check that it is valid for narrower modes.
2523 :
2524 : If STRICTP is nonzero, we require a strictly valid address,
2525 : for the sake of use in reload.cc. */
2526 :
2527 : bool
2528 17783639 : offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
2529 : addr_space_t as)
2530 : {
2531 17783639 : enum rtx_code ycode = GET_CODE (y);
2532 17783639 : rtx z;
2533 17783639 : rtx y1 = y;
2534 17783639 : rtx *y2;
2535 12156819 : bool (*addressp) (machine_mode, rtx, addr_space_t, code_helper) =
2536 17783639 : (strictp ? strict_memory_address_addr_space_p
2537 : : memory_address_addr_space_p);
2538 35567278 : poly_int64 mode_sz = GET_MODE_SIZE (mode);
2539 :
2540 17783639 : if (CONSTANT_ADDRESS_P (y))
2541 : return true;
2542 :
2543 : /* Adjusting an offsettable address involves changing to a narrower mode.
2544 : Make sure that's OK. */
2545 :
2546 15097640 : if (mode_dependent_address_p (y, as))
2547 : return false;
2548 :
2549 14913606 : machine_mode address_mode = GET_MODE (y);
2550 14913606 : if (address_mode == VOIDmode)
2551 0 : address_mode = targetm.addr_space.address_mode (as);
2552 : #ifdef POINTERS_EXTEND_UNSIGNED
2553 14913606 : machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2554 : #endif
2555 :
2556 : /* ??? How much offset does an offsettable BLKmode reference need?
2557 : Clearly that depends on the situation in which it's being used.
2558 : However, the current situation in which we test 0xffffffff is
2559 : less than ideal. Caveat user. */
2560 14913606 : if (known_eq (mode_sz, 0))
2561 0 : mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2562 :
2563 : /* If the expression contains a constant term,
2564 : see if it remains valid when max possible offset is added. */
2565 :
2566 14913606 : if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2567 : {
2568 11916372 : bool good;
2569 :
2570 11916372 : y1 = *y2;
2571 11916372 : *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2572 : /* Use QImode because an odd displacement may be automatically invalid
2573 : for any wider mode. But it should be valid for a single byte. */
2574 11916372 : good = (*addressp) (QImode, y, as, ERROR_MARK);
2575 :
2576 : /* In any case, restore old contents of memory. */
2577 11916372 : *y2 = y1;
2578 11916372 : return good;
2579 : }
2580 :
2581 2997234 : if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2582 : return false;
2583 :
2584 : /* The offset added here is chosen as the maximum offset that
2585 : any instruction could need to add when operating on something
2586 : of the specified mode. We assume that if Y and Y+c are
2587 : valid addresses then so is Y+d for all 0<d<c. adjust_address will
2588 : go inside a LO_SUM here, so we do so as well. */
2589 2997234 : if (GET_CODE (y) == LO_SUM
2590 0 : && mode != BLKmode
2591 2997234 : && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2592 0 : z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2593 : plus_constant (address_mode, XEXP (y, 1),
2594 : mode_sz - 1));
2595 : #ifdef POINTERS_EXTEND_UNSIGNED
2596 : /* Likewise for a ZERO_EXTEND from pointer_mode. */
2597 2997234 : else if (POINTERS_EXTEND_UNSIGNED > 0
2598 2997234 : && GET_CODE (y) == ZERO_EXTEND
2599 13 : && GET_MODE (XEXP (y, 0)) == pointer_mode)
2600 7 : z = gen_rtx_ZERO_EXTEND (address_mode,
2601 : plus_constant (pointer_mode, XEXP (y, 0),
2602 : mode_sz - 1));
2603 : #endif
2604 : else
2605 2997227 : z = plus_constant (address_mode, y, mode_sz - 1);
2606 :
2607 : /* Use QImode because an odd displacement may be automatically invalid
2608 : for any wider mode. But it should be valid for a single byte. */
2609 2997234 : return (*addressp) (QImode, z, as, ERROR_MARK);
2610 : }
2611 :
2612 : /* Return true if ADDR is an address-expression whose effect depends
2613 : on the mode of the memory reference it is used in.
2614 :
2615 : ADDRSPACE is the address space associated with the address.
2616 :
2617 : Autoincrement addressing is a typical example of mode-dependence
2618 : because the amount of the increment depends on the mode. */
2619 :
2620 : bool
2621 40115141 : mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2622 : {
2623 : /* Auto-increment addressing with anything other than post_modify
2624 : or pre_modify always introduces a mode dependency. Catch such
2625 : cases now instead of deferring to the target. */
2626 40115141 : if (GET_CODE (addr) == PRE_INC
2627 40115141 : || GET_CODE (addr) == POST_INC
2628 40115135 : || GET_CODE (addr) == PRE_DEC
2629 36341585 : || GET_CODE (addr) == POST_DEC)
2630 : return true;
2631 :
2632 36341585 : return targetm.mode_dependent_address_p (addr, addrspace);
2633 : }
2634 :
2635 : /* Return true if boolean attribute ATTR is supported. */
2636 :
2637 : static bool
2638 1626623028 : have_bool_attr (bool_attr attr)
2639 : {
2640 1626623028 : switch (attr)
2641 : {
2642 : case BA_ENABLED:
2643 : return HAVE_ATTR_enabled;
2644 : case BA_PREFERRED_FOR_SIZE:
2645 : return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2646 : case BA_PREFERRED_FOR_SPEED:
2647 : return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2648 : }
2649 0 : gcc_unreachable ();
2650 : }
2651 :
2652 : /* Return the value of ATTR for instruction INSN. */
2653 :
2654 : static bool
2655 1701511032 : get_bool_attr (rtx_insn *insn, bool_attr attr)
2656 : {
2657 1701511032 : switch (attr)
2658 : {
2659 721221185 : case BA_ENABLED:
2660 721221185 : return get_attr_enabled (insn);
2661 352688576 : case BA_PREFERRED_FOR_SIZE:
2662 352688576 : return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2663 627601271 : case BA_PREFERRED_FOR_SPEED:
2664 627601271 : return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2665 : }
2666 0 : gcc_unreachable ();
2667 : }
2668 :
2669 : /* Like get_bool_attr_mask, but don't use the cache. */
2670 :
2671 : static alternative_mask
2672 102033865 : get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2673 : {
2674 : /* Temporarily install enough information for get_attr_<foo> to assume
2675 : that the insn operands are already cached. As above, the attribute
2676 : mustn't depend on the values of operands, so we don't provide their
2677 : real values here. */
2678 102033865 : rtx_insn *old_insn = recog_data.insn;
2679 102033865 : int old_alternative = which_alternative;
2680 :
2681 102033865 : recog_data.insn = insn;
2682 102033865 : alternative_mask mask = ALL_ALTERNATIVES;
2683 102033865 : int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2684 1803544897 : for (int i = 0; i < n_alternatives; i++)
2685 : {
2686 1701511032 : which_alternative = i;
2687 1701511032 : if (!get_bool_attr (insn, attr))
2688 510080586 : mask &= ~ALTERNATIVE_BIT (i);
2689 : }
2690 :
2691 102033865 : recog_data.insn = old_insn;
2692 102033865 : which_alternative = old_alternative;
2693 102033865 : return mask;
2694 : }
2695 :
2696 : /* Return the mask of operand alternatives that are allowed for INSN
2697 : by boolean attribute ATTR. This mask depends only on INSN and on
2698 : the current target; it does not depend on things like the values of
2699 : operands. */
2700 :
2701 : static alternative_mask
2702 1628914117 : get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2703 : {
2704 : /* Quick exit for asms and for targets that don't use these attributes. */
2705 1628914117 : int code = INSN_CODE (insn);
2706 1628914117 : if (code < 0 || !have_bool_attr (attr))
2707 : return ALL_ALTERNATIVES;
2708 :
2709 : /* Calling get_attr_<foo> can be expensive, so cache the mask
2710 : for speed. */
2711 1626623028 : if (!this_target_recog->x_bool_attr_masks[code][attr])
2712 12880163 : this_target_recog->x_bool_attr_masks[code][attr]
2713 12880163 : = get_bool_attr_mask_uncached (insn, attr);
2714 1626623028 : return this_target_recog->x_bool_attr_masks[code][attr];
2715 : }
2716 :
2717 : /* Return the set of alternatives of INSN that are allowed by the current
2718 : target. */
2719 :
2720 : alternative_mask
2721 1165693765 : get_enabled_alternatives (rtx_insn *insn)
2722 : {
2723 1165693765 : return get_bool_attr_mask (insn, BA_ENABLED);
2724 : }
2725 :
2726 : /* Return the set of alternatives of INSN that are allowed by the current
2727 : target and are preferred for the current size/speed optimization
2728 : choice. */
2729 :
2730 : alternative_mask
2731 463133872 : get_preferred_alternatives (rtx_insn *insn)
2732 : {
2733 463133872 : if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2734 406343491 : return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2735 : else
2736 56790381 : return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2737 : }
2738 :
2739 : /* Return the set of alternatives of INSN that are allowed by the current
2740 : target and are preferred for the size/speed optimization choice
2741 : associated with BB. Passing a separate BB is useful if INSN has not
2742 : been emitted yet or if we are considering moving it to a different
2743 : block. */
2744 :
2745 : alternative_mask
2746 86480 : get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2747 : {
2748 86480 : if (optimize_bb_for_speed_p (bb))
2749 81650 : return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2750 : else
2751 4830 : return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2752 : }
2753 :
2754 : /* Assert that the cached boolean attributes for INSN are still accurate.
2755 : The backend is required to define these attributes in a way that only
2756 : depends on the current target (rather than operands, compiler phase,
2757 : etc.). */
2758 :
2759 : bool
2760 36051456 : check_bool_attrs (rtx_insn *insn)
2761 : {
2762 36051456 : int code = INSN_CODE (insn);
2763 36051456 : if (code >= 0)
2764 144205824 : for (int i = 0; i <= BA_LAST; ++i)
2765 : {
2766 108154368 : enum bool_attr attr = (enum bool_attr) i;
2767 108154368 : if (this_target_recog->x_bool_attr_masks[code][attr])
2768 89153702 : gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2769 : == get_bool_attr_mask_uncached (insn, attr));
2770 : }
2771 36051456 : return true;
2772 : }
2773 :
2774 : /* Like extract_insn, but save insn extracted and don't extract again, when
2775 : called again for the same insn expecting that recog_data still contain the
2776 : valid information. This is used primary by gen_attr infrastructure that
2777 : often does extract insn again and again. */
2778 : void
2779 10345334080 : extract_insn_cached (rtx_insn *insn)
2780 : {
2781 10345334080 : if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2782 : return;
2783 761151290 : extract_insn (insn);
2784 761151290 : recog_data.insn = insn;
2785 : }
2786 :
2787 : /* Do uncached extract_insn, constrain_operands and complain about failures.
2788 : This should be used when extracting a pre-existing constrained instruction
2789 : if the caller wants to know which alternative was chosen. */
2790 : void
2791 262460690 : extract_constrain_insn (rtx_insn *insn)
2792 : {
2793 262460690 : extract_insn (insn);
2794 262460690 : if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2795 0 : fatal_insn_not_found (insn);
2796 262460690 : }
2797 :
2798 : /* Do cached extract_insn, constrain_operands and complain about failures.
2799 : Used by insn_attrtab. */
2800 : void
2801 9148471708 : extract_constrain_insn_cached (rtx_insn *insn)
2802 : {
2803 9148471708 : extract_insn_cached (insn);
2804 9148471708 : if (which_alternative == -1
2805 9148471708 : && !constrain_operands (reload_completed,
2806 : get_enabled_alternatives (insn)))
2807 0 : fatal_insn_not_found (insn);
2808 9148471708 : }
2809 :
2810 : /* Do cached constrain_operands on INSN and complain about failures. */
2811 : bool
2812 336130043 : constrain_operands_cached (rtx_insn *insn, int strict)
2813 : {
2814 336130043 : if (which_alternative == -1)
2815 92000683 : return constrain_operands (strict, get_enabled_alternatives (insn));
2816 : else
2817 : return true;
2818 : }
2819 :
2820 : /* Analyze INSN and fill in recog_data. */
2821 :
2822 : void
2823 2428402742 : extract_insn (rtx_insn *insn)
2824 : {
2825 2428402742 : int i;
2826 2428402742 : int icode;
2827 2428402742 : int noperands;
2828 2428402742 : rtx body = PATTERN (insn);
2829 :
2830 2428402742 : recog_data.n_operands = 0;
2831 2428402742 : recog_data.n_alternatives = 0;
2832 2428402742 : recog_data.n_dups = 0;
2833 2428402742 : recog_data.is_asm = false;
2834 :
2835 2428402742 : switch (GET_CODE (body))
2836 : {
2837 : case USE:
2838 : case CLOBBER:
2839 : case ASM_INPUT:
2840 : case ADDR_VEC:
2841 : case ADDR_DIFF_VEC:
2842 : case VAR_LOCATION:
2843 : case DEBUG_MARKER:
2844 : return;
2845 :
2846 1807761705 : case SET:
2847 1807761705 : if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2848 400 : goto asm_insn;
2849 : else
2850 1807761305 : goto normal_insn;
2851 242321300 : case PARALLEL:
2852 242321300 : if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2853 237721508 : && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2854 241851106 : || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2855 241123812 : || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2856 1206082 : goto asm_insn;
2857 : else
2858 241115218 : goto normal_insn;
2859 1206622 : case ASM_OPERANDS:
2860 1206622 : asm_insn:
2861 1206622 : recog_data.n_operands = noperands = asm_noperands (body);
2862 1206622 : if (noperands >= 0)
2863 : {
2864 : /* This insn is an `asm' with operands. */
2865 :
2866 : /* expand_asm_operands makes sure there aren't too many operands. */
2867 1206622 : gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2868 :
2869 : /* Now get the operand values and constraints out of the insn. */
2870 1206622 : decode_asm_operands (body, recog_data.operand,
2871 : recog_data.operand_loc,
2872 : recog_data.constraints,
2873 : recog_data.operand_mode, NULL);
2874 1206622 : memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2875 1206622 : if (noperands > 0)
2876 : {
2877 575834 : const char *p = recog_data.constraints[0];
2878 575834 : recog_data.n_alternatives = 1;
2879 1625873 : while (*p)
2880 1050039 : recog_data.n_alternatives += (*p++ == ',');
2881 : }
2882 1206622 : recog_data.is_asm = true;
2883 1206622 : break;
2884 : }
2885 0 : fatal_insn_not_found (insn);
2886 :
2887 2110446970 : default:
2888 2110446970 : normal_insn:
2889 : /* Ordinary insn: recognize it, get the operands via insn_extract
2890 : and get the constraints. */
2891 :
2892 2110446970 : icode = recog_memoized (insn);
2893 2110446970 : if (icode < 0)
2894 0 : fatal_insn_not_found (insn);
2895 :
2896 2110446970 : recog_data.n_operands = noperands = insn_data[icode].n_operands;
2897 2110446970 : recog_data.n_alternatives = insn_data[icode].n_alternatives;
2898 2110446970 : recog_data.n_dups = insn_data[icode].n_dups;
2899 :
2900 2110446970 : insn_extract (insn);
2901 :
2902 8736296202 : for (i = 0; i < noperands; i++)
2903 : {
2904 4515402262 : recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2905 4515402262 : recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2906 4515402262 : recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2907 : /* VOIDmode match_operands gets mode from their real operand. */
2908 4515402262 : if (recog_data.operand_mode[i] == VOIDmode)
2909 459584205 : recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2910 : }
2911 : }
2912 6628892284 : for (i = 0; i < noperands; i++)
2913 4517238692 : recog_data.operand_type[i]
2914 7338871999 : = (recog_data.constraints[i][0] == '=' ? OP_OUT
2915 2821633307 : : recog_data.constraints[i][0] == '+' ? OP_INOUT
2916 : : OP_IN);
2917 :
2918 2111653592 : gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2919 :
2920 2111653592 : recog_data.insn = NULL;
2921 2111653592 : which_alternative = -1;
2922 : }
2923 :
2924 : /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2925 : operands, N_ALTERNATIVES alternatives and constraint strings
2926 : CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2927 : and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2928 : if the insn is an asm statement and preprocessing should take the
2929 : asm operands into account, e.g. to determine whether they could be
2930 : addresses in constraints that require addresses; it should then
2931 : point to an array of pointers to each operand. */
2932 :
2933 : void
2934 4736494 : preprocess_constraints (int n_operands, int n_alternatives,
2935 : const char **constraints,
2936 : operand_alternative *op_alt_base,
2937 : rtx **oploc)
2938 : {
2939 12206894 : for (int i = 0; i < n_operands; i++)
2940 : {
2941 7470400 : int j;
2942 7470400 : struct operand_alternative *op_alt;
2943 7470400 : const char *p = constraints[i];
2944 :
2945 7470400 : op_alt = op_alt_base;
2946 :
2947 47030140 : for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2948 : {
2949 39559740 : op_alt[i].cl = NO_REGS;
2950 39559740 : op_alt[i].register_filters = 0;
2951 39559740 : op_alt[i].constraint = p;
2952 39559740 : op_alt[i].matches = -1;
2953 39559740 : op_alt[i].matched = -1;
2954 :
2955 39559740 : if (*p == '\0' || *p == ',')
2956 : {
2957 1705756 : op_alt[i].anything_ok = 1;
2958 1705756 : continue;
2959 : }
2960 :
2961 100563195 : for (;;)
2962 : {
2963 100563195 : char c = *p;
2964 100563195 : if (c == '#')
2965 0 : do
2966 0 : c = *++p;
2967 0 : while (c != ',' && c != '\0');
2968 100563195 : if (c == ',' || c == '\0')
2969 : {
2970 37853984 : p++;
2971 37853984 : break;
2972 : }
2973 :
2974 62709211 : switch (c)
2975 : {
2976 5714104 : case '?':
2977 5714104 : op_alt[i].reject += 6;
2978 5714104 : break;
2979 402892 : case '!':
2980 402892 : op_alt[i].reject += 600;
2981 402892 : break;
2982 55127 : case '&':
2983 55127 : op_alt[i].earlyclobber = 1;
2984 55127 : break;
2985 :
2986 2025588 : case '0': case '1': case '2': case '3': case '4':
2987 2025588 : case '5': case '6': case '7': case '8': case '9':
2988 2025588 : {
2989 2025588 : char *end;
2990 2025588 : op_alt[i].matches = strtoul (p, &end, 10);
2991 2025588 : op_alt[op_alt[i].matches].matched = i;
2992 2025588 : p = end;
2993 : }
2994 2025588 : continue;
2995 :
2996 30800 : case 'X':
2997 30800 : op_alt[i].anything_ok = 1;
2998 30800 : break;
2999 :
3000 206171 : case 'g':
3001 206171 : op_alt[i].cl =
3002 206171 : reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
3003 206171 : break;
3004 :
3005 54274529 : default:
3006 54274529 : enum constraint_num cn = lookup_constraint (p);
3007 54274529 : enum reg_class cl;
3008 54274529 : switch (get_constraint_type (cn))
3009 : {
3010 38734995 : case CT_REGISTER:
3011 38734995 : cl = reg_class_for_constraint (cn);
3012 27890729 : if (cl != NO_REGS)
3013 : {
3014 24324463 : op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
3015 24324463 : auto filter_id = get_register_filter_id (cn);
3016 24324463 : if (filter_id >= 0)
3017 : op_alt[i].register_filters |= 1U << filter_id;
3018 : }
3019 : break;
3020 :
3021 : case CT_CONST_INT:
3022 : break;
3023 :
3024 7837668 : case CT_MEMORY:
3025 7837668 : case CT_SPECIAL_MEMORY:
3026 7837668 : case CT_RELAXED_MEMORY:
3027 7837668 : op_alt[i].memory_ok = 1;
3028 7837668 : break;
3029 :
3030 87815 : case CT_ADDRESS:
3031 87815 : if (oploc && !address_operand (*oploc[i], VOIDmode))
3032 : break;
3033 :
3034 87796 : op_alt[i].is_address = 1;
3035 87796 : op_alt[i].cl
3036 87796 : = (reg_class_subunion
3037 87796 : [(int) op_alt[i].cl]
3038 87796 : [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3039 87796 : ADDRESS, SCRATCH)]);
3040 87796 : break;
3041 :
3042 : case CT_FIXED_FORM:
3043 : break;
3044 : }
3045 : break;
3046 2025588 : }
3047 60683623 : p += CONSTRAINT_LEN (c, p);
3048 : }
3049 : }
3050 : }
3051 4736494 : }
3052 :
3053 : /* Return an array of operand_alternative instructions for
3054 : instruction ICODE. */
3055 :
3056 : const operand_alternative *
3057 289464909 : preprocess_insn_constraints (unsigned int icode)
3058 : {
3059 289464909 : gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
3060 289464909 : if (this_target_recog->x_op_alt[icode])
3061 : return this_target_recog->x_op_alt[icode];
3062 :
3063 5422907 : int n_operands = insn_data[icode].n_operands;
3064 5422907 : if (n_operands == 0)
3065 : return 0;
3066 : /* Always provide at least one alternative so that which_op_alt ()
3067 : works correctly. If the instruction has 0 alternatives (i.e. all
3068 : constraint strings are empty) then each operand in this alternative
3069 : will have anything_ok set. */
3070 2939619 : int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
3071 2939619 : int n_entries = n_operands * n_alternatives;
3072 :
3073 2939619 : operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
3074 2939619 : const char **constraints = XALLOCAVEC (const char *, n_operands);
3075 :
3076 9975469 : for (int i = 0; i < n_operands; ++i)
3077 7035850 : constraints[i] = insn_data[icode].operand[i].constraint;
3078 2939619 : preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
3079 : NULL);
3080 :
3081 2939619 : this_target_recog->x_op_alt[icode] = op_alt;
3082 2939619 : return op_alt;
3083 : }
3084 :
3085 : /* After calling extract_insn, you can use this function to extract some
3086 : information from the constraint strings into a more usable form.
3087 : The collected data is stored in recog_op_alt. */
3088 :
3089 : void
3090 197245016 : preprocess_constraints (rtx_insn *insn)
3091 : {
3092 197245016 : int icode = INSN_CODE (insn);
3093 197245016 : if (icode >= 0)
3094 195490810 : recog_op_alt = preprocess_insn_constraints (icode);
3095 : else
3096 : {
3097 1754206 : int n_operands = recog_data.n_operands;
3098 1754206 : int n_alternatives = recog_data.n_alternatives;
3099 1754206 : int n_entries = n_operands * n_alternatives;
3100 1754206 : memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
3101 1754206 : preprocess_constraints (n_operands, n_alternatives,
3102 : recog_data.constraints, asm_op_alt,
3103 : NULL);
3104 1754206 : recog_op_alt = asm_op_alt;
3105 : }
3106 197245016 : }
3107 :
3108 : /* Check the operands of an insn against the insn's operand constraints
3109 : and return 1 if they match any of the alternatives in ALTERNATIVES.
3110 :
3111 : The information about the insn's operands, constraints, operand modes
3112 : etc. is obtained from the global variables set up by extract_insn.
3113 :
3114 : WHICH_ALTERNATIVE is set to a number which indicates which
3115 : alternative of constraints was matched: 0 for the first alternative,
3116 : 1 for the next, etc.
3117 :
3118 : In addition, when two operands are required to match
3119 : and it happens that the output operand is (reg) while the
3120 : input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
3121 : make the output operand look like the input.
3122 : This is because the output operand is the one the template will print.
3123 :
3124 : This is used in final, just before printing the assembler code and by
3125 : the routines that determine an insn's attribute.
3126 :
3127 : If STRICT is a positive nonzero value, it means that we have been
3128 : called after reload has been completed. In that case, we must
3129 : do all checks strictly. If it is zero, it means that we have been called
3130 : before reload has completed. In that case, we first try to see if we can
3131 : find an alternative that matches strictly. If not, we try again, this
3132 : time assuming that reload will fix up the insn. This provides a "best
3133 : guess" for the alternative and is used to compute attributes of insns prior
3134 : to reload. A negative value of STRICT is used for this internal call. */
3135 :
3136 : struct funny_match
3137 : {
3138 : int this_op, other;
3139 : };
3140 :
3141 : bool
3142 1130672664 : constrain_operands (int strict, alternative_mask alternatives)
3143 : {
3144 1131561811 : const char *constraints[MAX_RECOG_OPERANDS];
3145 1131561811 : int matching_operands[MAX_RECOG_OPERANDS];
3146 1131561811 : int earlyclobber[MAX_RECOG_OPERANDS];
3147 1131561811 : int c;
3148 :
3149 1131561811 : struct funny_match funny_match[MAX_RECOG_OPERANDS];
3150 1131561811 : int funny_match_index;
3151 :
3152 1131561811 : which_alternative = 0;
3153 1131561811 : if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
3154 : return true;
3155 :
3156 3360507773 : for (c = 0; c < recog_data.n_operands; c++)
3157 2288459909 : constraints[c] = recog_data.constraints[c];
3158 :
3159 4040869589 : do
3160 : {
3161 4040869589 : int seen_earlyclobber_at = -1;
3162 4040869589 : int opno;
3163 4040869589 : bool lose = false;
3164 4040869589 : funny_match_index = 0;
3165 :
3166 4040869589 : if (!TEST_BIT (alternatives, which_alternative))
3167 : {
3168 : int i;
3169 :
3170 2723558393 : for (i = 0; i < recog_data.n_operands; i++)
3171 3654219294 : constraints[i] = skip_alternative (constraints[i]);
3172 :
3173 896448746 : which_alternative++;
3174 896448746 : continue;
3175 896448746 : }
3176 :
3177 9662310263 : for (opno = 0; opno < recog_data.n_operands; opno++)
3178 6517889420 : matching_operands[opno] = -1;
3179 :
3180 9662310263 : for (opno = 0; opno < recog_data.n_operands; opno++)
3181 : {
3182 6517889420 : rtx op = recog_data.operand[opno];
3183 6517889420 : machine_mode mode = GET_MODE (op);
3184 6517889420 : const char *p = constraints[opno];
3185 6517889420 : int offset = 0;
3186 6517889420 : bool win = false;
3187 6517889420 : int val;
3188 6517889420 : int len;
3189 :
3190 6517889420 : earlyclobber[opno] = 0;
3191 :
3192 6517889420 : if (GET_CODE (op) == SUBREG)
3193 : {
3194 1477369 : if (REG_P (SUBREG_REG (op))
3195 1477369 : && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
3196 416 : offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
3197 416 : GET_MODE (SUBREG_REG (op)),
3198 416 : SUBREG_BYTE (op),
3199 : GET_MODE (op));
3200 1477369 : op = SUBREG_REG (op);
3201 : }
3202 :
3203 : /* An empty constraint or empty alternative
3204 : allows anything which matched the pattern. */
3205 6517889420 : if (*p == 0 || *p == ',')
3206 94021533 : win = true;
3207 :
3208 16457621550 : do
3209 16457621550 : switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
3210 : {
3211 : case '\0':
3212 : len = 0;
3213 : break;
3214 6174959913 : case ',':
3215 6174959913 : c = '\0';
3216 6174959913 : break;
3217 32 : case '-':
3218 32 : raw_constraint_p = true;
3219 32 : break;
3220 :
3221 0 : case '#':
3222 : /* Ignore rest of this alternative as far as
3223 : constraint checking is concerned. */
3224 0 : do
3225 0 : p++;
3226 0 : while (*p && *p != ',');
3227 : len = 0;
3228 : break;
3229 :
3230 421595 : case '&':
3231 421595 : earlyclobber[opno] = 1;
3232 421595 : if (seen_earlyclobber_at < 0)
3233 403229 : seen_earlyclobber_at = opno;
3234 : break;
3235 :
3236 185631509 : case '0': case '1': case '2': case '3': case '4':
3237 185631509 : case '5': case '6': case '7': case '8': case '9':
3238 185631509 : {
3239 : /* This operand must be the same as a previous one.
3240 : This kind of constraint is used for instructions such
3241 : as add when they take only two operands.
3242 :
3243 : Note that the lower-numbered operand is passed first.
3244 :
3245 : If we are not testing strictly, assume that this
3246 : constraint will be satisfied. */
3247 :
3248 185631509 : char *end;
3249 185631509 : int match;
3250 :
3251 185631509 : match = strtoul (p, &end, 10);
3252 185631509 : p = end;
3253 :
3254 185631509 : if (strict < 0)
3255 : val = 1;
3256 : else
3257 : {
3258 184723488 : rtx op1 = recog_data.operand[match];
3259 184723488 : rtx op2 = recog_data.operand[opno];
3260 184723488 : val = operands_match_p (op1, op2);
3261 : }
3262 :
3263 185631509 : matching_operands[opno] = match;
3264 185631509 : matching_operands[match] = opno;
3265 :
3266 185631509 : if (val != 0)
3267 153307097 : win = true;
3268 :
3269 : /* If output is *x and input is *--x, arrange later
3270 : to change the output to *--x as well, since the
3271 : output op is the one that will be printed. */
3272 185631509 : if (val == 2 && strict > 0)
3273 : {
3274 0 : funny_match[funny_match_index].this_op = opno;
3275 0 : funny_match[funny_match_index++].other = match;
3276 : }
3277 : }
3278 185631509 : len = 0;
3279 185631509 : break;
3280 :
3281 267429 : case 'p':
3282 : /* p is used for address_operands. When we are called by
3283 : gen_reload, no one will have checked that the address is
3284 : strictly valid, i.e., that all pseudos requiring hard regs
3285 : have gotten them. We also want to make sure we have a
3286 : valid mode. */
3287 267429 : {
3288 267342 : auto mem_mode = (recog_data.is_asm
3289 267429 : ? VOIDmode
3290 : : recog_data.operand_mode[opno]);
3291 267429 : if ((GET_MODE (op) == VOIDmode
3292 267429 : || SCALAR_INT_MODE_P (GET_MODE (op)))
3293 534832 : && (strict <= 0
3294 267429 : || strict_memory_address_p (mem_mode, op)))
3295 267363 : win = true;
3296 : break;
3297 : }
3298 :
3299 : /* No need to check general_operand again;
3300 : it was done in insn-recog.cc. Well, except that reload
3301 : doesn't check the validity of its replacements, but
3302 : that should only matter when there's a bug. */
3303 125146300 : case 'g':
3304 : /* Anything goes unless it is a REG and really has a hard reg
3305 : but the hard reg is not in the class GENERAL_REGS. */
3306 125146300 : if (REG_P (op))
3307 : {
3308 47544451 : if (strict < 0
3309 : || GENERAL_REGS == ALL_REGS
3310 47544399 : || (reload_in_progress
3311 0 : && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3312 95088850 : || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
3313 : win = true;
3314 : }
3315 77601849 : else if (strict < 0 || general_operand (op, mode))
3316 : win = true;
3317 : break;
3318 :
3319 255 : case '{':
3320 247 : if ((REG_P (op) && HARD_REGISTER_P (op)
3321 247 : && (int) REGNO (op) == decode_hard_reg_constraint (p))
3322 269 : || !reload_completed)
3323 : win = true;
3324 : break;
3325 :
3326 9628265010 : default:
3327 9628265010 : {
3328 9628265010 : enum constraint_num cn = lookup_constraint (p);
3329 9628265010 : enum reg_class cl = reg_class_for_constraint (cn);
3330 4459976825 : if (cl != NO_REGS)
3331 : {
3332 4282868912 : auto *filter = get_register_filter (cn);
3333 4282868912 : if (strict < 0
3334 4281209581 : || (strict == 0
3335 22186457 : && REG_P (op)
3336 16307555 : && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3337 5955309 : || (strict == 0 && GET_CODE (op) == SCRATCH)
3338 8547830724 : || (REG_P (op)
3339 3057003449 : && reg_fits_class_p (op, cl, offset, mode)
3340 : && (!filter
3341 : || TEST_HARD_REG_BIT (*filter,
3342 : REGNO (op) + offset))))
3343 : win = true;
3344 : }
3345 :
3346 5345396098 : else if (constraint_satisfied_p (op, cn))
3347 : win = true;
3348 :
3349 4459728942 : else if ((insn_extra_memory_constraint (cn)
3350 : || insn_extra_relaxed_memory_constraint (cn))
3351 : /* Every memory operand can be reloaded to fit. */
3352 4459728942 : && ((strict < 0 && MEM_P (op))
3353 : /* Before reload, accept what reload can turn
3354 : into a mem. */
3355 670301 : || (strict < 0 && CONSTANT_P (op))
3356 : /* Before reload, accept a pseudo or hard register,
3357 : since LRA can turn it into a mem. */
3358 669939 : || (strict < 0 && targetm.lra_p () && REG_P (op))
3359 : /* During reload, accept a pseudo */
3360 932938651 : || (reload_in_progress && REG_P (op)
3361 0 : && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
3362 : win = true;
3363 4459058641 : else if (insn_extra_address_constraint (cn)
3364 : /* Every address operand can be reloaded to fit. */
3365 4459058641 : && strict < 0)
3366 : win = true;
3367 : /* Cater to architectures like IA-64 that define extra memory
3368 : constraints without using define_memory_constraint. */
3369 4459058641 : else if (reload_in_progress
3370 0 : && REG_P (op)
3371 0 : && REGNO (op) >= FIRST_PSEUDO_REGISTER
3372 0 : && reg_renumber[REGNO (op)] < 0
3373 0 : && reg_equiv_mem (REGNO (op)) != 0
3374 4459058641 : && constraint_satisfied_p
3375 0 : (reg_equiv_mem (REGNO (op)), cn))
3376 : win = true;
3377 : break;
3378 : }
3379 : }
3380 16457621550 : while (p += len, c);
3381 :
3382 6517889420 : raw_constraint_p = false;
3383 6517889420 : constraints[opno] = p;
3384 : /* If this operand did not win somehow,
3385 : this alternative loses. */
3386 6517889420 : if (! win)
3387 3160529475 : lose = true;
3388 : }
3389 : /* This alternative won; the operands are ok.
3390 : Change whichever operands this alternative says to change. */
3391 3144420843 : if (! lose)
3392 : {
3393 1067836404 : int opno, eopno;
3394 :
3395 : /* See if any earlyclobber operand conflicts with some other
3396 : operand. */
3397 :
3398 1067836404 : if (strict > 0 && seen_earlyclobber_at >= 0)
3399 1025333 : for (eopno = seen_earlyclobber_at;
3400 1353506 : eopno < recog_data.n_operands;
3401 : eopno++)
3402 : /* Ignore earlyclobber operands now in memory,
3403 : because we would often report failure when we have
3404 : two memory operands, one of which was formerly a REG. */
3405 1025333 : if (earlyclobber[eopno]
3406 344664 : && REG_P (recog_data.operand[eopno]))
3407 1755667 : for (opno = 0; opno < recog_data.n_operands; opno++)
3408 1411003 : if ((MEM_P (recog_data.operand[opno])
3409 1269964 : || recog_data.operand_type[opno] != OP_OUT)
3410 836524 : && opno != eopno
3411 : /* Ignore things like match_operator operands. */
3412 835965 : && *recog_data.constraints[opno] != 0
3413 864884 : && ! (matching_operands[opno] == eopno
3414 98129 : && operands_match_p (recog_data.operand[opno],
3415 : recog_data.operand[eopno]))
3416 2081774 : && ! safe_from_earlyclobber (recog_data.operand[opno],
3417 : recog_data.operand[eopno]))
3418 : lose = true;
3419 :
3420 1067836404 : if (! lose)
3421 : {
3422 1067835410 : while (--funny_match_index >= 0)
3423 : {
3424 0 : recog_data.operand[funny_match[funny_match_index].other]
3425 0 : = recog_data.operand[funny_match[funny_match_index].this_op];
3426 : }
3427 :
3428 : /* For operands without < or > constraints reject side-effects. */
3429 : if (AUTO_INC_DEC && recog_data.is_asm)
3430 : {
3431 : for (opno = 0; opno < recog_data.n_operands; opno++)
3432 : if (MEM_P (recog_data.operand[opno]))
3433 : switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
3434 : {
3435 : case PRE_INC:
3436 : case POST_INC:
3437 : case PRE_DEC:
3438 : case POST_DEC:
3439 : case PRE_MODIFY:
3440 : case POST_MODIFY:
3441 : if (strchr (recog_data.constraints[opno], '<') == NULL
3442 : && strchr (recog_data.constraints[opno], '>')
3443 : == NULL)
3444 : return false;
3445 : break;
3446 : default:
3447 : break;
3448 : }
3449 : }
3450 :
3451 : return true;
3452 : }
3453 : }
3454 :
3455 2076585433 : which_alternative++;
3456 : }
3457 2973034179 : while (which_alternative < recog_data.n_alternatives);
3458 :
3459 4212454 : which_alternative = -1;
3460 : /* If we are about to reject this, but we are not to test strictly,
3461 : try a very loose test. Only return failure if it fails also. */
3462 4212454 : if (strict == 0)
3463 : return constrain_operands (-1, alternatives);
3464 : else
3465 : return false;
3466 : }
3467 :
3468 : /* Return true iff OPERAND (assumed to be a REG rtx)
3469 : is a hard reg in class CLASS when its regno is offset by OFFSET
3470 : and changed to mode MODE.
3471 : If REG occupies multiple hard regs, all of them must be in CLASS. */
3472 :
3473 : bool
3474 3348862462 : reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
3475 : machine_mode mode)
3476 : {
3477 3348862462 : unsigned int regno = REGNO (operand);
3478 :
3479 3348862462 : if (cl == NO_REGS)
3480 : return false;
3481 :
3482 : /* Regno must not be a pseudo register. Offset may be negative. */
3483 3253237664 : return (HARD_REGISTER_NUM_P (regno)
3484 3253161338 : && HARD_REGISTER_NUM_P (regno + offset)
3485 6506399002 : && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
3486 : regno + offset));
3487 : }
3488 :
3489 : /* Split single instruction. Helper function for split_all_insns and
3490 : split_all_insns_noflow. Return last insn in the sequence if successful,
3491 : or NULL if unsuccessful. */
3492 :
3493 : static rtx_insn *
3494 374526336 : split_insn (rtx_insn *insn)
3495 : {
3496 : /* Split insns here to get max fine-grain parallelism. */
3497 374526336 : rtx_insn *first = PREV_INSN (insn);
3498 374526336 : rtx_insn *last = try_split (PATTERN (insn), insn, 1);
3499 374526336 : rtx insn_set, last_set, note;
3500 :
3501 374526336 : if (last == insn)
3502 : return NULL;
3503 :
3504 : /* If the original instruction was a single set that was known to be
3505 : equivalent to a constant, see if we can say the same about the last
3506 : instruction in the split sequence. The two instructions must set
3507 : the same destination. */
3508 6123211 : insn_set = single_set (insn);
3509 6123211 : if (insn_set)
3510 : {
3511 6014973 : last_set = single_set (last);
3512 6014973 : if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
3513 : {
3514 2844624 : note = find_reg_equal_equiv_note (insn);
3515 2844624 : if (note && CONSTANT_P (XEXP (note, 0)))
3516 83105 : set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
3517 2761519 : else if (CONSTANT_P (SET_SRC (insn_set)))
3518 33476 : set_unique_reg_note (last, REG_EQUAL,
3519 : copy_rtx (SET_SRC (insn_set)));
3520 : }
3521 : }
3522 :
3523 : /* try_split returns the NOTE that INSN became. */
3524 6123211 : SET_INSN_DELETED (insn);
3525 :
3526 : /* ??? Coddle to md files that generate subregs in post-reload
3527 : splitters instead of computing the proper hard register. */
3528 6123211 : if (reload_completed && first != last)
3529 : {
3530 5691213 : auto old_post_ra_split_completed = post_ra_split_completed;
3531 5691213 : post_ra_split_completed = true;
3532 5691213 : first = NEXT_INSN (first);
3533 2698511 : for (;;)
3534 : {
3535 8389724 : if (INSN_P (first))
3536 8385788 : cleanup_subreg_operands (first);
3537 8389724 : if (first == last)
3538 : break;
3539 2698511 : first = NEXT_INSN (first);
3540 : }
3541 5691213 : post_ra_split_completed = old_post_ra_split_completed;
3542 : }
3543 :
3544 : return last;
3545 : }
3546 :
3547 : /* Split all insns in the function. If UPD_LIFE, update life info after. */
3548 :
3549 : void
3550 3990406 : split_all_insns (void)
3551 : {
3552 3990406 : bool changed;
3553 3990406 : bool need_cfg_cleanup = false;
3554 3990406 : basic_block bb;
3555 :
3556 3990406 : auto_sbitmap blocks (last_basic_block_for_fn (cfun));
3557 3990406 : bitmap_clear (blocks);
3558 3990406 : changed = false;
3559 :
3560 43275244 : FOR_EACH_BB_REVERSE_FN (bb, cfun)
3561 : {
3562 39284838 : rtx_insn *insn, *next;
3563 39284838 : bool finish = false;
3564 :
3565 39284838 : rtl_profile_for_bb (bb);
3566 494148852 : for (insn = BB_HEAD (bb); !finish ; insn = next)
3567 : {
3568 : /* Can't use `next_real_insn' because that might go across
3569 : CODE_LABELS and short-out basic blocks. */
3570 454864014 : next = NEXT_INSN (insn);
3571 454864014 : finish = (insn == BB_END (bb));
3572 :
3573 : /* If INSN has a REG_EH_REGION note and we split INSN, the
3574 : resulting split may not have/need REG_EH_REGION notes.
3575 :
3576 : If that happens and INSN was the last reference to the
3577 : given EH region, then the EH region will become unreachable.
3578 : We cannot leave the unreachable blocks in the CFG as that
3579 : will trigger a checking failure.
3580 :
3581 : So track if INSN has a REG_EH_REGION note. If so and we
3582 : split INSN, then trigger a CFG cleanup. */
3583 454864014 : rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3584 454864014 : if (INSN_P (insn))
3585 : {
3586 374572679 : rtx set = single_set (insn);
3587 :
3588 : /* Don't split no-op move insns. These should silently
3589 : disappear later in final. Splitting such insns would
3590 : break the code that handles LIBCALL blocks. */
3591 374572679 : if (set && set_noop_p (set))
3592 : {
3593 : /* Nops get in the way while scheduling, so delete them
3594 : now if register allocation has already been done. It
3595 : is too risky to try to do this before register
3596 : allocation, and there are unlikely to be very many
3597 : nops then anyways. */
3598 46343 : if (reload_completed)
3599 46343 : delete_insn_and_edges (insn);
3600 46343 : if (note)
3601 454864014 : need_cfg_cleanup = true;
3602 : }
3603 : else
3604 : {
3605 374526336 : if (split_insn (insn))
3606 : {
3607 6123211 : bitmap_set_bit (blocks, bb->index);
3608 6123211 : changed = true;
3609 6123211 : if (note)
3610 2807 : need_cfg_cleanup = true;
3611 : }
3612 : }
3613 : }
3614 : }
3615 : }
3616 :
3617 3990406 : if (reload_completed)
3618 2515994 : post_ra_split_completed = true;
3619 :
3620 3990406 : default_rtl_profile ();
3621 3990406 : if (changed)
3622 : {
3623 745618 : find_many_sub_basic_blocks (blocks);
3624 :
3625 : /* Splitting could drop an REG_EH_REGION if it potentially
3626 : trapped in its original form, but does not in its split
3627 : form. Consider a FLOAT_TRUNCATE which splits into a memory
3628 : store/load pair and -fnon-call-exceptions. */
3629 745618 : if (need_cfg_cleanup)
3630 1340 : cleanup_cfg (0);
3631 : }
3632 :
3633 3990406 : checking_verify_flow_info ();
3634 3990406 : }
3635 :
3636 : /* Same as split_all_insns, but do not expect CFG to be available.
3637 : Used by machine dependent reorg passes. */
3638 :
3639 : void
3640 0 : split_all_insns_noflow (void)
3641 : {
3642 0 : rtx_insn *next, *insn;
3643 :
3644 0 : for (insn = get_insns (); insn; insn = next)
3645 : {
3646 0 : next = NEXT_INSN (insn);
3647 0 : if (INSN_P (insn))
3648 : {
3649 : /* Don't split no-op move insns. These should silently
3650 : disappear later in final. Splitting such insns would
3651 : break the code that handles LIBCALL blocks. */
3652 0 : rtx set = single_set (insn);
3653 0 : if (set && set_noop_p (set))
3654 : {
3655 : /* Nops get in the way while scheduling, so delete them
3656 : now if register allocation has already been done. It
3657 : is too risky to try to do this before register
3658 : allocation, and there are unlikely to be very many
3659 : nops then anyways.
3660 :
3661 : ??? Should we use delete_insn when the CFG isn't valid? */
3662 0 : if (reload_completed)
3663 0 : delete_insn_and_edges (insn);
3664 : }
3665 : else
3666 0 : split_insn (insn);
3667 : }
3668 : }
3669 :
3670 0 : if (reload_completed)
3671 0 : post_ra_split_completed = true;
3672 0 : }
3673 :
3674 : struct peep2_insn_data
3675 : {
3676 : rtx_insn *insn;
3677 : regset live_before;
3678 : };
3679 :
3680 : static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3681 : static int peep2_current;
3682 :
3683 : static bool peep2_do_rebuild_jump_labels;
3684 : static bool peep2_do_cleanup_cfg;
3685 :
3686 : /* The number of instructions available to match a peep2. */
3687 : int peep2_current_count;
3688 :
3689 : /* A marker indicating the last insn of the block. The live_before regset
3690 : for this element is correct, indicating DF_LIVE_OUT for the block. */
3691 : #define PEEP2_EOB invalid_insn_rtx
3692 :
3693 : /* Wrap N to fit into the peep2_insn_data buffer. */
3694 :
3695 : static int
3696 420084760 : peep2_buf_position (int n)
3697 : {
3698 0 : if (n >= MAX_INSNS_PER_PEEP2 + 1)
3699 141877170 : n -= MAX_INSNS_PER_PEEP2 + 1;
3700 420066669 : return n;
3701 : }
3702 :
3703 : /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3704 : does not exist. Used by the recognizer to find the next insn to match
3705 : in a multi-insn pattern. */
3706 :
3707 : rtx_insn *
3708 217304532 : peep2_next_insn (int n)
3709 : {
3710 217304532 : gcc_assert (n <= peep2_current_count);
3711 :
3712 217304532 : n = peep2_buf_position (peep2_current + n);
3713 :
3714 217304532 : return peep2_insn_data[n].insn;
3715 : }
3716 :
3717 : /* Return true if REGNO is dead before the Nth non-note insn
3718 : after `current'. */
3719 :
3720 : bool
3721 12842463 : peep2_regno_dead_p (int ofs, int regno)
3722 : {
3723 12842463 : gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3724 :
3725 12842463 : ofs = peep2_buf_position (peep2_current + ofs);
3726 :
3727 12842463 : gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3728 :
3729 12842463 : return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3730 : }
3731 :
3732 : /* Similarly for a REG. */
3733 :
3734 : bool
3735 298687 : peep2_reg_dead_p (int ofs, rtx reg)
3736 : {
3737 298687 : gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3738 :
3739 298687 : ofs = peep2_buf_position (peep2_current + ofs);
3740 :
3741 298687 : gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3742 :
3743 298687 : unsigned int end_regno = END_REGNO (reg);
3744 374332 : for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3745 298687 : if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3746 : return false;
3747 : return true;
3748 : }
3749 :
3750 : /* Regno offset to be used in the register search. */
3751 : static int search_ofs;
3752 :
3753 : /* Try to find a hard register of mode MODE, matching the register class in
3754 : CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3755 : remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3756 : in which case the only condition is that the register must be available
3757 : before CURRENT_INSN.
3758 : Registers that already have bits set in REG_SET will not be considered.
3759 :
3760 : If an appropriate register is available, it will be returned and the
3761 : corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3762 : returned. */
3763 :
3764 : rtx
3765 596348 : peep2_find_free_register (int from, int to, const char *class_str,
3766 : machine_mode mode, HARD_REG_SET *reg_set)
3767 : {
3768 596348 : enum reg_class cl;
3769 596348 : HARD_REG_SET live;
3770 596348 : df_ref def;
3771 596348 : int i;
3772 :
3773 596348 : gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3774 596348 : gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3775 :
3776 596348 : from = peep2_buf_position (peep2_current + from);
3777 596348 : to = peep2_buf_position (peep2_current + to);
3778 :
3779 596348 : gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3780 596348 : REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3781 :
3782 596348 : while (from != to)
3783 : {
3784 18091 : gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3785 :
3786 : /* Don't use registers set or clobbered by the insn. */
3787 72364 : FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3788 54273 : SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3789 :
3790 632530 : from = peep2_buf_position (from + 1);
3791 : }
3792 :
3793 596348 : cl = reg_class_for_constraint (lookup_constraint (class_str));
3794 :
3795 5840092 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3796 : {
3797 5835241 : int raw_regno, regno, j;
3798 5835241 : bool success;
3799 :
3800 : /* Distribute the free registers as much as possible. */
3801 5835241 : raw_regno = search_ofs + i;
3802 5835241 : if (raw_regno >= FIRST_PSEUDO_REGISTER)
3803 265255 : raw_regno -= FIRST_PSEUDO_REGISTER;
3804 : #ifdef REG_ALLOC_ORDER
3805 5835241 : regno = reg_alloc_order[raw_regno];
3806 : #else
3807 : regno = raw_regno;
3808 : #endif
3809 :
3810 : /* Can it support the mode we need? */
3811 5835241 : if (!targetm.hard_regno_mode_ok (regno, mode))
3812 1791208 : continue;
3813 :
3814 4635530 : success = true;
3815 4635530 : for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3816 : {
3817 : /* Don't allocate fixed registers. */
3818 4044033 : if (fixed_regs[regno + j])
3819 : {
3820 : success = false;
3821 : break;
3822 : }
3823 : /* Don't allocate global registers. */
3824 2106672 : if (global_regs[regno + j])
3825 : {
3826 : success = false;
3827 : break;
3828 : }
3829 : /* Make sure the register is of the right class. */
3830 2106672 : if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3831 : {
3832 : success = false;
3833 : break;
3834 : }
3835 : /* And that we don't create an extra save/restore. */
3836 1124947 : if (! crtl->abi->clobbers_full_reg_p (regno + j)
3837 1124947 : && ! df_regs_ever_live_p (regno + j))
3838 : {
3839 : success = false;
3840 : break;
3841 : }
3842 :
3843 1088341 : if (! targetm.hard_regno_scratch_ok (regno + j))
3844 : {
3845 : success = false;
3846 : break;
3847 : }
3848 :
3849 : /* And we don't clobber traceback for noreturn functions. */
3850 1088221 : if ((regno + j == FRAME_POINTER_REGNUM
3851 1088221 : || regno + j == HARD_FRAME_POINTER_REGNUM)
3852 49857 : && (! reload_completed || frame_pointer_needed))
3853 : {
3854 : success = false;
3855 : break;
3856 : }
3857 :
3858 1077418 : if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3859 1077418 : || TEST_HARD_REG_BIT (live, regno + j))
3860 : {
3861 : success = false;
3862 : break;
3863 : }
3864 : }
3865 :
3866 4044033 : if (success)
3867 : {
3868 591497 : add_to_hard_reg_set (reg_set, mode, regno);
3869 :
3870 : /* Start the next search with the next register. */
3871 591497 : if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3872 6515 : raw_regno = 0;
3873 591497 : search_ofs = raw_regno;
3874 :
3875 591497 : return gen_rtx_REG (mode, regno);
3876 : }
3877 : }
3878 :
3879 4851 : search_ofs = 0;
3880 4851 : return NULL_RTX;
3881 : }
3882 :
3883 : /* Forget all currently tracked instructions, only remember current
3884 : LIVE regset. */
3885 :
3886 : static void
3887 10743904 : peep2_reinit_state (regset live)
3888 : {
3889 10743904 : int i;
3890 :
3891 : /* Indicate that all slots except the last holds invalid data. */
3892 75207328 : for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3893 64463424 : peep2_insn_data[i].insn = NULL;
3894 10743904 : peep2_current_count = 0;
3895 :
3896 : /* Indicate that the last slot contains live_after data. */
3897 10743904 : peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3898 10743904 : peep2_current = MAX_INSNS_PER_PEEP2;
3899 :
3900 10743904 : COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3901 10743904 : }
3902 :
3903 : /* Copies frame related info of an insn (OLD_INSN) to the single
3904 : insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3905 :
3906 : void
3907 132985 : copy_frame_info_to_split_insn (rtx_insn *old_insn, rtx_insn *new_insn)
3908 : {
3909 132985 : bool any_note = false;
3910 132985 : rtx note;
3911 :
3912 132985 : if (!RTX_FRAME_RELATED_P (old_insn))
3913 : return;
3914 :
3915 132985 : RTX_FRAME_RELATED_P (new_insn) = 1;
3916 :
3917 : /* Allow the backend to fill in a note during the split. */
3918 132985 : for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3919 0 : switch (REG_NOTE_KIND (note))
3920 : {
3921 0 : case REG_FRAME_RELATED_EXPR:
3922 0 : case REG_CFA_DEF_CFA:
3923 0 : case REG_CFA_ADJUST_CFA:
3924 0 : case REG_CFA_OFFSET:
3925 0 : case REG_CFA_REGISTER:
3926 0 : case REG_CFA_EXPRESSION:
3927 0 : case REG_CFA_RESTORE:
3928 0 : case REG_CFA_SET_VDRAP:
3929 0 : any_note = true;
3930 0 : break;
3931 : default:
3932 : break;
3933 : }
3934 :
3935 : /* If the backend didn't supply a note, copy one over. */
3936 132985 : if (!any_note)
3937 325079 : for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3938 192094 : switch (REG_NOTE_KIND (note))
3939 : {
3940 144853 : case REG_FRAME_RELATED_EXPR:
3941 144853 : case REG_CFA_DEF_CFA:
3942 144853 : case REG_CFA_ADJUST_CFA:
3943 144853 : case REG_CFA_OFFSET:
3944 144853 : case REG_CFA_REGISTER:
3945 144853 : case REG_CFA_EXPRESSION:
3946 144853 : case REG_CFA_RESTORE:
3947 144853 : case REG_CFA_SET_VDRAP:
3948 144853 : add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3949 144853 : any_note = true;
3950 144853 : break;
3951 : default:
3952 : break;
3953 : }
3954 :
3955 : /* If there still isn't a note, make sure the unwind info sees the
3956 : same expression as before the split. */
3957 132985 : if (!any_note)
3958 : {
3959 2420 : rtx old_set, new_set;
3960 :
3961 : /* The old insn had better have been simple, or annotated. */
3962 2420 : old_set = single_set (old_insn);
3963 2420 : gcc_assert (old_set != NULL);
3964 :
3965 2420 : new_set = single_set (new_insn);
3966 2420 : if (!new_set || !rtx_equal_p (new_set, old_set))
3967 274 : add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3968 : }
3969 :
3970 : /* Copy prologue/epilogue status. This is required in order to keep
3971 : proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3972 132985 : maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3973 : }
3974 :
3975 : /* While scanning basic block BB, we found a match of length MATCH_LEN + 1,
3976 : starting at INSN. Perform the replacement, removing the old insns and
3977 : replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3978 : if the replacement is rejected. */
3979 :
3980 : static rtx_insn *
3981 2229534 : peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3982 : {
3983 2229534 : int i;
3984 2229534 : rtx_insn *last, *before_try, *x;
3985 2229534 : rtx eh_note, as_note;
3986 2229534 : rtx_insn *old_insn;
3987 2229534 : rtx_insn *new_insn;
3988 2229534 : bool was_call = false;
3989 :
3990 : /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3991 : match more than one insn, or to be split into more than one insn. */
3992 2229534 : old_insn = peep2_insn_data[peep2_current].insn;
3993 2229534 : if (RTX_FRAME_RELATED_P (old_insn))
3994 : {
3995 135769 : if (match_len != 0)
3996 : return NULL;
3997 :
3998 : /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3999 : may be in the stream for the purpose of register allocation. */
4000 135769 : if (active_insn_p (attempt))
4001 : new_insn = attempt;
4002 : else
4003 34906 : new_insn = next_active_insn (attempt);
4004 135769 : if (next_active_insn (new_insn))
4005 : return NULL;
4006 :
4007 : /* We have a 1-1 replacement. Copy over any frame-related info. */
4008 132961 : copy_frame_info_to_split_insn (old_insn, new_insn);
4009 : }
4010 :
4011 : /* If we are splitting a CALL_INSN, look for the CALL_INSN
4012 : in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
4013 : cfg-related call notes. */
4014 4658572 : for (i = 0; i <= match_len; ++i)
4015 : {
4016 2433391 : int j;
4017 2433391 : rtx note;
4018 :
4019 2433391 : j = peep2_buf_position (peep2_current + i);
4020 2433391 : old_insn = peep2_insn_data[j].insn;
4021 2433391 : if (!CALL_P (old_insn))
4022 2431846 : continue;
4023 1545 : was_call = true;
4024 :
4025 : new_insn = attempt;
4026 1545 : while (new_insn != NULL_RTX)
4027 : {
4028 1545 : if (CALL_P (new_insn))
4029 : break;
4030 0 : new_insn = NEXT_INSN (new_insn);
4031 : }
4032 :
4033 1545 : gcc_assert (new_insn != NULL_RTX);
4034 :
4035 1545 : CALL_INSN_FUNCTION_USAGE (new_insn)
4036 1545 : = CALL_INSN_FUNCTION_USAGE (old_insn);
4037 1545 : SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
4038 :
4039 1545 : for (note = REG_NOTES (old_insn);
4040 6966 : note;
4041 5421 : note = XEXP (note, 1))
4042 5421 : switch (REG_NOTE_KIND (note))
4043 : {
4044 0 : case REG_NORETURN:
4045 0 : case REG_SETJMP:
4046 0 : case REG_TM:
4047 0 : case REG_CALL_NOCF_CHECK:
4048 0 : add_reg_note (new_insn, REG_NOTE_KIND (note),
4049 : XEXP (note, 0));
4050 0 : break;
4051 : default:
4052 : /* Discard all other reg notes. */
4053 : break;
4054 : }
4055 :
4056 : /* Croak if there is another call in the sequence. */
4057 1545 : while (++i <= match_len)
4058 : {
4059 0 : j = peep2_buf_position (peep2_current + i);
4060 0 : old_insn = peep2_insn_data[j].insn;
4061 0 : gcc_assert (!CALL_P (old_insn));
4062 : }
4063 : break;
4064 : }
4065 :
4066 : /* If we matched any instruction that had a REG_ARGS_SIZE, then
4067 : move those notes over to the new sequence. */
4068 2226726 : as_note = NULL;
4069 4543123 : for (i = match_len; i >= 0; --i)
4070 : {
4071 2433391 : int j = peep2_buf_position (peep2_current + i);
4072 2433391 : old_insn = peep2_insn_data[j].insn;
4073 :
4074 2433391 : as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
4075 2433391 : if (as_note)
4076 : break;
4077 : }
4078 :
4079 2226726 : i = peep2_buf_position (peep2_current + match_len);
4080 2226726 : eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
4081 :
4082 : /* Replace the old sequence with the new. */
4083 2226726 : rtx_insn *peepinsn = peep2_insn_data[i].insn;
4084 4453452 : last = emit_insn_after_setloc (attempt,
4085 : peep2_insn_data[i].insn,
4086 2226726 : INSN_LOCATION (peepinsn));
4087 2226726 : if (JUMP_P (peepinsn) && JUMP_P (last))
4088 796 : CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
4089 2226726 : before_try = PREV_INSN (insn);
4090 2226726 : delete_insn_chain (insn, peep2_insn_data[i].insn, false);
4091 :
4092 : /* Re-insert the EH_REGION notes. */
4093 2226726 : if (eh_note || (was_call && nonlocal_goto_handler_labels))
4094 : {
4095 40 : edge eh_edge;
4096 40 : edge_iterator ei;
4097 :
4098 48 : FOR_EACH_EDGE (eh_edge, ei, bb->succs)
4099 47 : if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
4100 : break;
4101 :
4102 40 : if (eh_note)
4103 40 : copy_reg_eh_region_note_backward (eh_note, last, before_try);
4104 :
4105 40 : if (eh_edge)
4106 117 : for (x = last; x != before_try; x = PREV_INSN (x))
4107 78 : if (x != BB_END (bb)
4108 78 : && (can_throw_internal (x)
4109 39 : || can_nonlocal_goto (x)))
4110 : {
4111 0 : edge nfte, nehe;
4112 0 : int flags;
4113 :
4114 0 : nfte = split_block (bb, x);
4115 0 : flags = (eh_edge->flags
4116 : & (EDGE_EH | EDGE_ABNORMAL));
4117 0 : if (CALL_P (x))
4118 0 : flags |= EDGE_ABNORMAL_CALL;
4119 0 : nehe = make_edge (nfte->src, eh_edge->dest,
4120 : flags);
4121 :
4122 0 : nehe->probability = eh_edge->probability;
4123 0 : nfte->probability = nehe->probability.invert ();
4124 :
4125 0 : peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
4126 0 : bb = nfte->src;
4127 0 : eh_edge = nehe;
4128 : }
4129 :
4130 : /* Converting possibly trapping insn to non-trapping is
4131 : possible. Zap dummy outgoing edges. */
4132 40 : peep2_do_cleanup_cfg |= purge_dead_edges (bb);
4133 : }
4134 :
4135 : /* Re-insert the ARGS_SIZE notes. */
4136 2226726 : if (as_note)
4137 116994 : fixup_args_size_notes (before_try, last, get_args_size (as_note));
4138 :
4139 : /* Scan the new insns for embedded side effects and add appropriate
4140 : REG_INC notes. */
4141 : if (AUTO_INC_DEC)
4142 : for (x = last; x != before_try; x = PREV_INSN (x))
4143 : if (NONDEBUG_INSN_P (x))
4144 : add_auto_inc_notes (x, PATTERN (x));
4145 :
4146 : /* If we generated a jump instruction, it won't have
4147 : JUMP_LABEL set. Recompute after we're done. */
4148 5185929 : for (x = last; x != before_try; x = PREV_INSN (x))
4149 2959999 : if (JUMP_P (x))
4150 : {
4151 796 : peep2_do_rebuild_jump_labels = true;
4152 796 : break;
4153 : }
4154 :
4155 : return last;
4156 : }
4157 :
4158 : /* After performing a replacement in basic block BB, fix up the life
4159 : information in our buffer. LAST is the last of the insns that we
4160 : emitted as a replacement. PREV is the insn before the start of
4161 : the replacement. MATCH_LEN + 1 is the number of instructions that were
4162 : matched, and which now need to be replaced in the buffer. */
4163 :
4164 : static void
4165 2226726 : peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
4166 : rtx_insn *prev)
4167 : {
4168 2226726 : int i = peep2_buf_position (peep2_current + match_len + 1);
4169 2226726 : rtx_insn *x;
4170 2226726 : regset_head live;
4171 :
4172 2226726 : INIT_REG_SET (&live);
4173 2226726 : COPY_REG_SET (&live, peep2_insn_data[i].live_before);
4174 :
4175 2226726 : gcc_assert (peep2_current_count >= match_len + 1);
4176 2226726 : peep2_current_count -= match_len + 1;
4177 :
4178 2226726 : x = last;
4179 2960691 : do
4180 : {
4181 2960691 : if (INSN_P (x))
4182 : {
4183 2960691 : df_insn_rescan (x);
4184 2960691 : if (peep2_current_count < MAX_INSNS_PER_PEEP2)
4185 : {
4186 2817350 : peep2_current_count++;
4187 2817350 : if (--i < 0)
4188 818255 : i = MAX_INSNS_PER_PEEP2;
4189 2817350 : peep2_insn_data[i].insn = x;
4190 2817350 : df_simulate_one_insn_backwards (bb, x, &live);
4191 2817350 : COPY_REG_SET (peep2_insn_data[i].live_before, &live);
4192 : }
4193 : }
4194 2960691 : x = PREV_INSN (x);
4195 : }
4196 2960691 : while (x != prev);
4197 2226726 : CLEAR_REG_SET (&live);
4198 :
4199 2226726 : peep2_current = i;
4200 2226726 : }
4201 :
4202 : /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
4203 : Return true if we added it, false otherwise. The caller will try to match
4204 : peepholes against the buffer if we return false; otherwise it will try to
4205 : add more instructions to the buffer. */
4206 :
4207 : static bool
4208 81669861 : peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
4209 : {
4210 81669861 : int pos;
4211 :
4212 : /* Once we have filled the maximum number of insns the buffer can hold,
4213 : allow the caller to match the insns against peepholes. We wait until
4214 : the buffer is full in case the target has similar peepholes of different
4215 : length; we always want to match the longest if possible. */
4216 81669861 : if (peep2_current_count == MAX_INSNS_PER_PEEP2)
4217 : return false;
4218 :
4219 : /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
4220 : any other pattern, lest it change the semantics of the frame info. */
4221 62965586 : if (RTX_FRAME_RELATED_P (insn))
4222 : {
4223 : /* Let the buffer drain first. */
4224 7638509 : if (peep2_current_count > 0)
4225 : return false;
4226 : /* Now the insn will be the only thing in the buffer. */
4227 : }
4228 :
4229 58704471 : pos = peep2_buf_position (peep2_current + peep2_current_count);
4230 58704471 : peep2_insn_data[pos].insn = insn;
4231 58704471 : COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4232 58704471 : peep2_current_count++;
4233 :
4234 58704471 : df_simulate_one_insn_forwards (bb, insn, live);
4235 58704471 : return true;
4236 : }
4237 :
4238 : /* Perform the peephole2 optimization pass. */
4239 :
4240 : static void
4241 961544 : peephole2_optimize (void)
4242 : {
4243 961544 : rtx_insn *insn;
4244 961544 : bitmap live;
4245 961544 : int i;
4246 961544 : basic_block bb;
4247 :
4248 961544 : peep2_do_cleanup_cfg = false;
4249 961544 : peep2_do_rebuild_jump_labels = false;
4250 :
4251 961544 : df_set_flags (DF_LR_RUN_DCE);
4252 961544 : df_note_add_problem ();
4253 961544 : df_analyze ();
4254 :
4255 : /* Initialize the regsets we're going to use. */
4256 8653896 : for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4257 6730808 : peep2_insn_data[i].live_before = BITMAP_ALLOC (®_obstack);
4258 961544 : search_ofs = 0;
4259 961544 : live = BITMAP_ALLOC (®_obstack);
4260 :
4261 11705448 : FOR_EACH_BB_REVERSE_FN (bb, cfun)
4262 : {
4263 10743904 : bool past_end = false;
4264 10743904 : int pos;
4265 :
4266 10743904 : rtl_profile_for_bb (bb);
4267 :
4268 : /* Start up propagation. */
4269 21487808 : bitmap_copy (live, DF_LR_IN (bb));
4270 10743904 : df_simulate_initialize_forwards (bb, live);
4271 10743904 : peep2_reinit_state (live);
4272 :
4273 10743904 : insn = BB_HEAD (bb);
4274 202093275 : for (;;)
4275 : {
4276 202093275 : rtx_insn *attempt, *head;
4277 202093275 : int match_len;
4278 :
4279 202093275 : if (!past_end && !NONDEBUG_INSN_P (insn))
4280 : {
4281 71329744 : next_insn:
4282 130034215 : insn = NEXT_INSN (insn);
4283 130034215 : if (insn == NEXT_INSN (BB_END (bb)))
4284 10743904 : past_end = true;
4285 132260941 : continue;
4286 : }
4287 81669861 : if (!past_end && peep2_fill_buffer (bb, insn, live))
4288 58704471 : goto next_insn;
4289 :
4290 : /* If we did not fill an empty buffer, it signals the end of the
4291 : block. */
4292 72059060 : if (peep2_current_count == 0)
4293 : break;
4294 :
4295 : /* The buffer filled to the current maximum, so try to match. */
4296 :
4297 61315156 : pos = peep2_buf_position (peep2_current + peep2_current_count);
4298 61315156 : peep2_insn_data[pos].insn = PEEP2_EOB;
4299 61315156 : COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4300 :
4301 : /* Match the peephole. */
4302 61315156 : head = peep2_insn_data[peep2_current].insn;
4303 61315156 : attempt = peephole2_insns (PATTERN (head), head, &match_len);
4304 61315156 : if (attempt != NULL)
4305 : {
4306 2229534 : rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
4307 2229534 : if (last)
4308 : {
4309 2226726 : peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
4310 2226726 : continue;
4311 : }
4312 : }
4313 :
4314 : /* No match: advance the buffer by one insn. */
4315 59088430 : peep2_current = peep2_buf_position (peep2_current + 1);
4316 59088430 : peep2_current_count--;
4317 : }
4318 : }
4319 :
4320 961544 : default_rtl_profile ();
4321 8653896 : for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4322 6730808 : BITMAP_FREE (peep2_insn_data[i].live_before);
4323 961544 : BITMAP_FREE (live);
4324 961544 : if (peep2_do_rebuild_jump_labels)
4325 737 : rebuild_jump_labels (get_insns ());
4326 961544 : if (peep2_do_cleanup_cfg)
4327 0 : cleanup_cfg (CLEANUP_CFG_CHANGED);
4328 961544 : }
4329 :
4330 : /* Common predicates for use with define_bypass. */
4331 :
4332 : /* Helper function for store_data_bypass_p, handle just a single SET
4333 : IN_SET. */
4334 :
4335 : static bool
4336 0 : store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
4337 : {
4338 0 : if (!MEM_P (SET_DEST (in_set)))
4339 : return false;
4340 :
4341 0 : rtx out_set = single_set (out_insn);
4342 0 : if (out_set)
4343 0 : return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
4344 :
4345 0 : rtx out_pat = PATTERN (out_insn);
4346 0 : if (GET_CODE (out_pat) != PARALLEL)
4347 : return false;
4348 :
4349 0 : for (int i = 0; i < XVECLEN (out_pat, 0); i++)
4350 : {
4351 0 : rtx out_exp = XVECEXP (out_pat, 0, i);
4352 :
4353 0 : if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
4354 0 : continue;
4355 :
4356 0 : gcc_assert (GET_CODE (out_exp) == SET);
4357 :
4358 0 : if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
4359 : return false;
4360 : }
4361 :
4362 : return true;
4363 : }
4364 :
4365 : /* True if the dependency between OUT_INSN and IN_INSN is on the store
4366 : data not the address operand(s) of the store. IN_INSN and OUT_INSN
4367 : must be either a single_set or a PARALLEL with SETs inside. */
4368 :
4369 : bool
4370 0 : store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4371 : {
4372 0 : rtx in_set = single_set (in_insn);
4373 0 : if (in_set)
4374 0 : return store_data_bypass_p_1 (out_insn, in_set);
4375 :
4376 0 : rtx in_pat = PATTERN (in_insn);
4377 0 : if (GET_CODE (in_pat) != PARALLEL)
4378 : return false;
4379 :
4380 0 : for (int i = 0; i < XVECLEN (in_pat, 0); i++)
4381 : {
4382 0 : rtx in_exp = XVECEXP (in_pat, 0, i);
4383 :
4384 0 : if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
4385 0 : continue;
4386 :
4387 0 : gcc_assert (GET_CODE (in_exp) == SET);
4388 :
4389 0 : if (!store_data_bypass_p_1 (out_insn, in_exp))
4390 : return false;
4391 : }
4392 :
4393 : return true;
4394 : }
4395 :
4396 : /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
4397 : condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
4398 : or multiple set; IN_INSN should be single_set for truth, but for convenience
4399 : of insn categorization may be any JUMP or CALL insn. */
4400 :
4401 : bool
4402 0 : if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4403 : {
4404 0 : rtx out_set, in_set;
4405 :
4406 0 : in_set = single_set (in_insn);
4407 0 : if (! in_set)
4408 : {
4409 0 : gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
4410 : return false;
4411 : }
4412 :
4413 0 : if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
4414 : return false;
4415 0 : in_set = SET_SRC (in_set);
4416 :
4417 0 : out_set = single_set (out_insn);
4418 0 : if (out_set)
4419 : {
4420 0 : if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4421 0 : || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4422 0 : return false;
4423 : }
4424 : else
4425 : {
4426 0 : rtx out_pat;
4427 0 : int i;
4428 :
4429 0 : out_pat = PATTERN (out_insn);
4430 0 : gcc_assert (GET_CODE (out_pat) == PARALLEL);
4431 :
4432 0 : for (i = 0; i < XVECLEN (out_pat, 0); i++)
4433 : {
4434 0 : rtx exp = XVECEXP (out_pat, 0, i);
4435 :
4436 0 : if (GET_CODE (exp) == CLOBBER)
4437 0 : continue;
4438 :
4439 0 : gcc_assert (GET_CODE (exp) == SET);
4440 :
4441 0 : if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4442 0 : || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4443 0 : return false;
4444 : }
4445 : }
4446 :
4447 : return true;
4448 : }
4449 :
4450 : static unsigned int
4451 961544 : rest_of_handle_peephole2 (void)
4452 : {
4453 961544 : if (HAVE_peephole2)
4454 0 : peephole2_optimize ();
4455 :
4456 961544 : return 0;
4457 : }
4458 :
4459 : namespace {
4460 :
4461 : const pass_data pass_data_peephole2 =
4462 : {
4463 : RTL_PASS, /* type */
4464 : "peephole2", /* name */
4465 : OPTGROUP_NONE, /* optinfo_flags */
4466 : TV_PEEPHOLE2, /* tv_id */
4467 : 0, /* properties_required */
4468 : 0, /* properties_provided */
4469 : 0, /* properties_destroyed */
4470 : 0, /* todo_flags_start */
4471 : TODO_df_finish, /* todo_flags_finish */
4472 : };
4473 :
4474 : class pass_peephole2 : public rtl_opt_pass
4475 : {
4476 : public:
4477 288047 : pass_peephole2 (gcc::context *ctxt)
4478 576094 : : rtl_opt_pass (pass_data_peephole2, ctxt)
4479 : {}
4480 :
4481 : /* opt_pass methods: */
4482 : /* The epiphany backend creates a second instance of this pass, so we need
4483 : a clone method. */
4484 0 : opt_pass * clone () final override { return new pass_peephole2 (m_ctxt); }
4485 1474422 : bool gate (function *) final override
4486 : {
4487 1474422 : return (optimize > 0 && flag_peephole2);
4488 : }
4489 961544 : unsigned int execute (function *) final override
4490 : {
4491 961544 : return rest_of_handle_peephole2 ();
4492 : }
4493 :
4494 : }; // class pass_peephole2
4495 :
4496 : } // anon namespace
4497 :
4498 : rtl_opt_pass *
4499 288047 : make_pass_peephole2 (gcc::context *ctxt)
4500 : {
4501 288047 : return new pass_peephole2 (ctxt);
4502 : }
4503 :
4504 : namespace {
4505 :
4506 : const pass_data pass_data_split_all_insns =
4507 : {
4508 : RTL_PASS, /* type */
4509 : "split1", /* name */
4510 : OPTGROUP_NONE, /* optinfo_flags */
4511 : TV_NONE, /* tv_id */
4512 : 0, /* properties_required */
4513 : PROP_rtl_split_insns, /* properties_provided */
4514 : 0, /* properties_destroyed */
4515 : 0, /* todo_flags_start */
4516 : 0, /* todo_flags_finish */
4517 : };
4518 :
4519 : class pass_split_all_insns : public rtl_opt_pass
4520 : {
4521 : public:
4522 288047 : pass_split_all_insns (gcc::context *ctxt)
4523 576094 : : rtl_opt_pass (pass_data_split_all_insns, ctxt)
4524 : {}
4525 :
4526 : /* opt_pass methods: */
4527 : /* The epiphany backend creates a second instance of this pass, so
4528 : we need a clone method. */
4529 0 : opt_pass * clone () final override
4530 : {
4531 0 : return new pass_split_all_insns (m_ctxt);
4532 : }
4533 1474412 : unsigned int execute (function *) final override
4534 : {
4535 1474412 : split_all_insns ();
4536 1474412 : return 0;
4537 : }
4538 :
4539 : }; // class pass_split_all_insns
4540 :
4541 : } // anon namespace
4542 :
4543 : rtl_opt_pass *
4544 288047 : make_pass_split_all_insns (gcc::context *ctxt)
4545 : {
4546 288047 : return new pass_split_all_insns (ctxt);
4547 : }
4548 :
4549 : namespace {
4550 :
4551 : const pass_data pass_data_split_after_reload =
4552 : {
4553 : RTL_PASS, /* type */
4554 : "split2", /* name */
4555 : OPTGROUP_NONE, /* optinfo_flags */
4556 : TV_NONE, /* tv_id */
4557 : 0, /* properties_required */
4558 : 0, /* properties_provided */
4559 : 0, /* properties_destroyed */
4560 : 0, /* todo_flags_start */
4561 : 0, /* todo_flags_finish */
4562 : };
4563 :
4564 : class pass_split_after_reload : public rtl_opt_pass
4565 : {
4566 : public:
4567 288047 : pass_split_after_reload (gcc::context *ctxt)
4568 576094 : : rtl_opt_pass (pass_data_split_after_reload, ctxt)
4569 : {}
4570 :
4571 : /* opt_pass methods: */
4572 1474422 : bool gate (function *) final override
4573 : {
4574 : /* If optimizing, then go ahead and split insns now. */
4575 1474422 : return optimize > 0;
4576 : }
4577 :
4578 1041492 : unsigned int execute (function *) final override
4579 : {
4580 1041492 : split_all_insns ();
4581 1041492 : return 0;
4582 : }
4583 :
4584 : }; // class pass_split_after_reload
4585 :
4586 : } // anon namespace
4587 :
4588 : rtl_opt_pass *
4589 288047 : make_pass_split_after_reload (gcc::context *ctxt)
4590 : {
4591 288047 : return new pass_split_after_reload (ctxt);
4592 : }
4593 :
4594 : static bool
4595 2948844 : enable_split_before_sched2 (void)
4596 : {
4597 : #ifdef INSN_SCHEDULING
4598 2082986 : return optimize > 0 && flag_schedule_insns_after_reload;
4599 : #else
4600 : return false;
4601 : #endif
4602 : }
4603 :
4604 : namespace {
4605 :
4606 : const pass_data pass_data_split_before_sched2 =
4607 : {
4608 : RTL_PASS, /* type */
4609 : "split3", /* name */
4610 : OPTGROUP_NONE, /* optinfo_flags */
4611 : TV_NONE, /* tv_id */
4612 : 0, /* properties_required */
4613 : 0, /* properties_provided */
4614 : 0, /* properties_destroyed */
4615 : 0, /* todo_flags_start */
4616 : 0, /* todo_flags_finish */
4617 : };
4618 :
4619 : class pass_split_before_sched2 : public rtl_opt_pass
4620 : {
4621 : public:
4622 288047 : pass_split_before_sched2 (gcc::context *ctxt)
4623 576094 : : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4624 : {}
4625 :
4626 : /* opt_pass methods: */
4627 1474422 : bool gate (function *) final override
4628 : {
4629 1474422 : return enable_split_before_sched2 ();
4630 : }
4631 :
4632 961549 : unsigned int execute (function *) final override
4633 : {
4634 961549 : split_all_insns ();
4635 961549 : return 0;
4636 : }
4637 :
4638 : }; // class pass_split_before_sched2
4639 :
4640 : } // anon namespace
4641 :
4642 : rtl_opt_pass *
4643 288047 : make_pass_split_before_sched2 (gcc::context *ctxt)
4644 : {
4645 288047 : return new pass_split_before_sched2 (ctxt);
4646 : }
4647 :
4648 : namespace {
4649 :
4650 : const pass_data pass_data_split_before_regstack =
4651 : {
4652 : RTL_PASS, /* type */
4653 : "split4", /* name */
4654 : OPTGROUP_NONE, /* optinfo_flags */
4655 : TV_NONE, /* tv_id */
4656 : 0, /* properties_required */
4657 : 0, /* properties_provided */
4658 : 0, /* properties_destroyed */
4659 : 0, /* todo_flags_start */
4660 : 0, /* todo_flags_finish */
4661 : };
4662 :
4663 : class pass_split_before_regstack : public rtl_opt_pass
4664 : {
4665 : public:
4666 288047 : pass_split_before_regstack (gcc::context *ctxt)
4667 576094 : : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4668 : {}
4669 :
4670 : /* opt_pass methods: */
4671 : bool gate (function *) final override;
4672 512953 : unsigned int execute (function *) final override
4673 : {
4674 512953 : split_all_insns ();
4675 512953 : return 0;
4676 : }
4677 :
4678 : }; // class pass_split_before_regstack
4679 :
4680 : bool
4681 1474422 : pass_split_before_regstack::gate (function *)
4682 : {
4683 : #if HAVE_ATTR_length && defined (STACK_REGS)
4684 : /* If flow2 creates new instructions which need splitting
4685 : and scheduling after reload is not done, they might not be
4686 : split until final which doesn't allow splitting
4687 : if HAVE_ATTR_length. Selective scheduling can result in
4688 : further instructions that need splitting. */
4689 : #ifdef INSN_SCHEDULING
4690 2435972 : return !enable_split_before_sched2 () || flag_selective_scheduling2;
4691 : #else
4692 : return !enable_split_before_sched2 ();
4693 : #endif
4694 : #else
4695 : return false;
4696 : #endif
4697 : }
4698 :
4699 : } // anon namespace
4700 :
4701 : rtl_opt_pass *
4702 288047 : make_pass_split_before_regstack (gcc::context *ctxt)
4703 : {
4704 288047 : return new pass_split_before_regstack (ctxt);
4705 : }
4706 :
4707 : namespace {
4708 :
4709 : const pass_data pass_data_split_for_shorten_branches =
4710 : {
4711 : RTL_PASS, /* type */
4712 : "split5", /* name */
4713 : OPTGROUP_NONE, /* optinfo_flags */
4714 : TV_NONE, /* tv_id */
4715 : 0, /* properties_required */
4716 : 0, /* properties_provided */
4717 : 0, /* properties_destroyed */
4718 : 0, /* todo_flags_start */
4719 : 0, /* todo_flags_finish */
4720 : };
4721 :
4722 : class pass_split_for_shorten_branches : public rtl_opt_pass
4723 : {
4724 : public:
4725 288047 : pass_split_for_shorten_branches (gcc::context *ctxt)
4726 576094 : : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4727 : {}
4728 :
4729 : /* opt_pass methods: */
4730 1474422 : bool gate (function *) final override
4731 : {
4732 : /* The placement of the splitting that we do for shorten_branches
4733 : depends on whether regstack is used by the target or not. */
4734 : #if HAVE_ATTR_length && !defined (STACK_REGS)
4735 : return true;
4736 : #else
4737 1474422 : return false;
4738 : #endif
4739 : }
4740 :
4741 0 : unsigned int execute (function *) final override
4742 : {
4743 0 : split_all_insns_noflow ();
4744 0 : return 0;
4745 : }
4746 :
4747 : }; // class pass_split_for_shorten_branches
4748 :
4749 : } // anon namespace
4750 :
4751 : rtl_opt_pass *
4752 288047 : make_pass_split_for_shorten_branches (gcc::context *ctxt)
4753 : {
4754 288047 : return new pass_split_for_shorten_branches (ctxt);
4755 : }
4756 :
4757 : /* (Re)initialize the target information after a change in target. */
4758 :
4759 : void
4760 216330 : recog_init ()
4761 : {
4762 : /* The information is zero-initialized, so we don't need to do anything
4763 : first time round. */
4764 216330 : if (!this_target_recog->x_initialized)
4765 : {
4766 214185 : this_target_recog->x_initialized = true;
4767 214185 : return;
4768 : }
4769 2145 : memset (this_target_recog->x_bool_attr_masks, 0,
4770 : sizeof (this_target_recog->x_bool_attr_masks));
4771 32513910 : for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4772 32511765 : if (this_target_recog->x_op_alt[i])
4773 : {
4774 29880 : free (this_target_recog->x_op_alt[i]);
4775 29880 : this_target_recog->x_op_alt[i] = 0;
4776 : }
4777 : }
|