Line data Source code
1 : /* Subroutines used by or related to instruction recognition.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "target.h"
26 : #include "rtl.h"
27 : #include "tree.h"
28 : #include "stmt.h"
29 : #include "cfghooks.h"
30 : #include "df.h"
31 : #include "memmodel.h"
32 : #include "tm_p.h"
33 : #include "insn-config.h"
34 : #include "regs.h"
35 : #include "emit-rtl.h"
36 : #include "recog.h"
37 : #include "insn-attr.h"
38 : #include "addresses.h"
39 : #include "cfgrtl.h"
40 : #include "cfgbuild.h"
41 : #include "cfgcleanup.h"
42 : #include "reload.h"
43 : #include "tree-pass.h"
44 : #include "function-abi.h"
45 : #include "rtl-iter.h"
46 :
47 : #ifndef STACK_POP_CODE
48 : #if STACK_GROWS_DOWNWARD
49 : #define STACK_POP_CODE POST_INC
50 : #else
51 : #define STACK_POP_CODE POST_DEC
52 : #endif
53 : #endif
54 :
55 : static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
56 : static void validate_replace_src_1 (rtx *, void *);
57 : static rtx_insn *split_insn (rtx_insn *);
58 :
59 : struct target_recog default_target_recog;
60 : #if SWITCHABLE_TARGET
61 : struct target_recog *this_target_recog = &default_target_recog;
62 : #endif
63 :
64 : /* Nonzero means allow operands to be volatile.
65 : This should be 0 if you are generating rtl, such as if you are calling
66 : the functions in optabs.cc and expmed.cc (most of the time).
67 : This should be 1 if all valid insns need to be recognized,
68 : such as in reginfo.cc and final.cc and reload.cc.
69 :
70 : init_recog and init_recog_no_volatile are responsible for setting this. */
71 :
72 : int volatile_ok;
73 :
74 : struct recog_data_d recog_data;
75 :
76 : /* Contains a vector of operand_alternative structures, such that
77 : operand OP of alternative A is at index A * n_operands + OP.
78 : Set up by preprocess_constraints. */
79 : const operand_alternative *recog_op_alt;
80 :
81 : /* Used to provide recog_op_alt for asms. */
82 : static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
83 : * MAX_RECOG_ALTERNATIVES];
84 :
85 : /* On return from `constrain_operands', indicate which alternative
86 : was satisfied. */
87 :
88 : int which_alternative;
89 :
90 : /* True for inline asm operands with - constraint modifier. */
91 : bool raw_constraint_p;
92 :
93 : /* Nonzero after end of reload pass.
94 : Set to 1 or 0 by toplev.cc.
95 : Controls the significance of (SUBREG (MEM)). */
96 :
97 : int reload_completed;
98 :
99 : /* Nonzero after thread_prologue_and_epilogue_insns has run. */
100 : int epilogue_completed;
101 :
102 : /* Initialize data used by the function `recog'.
103 : This must be called once in the compilation of a function
104 : before any insn recognition may be done in the function. */
105 :
106 : void
107 7799577 : init_recog_no_volatile (void)
108 : {
109 7799577 : volatile_ok = 0;
110 7799577 : }
111 :
112 : void
113 11599883 : init_recog (void)
114 : {
115 11599883 : volatile_ok = 1;
116 11599883 : }
117 :
118 :
119 : /* Return true if labels in asm operands BODY are LABEL_REFs. */
120 :
121 : static bool
122 105097825 : asm_labels_ok (rtx body)
123 : {
124 105097825 : rtx asmop;
125 105097825 : int i;
126 :
127 105097825 : asmop = extract_asm_operands (body);
128 105097825 : if (asmop == NULL_RTX)
129 : return true;
130 :
131 802669 : for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
132 7507 : if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
133 : return false;
134 :
135 : return true;
136 : }
137 :
138 : /* Check that X is an insn-body for an `asm' with operands
139 : and that the operands mentioned in it are legitimate. */
140 :
141 : bool
142 105097825 : check_asm_operands (rtx x)
143 : {
144 105097825 : int noperands;
145 105097825 : rtx *operands;
146 105097825 : const char **constraints;
147 105097825 : int i;
148 :
149 105097825 : if (!asm_labels_ok (x))
150 : return false;
151 :
152 : /* Post-reload, be more strict with things. */
153 105097825 : if (reload_completed)
154 : {
155 : /* ??? Doh! We've not got the wrapping insn. Cook one up. */
156 31224 : rtx_insn *insn = make_insn_raw (x);
157 31224 : extract_insn (insn);
158 31224 : constrain_operands (1, get_enabled_alternatives (insn));
159 31224 : return which_alternative >= 0;
160 : }
161 :
162 105066601 : noperands = asm_noperands (x);
163 105066601 : if (noperands < 0)
164 : return false;
165 748738 : if (noperands == 0)
166 : return true;
167 :
168 635573 : operands = XALLOCAVEC (rtx, noperands);
169 635573 : constraints = XALLOCAVEC (const char *, noperands);
170 :
171 635573 : decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
172 :
173 3064217 : for (i = 0; i < noperands; i++)
174 : {
175 2712901 : const char *c = constraints[i];
176 2712901 : if (c[0] == '%')
177 12327 : c++;
178 2712901 : if (! asm_operand_ok (operands[i], c, constraints))
179 : return false;
180 : }
181 :
182 : return true;
183 : }
184 :
185 : /* Static data for the next two routines. */
186 :
187 : struct change_t
188 : {
189 : rtx object;
190 : int old_code;
191 : int old_len;
192 : bool unshare;
193 : rtx *loc;
194 : rtx old;
195 : };
196 :
197 : static change_t *changes;
198 : static int changes_allocated;
199 :
200 : static int num_changes = 0;
201 : int undo_recog_changes::s_num_changes = 0;
202 :
203 : /* Validate a proposed change to OBJECT. LOC is the location in the rtl
204 : at which NEW_RTX will be placed. If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0)
205 : will also be changed to NEW_LEN, which is no greater than the current
206 : XVECLEN. If OBJECT is zero, no validation is done, the change is
207 : simply made.
208 :
209 : Two types of objects are supported: If OBJECT is a MEM, memory_address_p
210 : will be called with the address and mode as parameters. If OBJECT is
211 : an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
212 : the change in place.
213 :
214 : IN_GROUP is nonzero if this is part of a group of changes that must be
215 : performed as a group. In that case, the changes will be stored. The
216 : function `apply_change_group' will validate and apply the changes.
217 :
218 : If IN_GROUP is zero, this is a single change. Try to recognize the insn
219 : or validate the memory reference with the change applied. If the result
220 : is not valid for the machine, suppress the change and return false.
221 : Otherwise, perform the change and return true. */
222 :
223 : static bool
224 1789979689 : validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group,
225 : bool unshare, int new_len = -1)
226 : {
227 1789979689 : gcc_assert (!undo_recog_changes::is_active ());
228 1789979689 : rtx old = *loc;
229 :
230 : /* Single-element parallels aren't valid and won't match anything.
231 : Replace them with the single element. */
232 1789979689 : if (new_len == 1 && GET_CODE (new_rtx) == PARALLEL)
233 : {
234 6344449 : new_rtx = XVECEXP (new_rtx, 0, 0);
235 6344449 : new_len = -1;
236 : }
237 :
238 : /* When a change is part of a group, callers expect to be able to change
239 : INSN_CODE after making the change and have the code reset to its old
240 : value by a later cancel_changes. We therefore need to register group
241 : changes even if they're no-ops. */
242 1789979689 : if (!in_group
243 208106134 : && (old == new_rtx || rtx_equal_p (old, new_rtx))
244 1980210858 : && (new_len < 0 || XVECLEN (new_rtx, 0) == new_len))
245 : return true;
246 :
247 1599748520 : gcc_assert ((in_group != 0 || num_changes == 0)
248 : && (new_len < 0 || new_rtx == *loc));
249 :
250 1599748520 : *loc = new_rtx;
251 :
252 : /* Save the information describing this change. */
253 1599748520 : if (num_changes >= changes_allocated)
254 : {
255 170353 : if (changes_allocated == 0)
256 : /* This value allows for repeated substitutions inside complex
257 : indexed addresses, or changes in up to 5 insns. */
258 169286 : changes_allocated = MAX_RECOG_OPERANDS * 5;
259 : else
260 1067 : changes_allocated *= 2;
261 :
262 170353 : changes = XRESIZEVEC (change_t, changes, changes_allocated);
263 : }
264 :
265 1599748520 : changes[num_changes].object = object;
266 1599748520 : changes[num_changes].loc = loc;
267 1599748520 : changes[num_changes].old = old;
268 1599748520 : changes[num_changes].old_len = (new_len >= 0 ? XVECLEN (new_rtx, 0) : -1);
269 1599748520 : changes[num_changes].unshare = unshare;
270 :
271 1599748520 : if (new_len >= 0)
272 11481966 : XVECLEN (new_rtx, 0) = new_len;
273 :
274 1599748520 : if (object && !MEM_P (object))
275 : {
276 : /* Set INSN_CODE to force rerecognition of insn. Save old code in
277 : case invalid. */
278 1575783498 : changes[num_changes].old_code = INSN_CODE (object);
279 1575783498 : INSN_CODE (object) = -1;
280 : }
281 :
282 1599748520 : num_changes++;
283 :
284 : /* If we are making a group of changes, return 1. Otherwise, validate the
285 : change group we made. */
286 :
287 1599748520 : if (in_group)
288 : return true;
289 : else
290 17874965 : return apply_change_group ();
291 : }
292 :
293 : /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
294 : UNSHARE to false. */
295 :
296 : bool
297 1487063781 : validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
298 : {
299 1487063781 : return validate_change_1 (object, loc, new_rtx, in_group, false);
300 : }
301 :
302 : /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
303 : UNSHARE to true. */
304 :
305 : bool
306 285089493 : validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
307 : {
308 285089493 : return validate_change_1 (object, loc, new_rtx, in_group, true);
309 : }
310 :
311 : /* Change XVECLEN (*LOC, 0) to NEW_LEN. OBJECT, IN_GROUP and the return
312 : value are as for validate_change_1. */
313 :
314 : bool
315 17826415 : validate_change_xveclen (rtx object, rtx *loc, int new_len, bool in_group)
316 : {
317 17826415 : return validate_change_1 (object, loc, *loc, in_group, false, new_len);
318 : }
319 :
320 : /* Keep X canonicalized if some changes have made it non-canonical; only
321 : modifies the operands of X, not (for example) its code. Simplifications
322 : are not the job of this routine.
323 :
324 : Return true if anything was changed. */
325 : bool
326 1834932 : canonicalize_change_group (rtx_insn *insn, rtx x)
327 : {
328 1834932 : if (COMMUTATIVE_P (x)
329 1834932 : && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
330 : {
331 : /* Oops, the caller has made X no longer canonical.
332 : Let's redo the changes in the correct order. */
333 88917 : rtx tem = XEXP (x, 0);
334 88917 : validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
335 88917 : validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
336 88917 : return true;
337 : }
338 : else
339 1746015 : return false;
340 : }
341 :
342 : /* Check if REG_INC argument in *data overlaps a stored REG. */
343 :
344 : static void
345 0 : check_invalid_inc_dec (rtx reg, const_rtx, void *data)
346 : {
347 0 : rtx *pinc = (rtx *) data;
348 0 : if (*pinc == NULL_RTX || MEM_P (reg))
349 : return;
350 0 : if (reg_overlap_mentioned_p (reg, *pinc))
351 0 : *pinc = NULL_RTX;
352 : }
353 :
354 : /* This subroutine of apply_change_group verifies whether the changes to INSN
355 : were valid; i.e. whether INSN can still be recognized.
356 :
357 : If IN_GROUP is true clobbers which have to be added in order to
358 : match the instructions will be added to the current change group.
359 : Otherwise the changes will take effect immediately. */
360 :
361 : bool
362 477165633 : insn_invalid_p (rtx_insn *insn, bool in_group)
363 : {
364 477165633 : rtx pat = PATTERN (insn);
365 477165633 : int num_clobbers = 0;
366 : /* If we are before reload and the pattern is a SET, see if we can add
367 : clobbers. */
368 477165633 : int icode = recog (pat, insn,
369 477165633 : (GET_CODE (pat) == SET
370 396980121 : && ! reload_completed
371 364340833 : && ! reload_in_progress)
372 : ? &num_clobbers : 0);
373 477165633 : bool is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
374 :
375 :
376 : /* If this is an asm and the operand aren't legal, then fail. Likewise if
377 : this is not an asm and the insn wasn't recognized. */
378 553355 : if ((is_asm && ! check_asm_operands (PATTERN (insn)))
379 476964874 : || (!is_asm && icode < 0))
380 17714717 : return true;
381 :
382 : /* If we have to add CLOBBERs, fail if we have to add ones that reference
383 : hard registers since our callers can't know if they are live or not.
384 : Otherwise, add them. */
385 459450916 : if (num_clobbers > 0)
386 : {
387 1813 : rtx newpat;
388 :
389 1813 : if (added_clobbers_hard_reg_p (icode))
390 : return true;
391 :
392 537 : newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
393 537 : XVECEXP (newpat, 0, 0) = pat;
394 537 : add_clobbers (newpat, icode);
395 537 : if (in_group)
396 536 : validate_change (insn, &PATTERN (insn), newpat, 1);
397 : else
398 1 : PATTERN (insn) = pat = newpat;
399 : }
400 :
401 : /* After reload, verify that all constraints are satisfied. */
402 459449640 : if (reload_completed)
403 : {
404 32624980 : extract_insn (insn);
405 :
406 32624980 : if (! constrain_operands (1, get_preferred_alternatives (insn)))
407 : return true;
408 : }
409 :
410 : /* Punt if REG_INC argument overlaps some stored REG. */
411 459426597 : for (rtx link = FIND_REG_INC_NOTE (insn, NULL_RTX);
412 459426597 : link; link = XEXP (link, 1))
413 : if (REG_NOTE_KIND (link) == REG_INC)
414 : {
415 : rtx reg = XEXP (link, 0);
416 : note_stores (insn, check_invalid_inc_dec, ®);
417 : if (reg == NULL_RTX)
418 : return true;
419 : }
420 :
421 459426597 : INSN_CODE (insn) = icode;
422 459426597 : return false;
423 : }
424 :
425 : /* Return number of changes made and not validated yet. */
426 : int
427 4826931 : num_changes_pending (void)
428 : {
429 4826931 : return num_changes;
430 : }
431 :
432 : /* Tentatively apply the changes numbered NUM and up.
433 : Return true if all changes are valid, false otherwise. */
434 :
435 : bool
436 753379391 : verify_changes (int num)
437 : {
438 753379391 : int i;
439 753379391 : rtx last_validated = NULL_RTX;
440 :
441 : /* The changes have been applied and all INSN_CODEs have been reset to force
442 : rerecognition.
443 :
444 : The changes are valid if we aren't given an object, or if we are
445 : given a MEM and it still is a valid address, or if this is in insn
446 : and it is recognized. In the latter case, if reload has completed,
447 : we also require that the operands meet the constraints for
448 : the insn. */
449 :
450 2217309357 : for (i = num; i < num_changes; i++)
451 : {
452 1478454380 : rtx object = changes[i].object;
453 :
454 : /* If there is no object to test or if it is the same as the one we
455 : already tested, ignore it. */
456 1478454380 : if (object == 0 || object == last_validated)
457 767857264 : continue;
458 :
459 710597116 : if (MEM_P (object))
460 : {
461 36886 : if (! memory_address_addr_space_p (GET_MODE (object),
462 : XEXP (object, 0),
463 18443 : MEM_ADDR_SPACE (object)))
464 : break;
465 : }
466 710578673 : else if (/* changes[i].old might be zero, e.g. when putting a
467 : REG_FRAME_RELATED_EXPR into a previously empty list. */
468 710578673 : changes[i].old
469 710578673 : && REG_P (changes[i].old)
470 235963121 : && asm_noperands (PATTERN (object)) > 0
471 710776142 : && register_asm_p (changes[i].old))
472 : {
473 : /* Don't allow changes of hard register operands to inline
474 : assemblies if they have been defined as register asm ("x"). */
475 : break;
476 : }
477 710578672 : else if (DEBUG_INSN_P (object))
478 247344246 : continue;
479 463234426 : else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
480 : {
481 17184653 : rtx pat = PATTERN (object);
482 :
483 : /* Perhaps we couldn't recognize the insn because there were
484 : extra CLOBBERs at the end. If so, try to re-recognize
485 : without the last CLOBBER (later iterations will cause each of
486 : them to be eliminated, in turn). But don't do this if we
487 : have an ASM_OPERAND. */
488 17184653 : if (GET_CODE (pat) == PARALLEL
489 4046036 : && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
490 20030950 : && asm_noperands (PATTERN (object)) < 0)
491 : {
492 2647196 : rtx newpat;
493 :
494 2647196 : if (XVECLEN (pat, 0) == 2)
495 2245043 : newpat = XVECEXP (pat, 0, 0);
496 : else
497 : {
498 402153 : int j;
499 :
500 402153 : newpat
501 402153 : = gen_rtx_PARALLEL (VOIDmode,
502 : rtvec_alloc (XVECLEN (pat, 0) - 1));
503 1239983 : for (j = 0; j < XVECLEN (newpat, 0); j++)
504 837830 : XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
505 : }
506 :
507 : /* Add a new change to this group to replace the pattern
508 : with this new pattern. Then consider this change
509 : as having succeeded. The change we added will
510 : cause the entire call to fail if things remain invalid.
511 :
512 : Note that this can lose if a later change than the one
513 : we are processing specified &XVECEXP (PATTERN (object), 0, X)
514 : but this shouldn't occur. */
515 :
516 2647196 : validate_change (object, &PATTERN (object), newpat, 1);
517 2647196 : continue;
518 2647196 : }
519 14537457 : else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
520 14524343 : || GET_CODE (pat) == VAR_LOCATION)
521 : /* If this insn is a CLOBBER or USE, it is always valid, but is
522 : never recognized. */
523 13114 : continue;
524 : else
525 : break;
526 : }
527 : last_validated = object;
528 : }
529 :
530 753379391 : return (i == num_changes);
531 : }
532 :
533 : /* A group of changes has previously been issued with validate_change
534 : and verified with verify_changes. Call df_insn_rescan for each of
535 : the insn changed and clear num_changes. */
536 :
537 : void
538 747980004 : confirm_change_group (void)
539 : {
540 747980004 : int i;
541 747980004 : rtx last_object = NULL;
542 :
543 747980004 : gcc_assert (!undo_recog_changes::is_active ());
544 2221452348 : for (i = 0; i < num_changes; i++)
545 : {
546 1473472344 : rtx object = changes[i].object;
547 :
548 1473472344 : if (changes[i].unshare)
549 19602345 : *changes[i].loc = copy_rtx (*changes[i].loc);
550 :
551 : /* Avoid unnecessary rescanning when multiple changes to same instruction
552 : are made. */
553 1473472344 : if (object)
554 : {
555 1471187519 : if (object != last_object && last_object && INSN_P (last_object))
556 7550999 : df_insn_rescan (as_a <rtx_insn *> (last_object));
557 : last_object = object;
558 : }
559 : }
560 :
561 747980004 : if (last_object && INSN_P (last_object))
562 588638123 : df_insn_rescan (as_a <rtx_insn *> (last_object));
563 747980004 : num_changes = 0;
564 747980004 : }
565 :
566 : /* Apply a group of changes previously issued with `validate_change'.
567 : If all changes are valid, call confirm_change_group and return true,
568 : otherwise, call cancel_changes and return false. */
569 :
570 : bool
571 746274143 : apply_change_group (void)
572 : {
573 746274143 : if (verify_changes (0))
574 : {
575 734530306 : confirm_change_group ();
576 734530306 : return true;
577 : }
578 : else
579 : {
580 11743837 : cancel_changes (0);
581 11743837 : return false;
582 : }
583 : }
584 :
585 :
586 : /* Return the number of changes so far in the current group. */
587 :
588 : int
589 730331320 : num_validated_changes (void)
590 : {
591 730331320 : return num_changes;
592 : }
593 :
594 : /* Retract the changes numbered NUM and up. */
595 :
596 : void
597 165878553 : cancel_changes (int num)
598 : {
599 165878553 : gcc_assert (!undo_recog_changes::is_active ());
600 165878553 : int i;
601 :
602 : /* Back out all the changes. Do this in the opposite order in which
603 : they were made. */
604 292154729 : for (i = num_changes - 1; i >= num; i--)
605 : {
606 126276176 : if (changes[i].old_len >= 0)
607 10596521 : XVECLEN (*changes[i].loc, 0) = changes[i].old_len;
608 : else
609 115679655 : *changes[i].loc = changes[i].old;
610 126276176 : if (changes[i].object && !MEM_P (changes[i].object))
611 : {
612 104614352 : INSN_CODE (changes[i].object) = changes[i].old_code;
613 104614352 : if (recog_data.insn == changes[i].object)
614 213 : recog_data.insn = nullptr;
615 : }
616 : }
617 165878553 : num_changes = num;
618 165878553 : }
619 :
620 : /* Swap the status of change NUM from being applied to not being applied,
621 : or vice versa. */
622 :
623 : static void
624 47190844 : swap_change (int num)
625 : {
626 47190844 : if (changes[num].old_len >= 0)
627 2095844 : std::swap (XVECLEN (*changes[num].loc, 0), changes[num].old_len);
628 : else
629 45095000 : std::swap (*changes[num].loc, changes[num].old);
630 47190844 : if (changes[num].object && !MEM_P (changes[num].object))
631 : {
632 47190844 : std::swap (INSN_CODE (changes[num].object), changes[num].old_code);
633 47190844 : if (recog_data.insn == changes[num].object)
634 8 : recog_data.insn = nullptr;
635 : }
636 47190844 : }
637 :
638 27217323 : undo_recog_changes::undo_recog_changes (int num)
639 27217323 : : m_old_num_changes (s_num_changes)
640 : {
641 27217323 : gcc_assert (num <= num_changes - s_num_changes);
642 50812745 : for (int i = num_changes - s_num_changes - 1; i >= num; i--)
643 23595422 : swap_change (i);
644 27217323 : s_num_changes = num_changes - num;
645 27217323 : }
646 :
647 27217323 : undo_recog_changes::~undo_recog_changes ()
648 : {
649 50812745 : for (int i = num_changes - s_num_changes;
650 50812745 : i < num_changes - m_old_num_changes; ++i)
651 23595422 : swap_change (i);
652 27217323 : s_num_changes = m_old_num_changes;
653 27217323 : }
654 :
655 : /* Reduce conditional compilation elsewhere. */
656 : /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
657 : rtx. */
658 :
659 : static void
660 12307180 : simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
661 : machine_mode op0_mode)
662 : {
663 12307180 : rtx x = *loc;
664 12307180 : enum rtx_code code = GET_CODE (x);
665 12307180 : rtx new_rtx = NULL_RTX;
666 12307180 : scalar_int_mode is_mode;
667 :
668 12307180 : if (SWAPPABLE_OPERANDS_P (x)
669 12307180 : && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
670 : {
671 414785 : validate_unshare_change (object, loc,
672 414785 : gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
673 : : swap_condition (code),
674 : GET_MODE (x), XEXP (x, 1),
675 : XEXP (x, 0)), 1);
676 414785 : x = *loc;
677 414785 : code = GET_CODE (x);
678 : }
679 :
680 : /* Canonicalize arithmetics with all constant operands. */
681 12307180 : switch (GET_RTX_CLASS (code))
682 : {
683 816338 : case RTX_UNARY:
684 816338 : if (CONSTANT_P (XEXP (x, 0)))
685 569404 : new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
686 : op0_mode);
687 : break;
688 6358743 : case RTX_COMM_ARITH:
689 6358743 : case RTX_BIN_ARITH:
690 6358743 : if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
691 229337 : new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
692 : XEXP (x, 1));
693 : break;
694 107793 : case RTX_COMPARE:
695 107793 : case RTX_COMM_COMPARE:
696 107793 : if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
697 3111 : new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
698 : XEXP (x, 0), XEXP (x, 1));
699 : break;
700 : default:
701 : break;
702 : }
703 801852 : if (new_rtx)
704 : {
705 771703 : validate_change (object, loc, new_rtx, 1);
706 771703 : return;
707 : }
708 :
709 11535477 : switch (code)
710 : {
711 2222260 : case PLUS:
712 : /* If we have a PLUS whose second operand is now a CONST_INT, use
713 : simplify_gen_binary to try to simplify it.
714 : ??? We may want later to remove this, once simplification is
715 : separated from this function. */
716 2222260 : if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
717 192631 : validate_change (object, loc,
718 : simplify_gen_binary
719 192631 : (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
720 : break;
721 455234 : case MINUS:
722 455234 : if (CONST_SCALAR_INT_P (XEXP (x, 1)))
723 22884 : validate_change (object, loc,
724 : simplify_gen_binary
725 22884 : (PLUS, GET_MODE (x), XEXP (x, 0),
726 : simplify_gen_unary (NEG,
727 : GET_MODE (x), XEXP (x, 1),
728 22884 : GET_MODE (x))), 1);
729 : break;
730 184930 : case ZERO_EXTEND:
731 184930 : case SIGN_EXTEND:
732 184930 : if (GET_MODE (XEXP (x, 0)) == VOIDmode)
733 : {
734 0 : new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
735 : op0_mode);
736 : /* If any of the above failed, substitute in something that
737 : we know won't be recognized. */
738 0 : if (!new_rtx)
739 0 : new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
740 0 : validate_change (object, loc, new_rtx, 1);
741 : }
742 : break;
743 146159 : case SUBREG:
744 : /* All subregs possible to simplify should be simplified. */
745 292318 : new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
746 146159 : SUBREG_BYTE (x));
747 :
748 : /* Subregs of VOIDmode operands are incorrect. */
749 146159 : if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
750 2 : new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
751 2 : if (new_rtx)
752 122236 : validate_change (object, loc, new_rtx, 1);
753 : break;
754 5596 : case ZERO_EXTRACT:
755 5596 : case SIGN_EXTRACT:
756 : /* If we are replacing a register with memory, try to change the memory
757 : to be the mode required for memory in extract operations (this isn't
758 : likely to be an insertion operation; if it was, nothing bad will
759 : happen, we might just fail in some cases). */
760 :
761 5596 : if (MEM_P (XEXP (x, 0))
762 400 : && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
763 400 : && CONST_INT_P (XEXP (x, 1))
764 400 : && CONST_INT_P (XEXP (x, 2))
765 280 : && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
766 298 : MEM_ADDR_SPACE (XEXP (x, 0)))
767 5876 : && !MEM_VOLATILE_P (XEXP (x, 0)))
768 : {
769 276 : int pos = INTVAL (XEXP (x, 2));
770 276 : machine_mode new_mode = is_mode;
771 276 : if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
772 0 : new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
773 276 : else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
774 0 : new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
775 276 : scalar_int_mode wanted_mode = (new_mode == VOIDmode
776 276 : ? word_mode
777 276 : : as_a <scalar_int_mode> (new_mode));
778 :
779 : /* If we have a narrower mode, we can do something. */
780 828 : if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
781 : {
782 0 : int offset = pos / BITS_PER_UNIT;
783 0 : rtx newmem;
784 :
785 : /* If the bytes and bits are counted differently, we
786 : must adjust the offset. */
787 0 : if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
788 : offset =
789 : (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
790 : offset);
791 :
792 0 : gcc_assert (GET_MODE_PRECISION (wanted_mode)
793 : == GET_MODE_BITSIZE (wanted_mode));
794 0 : pos %= GET_MODE_BITSIZE (wanted_mode);
795 :
796 0 : newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
797 :
798 0 : validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
799 0 : validate_change (object, &XEXP (x, 0), newmem, 1);
800 : }
801 : }
802 :
803 : break;
804 :
805 : default:
806 : break;
807 : }
808 : }
809 :
810 : /* Replace every occurrence of FROM in X with TO. Mark each change with
811 : validate_change passing OBJECT. */
812 :
813 : static void
814 68999066 : validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
815 : bool simplify)
816 : {
817 68999066 : int i, j;
818 68999066 : const char *fmt;
819 68999066 : rtx x = *loc;
820 68999066 : enum rtx_code code;
821 68999066 : machine_mode op0_mode = VOIDmode;
822 68999066 : int prev_changes = num_changes;
823 :
824 68999066 : if (!x)
825 : return;
826 :
827 68999066 : code = GET_CODE (x);
828 68999066 : fmt = GET_RTX_FORMAT (code);
829 68999066 : if (fmt[0] == 'e')
830 23810897 : op0_mode = GET_MODE (XEXP (x, 0));
831 :
832 : /* X matches FROM if it is the same rtx or they are both referring to the
833 : same register in the same mode. Avoid calling rtx_equal_p unless the
834 : operands look similar. */
835 :
836 68999066 : if (x == from
837 53431691 : || (REG_P (x) && REG_P (from)
838 15384285 : && GET_MODE (x) == GET_MODE (from)
839 8981904 : && REGNO (x) == REGNO (from))
840 122430754 : || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
841 8981901 : && rtx_equal_p (x, from)))
842 : {
843 15567378 : validate_unshare_change (object, loc, to, 1);
844 15567378 : return;
845 : }
846 :
847 : /* Call ourself recursively to perform the replacements.
848 : We must not replace inside already replaced expression, otherwise we
849 : get infinite recursion for replacements like (reg X)->(subreg (reg X))
850 : so we must special case shared ASM_OPERANDS. */
851 :
852 53431688 : if (GET_CODE (x) == PARALLEL)
853 : {
854 1370696 : for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
855 : {
856 1012879 : if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
857 26239 : && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
858 : {
859 : /* Verify that operands are really shared. */
860 271 : gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
861 : == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
862 : (x, 0, j))));
863 271 : validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
864 : from, to, object, simplify);
865 : }
866 : else
867 1012608 : validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
868 : simplify);
869 : }
870 : }
871 : else
872 135534438 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
873 : {
874 82460567 : if (fmt[i] == 'e')
875 40621727 : validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
876 41838840 : else if (fmt[i] == 'E')
877 6348379 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
878 3460349 : validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
879 : simplify);
880 : }
881 :
882 : /* If we didn't substitute, there is nothing more to do. */
883 53431688 : if (num_changes == prev_changes)
884 : return;
885 :
886 : /* ??? The regmove is no more, so is this aberration still necessary? */
887 : /* Allow substituted expression to have different mode. This is used by
888 : regmove to change mode of pseudo register. */
889 12307250 : if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
890 9394648 : op0_mode = GET_MODE (XEXP (x, 0));
891 :
892 : /* Do changes needed to keep rtx consistent. Don't do any other
893 : simplifications, as it is not our job. */
894 12307250 : if (simplify)
895 12307180 : simplify_while_replacing (loc, to, object, op0_mode);
896 : }
897 :
898 : /* Try replacing every occurrence of FROM in subexpression LOC of INSN
899 : with TO. After all changes have been made, validate by seeing
900 : if INSN is still valid. */
901 :
902 : bool
903 0 : validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
904 : {
905 0 : validate_replace_rtx_1 (loc, from, to, insn, true);
906 0 : return apply_change_group ();
907 : }
908 :
909 : /* Try replacing every occurrence of FROM in INSN with TO. After all
910 : changes have been made, validate by seeing if INSN is still valid. */
911 :
912 : bool
913 2032786 : validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
914 : {
915 2032786 : validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
916 2032786 : return apply_change_group ();
917 : }
918 :
919 : /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
920 : is a part of INSN. After all changes have been made, validate by seeing if
921 : INSN is still valid.
922 : validate_replace_rtx (from, to, insn) is equivalent to
923 : validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
924 :
925 : bool
926 0 : validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
927 : {
928 0 : validate_replace_rtx_1 (where, from, to, insn, true);
929 0 : return apply_change_group ();
930 : }
931 :
932 : /* Same as above, but do not simplify rtx afterwards. */
933 : bool
934 88 : validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
935 : rtx_insn *insn)
936 : {
937 88 : validate_replace_rtx_1 (where, from, to, insn, false);
938 88 : return apply_change_group ();
939 :
940 : }
941 :
942 : /* Try replacing every occurrence of FROM in INSN with TO. This also
943 : will replace in REG_EQUAL and REG_EQUIV notes. */
944 :
945 : void
946 21 : validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
947 : {
948 21 : rtx note;
949 21 : validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
950 28 : for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
951 7 : if (REG_NOTE_KIND (note) == REG_EQUAL
952 7 : || REG_NOTE_KIND (note) == REG_EQUIV)
953 0 : validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
954 21 : }
955 :
956 : /* Function called by note_uses to replace used subexpressions. */
957 : struct validate_replace_src_data
958 : {
959 : rtx from; /* Old RTX */
960 : rtx to; /* New RTX */
961 : rtx_insn *insn; /* Insn in which substitution is occurring. */
962 : };
963 :
964 : static void
965 21871216 : validate_replace_src_1 (rtx *x, void *data)
966 : {
967 21871216 : struct validate_replace_src_data *d
968 : = (struct validate_replace_src_data *) data;
969 :
970 21871216 : validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
971 21871216 : }
972 :
973 : /* Try replacing every occurrence of FROM in INSN with TO, avoiding
974 : SET_DESTs. */
975 :
976 : void
977 15707557 : validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
978 : {
979 15707557 : struct validate_replace_src_data d;
980 :
981 15707557 : d.from = from;
982 15707557 : d.to = to;
983 15707557 : d.insn = insn;
984 15707557 : note_uses (&PATTERN (insn), validate_replace_src_1, &d);
985 15707557 : }
986 :
987 : /* Try simplify INSN.
988 : Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
989 : pattern and return true if something was simplified. */
990 :
991 : bool
992 0 : validate_simplify_insn (rtx_insn *insn)
993 : {
994 0 : int i;
995 0 : rtx pat = NULL;
996 0 : rtx newpat = NULL;
997 :
998 0 : pat = PATTERN (insn);
999 :
1000 0 : if (GET_CODE (pat) == SET)
1001 : {
1002 0 : newpat = simplify_rtx (SET_SRC (pat));
1003 0 : if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
1004 0 : validate_change (insn, &SET_SRC (pat), newpat, 1);
1005 0 : newpat = simplify_rtx (SET_DEST (pat));
1006 0 : if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
1007 0 : validate_change (insn, &SET_DEST (pat), newpat, 1);
1008 : }
1009 0 : else if (GET_CODE (pat) == PARALLEL)
1010 0 : for (i = 0; i < XVECLEN (pat, 0); i++)
1011 : {
1012 0 : rtx s = XVECEXP (pat, 0, i);
1013 :
1014 0 : if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
1015 : {
1016 0 : newpat = simplify_rtx (SET_SRC (s));
1017 0 : if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
1018 0 : validate_change (insn, &SET_SRC (s), newpat, 1);
1019 0 : newpat = simplify_rtx (SET_DEST (s));
1020 0 : if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
1021 0 : validate_change (insn, &SET_DEST (s), newpat, 1);
1022 : }
1023 : }
1024 0 : return ((num_changes_pending () > 0) && (apply_change_group () > 0));
1025 : }
1026 :
1027 : /* Try to process the address of memory expression MEM. Return true on
1028 : success; leave the caller to clean up on failure. */
1029 :
1030 : bool
1031 24107163 : insn_propagation::apply_to_mem_1 (rtx mem)
1032 : {
1033 24107163 : auto old_num_changes = num_validated_changes ();
1034 24107163 : mem_depth += 1;
1035 24107163 : bool res = apply_to_rvalue_1 (&XEXP (mem, 0));
1036 24107163 : mem_depth -= 1;
1037 24107163 : if (!res)
1038 : return false;
1039 :
1040 24106565 : if (old_num_changes != num_validated_changes ()
1041 7902578 : && should_check_mems
1042 27515147 : && !check_mem (old_num_changes, mem))
1043 : return false;
1044 :
1045 : return true;
1046 : }
1047 :
1048 : /* Try to process the rvalue expression at *LOC. Return true on success;
1049 : leave the caller to clean up on failure. */
1050 :
1051 : bool
1052 235040904 : insn_propagation::apply_to_rvalue_1 (rtx *loc)
1053 : {
1054 235040904 : rtx x = *loc;
1055 235040904 : enum rtx_code code = GET_CODE (x);
1056 235040904 : machine_mode mode = GET_MODE (x);
1057 :
1058 235040904 : auto old_num_changes = num_validated_changes ();
1059 235040904 : if (from
1060 223476676 : && GET_CODE (x) == GET_CODE (from)
1061 326933812 : && (REG_P (x)
1062 91892908 : ? REGNO (x) == REGNO (from)
1063 23269 : : rtx_equal_p (x, from)))
1064 : {
1065 : /* Don't replace register asms in asm statements; we mustn't
1066 : change the user's register allocation. */
1067 59149414 : if (REG_P (x)
1068 59126678 : && HARD_REGISTER_P (x)
1069 18133269 : && register_asm_p (x)
1070 59151411 : && asm_noperands (PATTERN (insn)) > 0)
1071 : return false;
1072 :
1073 59147586 : rtx newval = to;
1074 59147586 : if (GET_MODE (x) != GET_MODE (from))
1075 : {
1076 632971 : gcc_assert (REG_P (x) && HARD_REGISTER_P (x));
1077 632971 : if (REG_NREGS (x) != REG_NREGS (from)
1078 632971 : || !REG_CAN_CHANGE_MODE_P (REGNO (x), GET_MODE (from),
1079 : GET_MODE (x)))
1080 339809 : return false;
1081 :
1082 : /* If the reference is paradoxical and the replacement
1083 : value contains registers, we would need to check that the
1084 : simplification below does not increase REG_NREGS for those
1085 : registers either. It seems simpler to punt on nonconstant
1086 : values instead. */
1087 550370 : if (paradoxical_subreg_p (GET_MODE (x), GET_MODE (from))
1088 550370 : && !CONSTANT_P (to))
1089 : return false;
1090 :
1091 530060 : newval = simplify_subreg (GET_MODE (x), to, GET_MODE (from),
1092 : subreg_lowpart_offset (GET_MODE (x),
1093 : GET_MODE (from)));
1094 530060 : if (!newval)
1095 : return false;
1096 :
1097 : /* Check that the simplification didn't just push an explicit
1098 : subreg down into subexpressions. In particular, for a register
1099 : R that has a fixed mode, such as the stack pointer, a subreg of:
1100 :
1101 : (plus:M (reg:M R) (const_int C))
1102 :
1103 : would be:
1104 :
1105 : (plus:N (subreg:N (reg:M R) ...) (const_int C'))
1106 :
1107 : But targets can legitimately assume that subregs of hard registers
1108 : will not be created after RA (except in special circumstances,
1109 : such as strict_low_part). */
1110 296402 : subrtx_iterator::array_type array;
1111 1211863 : FOR_EACH_SUBRTX (iter, array, newval, NONCONST)
1112 918701 : if (GET_CODE (*iter) == SUBREG)
1113 3240 : return false;
1114 296402 : }
1115 :
1116 58807777 : if (should_unshare)
1117 58807777 : validate_unshare_change (insn, loc, newval, 1);
1118 : else
1119 0 : validate_change (insn, loc, newval, 1);
1120 58807777 : if (mem_depth && !REG_P (newval) && !CONSTANT_P (newval))
1121 : {
1122 : /* We're substituting into an address, but TO will have the
1123 : form expected outside an address. Canonicalize it if
1124 : necessary. */
1125 3704630 : insn_propagation subprop (insn);
1126 3704630 : subprop.mem_depth += 1;
1127 3704630 : if (!subprop.apply_to_rvalue (loc))
1128 0 : gcc_unreachable ();
1129 3704630 : if (should_unshare
1130 3704630 : && num_validated_changes () != old_num_changes + 1)
1131 : {
1132 : /* TO is owned by someone else, so create a copy and
1133 : return TO to its original form. */
1134 315592 : newval = copy_rtx (*loc);
1135 315592 : cancel_changes (old_num_changes);
1136 315592 : validate_change (insn, loc, newval, 1);
1137 : }
1138 : }
1139 58807777 : num_replacements += 1;
1140 58807777 : should_unshare = true;
1141 58807777 : result_flags |= UNSIMPLIFIED;
1142 58807777 : return true;
1143 : }
1144 :
1145 : /* Recursively apply the substitution and see if we can simplify
1146 : the result. This specifically shouldn't use simplify_gen_* for
1147 : speculative simplifications, since we want to avoid generating new
1148 : expressions where possible. */
1149 175891490 : auto old_result_flags = result_flags;
1150 175891490 : rtx newx = NULL_RTX;
1151 175891490 : bool recurse_p = false;
1152 175891490 : switch (GET_RTX_CLASS (code))
1153 : {
1154 3068268 : case RTX_UNARY:
1155 3068268 : {
1156 3068268 : machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1157 3068268 : if (!apply_to_rvalue_1 (&XEXP (x, 0)))
1158 : return false;
1159 3030568 : if (from && old_num_changes == num_validated_changes ())
1160 : return true;
1161 :
1162 2440872 : newx = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
1163 2440872 : break;
1164 : }
1165 :
1166 45842899 : case RTX_BIN_ARITH:
1167 45842899 : case RTX_COMM_ARITH:
1168 45842899 : {
1169 45842899 : if (!apply_to_rvalue_1 (&XEXP (x, 0))
1170 45842899 : || !apply_to_rvalue_1 (&XEXP (x, 1)))
1171 448978 : return false;
1172 45393921 : if (from && old_num_changes == num_validated_changes ())
1173 : return true;
1174 :
1175 34962372 : if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
1176 34962372 : && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
1177 2731324 : newx = simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
1178 : else
1179 32231048 : newx = simplify_binary_operation (code, mode,
1180 : XEXP (x, 0), XEXP (x, 1));
1181 : break;
1182 : }
1183 :
1184 5990769 : case RTX_COMPARE:
1185 5990769 : case RTX_COMM_COMPARE:
1186 5990769 : {
1187 11981835 : machine_mode op_mode = (GET_MODE (XEXP (x, 0)) != VOIDmode
1188 5990769 : ? GET_MODE (XEXP (x, 0))
1189 297 : : GET_MODE (XEXP (x, 1)));
1190 5990769 : if (!apply_to_rvalue_1 (&XEXP (x, 0))
1191 5990769 : || !apply_to_rvalue_1 (&XEXP (x, 1)))
1192 4289 : return false;
1193 5986480 : if (from && old_num_changes == num_validated_changes ())
1194 : return true;
1195 :
1196 5104940 : newx = simplify_relational_operation (code, mode, op_mode,
1197 : XEXP (x, 0), XEXP (x, 1));
1198 5104940 : break;
1199 : }
1200 :
1201 5477216 : case RTX_TERNARY:
1202 5477216 : case RTX_BITFIELD_OPS:
1203 5477216 : {
1204 5477216 : machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1205 5477216 : if (!apply_to_rvalue_1 (&XEXP (x, 0))
1206 5469743 : || !apply_to_rvalue_1 (&XEXP (x, 1))
1207 10924361 : || !apply_to_rvalue_1 (&XEXP (x, 2)))
1208 32376 : return false;
1209 5444840 : if (from && old_num_changes == num_validated_changes ())
1210 : return true;
1211 :
1212 5377301 : newx = simplify_ternary_operation (code, mode, op0_mode,
1213 : XEXP (x, 0), XEXP (x, 1),
1214 : XEXP (x, 2));
1215 5377301 : break;
1216 : }
1217 :
1218 10758361 : case RTX_EXTRA:
1219 10758361 : if (code == SUBREG)
1220 : {
1221 2335060 : machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
1222 2335060 : if (!apply_to_rvalue_1 (&SUBREG_REG (x)))
1223 : return false;
1224 2335045 : if (from && old_num_changes == num_validated_changes ())
1225 : return true;
1226 :
1227 1703679 : rtx inner = SUBREG_REG (x);
1228 1703679 : newx = simplify_subreg (mode, inner, inner_mode, SUBREG_BYTE (x));
1229 : /* Reject the same cases that simplify_gen_subreg would. */
1230 1703679 : if (!newx
1231 1703679 : && (GET_CODE (inner) == SUBREG
1232 1062377 : || GET_CODE (inner) == CONCAT
1233 1050062 : || GET_MODE (inner) == VOIDmode
1234 1050061 : || !validate_subreg (mode, inner_mode,
1235 1050061 : inner, SUBREG_BYTE (x))))
1236 : {
1237 12360 : failure_reason = "would create an invalid subreg";
1238 12360 : return false;
1239 : }
1240 : break;
1241 : }
1242 : else
1243 : recurse_p = true;
1244 : break;
1245 :
1246 53773889 : case RTX_OBJ:
1247 53773889 : if (code == LO_SUM)
1248 : {
1249 0 : if (!apply_to_rvalue_1 (&XEXP (x, 0))
1250 0 : || !apply_to_rvalue_1 (&XEXP (x, 1)))
1251 0 : return false;
1252 0 : if (from && old_num_changes == num_validated_changes ())
1253 : return true;
1254 :
1255 : /* (lo_sum (high x) y) -> y where x and y have the same base. */
1256 0 : rtx op0 = XEXP (x, 0);
1257 0 : rtx op1 = XEXP (x, 1);
1258 0 : if (GET_CODE (op0) == HIGH)
1259 : {
1260 0 : rtx base0, base1, offset0, offset1;
1261 0 : split_const (XEXP (op0, 0), &base0, &offset0);
1262 0 : split_const (op1, &base1, &offset1);
1263 0 : if (rtx_equal_p (base0, base1))
1264 0 : newx = op1;
1265 : }
1266 : }
1267 53773889 : else if (code == REG)
1268 : {
1269 37094152 : if (from && REG_P (from) && reg_overlap_mentioned_p (x, from))
1270 : {
1271 61274 : failure_reason = "inexact register overlap";
1272 61274 : return false;
1273 : }
1274 : }
1275 16679737 : else if (code == MEM)
1276 12537456 : return apply_to_mem_1 (x);
1277 : else
1278 : recurse_p = true;
1279 : break;
1280 :
1281 : case RTX_CONST_OBJ:
1282 : break;
1283 :
1284 1423203 : case RTX_AUTOINC:
1285 1423203 : if (from && reg_overlap_mentioned_p (XEXP (x, 0), from))
1286 : {
1287 0 : failure_reason = "is subject to autoinc";
1288 0 : return false;
1289 : }
1290 : recurse_p = true;
1291 : break;
1292 :
1293 0 : case RTX_MATCH:
1294 0 : case RTX_INSN:
1295 0 : gcc_unreachable ();
1296 : }
1297 :
1298 49576804 : if (recurse_p)
1299 : {
1300 13988785 : const char *fmt = GET_RTX_FORMAT (code);
1301 31676886 : for (int i = 0; fmt[i]; i++)
1302 17799727 : switch (fmt[i])
1303 : {
1304 : case 'E':
1305 5965468 : for (int j = 0; j < XVECLEN (x, i); j++)
1306 4240485 : if (!apply_to_rvalue_1 (&XVECEXP (x, i, j)))
1307 : return false;
1308 : break;
1309 :
1310 12078005 : case 'e':
1311 12078005 : if (XEXP (x, i) && !apply_to_rvalue_1 (&XEXP (x, i)))
1312 : return false;
1313 : break;
1314 : }
1315 : }
1316 136166567 : else if (newx && !rtx_equal_p (x, newx))
1317 : {
1318 : /* All substitutions made by OLD_NUM_CHANGES onwards have been
1319 : simplified. */
1320 10806972 : result_flags = ((result_flags & ~UNSIMPLIFIED)
1321 : | (old_result_flags & UNSIMPLIFIED));
1322 :
1323 10806972 : if (should_note_simplifications)
1324 3907944 : note_simplification (old_num_changes, old_result_flags, x, newx);
1325 :
1326 : /* There's no longer any point unsharing the substitutions made
1327 : for subexpressions, since we'll just copy this one instead. */
1328 : bool unshare = false;
1329 21551394 : for (int i = old_num_changes; i < num_changes; ++i)
1330 : {
1331 10744422 : unshare |= changes[i].unshare;
1332 10744422 : changes[i].unshare = false;
1333 : }
1334 10806972 : if (unshare)
1335 10363796 : validate_unshare_change (insn, loc, newx, 1);
1336 : else
1337 443176 : validate_change (insn, loc, newx, 1);
1338 : }
1339 :
1340 : return true;
1341 : }
1342 :
1343 : /* Try to process the lvalue expression at *LOC. Return true on success;
1344 : leave the caller to clean up on failure. */
1345 :
1346 : bool
1347 62657934 : insn_propagation::apply_to_lvalue_1 (rtx dest)
1348 : {
1349 62657934 : rtx old_dest = dest;
1350 62657934 : while (GET_CODE (dest) == SUBREG
1351 62859361 : || GET_CODE (dest) == ZERO_EXTRACT
1352 62859361 : || GET_CODE (dest) == STRICT_LOW_PART)
1353 : {
1354 201427 : if (GET_CODE (dest) == ZERO_EXTRACT
1355 201427 : && (!apply_to_rvalue_1 (&XEXP (dest, 1))
1356 2545 : || !apply_to_rvalue_1 (&XEXP (dest, 2))))
1357 0 : return false;
1358 201427 : dest = XEXP (dest, 0);
1359 : }
1360 :
1361 62657934 : if (MEM_P (dest))
1362 11569707 : return apply_to_mem_1 (dest);
1363 :
1364 : /* Check whether the substitution is safe in the presence of this lvalue. */
1365 51088227 : if (!from
1366 51088227 : || dest == old_dest
1367 197383 : || !REG_P (dest)
1368 51285610 : || !reg_overlap_mentioned_p (dest, from))
1369 50988545 : return true;
1370 :
1371 99682 : if (SUBREG_P (old_dest)
1372 95226 : && SUBREG_REG (old_dest) == dest
1373 194908 : && !read_modify_subreg_p (old_dest))
1374 : return true;
1375 :
1376 99407 : failure_reason = "is part of a read-write destination";
1377 99407 : return false;
1378 : }
1379 :
1380 : /* Try to process the instruction pattern at *LOC. Return true on success;
1381 : leave the caller to clean up on failure. */
1382 :
1383 : bool
1384 66299569 : insn_propagation::apply_to_pattern_1 (rtx *loc)
1385 : {
1386 66299569 : rtx body = *loc;
1387 66299569 : switch (GET_CODE (body))
1388 : {
1389 0 : case COND_EXEC:
1390 0 : return (apply_to_rvalue_1 (&COND_EXEC_TEST (body))
1391 0 : && apply_to_pattern_1 (&COND_EXEC_CODE (body)));
1392 :
1393 : case PARALLEL:
1394 14096944 : for (int i = 0; i < XVECLEN (body, 0); ++i)
1395 : {
1396 9619860 : rtx *subloc = &XVECEXP (body, 0, i);
1397 9619860 : if (GET_CODE (*subloc) == SET)
1398 : {
1399 5226368 : if (!apply_to_lvalue_1 (SET_DEST (*subloc)))
1400 : return false;
1401 : /* ASM_OPERANDS are shared between SETs in the same PARALLEL.
1402 : Only process them on the first iteration. */
1403 708232 : if ((i == 0 || GET_CODE (SET_SRC (*subloc)) != ASM_OPERANDS)
1404 5801245 : && !apply_to_rvalue_1 (&SET_SRC (*subloc)))
1405 : return false;
1406 : }
1407 : else
1408 : {
1409 4393492 : if (!apply_to_pattern_1 (subloc))
1410 : return false;
1411 : }
1412 : }
1413 : return true;
1414 :
1415 8587 : case ASM_OPERANDS:
1416 30316 : for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (body); i < len; ++i)
1417 21949 : if (!apply_to_rvalue_1 (&ASM_OPERANDS_INPUT (body, i)))
1418 : return false;
1419 : return true;
1420 :
1421 4234901 : case CLOBBER:
1422 4234901 : return apply_to_lvalue_1 (XEXP (body, 0));
1423 :
1424 53196665 : case SET:
1425 53196665 : return (apply_to_lvalue_1 (SET_DEST (body))
1426 53196665 : && apply_to_rvalue_1 (&SET_SRC (body)));
1427 :
1428 4330942 : default:
1429 : /* All the other possibilities never store and can use a normal
1430 : rtx walk. This includes:
1431 :
1432 : - USE
1433 : - TRAP_IF
1434 : - PREFETCH
1435 : - UNSPEC
1436 : - UNSPEC_VOLATILE. */
1437 4330942 : return apply_to_rvalue_1 (loc);
1438 : }
1439 : }
1440 :
1441 : /* Apply this insn_propagation object's simplification or substitution
1442 : to the instruction pattern at LOC. */
1443 :
1444 : bool
1445 61906077 : insn_propagation::apply_to_pattern (rtx *loc)
1446 : {
1447 61906077 : unsigned int num_changes = num_validated_changes ();
1448 61906077 : bool res = apply_to_pattern_1 (loc);
1449 61906077 : if (!res)
1450 2388522 : cancel_changes (num_changes);
1451 61906077 : return res;
1452 : }
1453 :
1454 : /* Apply this insn_propagation object's simplification or substitution
1455 : to the rvalue expression at LOC. */
1456 :
1457 : bool
1458 7414388 : insn_propagation::apply_to_rvalue (rtx *loc)
1459 : {
1460 7414388 : unsigned int num_changes = num_validated_changes ();
1461 7414388 : bool res = apply_to_rvalue_1 (loc);
1462 7414388 : if (!res)
1463 19809 : cancel_changes (num_changes);
1464 7414388 : return res;
1465 : }
1466 :
1467 : /* Like apply_to_rvalue, but specifically for the case where *LOC is in
1468 : a note. This never changes the INSN_CODE. */
1469 :
1470 : bool
1471 188739 : insn_propagation::apply_to_note (rtx *loc)
1472 : {
1473 188739 : auto old_code = INSN_CODE (insn);
1474 188739 : bool res = apply_to_rvalue (loc);
1475 188739 : if (INSN_CODE (insn) != old_code)
1476 92725 : INSN_CODE (insn) = old_code;
1477 188739 : return res;
1478 : }
1479 :
1480 : /* Check whether INSN matches a specific alternative of an .md pattern. */
1481 :
1482 : bool
1483 0 : valid_insn_p (rtx_insn *insn)
1484 : {
1485 0 : recog_memoized (insn);
1486 0 : if (INSN_CODE (insn) < 0)
1487 : return false;
1488 0 : extract_insn (insn);
1489 : /* We don't know whether the insn will be in code that is optimized
1490 : for size or speed, so consider all enabled alternatives. */
1491 0 : if (!constrain_operands (1, get_enabled_alternatives (insn)))
1492 : return false;
1493 : return true;
1494 : }
1495 :
1496 : /* Return true if OP is a valid general operand for machine mode MODE.
1497 : This is either a register reference, a memory reference,
1498 : or a constant. In the case of a memory reference, the address
1499 : is checked for general validity for the target machine.
1500 :
1501 : Register and memory references must have mode MODE in order to be valid,
1502 : but some constants have no machine mode and are valid for any mode.
1503 :
1504 : If MODE is VOIDmode, OP is checked for validity for whatever mode
1505 : it has.
1506 :
1507 : The main use of this function is as a predicate in match_operand
1508 : expressions in the machine description. */
1509 :
1510 : bool
1511 4889201030 : general_operand (rtx op, machine_mode mode)
1512 : {
1513 4889201030 : enum rtx_code code = GET_CODE (op);
1514 :
1515 4889201030 : if (mode == VOIDmode)
1516 1266381622 : mode = GET_MODE (op);
1517 :
1518 : /* Don't accept CONST_INT or anything similar
1519 : if the caller wants something floating. */
1520 4889201030 : if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1521 222918788 : && GET_MODE_CLASS (mode) != MODE_INT
1522 6202 : && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1523 : return false;
1524 :
1525 4889194828 : if (CONST_INT_P (op)
1526 272258715 : && mode != VOIDmode
1527 5108890241 : && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1528 : return false;
1529 :
1530 4889194605 : if (CONSTANT_P (op))
1531 63078216 : return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1532 7031 : || mode == VOIDmode)
1533 335904038 : && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1534 723878978 : && targetm.legitimate_constant_p (mode == VOIDmode
1535 52639753 : ? GET_MODE (op)
1536 : : mode, op));
1537 :
1538 : /* Except for certain constants with VOIDmode, already checked for,
1539 : OP's mode must match MODE if MODE specifies a mode. */
1540 :
1541 4553283536 : if (GET_MODE (op) != mode)
1542 : return false;
1543 :
1544 4500087196 : if (code == SUBREG)
1545 : {
1546 33332531 : rtx sub = SUBREG_REG (op);
1547 :
1548 : #ifdef INSN_SCHEDULING
1549 : /* On machines that have insn scheduling, we want all memory
1550 : reference to be explicit, so outlaw paradoxical SUBREGs.
1551 : However, we must allow them after reload so that they can
1552 : get cleaned up by cleanup_subreg_operands. */
1553 33282428 : if (!reload_completed && MEM_P (sub)
1554 33399112 : && paradoxical_subreg_p (op))
1555 : return false;
1556 : #endif
1557 : /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1558 : may result in incorrect reference. We should simplify all valid
1559 : subregs of MEM anyway. But allow this after reload because we
1560 : might be called from cleanup_subreg_operands.
1561 :
1562 : ??? This is a kludge. */
1563 33266088 : if (!reload_completed
1564 33215985 : && maybe_ne (SUBREG_BYTE (op), 0)
1565 38389759 : && MEM_P (sub))
1566 : return false;
1567 :
1568 33266088 : if (REG_P (sub)
1569 31693514 : && REGNO (sub) < FIRST_PSEUDO_REGISTER
1570 5190 : && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1571 0 : && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1572 0 : && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1573 : /* LRA can generate some invalid SUBREGS just for matched
1574 : operand reload presentation. LRA needs to treat them as
1575 : valid. */
1576 33266088 : && ! LRA_SUBREG_P (op))
1577 : return false;
1578 :
1579 : /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1580 : create such rtl, and we must reject it. */
1581 33266088 : if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1582 : /* LRA can use subreg to store a floating point value in an
1583 : integer mode. Although the floating point and the
1584 : integer modes need the same number of hard registers, the
1585 : size of floating point mode can be less than the integer
1586 : mode. */
1587 326990 : && ! lra_in_progress
1588 33576198 : && paradoxical_subreg_p (op))
1589 : return false;
1590 :
1591 33266088 : op = sub;
1592 33266088 : code = GET_CODE (op);
1593 : }
1594 :
1595 4500020753 : if (code == REG)
1596 3662376936 : return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1597 3662376936 : || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1598 :
1599 837643817 : if (code == MEM)
1600 : {
1601 746043833 : rtx y = XEXP (op, 0);
1602 :
1603 : /* If -ffuse-ops-with-volatile-access is enabled, allow volatile
1604 : memory reference. */
1605 746043833 : if (!flag_fuse_ops_with_volatile_access
1606 151866 : && !volatile_ok
1607 746090503 : && MEM_VOLATILE_P (op))
1608 : return false;
1609 :
1610 : /* Use the mem's mode, since it will be reloaded thus. LRA can
1611 : generate move insn with invalid addresses which is made valid
1612 : and efficiently calculated by LRA through further numerous
1613 : transformations. */
1614 746037801 : if (lra_in_progress
1615 801517042 : || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1616 727826770 : return true;
1617 : }
1618 :
1619 : return false;
1620 : }
1621 :
1622 : /* Return true if OP is a valid memory address for a memory reference
1623 : of mode MODE.
1624 :
1625 : The main use of this function is as a predicate in match_operand
1626 : expressions in the machine description. */
1627 :
1628 : bool
1629 112821102 : address_operand (rtx op, machine_mode mode)
1630 : {
1631 : /* Wrong mode for an address expr. */
1632 112821102 : if (GET_MODE (op) != VOIDmode
1633 100579968 : && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1634 : return false;
1635 :
1636 111975474 : return memory_address_p (mode, op);
1637 : }
1638 :
1639 : /* Return true if OP is a register reference of mode MODE.
1640 : If MODE is VOIDmode, accept a register in any mode.
1641 :
1642 : The main use of this function is as a predicate in match_operand
1643 : expressions in the machine description. */
1644 :
1645 : bool
1646 2626224721 : register_operand (rtx op, machine_mode mode)
1647 : {
1648 2626224721 : if (GET_CODE (op) == SUBREG)
1649 : {
1650 11741550 : rtx sub = SUBREG_REG (op);
1651 :
1652 : /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1653 : because it is guaranteed to be reloaded into one.
1654 : Just make sure the MEM is valid in itself.
1655 : (Ideally, (SUBREG (MEM)...) should not exist after reload,
1656 : but currently it does result from (SUBREG (REG)...) where the
1657 : reg went on the stack.) */
1658 11741550 : if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1659 : return false;
1660 : }
1661 2614483171 : else if (!REG_P (op))
1662 : return false;
1663 1918455913 : return general_operand (op, mode);
1664 : }
1665 :
1666 : /* Return true for a register in Pmode; ignore the tested mode. */
1667 :
1668 : bool
1669 0 : pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1670 : {
1671 0 : return register_operand (op, Pmode);
1672 : }
1673 :
1674 : /* Return true if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1675 : or a hard register. */
1676 :
1677 : bool
1678 738021 : scratch_operand (rtx op, machine_mode mode)
1679 : {
1680 738021 : if (GET_MODE (op) != mode && mode != VOIDmode)
1681 : return false;
1682 :
1683 699806 : return (GET_CODE (op) == SCRATCH
1684 699806 : || (REG_P (op)
1685 89344 : && (lra_in_progress
1686 71989 : || (REGNO (op) < FIRST_PSEUDO_REGISTER
1687 69899 : && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1688 : }
1689 :
1690 : /* Return true if OP is a valid immediate operand for mode MODE.
1691 :
1692 : The main use of this function is as a predicate in match_operand
1693 : expressions in the machine description. */
1694 :
1695 : bool
1696 502699015 : immediate_operand (rtx op, machine_mode mode)
1697 : {
1698 : /* Don't accept CONST_INT or anything similar
1699 : if the caller wants something floating. */
1700 502699015 : if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1701 142840824 : && GET_MODE_CLASS (mode) != MODE_INT
1702 0 : && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1703 : return false;
1704 :
1705 502699015 : if (CONST_INT_P (op)
1706 324104457 : && mode != VOIDmode
1707 642475727 : && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1708 : return false;
1709 :
1710 502474091 : return (CONSTANT_P (op)
1711 388324836 : && (GET_MODE (op) == mode || mode == VOIDmode
1712 144048796 : || GET_MODE (op) == VOIDmode)
1713 385869193 : && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1714 1104608501 : && targetm.legitimate_constant_p (mode == VOIDmode
1715 220948543 : ? GET_MODE (op)
1716 : : mode, op));
1717 : }
1718 :
1719 : /* Return true if OP is an operand that is a CONST_INT of mode MODE. */
1720 :
1721 : bool
1722 34237394 : const_int_operand (rtx op, machine_mode mode)
1723 : {
1724 34237394 : if (!CONST_INT_P (op))
1725 : return false;
1726 :
1727 28282126 : if (mode != VOIDmode
1728 28282126 : && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1729 : return false;
1730 :
1731 : return true;
1732 : }
1733 :
1734 : #if TARGET_SUPPORTS_WIDE_INT
1735 : /* Return true if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1736 : of mode MODE. */
1737 : bool
1738 2321779 : const_scalar_int_operand (rtx op, machine_mode mode)
1739 : {
1740 2321779 : if (!CONST_SCALAR_INT_P (op))
1741 : return false;
1742 :
1743 1954252 : if (CONST_INT_P (op))
1744 162493 : return const_int_operand (op, mode);
1745 :
1746 1791759 : if (mode != VOIDmode)
1747 : {
1748 1791759 : scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1749 1791759 : int prec = GET_MODE_PRECISION (int_mode);
1750 1791759 : int bitsize = GET_MODE_BITSIZE (int_mode);
1751 :
1752 1791759 : if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1753 : return false;
1754 :
1755 1791759 : if (prec == bitsize)
1756 : return true;
1757 : else
1758 : {
1759 : /* Multiword partial int. */
1760 5496 : HOST_WIDE_INT x
1761 5496 : = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1762 5496 : return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1763 : }
1764 : }
1765 : return true;
1766 : }
1767 :
1768 : /* Return true if OP is an operand that is a constant integer or constant
1769 : floating-point number of MODE. */
1770 :
1771 : bool
1772 0 : const_double_operand (rtx op, machine_mode mode)
1773 : {
1774 0 : return (GET_CODE (op) == CONST_DOUBLE)
1775 0 : && (GET_MODE (op) == mode || mode == VOIDmode);
1776 : }
1777 : #else
1778 : /* Return true if OP is an operand that is a constant integer or constant
1779 : floating-point number of MODE. */
1780 :
1781 : bool
1782 : const_double_operand (rtx op, machine_mode mode)
1783 : {
1784 : /* Don't accept CONST_INT or anything similar
1785 : if the caller wants something floating. */
1786 : if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1787 : && GET_MODE_CLASS (mode) != MODE_INT
1788 : && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1789 : return false;
1790 :
1791 : return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1792 : && (mode == VOIDmode || GET_MODE (op) == mode
1793 : || GET_MODE (op) == VOIDmode));
1794 : }
1795 : #endif
1796 : /* Return true if OP is a general operand that is not an immediate
1797 : operand of mode MODE. */
1798 :
1799 : bool
1800 1904514391 : nonimmediate_operand (rtx op, machine_mode mode)
1801 : {
1802 1904514391 : return (general_operand (op, mode) && ! CONSTANT_P (op));
1803 : }
1804 :
1805 : /* Return true if OP is a register reference or
1806 : immediate value of mode MODE. */
1807 :
1808 : bool
1809 519185562 : nonmemory_operand (rtx op, machine_mode mode)
1810 : {
1811 519185562 : if (CONSTANT_P (op))
1812 31624450 : return immediate_operand (op, mode);
1813 487561112 : return register_operand (op, mode);
1814 : }
1815 :
1816 : /* Return true if OP is a valid operand that stands for pushing a
1817 : value of mode MODE onto the stack.
1818 :
1819 : The main use of this function is as a predicate in match_operand
1820 : expressions in the machine description. */
1821 :
1822 : bool
1823 850414061 : push_operand (rtx op, machine_mode mode)
1824 : {
1825 850414061 : if (!MEM_P (op))
1826 : return false;
1827 :
1828 257208971 : if (mode != VOIDmode && GET_MODE (op) != mode)
1829 : return false;
1830 :
1831 487943096 : poly_int64 rounded_size = GET_MODE_SIZE (mode);
1832 :
1833 : #ifdef PUSH_ROUNDING
1834 243971548 : rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1835 : #endif
1836 :
1837 243971548 : op = XEXP (op, 0);
1838 :
1839 487943096 : if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1840 : {
1841 207906948 : if (GET_CODE (op) != STACK_PUSH_CODE)
1842 : return false;
1843 : }
1844 : else
1845 : {
1846 36064600 : poly_int64 offset;
1847 36064600 : if (GET_CODE (op) != PRE_MODIFY
1848 1180977 : || GET_CODE (XEXP (op, 1)) != PLUS
1849 1180977 : || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1850 1180977 : || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1851 36064600 : || (STACK_GROWS_DOWNWARD
1852 1180977 : ? maybe_ne (offset, -rounded_size)
1853 : : maybe_ne (offset, rounded_size)))
1854 806257076 : return false;
1855 : }
1856 :
1857 44156985 : return XEXP (op, 0) == stack_pointer_rtx;
1858 : }
1859 :
1860 : /* Return true if OP is a valid operand that stands for popping a
1861 : value of mode MODE off the stack.
1862 :
1863 : The main use of this function is as a predicate in match_operand
1864 : expressions in the machine description. */
1865 :
1866 : bool
1867 303904371 : pop_operand (rtx op, machine_mode mode)
1868 : {
1869 303904371 : if (!MEM_P (op))
1870 : return false;
1871 :
1872 75151221 : if (mode != VOIDmode && GET_MODE (op) != mode)
1873 : return false;
1874 :
1875 75151221 : op = XEXP (op, 0);
1876 :
1877 75151221 : if (GET_CODE (op) != STACK_POP_CODE)
1878 : return false;
1879 :
1880 1360999 : return XEXP (op, 0) == stack_pointer_rtx;
1881 : }
1882 :
1883 : /* Return true if ADDR is a valid memory address
1884 : for mode MODE in address space AS. */
1885 :
1886 : bool
1887 1479893681 : memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED, rtx addr,
1888 : addr_space_t as, code_helper ch ATTRIBUTE_UNUSED)
1889 : {
1890 : #ifdef GO_IF_LEGITIMATE_ADDRESS
1891 : gcc_assert (ADDR_SPACE_GENERIC_P (as));
1892 : GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1893 : return false;
1894 :
1895 : win:
1896 : return true;
1897 : #else
1898 1479893681 : return targetm.addr_space.legitimate_address_p (mode, addr, 0, as, ch);
1899 : #endif
1900 : }
1901 :
1902 : /* Return true if OP is a valid memory reference with mode MODE,
1903 : including a valid address.
1904 :
1905 : The main use of this function is as a predicate in match_operand
1906 : expressions in the machine description. */
1907 :
1908 : bool
1909 1221176405 : memory_operand (rtx op, machine_mode mode)
1910 : {
1911 1221176405 : rtx inner;
1912 :
1913 1221176405 : if (! reload_completed)
1914 : /* Note that no SUBREG is a memory operand before end of reload pass,
1915 : because (SUBREG (MEM...)) forces reloading into a register. */
1916 121513873 : return MEM_P (op) && general_operand (op, mode);
1917 :
1918 1099662532 : if (mode != VOIDmode && GET_MODE (op) != mode)
1919 : return false;
1920 :
1921 804494055 : inner = op;
1922 804494055 : if (GET_CODE (inner) == SUBREG)
1923 8125 : inner = SUBREG_REG (inner);
1924 :
1925 804494055 : return (MEM_P (inner) && general_operand (op, mode));
1926 : }
1927 :
1928 : /* Return true if OP is a valid indirect memory reference with mode MODE;
1929 : that is, a memory reference whose address is a general_operand. */
1930 :
1931 : bool
1932 0 : indirect_operand (rtx op, machine_mode mode)
1933 : {
1934 : /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1935 0 : if (! reload_completed
1936 0 : && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1937 : {
1938 0 : if (mode != VOIDmode && GET_MODE (op) != mode)
1939 : return false;
1940 :
1941 : /* The only way that we can have a general_operand as the resulting
1942 : address is if OFFSET is zero and the address already is an operand
1943 : or if the address is (plus Y (const_int -OFFSET)) and Y is an
1944 : operand. */
1945 0 : poly_int64 offset;
1946 0 : rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1947 0 : return (known_eq (offset + SUBREG_BYTE (op), 0)
1948 0 : && general_operand (addr, Pmode));
1949 : }
1950 :
1951 0 : return (MEM_P (op)
1952 0 : && memory_operand (op, mode)
1953 0 : && general_operand (XEXP (op, 0), Pmode));
1954 : }
1955 :
1956 : /* Return true if this is an ordered comparison operator (not including
1957 : ORDERED and UNORDERED). */
1958 :
1959 : bool
1960 28414214 : ordered_comparison_operator (rtx op, machine_mode mode)
1961 : {
1962 28414214 : if (mode != VOIDmode && GET_MODE (op) != mode)
1963 : return false;
1964 28414214 : switch (GET_CODE (op))
1965 : {
1966 : case EQ:
1967 : case NE:
1968 : case LT:
1969 : case LTU:
1970 : case LE:
1971 : case LEU:
1972 : case GT:
1973 : case GTU:
1974 : case GE:
1975 : case GEU:
1976 : return true;
1977 : default:
1978 : return false;
1979 : }
1980 : }
1981 :
1982 : /* Return true if this is a comparison operator. This allows the use of
1983 : MATCH_OPERATOR to recognize all the branch insns. */
1984 :
1985 : bool
1986 115045031 : comparison_operator (rtx op, machine_mode mode)
1987 : {
1988 4513762 : return ((mode == VOIDmode || GET_MODE (op) == mode)
1989 119175269 : && COMPARISON_P (op));
1990 : }
1991 :
1992 : /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1993 :
1994 : rtx
1995 1978253739 : extract_asm_operands (rtx body)
1996 : {
1997 1978253739 : rtx tmp;
1998 1978253739 : switch (GET_CODE (body))
1999 : {
2000 : case ASM_OPERANDS:
2001 : return body;
2002 :
2003 1518833965 : case SET:
2004 : /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
2005 1518833965 : tmp = SET_SRC (body);
2006 1518833965 : if (GET_CODE (tmp) == ASM_OPERANDS)
2007 : return tmp;
2008 : break;
2009 :
2010 331381919 : case PARALLEL:
2011 331381919 : tmp = XVECEXP (body, 0, 0);
2012 331381919 : if (GET_CODE (tmp) == ASM_OPERANDS)
2013 : return tmp;
2014 329124889 : if (GET_CODE (tmp) == SET)
2015 : {
2016 324938546 : tmp = SET_SRC (tmp);
2017 324938546 : if (GET_CODE (tmp) == ASM_OPERANDS)
2018 : return tmp;
2019 : }
2020 : break;
2021 :
2022 : default:
2023 : break;
2024 : }
2025 1972481099 : return NULL;
2026 : }
2027 :
2028 : /* If BODY is an insn body that uses ASM_OPERANDS,
2029 : return the number of operands (both input and output) in the insn.
2030 : If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2031 : return 0.
2032 : Otherwise return -1. */
2033 :
2034 : int
2035 1515307102 : asm_noperands (const_rtx body)
2036 : {
2037 1515307102 : rtx asm_op = extract_asm_operands (const_cast<rtx> (body));
2038 1515307102 : int i, n_sets = 0;
2039 :
2040 1515307102 : if (asm_op == NULL)
2041 : {
2042 1510667937 : if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
2043 240253318 : && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2044 : {
2045 : /* body is [(asm_input ...) (clobber (reg ...))...]. */
2046 60090 : for (i = XVECLEN (body, 0) - 1; i > 0; i--)
2047 40060 : if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
2048 : return -1;
2049 : return 0;
2050 : }
2051 : return -1;
2052 : }
2053 :
2054 4639165 : if (GET_CODE (body) == SET)
2055 : n_sets = 1;
2056 4631478 : else if (GET_CODE (body) == PARALLEL)
2057 : {
2058 4621323 : if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
2059 : {
2060 : /* Multiple output operands, or 1 output plus some clobbers:
2061 : body is
2062 : [(set OUTPUT (asm_operands ...))...
2063 : (use (reg ...))...
2064 : (clobber (reg ...))...]. */
2065 : /* Count backwards through USEs and CLOBBERs to determine
2066 : number of SETs. */
2067 5541094 : for (i = XVECLEN (body, 0); i > 0; i--)
2068 : {
2069 5541094 : if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
2070 : break;
2071 2791966 : if (GET_CODE (XVECEXP (body, 0, i - 1)) != USE
2072 2791966 : && GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
2073 : return -1;
2074 : }
2075 :
2076 : /* N_SETS is now number of output operands. */
2077 10690305 : n_sets = i;
2078 :
2079 : /* Verify that all the SETs we have
2080 : came from a single original asm_operands insn
2081 : (so that invalid combinations are blocked). */
2082 10690305 : for (i = 0; i < n_sets; i++)
2083 : {
2084 7980406 : rtx elt = XVECEXP (body, 0, i);
2085 7980406 : if (GET_CODE (elt) != SET)
2086 : return -1;
2087 7975191 : if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
2088 : return -1;
2089 : /* If these ASM_OPERANDS rtx's came from different original insns
2090 : then they aren't allowed together. */
2091 7959306 : if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
2092 7959306 : != ASM_OPERANDS_INPUT_VEC (asm_op))
2093 : return -1;
2094 : }
2095 : }
2096 : else
2097 : {
2098 : /* 0 outputs, but some clobbers:
2099 : body is [(asm_operands ...)
2100 : (use (reg ...))...
2101 : (clobber (reg ...))...]. */
2102 : /* Make sure all the other parallel things really are clobbers. */
2103 5771217 : for (i = XVECLEN (body, 0) - 1; i > 0; i--)
2104 3901402 : if (GET_CODE (XVECEXP (body, 0, i)) != USE
2105 3901402 : && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
2106 : return -1;
2107 : }
2108 : }
2109 :
2110 4597556 : return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
2111 4597556 : + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
2112 : }
2113 :
2114 : /* Assuming BODY is an insn body that uses ASM_OPERANDS,
2115 : copy its operands (both input and output) into the vector OPERANDS,
2116 : the locations of the operands within the insn into the vector OPERAND_LOCS,
2117 : and the constraints for the operands into CONSTRAINTS.
2118 : Write the modes of the operands into MODES.
2119 : Write the location info into LOC.
2120 : Return the assembler-template.
2121 : If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2122 : return the basic assembly string.
2123 :
2124 : If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
2125 : we don't store that info. */
2126 :
2127 : const char *
2128 2171129 : decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
2129 : const char **constraints, machine_mode *modes,
2130 : location_t *loc)
2131 : {
2132 2171129 : int nbase = 0, n, i;
2133 2171129 : rtx asmop;
2134 :
2135 2171129 : switch (GET_CODE (body))
2136 : {
2137 : case ASM_OPERANDS:
2138 : /* Zero output asm: BODY is (asm_operands ...). */
2139 : asmop = body;
2140 : break;
2141 :
2142 3655 : case SET:
2143 : /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
2144 3655 : asmop = SET_SRC (body);
2145 :
2146 : /* The output is in the SET.
2147 : Its constraint is in the ASM_OPERANDS itself. */
2148 3655 : if (operands)
2149 3545 : operands[0] = SET_DEST (body);
2150 3655 : if (operand_locs)
2151 455 : operand_locs[0] = &SET_DEST (body);
2152 3655 : if (constraints)
2153 3545 : constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
2154 3655 : if (modes)
2155 455 : modes[0] = GET_MODE (SET_DEST (body));
2156 : nbase = 1;
2157 : break;
2158 :
2159 2163189 : case PARALLEL:
2160 2163189 : {
2161 2163189 : int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
2162 :
2163 2163189 : asmop = XVECEXP (body, 0, 0);
2164 2163189 : if (GET_CODE (asmop) == SET)
2165 : {
2166 1149499 : asmop = SET_SRC (asmop);
2167 :
2168 : /* At least one output, plus some CLOBBERs. The outputs are in
2169 : the SETs. Their constraints are in the ASM_OPERANDS itself. */
2170 4267527 : for (i = 0; i < nparallel; i++)
2171 : {
2172 4246408 : if (GET_CODE (XVECEXP (body, 0, i)) == USE
2173 4246408 : || GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
2174 : break; /* Past last SET */
2175 3118028 : gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
2176 3118028 : if (operands)
2177 2952333 : operands[i] = SET_DEST (XVECEXP (body, 0, i));
2178 3118028 : if (operand_locs)
2179 1122823 : operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
2180 3118028 : if (constraints)
2181 2966909 : constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
2182 3118028 : if (modes)
2183 1122823 : modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
2184 : }
2185 : nbase = i;
2186 : }
2187 1013690 : else if (GET_CODE (asmop) == ASM_INPUT)
2188 : {
2189 10195 : if (loc)
2190 0 : *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
2191 10195 : return XSTR (asmop, 0);
2192 : }
2193 : break;
2194 : }
2195 :
2196 0 : default:
2197 0 : gcc_unreachable ();
2198 : }
2199 :
2200 2160934 : n = ASM_OPERANDS_INPUT_LENGTH (asmop);
2201 4093313 : for (i = 0; i < n; i++)
2202 : {
2203 1932379 : if (operand_locs)
2204 817499 : operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
2205 1932379 : if (operands)
2206 1801684 : operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
2207 1932379 : if (constraints)
2208 1819743 : constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
2209 1932379 : if (modes)
2210 817499 : modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
2211 : }
2212 2160934 : nbase += n;
2213 :
2214 2160934 : n = ASM_OPERANDS_LABEL_LENGTH (asmop);
2215 2181086 : for (i = 0; i < n; i++)
2216 : {
2217 20152 : if (operand_locs)
2218 11148 : operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
2219 20152 : if (operands)
2220 18335 : operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
2221 20152 : if (constraints)
2222 18406 : constraints[nbase + i] = "";
2223 20152 : if (modes)
2224 11148 : modes[nbase + i] = Pmode;
2225 : }
2226 :
2227 2160934 : if (loc)
2228 106059 : *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
2229 :
2230 2160934 : return ASM_OPERANDS_TEMPLATE (asmop);
2231 : }
2232 :
2233 : /* Parse inline assembly string STRING and determine which operands are
2234 : referenced by % markers. For the first NOPERANDS operands, set USED[I]
2235 : to true if operand I is referenced.
2236 :
2237 : This is intended to distinguish barrier-like asms such as:
2238 :
2239 : asm ("" : "=m" (...));
2240 :
2241 : from real references such as:
2242 :
2243 : asm ("sw\t$0, %0" : "=m" (...)); */
2244 :
2245 : void
2246 0 : get_referenced_operands (const char *string, bool *used,
2247 : unsigned int noperands)
2248 : {
2249 0 : memset (used, 0, sizeof (bool) * noperands);
2250 0 : const char *p = string;
2251 0 : while (*p)
2252 0 : switch (*p)
2253 : {
2254 0 : case '%':
2255 0 : p += 1;
2256 : /* A letter followed by a digit indicates an operand number. */
2257 0 : if (ISALPHA (p[0]) && ISDIGIT (p[1]))
2258 0 : p += 1;
2259 0 : if (ISDIGIT (*p))
2260 : {
2261 0 : char *endptr;
2262 0 : unsigned long opnum = strtoul (p, &endptr, 10);
2263 0 : if (endptr != p && opnum < noperands)
2264 0 : used[opnum] = true;
2265 0 : p = endptr;
2266 : }
2267 : else
2268 0 : p += 1;
2269 : break;
2270 :
2271 0 : default:
2272 0 : p++;
2273 0 : break;
2274 : }
2275 0 : }
2276 :
2277 : /* Check if an asm_operand matches its constraints.
2278 : Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
2279 :
2280 : int
2281 3403162 : asm_operand_ok (rtx op, const char *constraint, const char **constraints)
2282 : {
2283 3403162 : int result = 0;
2284 3403162 : bool incdec_ok = false;
2285 :
2286 : /* Use constrain_operands after reload. */
2287 3403162 : gcc_assert (!reload_completed);
2288 :
2289 : /* Empty constraint string is the same as "X,...,X", i.e. X for as
2290 : many alternatives as required to match the other operands. */
2291 3403162 : if (*constraint == '\0')
2292 3781 : result = 1;
2293 :
2294 9399691 : while (*constraint)
2295 : {
2296 5996531 : enum constraint_num cn;
2297 5996531 : char c = *constraint;
2298 5996531 : int len;
2299 5996531 : switch (c)
2300 : {
2301 11497 : case ',':
2302 11497 : raw_constraint_p = false;
2303 11497 : constraint++;
2304 11497 : continue;
2305 :
2306 636061 : case '0': case '1': case '2': case '3': case '4':
2307 636061 : case '5': case '6': case '7': case '8': case '9':
2308 : /* If caller provided constraints pointer, look up
2309 : the matching constraint. Otherwise, our caller should have
2310 : given us the proper matching constraint, but we can't
2311 : actually fail the check if they didn't. Indicate that
2312 : results are inconclusive. */
2313 636061 : if (constraints)
2314 : {
2315 635860 : char *end;
2316 635860 : unsigned long match;
2317 :
2318 635860 : match = strtoul (constraint, &end, 10);
2319 635860 : if (!result)
2320 635599 : result = asm_operand_ok (op, constraints[match], NULL);
2321 635860 : constraint = (const char *) end;
2322 : }
2323 : else
2324 : {
2325 225 : do
2326 225 : constraint++;
2327 225 : while (ISDIGIT (*constraint));
2328 201 : if (! result)
2329 174 : result = -1;
2330 : }
2331 636061 : continue;
2332 :
2333 : /* The rest of the compiler assumes that reloading the address
2334 : of a MEM into a register will make it fit an 'o' constraint.
2335 : That is, if it sees a MEM operand for an 'o' constraint,
2336 : it assumes that (mem (base-reg)) will fit.
2337 :
2338 : That assumption fails on targets that don't have offsettable
2339 : addresses at all. We therefore need to treat 'o' asm
2340 : constraints as a special case and only accept operands that
2341 : are already offsettable, thus proving that at least one
2342 : offsettable address exists. */
2343 36 : case 'o': /* offsettable */
2344 36 : if (offsettable_nonstrict_memref_p (op))
2345 2316786 : result = 1;
2346 : break;
2347 :
2348 98778 : case 'g':
2349 98778 : if (general_operand (op, VOIDmode))
2350 2316786 : result = 1;
2351 : break;
2352 :
2353 32 : case '-':
2354 32 : raw_constraint_p = true;
2355 32 : constraint++;
2356 32 : continue;
2357 :
2358 : case '<':
2359 : case '>':
2360 : /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
2361 : to exist, excepting those that expand_call created. Further,
2362 : on some machines which do not have generalized auto inc/dec,
2363 : an inc/dec is not a memory_operand.
2364 :
2365 : Match any memory and hope things are resolved after reload. */
2366 5250127 : incdec_ok = true;
2367 : /* FALLTHRU */
2368 5250127 : default:
2369 5250127 : cn = lookup_constraint (constraint);
2370 5250127 : rtx mem = NULL;
2371 5250127 : switch (get_constraint_type (cn))
2372 : {
2373 5088116 : case CT_REGISTER:
2374 5088116 : if (!result
2375 2531689 : && (reg_class_for_constraint (cn) != NO_REGS
2376 2554458 : || constraint[0] == '{')
2377 2532347 : && GET_MODE (op) != BLKmode
2378 7620428 : && register_operand (op, VOIDmode))
2379 : result = 1;
2380 : break;
2381 :
2382 4 : case CT_CONST_INT:
2383 4 : if (!result
2384 4 : && CONST_INT_P (op)
2385 6 : && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2386 : result = 1;
2387 : break;
2388 :
2389 138871 : case CT_MEMORY:
2390 138871 : case CT_RELAXED_MEMORY:
2391 138871 : mem = op;
2392 : /* Fall through. */
2393 138871 : case CT_SPECIAL_MEMORY:
2394 : /* Every memory operand can be reloaded to fit. */
2395 138871 : if (!mem)
2396 0 : mem = extract_mem_from_operand (op);
2397 138871 : result = result || memory_operand (mem, VOIDmode);
2398 138871 : break;
2399 :
2400 143 : case CT_ADDRESS:
2401 : /* Every address operand can be reloaded to fit. */
2402 143 : result = result || address_operand (op, VOIDmode);
2403 143 : break;
2404 :
2405 22993 : case CT_FIXED_FORM:
2406 22993 : result = result || constraint_satisfied_p (op, cn);
2407 22993 : break;
2408 : }
2409 : break;
2410 647590 : }
2411 5348941 : len = CONSTRAINT_LEN (c, constraint);
2412 5353020 : do
2413 5353020 : constraint++;
2414 10701961 : while (--len && *constraint && *constraint != ',');
2415 5348941 : if (len)
2416 : {
2417 2 : raw_constraint_p = false;
2418 2 : return 0;
2419 : }
2420 : }
2421 3403160 : raw_constraint_p = false;
2422 :
2423 : /* For operands without < or > constraints reject side-effects. */
2424 3403160 : if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
2425 : switch (GET_CODE (XEXP (op, 0)))
2426 : {
2427 : case PRE_INC:
2428 : case POST_INC:
2429 : case PRE_DEC:
2430 : case POST_DEC:
2431 : case PRE_MODIFY:
2432 : case POST_MODIFY:
2433 : return 0;
2434 : default:
2435 : break;
2436 : }
2437 :
2438 3403160 : return result;
2439 : }
2440 :
2441 : /* Given an rtx *P, if it is a sum containing an integer constant term,
2442 : return the location (type rtx *) of the pointer to that constant term.
2443 : Otherwise, return a null pointer. */
2444 :
2445 : rtx *
2446 41715287 : find_constant_term_loc (rtx *p)
2447 : {
2448 41715287 : rtx *tem;
2449 41715287 : enum rtx_code code = GET_CODE (*p);
2450 :
2451 : /* If *P IS such a constant term, P is its location. */
2452 :
2453 41715287 : if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2454 29843641 : || code == CONST)
2455 : return p;
2456 :
2457 : /* Otherwise, if not a sum, it has no constant term. */
2458 :
2459 29796254 : if (GET_CODE (*p) != PLUS)
2460 : return 0;
2461 :
2462 : /* If one of the summands is constant, return its location. */
2463 :
2464 13934613 : if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2465 0 : && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2466 : return p;
2467 :
2468 : /* Otherwise, check each summand for containing a constant term. */
2469 :
2470 13934613 : if (XEXP (*p, 0) != 0)
2471 : {
2472 13934613 : tem = find_constant_term_loc (&XEXP (*p, 0));
2473 13934613 : if (tem != 0)
2474 : return tem;
2475 : }
2476 :
2477 13934613 : if (XEXP (*p, 1) != 0)
2478 : {
2479 13934613 : tem = find_constant_term_loc (&XEXP (*p, 1));
2480 13934613 : if (tem != 0)
2481 : return tem;
2482 : }
2483 :
2484 : return 0;
2485 : }
2486 :
2487 : /* Return true if OP is a memory reference whose address contains
2488 : no side effects and remains valid after the addition of a positive
2489 : integer less than the size of the object being referenced.
2490 :
2491 : We assume that the original address is valid and do not check it.
2492 :
2493 : This uses strict_memory_address_p as a subroutine, so
2494 : don't use it before reload. */
2495 :
2496 : bool
2497 5596310 : offsettable_memref_p (rtx op)
2498 : {
2499 5596310 : return ((MEM_P (op))
2500 11187675 : && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
2501 5591365 : MEM_ADDR_SPACE (op)));
2502 : }
2503 :
2504 : /* Similar, but don't require a strictly valid mem ref:
2505 : consider pseudo-regs valid as index or base regs. */
2506 :
2507 : bool
2508 12213081 : offsettable_nonstrict_memref_p (rtx op)
2509 : {
2510 12213081 : return ((MEM_P (op))
2511 24426128 : && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
2512 12213047 : MEM_ADDR_SPACE (op)));
2513 : }
2514 :
2515 : /* Return true if Y is a memory address which contains no side effects
2516 : and would remain valid for address space AS after the addition of
2517 : a positive integer less than the size of that mode.
2518 :
2519 : We assume that the original address is valid and do not check it.
2520 : We do check that it is valid for narrower modes.
2521 :
2522 : If STRICTP is nonzero, we require a strictly valid address,
2523 : for the sake of use in reload.cc. */
2524 :
2525 : bool
2526 17804412 : offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
2527 : addr_space_t as)
2528 : {
2529 17804412 : enum rtx_code ycode = GET_CODE (y);
2530 17804412 : rtx z;
2531 17804412 : rtx y1 = y;
2532 17804412 : rtx *y2;
2533 12213047 : bool (*addressp) (machine_mode, rtx, addr_space_t, code_helper) =
2534 17804412 : (strictp ? strict_memory_address_addr_space_p
2535 : : memory_address_addr_space_p);
2536 35608824 : poly_int64 mode_sz = GET_MODE_SIZE (mode);
2537 :
2538 17804412 : if (CONSTANT_ADDRESS_P (y))
2539 : return true;
2540 :
2541 : /* Adjusting an offsettable address involves changing to a narrower mode.
2542 : Make sure that's OK. */
2543 :
2544 15142003 : if (mode_dependent_address_p (y, as))
2545 : return false;
2546 :
2547 14957778 : machine_mode address_mode = GET_MODE (y);
2548 14957778 : if (address_mode == VOIDmode)
2549 0 : address_mode = targetm.addr_space.address_mode (as);
2550 : #ifdef POINTERS_EXTEND_UNSIGNED
2551 14957778 : machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2552 : #endif
2553 :
2554 : /* ??? How much offset does an offsettable BLKmode reference need?
2555 : Clearly that depends on the situation in which it's being used.
2556 : However, the current situation in which we test 0xffffffff is
2557 : less than ideal. Caveat user. */
2558 14957778 : if (known_eq (mode_sz, 0))
2559 0 : mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2560 :
2561 : /* If the expression contains a constant term,
2562 : see if it remains valid when max possible offset is added. */
2563 :
2564 14957778 : if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2565 : {
2566 11919033 : bool good;
2567 :
2568 11919033 : y1 = *y2;
2569 11919033 : *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2570 : /* Use QImode because an odd displacement may be automatically invalid
2571 : for any wider mode. But it should be valid for a single byte. */
2572 11919033 : good = (*addressp) (QImode, y, as, ERROR_MARK);
2573 :
2574 : /* In any case, restore old contents of memory. */
2575 11919033 : *y2 = y1;
2576 11919033 : return good;
2577 : }
2578 :
2579 3038745 : if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2580 : return false;
2581 :
2582 : /* The offset added here is chosen as the maximum offset that
2583 : any instruction could need to add when operating on something
2584 : of the specified mode. We assume that if Y and Y+c are
2585 : valid addresses then so is Y+d for all 0<d<c. adjust_address will
2586 : go inside a LO_SUM here, so we do so as well. */
2587 3038745 : if (GET_CODE (y) == LO_SUM
2588 0 : && mode != BLKmode
2589 3038745 : && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2590 0 : z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2591 : plus_constant (address_mode, XEXP (y, 1),
2592 : mode_sz - 1));
2593 : #ifdef POINTERS_EXTEND_UNSIGNED
2594 : /* Likewise for a ZERO_EXTEND from pointer_mode. */
2595 3038745 : else if (POINTERS_EXTEND_UNSIGNED > 0
2596 3038745 : && GET_CODE (y) == ZERO_EXTEND
2597 13 : && GET_MODE (XEXP (y, 0)) == pointer_mode)
2598 7 : z = gen_rtx_ZERO_EXTEND (address_mode,
2599 : plus_constant (pointer_mode, XEXP (y, 0),
2600 : mode_sz - 1));
2601 : #endif
2602 : else
2603 3038738 : z = plus_constant (address_mode, y, mode_sz - 1);
2604 :
2605 : /* Use QImode because an odd displacement may be automatically invalid
2606 : for any wider mode. But it should be valid for a single byte. */
2607 3038745 : return (*addressp) (QImode, z, as, ERROR_MARK);
2608 : }
2609 :
2610 : /* Return true if ADDR is an address-expression whose effect depends
2611 : on the mode of the memory reference it is used in.
2612 :
2613 : ADDRSPACE is the address space associated with the address.
2614 :
2615 : Autoincrement addressing is a typical example of mode-dependence
2616 : because the amount of the increment depends on the mode. */
2617 :
2618 : bool
2619 40377513 : mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2620 : {
2621 : /* Auto-increment addressing with anything other than post_modify
2622 : or pre_modify always introduces a mode dependency. Catch such
2623 : cases now instead of deferring to the target. */
2624 40377513 : if (GET_CODE (addr) == PRE_INC
2625 40377513 : || GET_CODE (addr) == POST_INC
2626 40377507 : || GET_CODE (addr) == PRE_DEC
2627 36612887 : || GET_CODE (addr) == POST_DEC)
2628 : return true;
2629 :
2630 36612887 : return targetm.mode_dependent_address_p (addr, addrspace);
2631 : }
2632 :
2633 : /* Return true if boolean attribute ATTR is supported. */
2634 :
2635 : static bool
2636 1627951491 : have_bool_attr (bool_attr attr)
2637 : {
2638 1627951491 : switch (attr)
2639 : {
2640 : case BA_ENABLED:
2641 : return HAVE_ATTR_enabled;
2642 : case BA_PREFERRED_FOR_SIZE:
2643 : return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2644 : case BA_PREFERRED_FOR_SPEED:
2645 : return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2646 : }
2647 0 : gcc_unreachable ();
2648 : }
2649 :
2650 : /* Return the value of ATTR for instruction INSN. */
2651 :
2652 : static bool
2653 1710108703 : get_bool_attr (rtx_insn *insn, bool_attr attr)
2654 : {
2655 1710108703 : switch (attr)
2656 : {
2657 722177131 : case BA_ENABLED:
2658 722177131 : return get_attr_enabled (insn);
2659 358547440 : case BA_PREFERRED_FOR_SIZE:
2660 358547440 : return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2661 629384132 : case BA_PREFERRED_FOR_SPEED:
2662 629384132 : return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2663 : }
2664 0 : gcc_unreachable ();
2665 : }
2666 :
2667 : /* Like get_bool_attr_mask, but don't use the cache. */
2668 :
2669 : static alternative_mask
2670 102471052 : get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2671 : {
2672 : /* Temporarily install enough information for get_attr_<foo> to assume
2673 : that the insn operands are already cached. As above, the attribute
2674 : mustn't depend on the values of operands, so we don't provide their
2675 : real values here. */
2676 102471052 : rtx_insn *old_insn = recog_data.insn;
2677 102471052 : int old_alternative = which_alternative;
2678 :
2679 102471052 : recog_data.insn = insn;
2680 102471052 : alternative_mask mask = ALL_ALTERNATIVES;
2681 102471052 : int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2682 1812579755 : for (int i = 0; i < n_alternatives; i++)
2683 : {
2684 1710108703 : which_alternative = i;
2685 1710108703 : if (!get_bool_attr (insn, attr))
2686 512991346 : mask &= ~ALTERNATIVE_BIT (i);
2687 : }
2688 :
2689 102471052 : recog_data.insn = old_insn;
2690 102471052 : which_alternative = old_alternative;
2691 102471052 : return mask;
2692 : }
2693 :
2694 : /* Return the mask of operand alternatives that are allowed for INSN
2695 : by boolean attribute ATTR. This mask depends only on INSN and on
2696 : the current target; it does not depend on things like the values of
2697 : operands. */
2698 :
2699 : static alternative_mask
2700 1630233700 : get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2701 : {
2702 : /* Quick exit for asms and for targets that don't use these attributes. */
2703 1630233700 : int code = INSN_CODE (insn);
2704 1630233700 : if (code < 0 || !have_bool_attr (attr))
2705 : return ALL_ALTERNATIVES;
2706 :
2707 : /* Calling get_attr_<foo> can be expensive, so cache the mask
2708 : for speed. */
2709 1627951491 : if (!this_target_recog->x_bool_attr_masks[code][attr])
2710 12793332 : this_target_recog->x_bool_attr_masks[code][attr]
2711 12793332 : = get_bool_attr_mask_uncached (insn, attr);
2712 1627951491 : return this_target_recog->x_bool_attr_masks[code][attr];
2713 : }
2714 :
2715 : /* Return the set of alternatives of INSN that are allowed by the current
2716 : target. */
2717 :
2718 : alternative_mask
2719 1165946303 : get_enabled_alternatives (rtx_insn *insn)
2720 : {
2721 1165946303 : return get_bool_attr_mask (insn, BA_ENABLED);
2722 : }
2723 :
2724 : /* Return the set of alternatives of INSN that are allowed by the current
2725 : target and are preferred for the current size/speed optimization
2726 : choice. */
2727 :
2728 : alternative_mask
2729 464198999 : get_preferred_alternatives (rtx_insn *insn)
2730 : {
2731 464198999 : if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2732 408874170 : return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2733 : else
2734 55324829 : return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2735 : }
2736 :
2737 : /* Return the set of alternatives of INSN that are allowed by the current
2738 : target and are preferred for the size/speed optimization choice
2739 : associated with BB. Passing a separate BB is useful if INSN has not
2740 : been emitted yet or if we are considering moving it to a different
2741 : block. */
2742 :
2743 : alternative_mask
2744 88398 : get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2745 : {
2746 88398 : if (optimize_bb_for_speed_p (bb))
2747 83533 : return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2748 : else
2749 4865 : return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2750 : }
2751 :
2752 : /* Assert that the cached boolean attributes for INSN are still accurate.
2753 : The backend is required to define these attributes in a way that only
2754 : depends on the current target (rather than operands, compiler phase,
2755 : etc.). */
2756 :
2757 : bool
2758 36098260 : check_bool_attrs (rtx_insn *insn)
2759 : {
2760 36098260 : int code = INSN_CODE (insn);
2761 36098260 : if (code >= 0)
2762 144393040 : for (int i = 0; i <= BA_LAST; ++i)
2763 : {
2764 108294780 : enum bool_attr attr = (enum bool_attr) i;
2765 108294780 : if (this_target_recog->x_bool_attr_masks[code][attr])
2766 89677720 : gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2767 : == get_bool_attr_mask_uncached (insn, attr));
2768 : }
2769 36098260 : return true;
2770 : }
2771 :
2772 : /* Like extract_insn, but save insn extracted and don't extract again, when
2773 : called again for the same insn expecting that recog_data still contain the
2774 : valid information. This is used primary by gen_attr infrastructure that
2775 : often does extract insn again and again. */
2776 : void
2777 10371906087 : extract_insn_cached (rtx_insn *insn)
2778 : {
2779 10371906087 : if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2780 : return;
2781 762371734 : extract_insn (insn);
2782 762371734 : recog_data.insn = insn;
2783 : }
2784 :
2785 : /* Do uncached extract_insn, constrain_operands and complain about failures.
2786 : This should be used when extracting a pre-existing constrained instruction
2787 : if the caller wants to know which alternative was chosen. */
2788 : void
2789 263940151 : extract_constrain_insn (rtx_insn *insn)
2790 : {
2791 263940151 : extract_insn (insn);
2792 263940151 : if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2793 0 : fatal_insn_not_found (insn);
2794 263940151 : }
2795 :
2796 : /* Do cached extract_insn, constrain_operands and complain about failures.
2797 : Used by insn_attrtab. */
2798 : void
2799 9171256787 : extract_constrain_insn_cached (rtx_insn *insn)
2800 : {
2801 9171256787 : extract_insn_cached (insn);
2802 9171256787 : if (which_alternative == -1
2803 9171256787 : && !constrain_operands (reload_completed,
2804 : get_enabled_alternatives (insn)))
2805 0 : fatal_insn_not_found (insn);
2806 9171256787 : }
2807 :
2808 : /* Do cached constrain_operands on INSN and complain about failures. */
2809 : bool
2810 336129720 : constrain_operands_cached (rtx_insn *insn, int strict)
2811 : {
2812 336129720 : if (which_alternative == -1)
2813 92225224 : return constrain_operands (strict, get_enabled_alternatives (insn));
2814 : else
2815 : return true;
2816 : }
2817 :
2818 : /* Analyze INSN and fill in recog_data. */
2819 :
2820 : void
2821 2426866608 : extract_insn (rtx_insn *insn)
2822 : {
2823 2426866608 : int i;
2824 2426866608 : int icode;
2825 2426866608 : int noperands;
2826 2426866608 : rtx body = PATTERN (insn);
2827 :
2828 2426866608 : recog_data.n_operands = 0;
2829 2426866608 : recog_data.n_alternatives = 0;
2830 2426866608 : recog_data.n_dups = 0;
2831 2426866608 : recog_data.is_asm = false;
2832 :
2833 2426866608 : switch (GET_CODE (body))
2834 : {
2835 : case USE:
2836 : case CLOBBER:
2837 : case ASM_INPUT:
2838 : case ADDR_VEC:
2839 : case ADDR_DIFF_VEC:
2840 : case VAR_LOCATION:
2841 : case DEBUG_MARKER:
2842 : return;
2843 :
2844 1802139049 : case SET:
2845 1802139049 : if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2846 400 : goto asm_insn;
2847 : else
2848 1802138649 : goto normal_insn;
2849 242520833 : case PARALLEL:
2850 242520833 : if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2851 237932950 : && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2852 242063447 : || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2853 241336359 : || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2854 1192972 : goto asm_insn;
2855 : else
2856 241327861 : goto normal_insn;
2857 1193512 : case ASM_OPERANDS:
2858 1193512 : asm_insn:
2859 1193512 : recog_data.n_operands = noperands = asm_noperands (body);
2860 1193512 : if (noperands >= 0)
2861 : {
2862 : /* This insn is an `asm' with operands. */
2863 :
2864 : /* expand_asm_operands makes sure there aren't too many operands. */
2865 1193512 : gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2866 :
2867 : /* Now get the operand values and constraints out of the insn. */
2868 1193512 : decode_asm_operands (body, recog_data.operand,
2869 : recog_data.operand_loc,
2870 : recog_data.constraints,
2871 : recog_data.operand_mode, NULL);
2872 1193512 : memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2873 1193512 : if (noperands > 0)
2874 : {
2875 563003 : const char *p = recog_data.constraints[0];
2876 563003 : recog_data.n_alternatives = 1;
2877 1587403 : while (*p)
2878 1024400 : recog_data.n_alternatives += (*p++ == ',');
2879 : }
2880 1193512 : recog_data.is_asm = true;
2881 1193512 : break;
2882 : }
2883 0 : fatal_insn_not_found (insn);
2884 :
2885 2105044361 : default:
2886 2105044361 : normal_insn:
2887 : /* Ordinary insn: recognize it, get the operands via insn_extract
2888 : and get the constraints. */
2889 :
2890 2105044361 : icode = recog_memoized (insn);
2891 2105044361 : if (icode < 0)
2892 0 : fatal_insn_not_found (insn);
2893 :
2894 2105044361 : recog_data.n_operands = noperands = insn_data[icode].n_operands;
2895 2105044361 : recog_data.n_alternatives = insn_data[icode].n_alternatives;
2896 2105044361 : recog_data.n_dups = insn_data[icode].n_dups;
2897 :
2898 2105044361 : insn_extract (insn);
2899 :
2900 8712101739 : for (i = 0; i < noperands; i++)
2901 : {
2902 4502013017 : recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2903 4502013017 : recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2904 4502013017 : recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2905 : /* VOIDmode match_operands gets mode from their real operand. */
2906 4502013017 : if (recog_data.operand_mode[i] == VOIDmode)
2907 461504805 : recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2908 : }
2909 : }
2910 6610041259 : for (i = 0; i < noperands; i++)
2911 4503803386 : recog_data.operand_type[i]
2912 7318647710 : = (recog_data.constraints[i][0] == '=' ? OP_OUT
2913 2814844324 : : recog_data.constraints[i][0] == '+' ? OP_INOUT
2914 : : OP_IN);
2915 :
2916 2106237873 : gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2917 :
2918 2106237873 : recog_data.insn = NULL;
2919 2106237873 : which_alternative = -1;
2920 : }
2921 :
2922 : /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2923 : operands, N_ALTERNATIVES alternatives and constraint strings
2924 : CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2925 : and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2926 : if the insn is an asm statement and preprocessing should take the
2927 : asm operands into account, e.g. to determine whether they could be
2928 : addresses in constraints that require addresses; it should then
2929 : point to an array of pointers to each operand. */
2930 :
2931 : void
2932 4719557 : preprocess_constraints (int n_operands, int n_alternatives,
2933 : const char **constraints,
2934 : operand_alternative *op_alt_base,
2935 : rtx **oploc)
2936 : {
2937 12146477 : for (int i = 0; i < n_operands; i++)
2938 : {
2939 7426920 : int j;
2940 7426920 : struct operand_alternative *op_alt;
2941 7426920 : const char *p = constraints[i];
2942 :
2943 7426920 : op_alt = op_alt_base;
2944 :
2945 46675900 : for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2946 : {
2947 39248980 : op_alt[i].cl = NO_REGS;
2948 39248980 : op_alt[i].register_filters = 0;
2949 39248980 : op_alt[i].constraint = p;
2950 39248980 : op_alt[i].matches = -1;
2951 39248980 : op_alt[i].matched = -1;
2952 :
2953 39248980 : if (*p == '\0' || *p == ',')
2954 : {
2955 1696773 : op_alt[i].anything_ok = 1;
2956 1696773 : continue;
2957 : }
2958 :
2959 99765414 : for (;;)
2960 : {
2961 99765414 : char c = *p;
2962 99765414 : if (c == '#')
2963 0 : do
2964 0 : c = *++p;
2965 0 : while (c != ',' && c != '\0');
2966 99765414 : if (c == ',' || c == '\0')
2967 : {
2968 37552207 : p++;
2969 37552207 : break;
2970 : }
2971 :
2972 62213207 : switch (c)
2973 : {
2974 5648796 : case '?':
2975 5648796 : op_alt[i].reject += 6;
2976 5648796 : break;
2977 399755 : case '!':
2978 399755 : op_alt[i].reject += 600;
2979 399755 : break;
2980 55751 : case '&':
2981 55751 : op_alt[i].earlyclobber = 1;
2982 55751 : break;
2983 :
2984 2012475 : case '0': case '1': case '2': case '3': case '4':
2985 2012475 : case '5': case '6': case '7': case '8': case '9':
2986 2012475 : {
2987 2012475 : char *end;
2988 2012475 : op_alt[i].matches = strtoul (p, &end, 10);
2989 2012475 : op_alt[op_alt[i].matches].matched = i;
2990 2012475 : p = end;
2991 : }
2992 2012475 : continue;
2993 :
2994 30859 : case 'X':
2995 30859 : op_alt[i].anything_ok = 1;
2996 30859 : break;
2997 :
2998 202580 : case 'g':
2999 202580 : op_alt[i].cl =
3000 202580 : reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
3001 202580 : break;
3002 :
3003 53862991 : default:
3004 53862991 : enum constraint_num cn = lookup_constraint (p);
3005 53862991 : enum reg_class cl;
3006 53862991 : switch (get_constraint_type (cn))
3007 : {
3008 38435861 : case CT_REGISTER:
3009 38435861 : cl = reg_class_for_constraint (cn);
3010 27667209 : if (cl != NO_REGS)
3011 : {
3012 24143120 : op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
3013 24143120 : auto filter_id = get_register_filter_id (cn);
3014 24143120 : if (filter_id >= 0)
3015 : op_alt[i].register_filters |= 1U << filter_id;
3016 : }
3017 : break;
3018 :
3019 : case CT_CONST_INT:
3020 : break;
3021 :
3022 7773328 : case CT_MEMORY:
3023 7773328 : case CT_SPECIAL_MEMORY:
3024 7773328 : case CT_RELAXED_MEMORY:
3025 7773328 : op_alt[i].memory_ok = 1;
3026 7773328 : break;
3027 :
3028 87321 : case CT_ADDRESS:
3029 87321 : if (oploc && !address_operand (*oploc[i], VOIDmode))
3030 : break;
3031 :
3032 87302 : op_alt[i].is_address = 1;
3033 87302 : op_alt[i].cl
3034 87302 : = (reg_class_subunion
3035 87302 : [(int) op_alt[i].cl]
3036 87302 : [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3037 87302 : ADDRESS, SCRATCH)]);
3038 87302 : break;
3039 :
3040 : case CT_FIXED_FORM:
3041 : break;
3042 : }
3043 : break;
3044 2012475 : }
3045 60200732 : p += CONSTRAINT_LEN (c, p);
3046 : }
3047 : }
3048 : }
3049 4719557 : }
3050 :
3051 : /* Return an array of operand_alternative instructions for
3052 : instruction ICODE. */
3053 :
3054 : const operand_alternative *
3055 290849698 : preprocess_insn_constraints (unsigned int icode)
3056 : {
3057 290849698 : gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
3058 290849698 : if (this_target_recog->x_op_alt[icode])
3059 : return this_target_recog->x_op_alt[icode];
3060 :
3061 5403799 : int n_operands = insn_data[icode].n_operands;
3062 5403799 : if (n_operands == 0)
3063 : return 0;
3064 : /* Always provide at least one alternative so that which_op_alt ()
3065 : works correctly. If the instruction has 0 alternatives (i.e. all
3066 : constraint strings are empty) then each operand in this alternative
3067 : will have anything_ok set. */
3068 2924907 : int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
3069 2924907 : int n_entries = n_operands * n_alternatives;
3070 :
3071 2924907 : operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
3072 2924907 : const char **constraints = XALLOCAVEC (const char *, n_operands);
3073 :
3074 9926307 : for (int i = 0; i < n_operands; ++i)
3075 7001400 : constraints[i] = insn_data[icode].operand[i].constraint;
3076 2924907 : preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
3077 : NULL);
3078 :
3079 2924907 : this_target_recog->x_op_alt[icode] = op_alt;
3080 2924907 : return op_alt;
3081 : }
3082 :
3083 : /* After calling extract_insn, you can use this function to extract some
3084 : information from the constraint strings into a more usable form.
3085 : The collected data is stored in recog_op_alt. */
3086 :
3087 : void
3088 198247173 : preprocess_constraints (rtx_insn *insn)
3089 : {
3090 198247173 : int icode = INSN_CODE (insn);
3091 198247173 : if (icode >= 0)
3092 196494567 : recog_op_alt = preprocess_insn_constraints (icode);
3093 : else
3094 : {
3095 1752606 : int n_operands = recog_data.n_operands;
3096 1752606 : int n_alternatives = recog_data.n_alternatives;
3097 1752606 : int n_entries = n_operands * n_alternatives;
3098 1752606 : memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
3099 1752606 : preprocess_constraints (n_operands, n_alternatives,
3100 : recog_data.constraints, asm_op_alt,
3101 : NULL);
3102 1752606 : recog_op_alt = asm_op_alt;
3103 : }
3104 198247173 : }
3105 :
3106 : /* Check the operands of an insn against the insn's operand constraints
3107 : and return 1 if they match any of the alternatives in ALTERNATIVES.
3108 :
3109 : The information about the insn's operands, constraints, operand modes
3110 : etc. is obtained from the global variables set up by extract_insn.
3111 :
3112 : WHICH_ALTERNATIVE is set to a number which indicates which
3113 : alternative of constraints was matched: 0 for the first alternative,
3114 : 1 for the next, etc.
3115 :
3116 : In addition, when two operands are required to match
3117 : and it happens that the output operand is (reg) while the
3118 : input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
3119 : make the output operand look like the input.
3120 : This is because the output operand is the one the template will print.
3121 :
3122 : This is used in final, just before printing the assembler code and by
3123 : the routines that determine an insn's attribute.
3124 :
3125 : If STRICT is a positive nonzero value, it means that we have been
3126 : called after reload has been completed. In that case, we must
3127 : do all checks strictly. If it is zero, it means that we have been called
3128 : before reload has completed. In that case, we first try to see if we can
3129 : find an alternative that matches strictly. If not, we try again, this
3130 : time assuming that reload will fix up the insn. This provides a "best
3131 : guess" for the alternative and is used to compute attributes of insns prior
3132 : to reload. A negative value of STRICT is used for this internal call. */
3133 :
3134 : struct funny_match
3135 : {
3136 : int this_op, other;
3137 : };
3138 :
3139 : bool
3140 1129723253 : constrain_operands (int strict, alternative_mask alternatives)
3141 : {
3142 1130632087 : const char *constraints[MAX_RECOG_OPERANDS];
3143 1130632087 : int matching_operands[MAX_RECOG_OPERANDS];
3144 1130632087 : int earlyclobber[MAX_RECOG_OPERANDS];
3145 1130632087 : int c;
3146 :
3147 1130632087 : struct funny_match funny_match[MAX_RECOG_OPERANDS];
3148 1130632087 : int funny_match_index;
3149 :
3150 1130632087 : which_alternative = 0;
3151 1130632087 : if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
3152 : return true;
3153 :
3154 3356931393 : for (c = 0; c < recog_data.n_operands; c++)
3155 2286301147 : constraints[c] = recog_data.constraints[c];
3156 :
3157 4026742015 : do
3158 : {
3159 4026742015 : int seen_earlyclobber_at = -1;
3160 4026742015 : int opno;
3161 4026742015 : bool lose = false;
3162 4026742015 : funny_match_index = 0;
3163 :
3164 4026742015 : if (!TEST_BIT (alternatives, which_alternative))
3165 : {
3166 : int i;
3167 :
3168 2716589585 : for (i = 0; i < recog_data.n_operands; i++)
3169 3645000068 : constraints[i] = skip_alternative (constraints[i]);
3170 :
3171 894089551 : which_alternative++;
3172 894089551 : continue;
3173 894089551 : }
3174 :
3175 9628304894 : for (opno = 0; opno < recog_data.n_operands; opno++)
3176 6495652430 : matching_operands[opno] = -1;
3177 :
3178 9628304894 : for (opno = 0; opno < recog_data.n_operands; opno++)
3179 : {
3180 6495652430 : rtx op = recog_data.operand[opno];
3181 6495652430 : machine_mode mode = GET_MODE (op);
3182 6495652430 : const char *p = constraints[opno];
3183 6495652430 : int offset = 0;
3184 6495652430 : bool win = false;
3185 6495652430 : int val;
3186 6495652430 : int len;
3187 :
3188 6495652430 : earlyclobber[opno] = 0;
3189 :
3190 6495652430 : if (GET_CODE (op) == SUBREG)
3191 : {
3192 1505372 : if (REG_P (SUBREG_REG (op))
3193 1505372 : && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
3194 416 : offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
3195 416 : GET_MODE (SUBREG_REG (op)),
3196 416 : SUBREG_BYTE (op),
3197 : GET_MODE (op));
3198 1505372 : op = SUBREG_REG (op);
3199 : }
3200 :
3201 : /* An empty constraint or empty alternative
3202 : allows anything which matched the pattern. */
3203 6495652430 : if (*p == 0 || *p == ',')
3204 94209542 : win = true;
3205 :
3206 16402575826 : do
3207 16402575826 : switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
3208 : {
3209 : case '\0':
3210 : len = 0;
3211 : break;
3212 6152077942 : case ',':
3213 6152077942 : c = '\0';
3214 6152077942 : break;
3215 32 : case '-':
3216 32 : raw_constraint_p = true;
3217 32 : break;
3218 :
3219 0 : case '#':
3220 : /* Ignore rest of this alternative as far as
3221 : constraint checking is concerned. */
3222 0 : do
3223 0 : p++;
3224 0 : while (*p && *p != ',');
3225 : len = 0;
3226 : break;
3227 :
3228 422938 : case '&':
3229 422938 : earlyclobber[opno] = 1;
3230 422938 : if (seen_earlyclobber_at < 0)
3231 404614 : seen_earlyclobber_at = opno;
3232 : break;
3233 :
3234 186771975 : case '0': case '1': case '2': case '3': case '4':
3235 186771975 : case '5': case '6': case '7': case '8': case '9':
3236 186771975 : {
3237 : /* This operand must be the same as a previous one.
3238 : This kind of constraint is used for instructions such
3239 : as add when they take only two operands.
3240 :
3241 : Note that the lower-numbered operand is passed first.
3242 :
3243 : If we are not testing strictly, assume that this
3244 : constraint will be satisfied. */
3245 :
3246 186771975 : char *end;
3247 186771975 : int match;
3248 :
3249 186771975 : match = strtoul (p, &end, 10);
3250 186771975 : p = end;
3251 :
3252 186771975 : if (strict < 0)
3253 : val = 1;
3254 : else
3255 : {
3256 185843682 : rtx op1 = recog_data.operand[match];
3257 185843682 : rtx op2 = recog_data.operand[opno];
3258 185843682 : val = operands_match_p (op1, op2);
3259 : }
3260 :
3261 186771975 : matching_operands[opno] = match;
3262 186771975 : matching_operands[match] = opno;
3263 :
3264 186771975 : if (val != 0)
3265 153898211 : win = true;
3266 :
3267 : /* If output is *x and input is *--x, arrange later
3268 : to change the output to *--x as well, since the
3269 : output op is the one that will be printed. */
3270 186771975 : if (val == 2 && strict > 0)
3271 : {
3272 0 : funny_match[funny_match_index].this_op = opno;
3273 0 : funny_match[funny_match_index++].other = match;
3274 : }
3275 : }
3276 186771975 : len = 0;
3277 186771975 : break;
3278 :
3279 261553 : case 'p':
3280 : /* p is used for address_operands. When we are called by
3281 : gen_reload, no one will have checked that the address is
3282 : strictly valid, i.e., that all pseudos requiring hard regs
3283 : have gotten them. We also want to make sure we have a
3284 : valid mode. */
3285 261553 : {
3286 261466 : auto mem_mode = (recog_data.is_asm
3287 261553 : ? VOIDmode
3288 : : recog_data.operand_mode[opno]);
3289 261553 : if ((GET_MODE (op) == VOIDmode
3290 261553 : || SCALAR_INT_MODE_P (GET_MODE (op)))
3291 523080 : && (strict <= 0
3292 261553 : || strict_memory_address_p (mem_mode, op)))
3293 261487 : win = true;
3294 : break;
3295 : }
3296 :
3297 : /* No need to check general_operand again;
3298 : it was done in insn-recog.cc. Well, except that reload
3299 : doesn't check the validity of its replacements, but
3300 : that should only matter when there's a bug. */
3301 124939092 : case 'g':
3302 : /* Anything goes unless it is a REG and really has a hard reg
3303 : but the hard reg is not in the class GENERAL_REGS. */
3304 124939092 : if (REG_P (op))
3305 : {
3306 47372903 : if (strict < 0
3307 : || GENERAL_REGS == ALL_REGS
3308 47372851 : || (reload_in_progress
3309 0 : && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3310 94745754 : || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
3311 : win = true;
3312 : }
3313 77566189 : else if (strict < 0 || general_operand (op, mode))
3314 : win = true;
3315 : break;
3316 :
3317 255 : case '{':
3318 247 : if ((REG_P (op) && HARD_REGISTER_P (op)
3319 247 : && (int) REGNO (op) == decode_hard_reg_constraint (p))
3320 269 : || !reload_completed)
3321 : win = true;
3322 : break;
3323 :
3324 9594527551 : default:
3325 9594527551 : {
3326 9594527551 : enum constraint_num cn = lookup_constraint (p);
3327 9594527551 : enum reg_class cl = reg_class_for_constraint (cn);
3328 4445613740 : if (cl != NO_REGS)
3329 : {
3330 4269028398 : auto *filter = get_register_filter (cn);
3331 4269028398 : if (strict < 0
3332 4267330416 : || (strict == 0
3333 22658011 : && REG_P (op)
3334 16641162 : && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3335 6092813 : || (strict == 0 && GET_CODE (op) == SCRATCH)
3336 8519777409 : || (REG_P (op)
3337 3044516386 : && reg_fits_class_p (op, cl, offset, mode)
3338 : && (!filter
3339 : || TEST_HARD_REG_BIT (*filter,
3340 : REGNO (op) + offset))))
3341 : win = true;
3342 : }
3343 :
3344 5325499153 : else if (constraint_satisfied_p (op, cn))
3345 : win = true;
3346 :
3347 4441369375 : else if ((insn_extra_memory_constraint (cn)
3348 : || insn_extra_relaxed_memory_constraint (cn))
3349 : /* Every memory operand can be reloaded to fit. */
3350 4441369375 : && ((strict < 0 && MEM_P (op))
3351 : /* Before reload, accept what reload can turn
3352 : into a mem. */
3353 686819 : || (strict < 0 && CONSTANT_P (op))
3354 : /* Before reload, accept a pseudo or hard register,
3355 : since LRA can turn it into a mem. */
3356 686457 : || (strict < 0 && targetm.lra_p () && REG_P (op))
3357 : /* During reload, accept a pseudo */
3358 929731793 : || (reload_in_progress && REG_P (op)
3359 0 : && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
3360 : win = true;
3361 4440682556 : else if (insn_extra_address_constraint (cn)
3362 : /* Every address operand can be reloaded to fit. */
3363 4440682556 : && strict < 0)
3364 : win = true;
3365 : /* Cater to architectures like IA-64 that define extra memory
3366 : constraints without using define_memory_constraint. */
3367 4440682556 : else if (reload_in_progress
3368 0 : && REG_P (op)
3369 0 : && REGNO (op) >= FIRST_PSEUDO_REGISTER
3370 0 : && reg_renumber[REGNO (op)] < 0
3371 0 : && reg_equiv_mem (REGNO (op)) != 0
3372 4440682556 : && constraint_satisfied_p
3373 0 : (reg_equiv_mem (REGNO (op)), cn))
3374 : win = true;
3375 : break;
3376 : }
3377 : }
3378 16402575826 : while (p += len, c);
3379 :
3380 6495652430 : raw_constraint_p = false;
3381 6495652430 : constraints[opno] = p;
3382 : /* If this operand did not win somehow,
3383 : this alternative loses. */
3384 6495652430 : if (! win)
3385 3142866306 : lose = true;
3386 : }
3387 : /* This alternative won; the operands are ok.
3388 : Change whichever operands this alternative says to change. */
3389 3132652464 : if (! lose)
3390 : {
3391 1066405285 : int opno, eopno;
3392 :
3393 : /* See if any earlyclobber operand conflicts with some other
3394 : operand. */
3395 :
3396 1066405285 : if (strict > 0 && seen_earlyclobber_at >= 0)
3397 1029030 : for (eopno = seen_earlyclobber_at;
3398 1357417 : eopno < recog_data.n_operands;
3399 : eopno++)
3400 : /* Ignore earlyclobber operands now in memory,
3401 : because we would often report failure when we have
3402 : two memory operands, one of which was formerly a REG. */
3403 1029030 : if (earlyclobber[eopno]
3404 344888 : && REG_P (recog_data.operand[eopno]))
3405 1753051 : for (opno = 0; opno < recog_data.n_operands; opno++)
3406 1408163 : if ((MEM_P (recog_data.operand[opno])
3407 1271837 : || recog_data.operand_type[opno] != OP_OUT)
3408 834693 : && opno != eopno
3409 : /* Ignore things like match_operator operands. */
3410 834145 : && *recog_data.constraints[opno] != 0
3411 865367 : && ! (matching_operands[opno] == eopno
3412 100701 : && operands_match_p (recog_data.operand[opno],
3413 : recog_data.operand[eopno]))
3414 2074279 : && ! safe_from_earlyclobber (recog_data.operand[opno],
3415 : recog_data.operand[eopno]))
3416 : lose = true;
3417 :
3418 1066405285 : if (! lose)
3419 : {
3420 1066404280 : while (--funny_match_index >= 0)
3421 : {
3422 0 : recog_data.operand[funny_match[funny_match_index].other]
3423 0 : = recog_data.operand[funny_match[funny_match_index].this_op];
3424 : }
3425 :
3426 : /* For operands without < or > constraints reject side-effects. */
3427 : if (AUTO_INC_DEC && recog_data.is_asm)
3428 : {
3429 : for (opno = 0; opno < recog_data.n_operands; opno++)
3430 : if (MEM_P (recog_data.operand[opno]))
3431 : switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
3432 : {
3433 : case PRE_INC:
3434 : case POST_INC:
3435 : case PRE_DEC:
3436 : case POST_DEC:
3437 : case PRE_MODIFY:
3438 : case POST_MODIFY:
3439 : if (strchr (recog_data.constraints[opno], '<') == NULL
3440 : && strchr (recog_data.constraints[opno], '>')
3441 : == NULL)
3442 : return false;
3443 : break;
3444 : default:
3445 : break;
3446 : }
3447 : }
3448 :
3449 : return true;
3450 : }
3451 : }
3452 :
3453 2066248184 : which_alternative++;
3454 : }
3455 2960337735 : while (which_alternative < recog_data.n_alternatives);
3456 :
3457 4225966 : which_alternative = -1;
3458 : /* If we are about to reject this, but we are not to test strictly,
3459 : try a very loose test. Only return failure if it fails also. */
3460 4225966 : if (strict == 0)
3461 : return constrain_operands (-1, alternatives);
3462 : else
3463 : return false;
3464 : }
3465 :
3466 : /* Return true iff OPERAND (assumed to be a REG rtx)
3467 : is a hard reg in class CLASS when its regno is offset by OFFSET
3468 : and changed to mode MODE.
3469 : If REG occupies multiple hard regs, all of them must be in CLASS. */
3470 :
3471 : bool
3472 3335660895 : reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
3473 : machine_mode mode)
3474 : {
3475 3335660895 : unsigned int regno = REGNO (operand);
3476 :
3477 3335660895 : if (cl == NO_REGS)
3478 : return false;
3479 :
3480 : /* Regno must not be a pseudo register. Offset may be negative. */
3481 3240293057 : return (HARD_REGISTER_NUM_P (regno)
3482 3240215710 : && HARD_REGISTER_NUM_P (regno + offset)
3483 6480508767 : && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
3484 : regno + offset));
3485 : }
3486 :
3487 : /* Split single instruction. Helper function for split_all_insns and
3488 : split_all_insns_noflow. Return last insn in the sequence if successful,
3489 : or NULL if unsuccessful. */
3490 :
3491 : static rtx_insn *
3492 377612415 : split_insn (rtx_insn *insn)
3493 : {
3494 : /* Split insns here to get max fine-grain parallelism. */
3495 377612415 : rtx_insn *first = PREV_INSN (insn);
3496 377612415 : rtx_insn *last = try_split (PATTERN (insn), insn, 1);
3497 377612415 : rtx insn_set, last_set, note;
3498 :
3499 377612415 : if (last == insn)
3500 : return NULL;
3501 :
3502 : /* If the original instruction was a single set that was known to be
3503 : equivalent to a constant, see if we can say the same about the last
3504 : instruction in the split sequence. The two instructions must set
3505 : the same destination. */
3506 6174960 : insn_set = single_set (insn);
3507 6174960 : if (insn_set)
3508 : {
3509 6065968 : last_set = single_set (last);
3510 6065968 : if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
3511 : {
3512 2859956 : note = find_reg_equal_equiv_note (insn);
3513 2859956 : if (note && CONSTANT_P (XEXP (note, 0)))
3514 83078 : set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
3515 2776878 : else if (CONSTANT_P (SET_SRC (insn_set)))
3516 34424 : set_unique_reg_note (last, REG_EQUAL,
3517 : copy_rtx (SET_SRC (insn_set)));
3518 : }
3519 : }
3520 :
3521 : /* try_split returns the NOTE that INSN became. */
3522 6174960 : SET_INSN_DELETED (insn);
3523 :
3524 : /* ??? Coddle to md files that generate subregs in post-reload
3525 : splitters instead of computing the proper hard register. */
3526 6174960 : if (reload_completed && first != last)
3527 : {
3528 5742871 : first = NEXT_INSN (first);
3529 2688641 : for (;;)
3530 : {
3531 8431512 : if (INSN_P (first))
3532 8427444 : cleanup_subreg_operands (first);
3533 8431512 : if (first == last)
3534 : break;
3535 2688641 : first = NEXT_INSN (first);
3536 : }
3537 : }
3538 :
3539 : return last;
3540 : }
3541 :
3542 : /* Split all insns in the function. If UPD_LIFE, update life info after. */
3543 :
3544 : void
3545 3986495 : split_all_insns (void)
3546 : {
3547 3986495 : bool changed;
3548 3986495 : bool need_cfg_cleanup = false;
3549 3986495 : basic_block bb;
3550 :
3551 3986495 : auto_sbitmap blocks (last_basic_block_for_fn (cfun));
3552 3986495 : bitmap_clear (blocks);
3553 3986495 : changed = false;
3554 :
3555 43541112 : FOR_EACH_BB_REVERSE_FN (bb, cfun)
3556 : {
3557 39554617 : rtx_insn *insn, *next;
3558 39554617 : bool finish = false;
3559 :
3560 39554617 : rtl_profile_for_bb (bb);
3561 498013717 : for (insn = BB_HEAD (bb); !finish ; insn = next)
3562 : {
3563 : /* Can't use `next_real_insn' because that might go across
3564 : CODE_LABELS and short-out basic blocks. */
3565 458459100 : next = NEXT_INSN (insn);
3566 458459100 : finish = (insn == BB_END (bb));
3567 :
3568 : /* If INSN has a REG_EH_REGION note and we split INSN, the
3569 : resulting split may not have/need REG_EH_REGION notes.
3570 :
3571 : If that happens and INSN was the last reference to the
3572 : given EH region, then the EH region will become unreachable.
3573 : We cannot leave the unreachable blocks in the CFG as that
3574 : will trigger a checking failure.
3575 :
3576 : So track if INSN has a REG_EH_REGION note. If so and we
3577 : split INSN, then trigger a CFG cleanup. */
3578 458459100 : rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3579 458459100 : if (INSN_P (insn))
3580 : {
3581 377658657 : rtx set = single_set (insn);
3582 :
3583 : /* Don't split no-op move insns. These should silently
3584 : disappear later in final. Splitting such insns would
3585 : break the code that handles LIBCALL blocks. */
3586 377658657 : if (set && set_noop_p (set))
3587 : {
3588 : /* Nops get in the way while scheduling, so delete them
3589 : now if register allocation has already been done. It
3590 : is too risky to try to do this before register
3591 : allocation, and there are unlikely to be very many
3592 : nops then anyways. */
3593 46242 : if (reload_completed)
3594 46242 : delete_insn_and_edges (insn);
3595 46242 : if (note)
3596 458459100 : need_cfg_cleanup = true;
3597 : }
3598 : else
3599 : {
3600 377612415 : if (split_insn (insn))
3601 : {
3602 6174960 : bitmap_set_bit (blocks, bb->index);
3603 6174960 : changed = true;
3604 6174960 : if (note)
3605 2806 : need_cfg_cleanup = true;
3606 : }
3607 : }
3608 : }
3609 : }
3610 : }
3611 :
3612 3986495 : default_rtl_profile ();
3613 3986495 : if (changed)
3614 : {
3615 751921 : find_many_sub_basic_blocks (blocks);
3616 :
3617 : /* Splitting could drop an REG_EH_REGION if it potentially
3618 : trapped in its original form, but does not in its split
3619 : form. Consider a FLOAT_TRUNCATE which splits into a memory
3620 : store/load pair and -fnon-call-exceptions. */
3621 751921 : if (need_cfg_cleanup)
3622 1339 : cleanup_cfg (0);
3623 : }
3624 :
3625 3986495 : checking_verify_flow_info ();
3626 3986495 : }
3627 :
3628 : /* Same as split_all_insns, but do not expect CFG to be available.
3629 : Used by machine dependent reorg passes. */
3630 :
3631 : void
3632 0 : split_all_insns_noflow (void)
3633 : {
3634 0 : rtx_insn *next, *insn;
3635 :
3636 0 : for (insn = get_insns (); insn; insn = next)
3637 : {
3638 0 : next = NEXT_INSN (insn);
3639 0 : if (INSN_P (insn))
3640 : {
3641 : /* Don't split no-op move insns. These should silently
3642 : disappear later in final. Splitting such insns would
3643 : break the code that handles LIBCALL blocks. */
3644 0 : rtx set = single_set (insn);
3645 0 : if (set && set_noop_p (set))
3646 : {
3647 : /* Nops get in the way while scheduling, so delete them
3648 : now if register allocation has already been done. It
3649 : is too risky to try to do this before register
3650 : allocation, and there are unlikely to be very many
3651 : nops then anyways.
3652 :
3653 : ??? Should we use delete_insn when the CFG isn't valid? */
3654 0 : if (reload_completed)
3655 0 : delete_insn_and_edges (insn);
3656 : }
3657 : else
3658 0 : split_insn (insn);
3659 : }
3660 : }
3661 0 : }
3662 :
3663 : struct peep2_insn_data
3664 : {
3665 : rtx_insn *insn;
3666 : regset live_before;
3667 : };
3668 :
3669 : static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3670 : static int peep2_current;
3671 :
3672 : static bool peep2_do_rebuild_jump_labels;
3673 : static bool peep2_do_cleanup_cfg;
3674 :
3675 : /* The number of instructions available to match a peep2. */
3676 : int peep2_current_count;
3677 :
3678 : /* A marker indicating the last insn of the block. The live_before regset
3679 : for this element is correct, indicating DF_LIVE_OUT for the block. */
3680 : #define PEEP2_EOB invalid_insn_rtx
3681 :
3682 : /* Wrap N to fit into the peep2_insn_data buffer. */
3683 :
3684 : static int
3685 423047422 : peep2_buf_position (int n)
3686 : {
3687 0 : if (n >= MAX_INSNS_PER_PEEP2 + 1)
3688 142974982 : n -= MAX_INSNS_PER_PEEP2 + 1;
3689 423028458 : return n;
3690 : }
3691 :
3692 : /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3693 : does not exist. Used by the recognizer to find the next insn to match
3694 : in a multi-insn pattern. */
3695 :
3696 : rtx_insn *
3697 218774223 : peep2_next_insn (int n)
3698 : {
3699 218774223 : gcc_assert (n <= peep2_current_count);
3700 :
3701 218774223 : n = peep2_buf_position (peep2_current + n);
3702 :
3703 218774223 : return peep2_insn_data[n].insn;
3704 : }
3705 :
3706 : /* Return true if REGNO is dead before the Nth non-note insn
3707 : after `current'. */
3708 :
3709 : bool
3710 12975797 : peep2_regno_dead_p (int ofs, int regno)
3711 : {
3712 12975797 : gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3713 :
3714 12975797 : ofs = peep2_buf_position (peep2_current + ofs);
3715 :
3716 12975797 : gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3717 :
3718 12975797 : return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3719 : }
3720 :
3721 : /* Similarly for a REG. */
3722 :
3723 : bool
3724 295136 : peep2_reg_dead_p (int ofs, rtx reg)
3725 : {
3726 295136 : gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3727 :
3728 295136 : ofs = peep2_buf_position (peep2_current + ofs);
3729 :
3730 295136 : gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3731 :
3732 295136 : unsigned int end_regno = END_REGNO (reg);
3733 370924 : for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3734 295136 : if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3735 : return false;
3736 : return true;
3737 : }
3738 :
3739 : /* Regno offset to be used in the register search. */
3740 : static int search_ofs;
3741 :
3742 : /* Try to find a hard register of mode MODE, matching the register class in
3743 : CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3744 : remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3745 : in which case the only condition is that the register must be available
3746 : before CURRENT_INSN.
3747 : Registers that already have bits set in REG_SET will not be considered.
3748 :
3749 : If an appropriate register is available, it will be returned and the
3750 : corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3751 : returned. */
3752 :
3753 : rtx
3754 607164 : peep2_find_free_register (int from, int to, const char *class_str,
3755 : machine_mode mode, HARD_REG_SET *reg_set)
3756 : {
3757 607164 : enum reg_class cl;
3758 607164 : HARD_REG_SET live;
3759 607164 : df_ref def;
3760 607164 : int i;
3761 :
3762 607164 : gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3763 607164 : gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3764 :
3765 607164 : from = peep2_buf_position (peep2_current + from);
3766 607164 : to = peep2_buf_position (peep2_current + to);
3767 :
3768 607164 : gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3769 607164 : REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3770 :
3771 607164 : while (from != to)
3772 : {
3773 18964 : gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3774 :
3775 : /* Don't use registers set or clobbered by the insn. */
3776 75856 : FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3777 56892 : SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3778 :
3779 645092 : from = peep2_buf_position (from + 1);
3780 : }
3781 :
3782 607164 : cl = reg_class_for_constraint (lookup_constraint (class_str));
3783 :
3784 5901303 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3785 : {
3786 5896518 : int raw_regno, regno, j;
3787 5896518 : bool success;
3788 :
3789 : /* Distribute the free registers as much as possible. */
3790 5896518 : raw_regno = search_ofs + i;
3791 5896518 : if (raw_regno >= FIRST_PSEUDO_REGISTER)
3792 265288 : raw_regno -= FIRST_PSEUDO_REGISTER;
3793 : #ifdef REG_ALLOC_ORDER
3794 5896518 : regno = reg_alloc_order[raw_regno];
3795 : #else
3796 : regno = raw_regno;
3797 : #endif
3798 :
3799 : /* Can it support the mode we need? */
3800 5896518 : if (!targetm.hard_regno_mode_ok (regno, mode))
3801 1815088 : continue;
3802 :
3803 4683809 : success = true;
3804 4683809 : for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3805 : {
3806 : /* Don't allocate fixed registers. */
3807 4081430 : if (fixed_regs[regno + j])
3808 : {
3809 : success = false;
3810 : break;
3811 : }
3812 : /* Don't allocate global registers. */
3813 2136241 : if (global_regs[regno + j])
3814 : {
3815 : success = false;
3816 : break;
3817 : }
3818 : /* Make sure the register is of the right class. */
3819 2136241 : if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3820 : {
3821 : success = false;
3822 : break;
3823 : }
3824 : /* And that we don't create an extra save/restore. */
3825 1146428 : if (! crtl->abi->clobbers_full_reg_p (regno + j)
3826 1146428 : && ! df_regs_ever_live_p (regno + j))
3827 : {
3828 : success = false;
3829 : break;
3830 : }
3831 :
3832 1108360 : if (! targetm.hard_regno_scratch_ok (regno + j))
3833 : {
3834 : success = false;
3835 : break;
3836 : }
3837 :
3838 : /* And we don't clobber traceback for noreturn functions. */
3839 1108232 : if ((regno + j == FRAME_POINTER_REGNUM
3840 1108232 : || regno + j == HARD_FRAME_POINTER_REGNUM)
3841 50397 : && (! reload_completed || frame_pointer_needed))
3842 : {
3843 : success = false;
3844 : break;
3845 : }
3846 :
3847 1097477 : if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3848 1097477 : || TEST_HARD_REG_BIT (live, regno + j))
3849 : {
3850 : success = false;
3851 : break;
3852 : }
3853 : }
3854 :
3855 4081430 : if (success)
3856 : {
3857 602379 : add_to_hard_reg_set (reg_set, mode, regno);
3858 :
3859 : /* Start the next search with the next register. */
3860 602379 : if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3861 6447 : raw_regno = 0;
3862 602379 : search_ofs = raw_regno;
3863 :
3864 602379 : return gen_rtx_REG (mode, regno);
3865 : }
3866 : }
3867 :
3868 4785 : search_ofs = 0;
3869 4785 : return NULL_RTX;
3870 : }
3871 :
3872 : /* Forget all currently tracked instructions, only remember current
3873 : LIVE regset. */
3874 :
3875 : static void
3876 10849795 : peep2_reinit_state (regset live)
3877 : {
3878 10849795 : int i;
3879 :
3880 : /* Indicate that all slots except the last holds invalid data. */
3881 75948565 : for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3882 65098770 : peep2_insn_data[i].insn = NULL;
3883 10849795 : peep2_current_count = 0;
3884 :
3885 : /* Indicate that the last slot contains live_after data. */
3886 10849795 : peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3887 10849795 : peep2_current = MAX_INSNS_PER_PEEP2;
3888 :
3889 10849795 : COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3890 10849795 : }
3891 :
3892 : /* Copies frame related info of an insn (OLD_INSN) to the single
3893 : insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3894 :
3895 : void
3896 135010 : copy_frame_info_to_split_insn (rtx_insn *old_insn, rtx_insn *new_insn)
3897 : {
3898 135010 : bool any_note = false;
3899 135010 : rtx note;
3900 :
3901 135010 : if (!RTX_FRAME_RELATED_P (old_insn))
3902 : return;
3903 :
3904 135010 : RTX_FRAME_RELATED_P (new_insn) = 1;
3905 :
3906 : /* Allow the backend to fill in a note during the split. */
3907 135010 : for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3908 0 : switch (REG_NOTE_KIND (note))
3909 : {
3910 0 : case REG_FRAME_RELATED_EXPR:
3911 0 : case REG_CFA_DEF_CFA:
3912 0 : case REG_CFA_ADJUST_CFA:
3913 0 : case REG_CFA_OFFSET:
3914 0 : case REG_CFA_REGISTER:
3915 0 : case REG_CFA_EXPRESSION:
3916 0 : case REG_CFA_RESTORE:
3917 0 : case REG_CFA_SET_VDRAP:
3918 0 : any_note = true;
3919 0 : break;
3920 : default:
3921 : break;
3922 : }
3923 :
3924 : /* If the backend didn't supply a note, copy one over. */
3925 135010 : if (!any_note)
3926 328771 : for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3927 193761 : switch (REG_NOTE_KIND (note))
3928 : {
3929 146569 : case REG_FRAME_RELATED_EXPR:
3930 146569 : case REG_CFA_DEF_CFA:
3931 146569 : case REG_CFA_ADJUST_CFA:
3932 146569 : case REG_CFA_OFFSET:
3933 146569 : case REG_CFA_REGISTER:
3934 146569 : case REG_CFA_EXPRESSION:
3935 146569 : case REG_CFA_RESTORE:
3936 146569 : case REG_CFA_SET_VDRAP:
3937 146569 : add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3938 146569 : any_note = true;
3939 146569 : break;
3940 : default:
3941 : break;
3942 : }
3943 :
3944 : /* If there still isn't a note, make sure the unwind info sees the
3945 : same expression as before the split. */
3946 135010 : if (!any_note)
3947 : {
3948 2406 : rtx old_set, new_set;
3949 :
3950 : /* The old insn had better have been simple, or annotated. */
3951 2406 : old_set = single_set (old_insn);
3952 2406 : gcc_assert (old_set != NULL);
3953 :
3954 2406 : new_set = single_set (new_insn);
3955 2406 : if (!new_set || !rtx_equal_p (new_set, old_set))
3956 266 : add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3957 : }
3958 :
3959 : /* Copy prologue/epilogue status. This is required in order to keep
3960 : proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3961 135010 : maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3962 : }
3963 :
3964 : /* While scanning basic block BB, we found a match of length MATCH_LEN + 1,
3965 : starting at INSN. Perform the replacement, removing the old insns and
3966 : replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3967 : if the replacement is rejected. */
3968 :
3969 : static rtx_insn *
3970 2257672 : peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3971 : {
3972 2257672 : int i;
3973 2257672 : rtx_insn *last, *before_try, *x;
3974 2257672 : rtx eh_note, as_note;
3975 2257672 : rtx_insn *old_insn;
3976 2257672 : rtx_insn *new_insn;
3977 2257672 : bool was_call = false;
3978 :
3979 : /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3980 : match more than one insn, or to be split into more than one insn. */
3981 2257672 : old_insn = peep2_insn_data[peep2_current].insn;
3982 2257672 : if (RTX_FRAME_RELATED_P (old_insn))
3983 : {
3984 137786 : if (match_len != 0)
3985 : return NULL;
3986 :
3987 : /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3988 : may be in the stream for the purpose of register allocation. */
3989 137786 : if (active_insn_p (attempt))
3990 : new_insn = attempt;
3991 : else
3992 34884 : new_insn = next_active_insn (attempt);
3993 137786 : if (next_active_insn (new_insn))
3994 : return NULL;
3995 :
3996 : /* We have a 1-1 replacement. Copy over any frame-related info. */
3997 134993 : copy_frame_info_to_split_insn (old_insn, new_insn);
3998 : }
3999 :
4000 : /* If we are splitting a CALL_INSN, look for the CALL_INSN
4001 : in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
4002 : cfg-related call notes. */
4003 4718873 : for (i = 0; i <= match_len; ++i)
4004 : {
4005 2465579 : int j;
4006 2465579 : rtx note;
4007 :
4008 2465579 : j = peep2_buf_position (peep2_current + i);
4009 2465579 : old_insn = peep2_insn_data[j].insn;
4010 2465579 : if (!CALL_P (old_insn))
4011 2463994 : continue;
4012 1585 : was_call = true;
4013 :
4014 : new_insn = attempt;
4015 1585 : while (new_insn != NULL_RTX)
4016 : {
4017 1585 : if (CALL_P (new_insn))
4018 : break;
4019 0 : new_insn = NEXT_INSN (new_insn);
4020 : }
4021 :
4022 1585 : gcc_assert (new_insn != NULL_RTX);
4023 :
4024 1585 : CALL_INSN_FUNCTION_USAGE (new_insn)
4025 1585 : = CALL_INSN_FUNCTION_USAGE (old_insn);
4026 1585 : SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
4027 :
4028 1585 : for (note = REG_NOTES (old_insn);
4029 7150 : note;
4030 5565 : note = XEXP (note, 1))
4031 5565 : switch (REG_NOTE_KIND (note))
4032 : {
4033 0 : case REG_NORETURN:
4034 0 : case REG_SETJMP:
4035 0 : case REG_TM:
4036 0 : case REG_CALL_NOCF_CHECK:
4037 0 : add_reg_note (new_insn, REG_NOTE_KIND (note),
4038 : XEXP (note, 0));
4039 0 : break;
4040 : default:
4041 : /* Discard all other reg notes. */
4042 : break;
4043 : }
4044 :
4045 : /* Croak if there is another call in the sequence. */
4046 1585 : while (++i <= match_len)
4047 : {
4048 0 : j = peep2_buf_position (peep2_current + i);
4049 0 : old_insn = peep2_insn_data[j].insn;
4050 0 : gcc_assert (!CALL_P (old_insn));
4051 : }
4052 : break;
4053 : }
4054 :
4055 : /* If we matched any instruction that had a REG_ARGS_SIZE, then
4056 : move those notes over to the new sequence. */
4057 2254879 : as_note = NULL;
4058 4603207 : for (i = match_len; i >= 0; --i)
4059 : {
4060 2465579 : int j = peep2_buf_position (peep2_current + i);
4061 2465579 : old_insn = peep2_insn_data[j].insn;
4062 :
4063 2465579 : as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
4064 2465579 : if (as_note)
4065 : break;
4066 : }
4067 :
4068 2254879 : i = peep2_buf_position (peep2_current + match_len);
4069 2254879 : eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
4070 :
4071 : /* Replace the old sequence with the new. */
4072 2254879 : rtx_insn *peepinsn = peep2_insn_data[i].insn;
4073 4509758 : last = emit_insn_after_setloc (attempt,
4074 : peep2_insn_data[i].insn,
4075 2254879 : INSN_LOCATION (peepinsn));
4076 2254879 : if (JUMP_P (peepinsn) && JUMP_P (last))
4077 3151 : CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
4078 2254879 : before_try = PREV_INSN (insn);
4079 2254879 : delete_insn_chain (insn, peep2_insn_data[i].insn, false);
4080 :
4081 : /* Re-insert the EH_REGION notes. */
4082 2254879 : if (eh_note || (was_call && nonlocal_goto_handler_labels))
4083 : {
4084 40 : edge eh_edge;
4085 40 : edge_iterator ei;
4086 :
4087 48 : FOR_EACH_EDGE (eh_edge, ei, bb->succs)
4088 47 : if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
4089 : break;
4090 :
4091 40 : if (eh_note)
4092 40 : copy_reg_eh_region_note_backward (eh_note, last, before_try);
4093 :
4094 40 : if (eh_edge)
4095 117 : for (x = last; x != before_try; x = PREV_INSN (x))
4096 78 : if (x != BB_END (bb)
4097 78 : && (can_throw_internal (x)
4098 39 : || can_nonlocal_goto (x)))
4099 : {
4100 0 : edge nfte, nehe;
4101 0 : int flags;
4102 :
4103 0 : nfte = split_block (bb, x);
4104 0 : flags = (eh_edge->flags
4105 : & (EDGE_EH | EDGE_ABNORMAL));
4106 0 : if (CALL_P (x))
4107 0 : flags |= EDGE_ABNORMAL_CALL;
4108 0 : nehe = make_edge (nfte->src, eh_edge->dest,
4109 : flags);
4110 :
4111 0 : nehe->probability = eh_edge->probability;
4112 0 : nfte->probability = nehe->probability.invert ();
4113 :
4114 0 : peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
4115 0 : bb = nfte->src;
4116 0 : eh_edge = nehe;
4117 : }
4118 :
4119 : /* Converting possibly trapping insn to non-trapping is
4120 : possible. Zap dummy outgoing edges. */
4121 40 : peep2_do_cleanup_cfg |= purge_dead_edges (bb);
4122 : }
4123 :
4124 : /* Re-insert the ARGS_SIZE notes. */
4125 2254879 : if (as_note)
4126 117251 : fixup_args_size_notes (before_try, last, get_args_size (as_note));
4127 :
4128 : /* Scan the new insns for embedded side effects and add appropriate
4129 : REG_INC notes. */
4130 : if (AUTO_INC_DEC)
4131 : for (x = last; x != before_try; x = PREV_INSN (x))
4132 : if (NONDEBUG_INSN_P (x))
4133 : add_auto_inc_notes (x, PATTERN (x));
4134 :
4135 : /* If we generated a jump instruction, it won't have
4136 : JUMP_LABEL set. Recompute after we're done. */
4137 5250570 : for (x = last; x != before_try; x = PREV_INSN (x))
4138 2998842 : if (JUMP_P (x))
4139 : {
4140 3151 : peep2_do_rebuild_jump_labels = true;
4141 3151 : break;
4142 : }
4143 :
4144 : return last;
4145 : }
4146 :
4147 : /* After performing a replacement in basic block BB, fix up the life
4148 : information in our buffer. LAST is the last of the insns that we
4149 : emitted as a replacement. PREV is the insn before the start of
4150 : the replacement. MATCH_LEN + 1 is the number of instructions that were
4151 : matched, and which now need to be replaced in the buffer. */
4152 :
4153 : static void
4154 2254879 : peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
4155 : rtx_insn *prev)
4156 : {
4157 2254879 : int i = peep2_buf_position (peep2_current + match_len + 1);
4158 2254879 : rtx_insn *x;
4159 2254879 : regset_head live;
4160 :
4161 2254879 : INIT_REG_SET (&live);
4162 2254879 : COPY_REG_SET (&live, peep2_insn_data[i].live_before);
4163 :
4164 2254879 : gcc_assert (peep2_current_count >= match_len + 1);
4165 2254879 : peep2_current_count -= match_len + 1;
4166 :
4167 2254879 : x = last;
4168 3001906 : do
4169 : {
4170 3001906 : if (INSN_P (x))
4171 : {
4172 3001906 : df_insn_rescan (x);
4173 3001906 : if (peep2_current_count < MAX_INSNS_PER_PEEP2)
4174 : {
4175 2852212 : peep2_current_count++;
4176 2852212 : if (--i < 0)
4177 823210 : i = MAX_INSNS_PER_PEEP2;
4178 2852212 : peep2_insn_data[i].insn = x;
4179 2852212 : df_simulate_one_insn_backwards (bb, x, &live);
4180 2852212 : COPY_REG_SET (peep2_insn_data[i].live_before, &live);
4181 : }
4182 : }
4183 3001906 : x = PREV_INSN (x);
4184 : }
4185 3001906 : while (x != prev);
4186 2254879 : CLEAR_REG_SET (&live);
4187 :
4188 2254879 : peep2_current = i;
4189 2254879 : }
4190 :
4191 : /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
4192 : Return true if we added it, false otherwise. The caller will try to match
4193 : peepholes against the buffer if we return false; otherwise it will try to
4194 : add more instructions to the buffer. */
4195 :
4196 : static bool
4197 82094669 : peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
4198 : {
4199 82094669 : int pos;
4200 :
4201 : /* Once we have filled the maximum number of insns the buffer can hold,
4202 : allow the caller to match the insns against peepholes. We wait until
4203 : the buffer is full in case the target has similar peepholes of different
4204 : length; we always want to match the longest if possible. */
4205 82094669 : if (peep2_current_count == MAX_INSNS_PER_PEEP2)
4206 : return false;
4207 :
4208 : /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
4209 : any other pattern, lest it change the semantics of the frame info. */
4210 63384087 : if (RTX_FRAME_RELATED_P (insn))
4211 : {
4212 : /* Let the buffer drain first. */
4213 7687332 : if (peep2_current_count > 0)
4214 : return false;
4215 : /* Now the insn will be the only thing in the buffer. */
4216 : }
4217 :
4218 59099971 : pos = peep2_buf_position (peep2_current + peep2_current_count);
4219 59099971 : peep2_insn_data[pos].insn = insn;
4220 59099971 : COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4221 59099971 : peep2_current_count++;
4222 :
4223 59099971 : df_simulate_one_insn_forwards (bb, insn, live);
4224 59099971 : return true;
4225 : }
4226 :
4227 : /* Perform the peephole2 optimization pass. */
4228 :
4229 : static void
4230 963979 : peephole2_optimize (void)
4231 : {
4232 963979 : rtx_insn *insn;
4233 963979 : bitmap live;
4234 963979 : int i;
4235 963979 : basic_block bb;
4236 :
4237 963979 : peep2_do_cleanup_cfg = false;
4238 963979 : peep2_do_rebuild_jump_labels = false;
4239 :
4240 963979 : df_set_flags (DF_LR_RUN_DCE);
4241 963979 : df_note_add_problem ();
4242 963979 : df_analyze ();
4243 :
4244 : /* Initialize the regsets we're going to use. */
4245 8675811 : for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4246 6747853 : peep2_insn_data[i].live_before = BITMAP_ALLOC (®_obstack);
4247 963979 : search_ofs = 0;
4248 963979 : live = BITMAP_ALLOC (®_obstack);
4249 :
4250 11813774 : FOR_EACH_BB_REVERSE_FN (bb, cfun)
4251 : {
4252 10849795 : bool past_end = false;
4253 10849795 : int pos;
4254 :
4255 10849795 : rtl_profile_for_bb (bb);
4256 :
4257 : /* Start up propagation. */
4258 21699590 : bitmap_copy (live, DF_LR_IN (bb));
4259 10849795 : df_simulate_initialize_forwards (bb, live);
4260 10849795 : peep2_reinit_state (live);
4261 :
4262 10849795 : insn = BB_HEAD (bb);
4263 203952730 : for (;;)
4264 : {
4265 203952730 : rtx_insn *attempt, *head;
4266 203952730 : int match_len;
4267 :
4268 203952730 : if (!past_end && !NONDEBUG_INSN_P (insn))
4269 : {
4270 72261481 : next_insn:
4271 131361452 : insn = NEXT_INSN (insn);
4272 131361452 : if (insn == NEXT_INSN (BB_END (bb)))
4273 10849795 : past_end = true;
4274 133616331 : continue;
4275 : }
4276 82094669 : if (!past_end && peep2_fill_buffer (bb, insn, live))
4277 59099971 : goto next_insn;
4278 :
4279 : /* If we did not fill an empty buffer, it signals the end of the
4280 : block. */
4281 72591278 : if (peep2_current_count == 0)
4282 : break;
4283 :
4284 : /* The buffer filled to the current maximum, so try to match. */
4285 :
4286 61741483 : pos = peep2_buf_position (peep2_current + peep2_current_count);
4287 61741483 : peep2_insn_data[pos].insn = PEEP2_EOB;
4288 61741483 : COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4289 :
4290 : /* Match the peephole. */
4291 61741483 : head = peep2_insn_data[peep2_current].insn;
4292 61741483 : attempt = peephole2_insns (PATTERN (head), head, &match_len);
4293 61741483 : if (attempt != NULL)
4294 : {
4295 2257672 : rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
4296 2257672 : if (last)
4297 : {
4298 2254879 : peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
4299 2254879 : continue;
4300 : }
4301 : }
4302 :
4303 : /* No match: advance the buffer by one insn. */
4304 59486604 : peep2_current = peep2_buf_position (peep2_current + 1);
4305 59486604 : peep2_current_count--;
4306 : }
4307 : }
4308 :
4309 963979 : default_rtl_profile ();
4310 8675811 : for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4311 6747853 : BITMAP_FREE (peep2_insn_data[i].live_before);
4312 963979 : BITMAP_FREE (live);
4313 963979 : if (peep2_do_rebuild_jump_labels)
4314 2467 : rebuild_jump_labels (get_insns ());
4315 963979 : if (peep2_do_cleanup_cfg)
4316 0 : cleanup_cfg (CLEANUP_CFG_CHANGED);
4317 963979 : }
4318 :
4319 : /* Common predicates for use with define_bypass. */
4320 :
4321 : /* Helper function for store_data_bypass_p, handle just a single SET
4322 : IN_SET. */
4323 :
4324 : static bool
4325 0 : store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
4326 : {
4327 0 : if (!MEM_P (SET_DEST (in_set)))
4328 : return false;
4329 :
4330 0 : rtx out_set = single_set (out_insn);
4331 0 : if (out_set)
4332 0 : return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
4333 :
4334 0 : rtx out_pat = PATTERN (out_insn);
4335 0 : if (GET_CODE (out_pat) != PARALLEL)
4336 : return false;
4337 :
4338 0 : for (int i = 0; i < XVECLEN (out_pat, 0); i++)
4339 : {
4340 0 : rtx out_exp = XVECEXP (out_pat, 0, i);
4341 :
4342 0 : if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
4343 0 : continue;
4344 :
4345 0 : gcc_assert (GET_CODE (out_exp) == SET);
4346 :
4347 0 : if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
4348 : return false;
4349 : }
4350 :
4351 : return true;
4352 : }
4353 :
4354 : /* True if the dependency between OUT_INSN and IN_INSN is on the store
4355 : data not the address operand(s) of the store. IN_INSN and OUT_INSN
4356 : must be either a single_set or a PARALLEL with SETs inside. */
4357 :
4358 : bool
4359 0 : store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4360 : {
4361 0 : rtx in_set = single_set (in_insn);
4362 0 : if (in_set)
4363 0 : return store_data_bypass_p_1 (out_insn, in_set);
4364 :
4365 0 : rtx in_pat = PATTERN (in_insn);
4366 0 : if (GET_CODE (in_pat) != PARALLEL)
4367 : return false;
4368 :
4369 0 : for (int i = 0; i < XVECLEN (in_pat, 0); i++)
4370 : {
4371 0 : rtx in_exp = XVECEXP (in_pat, 0, i);
4372 :
4373 0 : if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
4374 0 : continue;
4375 :
4376 0 : gcc_assert (GET_CODE (in_exp) == SET);
4377 :
4378 0 : if (!store_data_bypass_p_1 (out_insn, in_exp))
4379 : return false;
4380 : }
4381 :
4382 : return true;
4383 : }
4384 :
4385 : /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
4386 : condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
4387 : or multiple set; IN_INSN should be single_set for truth, but for convenience
4388 : of insn categorization may be any JUMP or CALL insn. */
4389 :
4390 : bool
4391 0 : if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4392 : {
4393 0 : rtx out_set, in_set;
4394 :
4395 0 : in_set = single_set (in_insn);
4396 0 : if (! in_set)
4397 : {
4398 0 : gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
4399 : return false;
4400 : }
4401 :
4402 0 : if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
4403 : return false;
4404 0 : in_set = SET_SRC (in_set);
4405 :
4406 0 : out_set = single_set (out_insn);
4407 0 : if (out_set)
4408 : {
4409 0 : if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4410 0 : || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4411 0 : return false;
4412 : }
4413 : else
4414 : {
4415 0 : rtx out_pat;
4416 0 : int i;
4417 :
4418 0 : out_pat = PATTERN (out_insn);
4419 0 : gcc_assert (GET_CODE (out_pat) == PARALLEL);
4420 :
4421 0 : for (i = 0; i < XVECLEN (out_pat, 0); i++)
4422 : {
4423 0 : rtx exp = XVECEXP (out_pat, 0, i);
4424 :
4425 0 : if (GET_CODE (exp) == CLOBBER)
4426 0 : continue;
4427 :
4428 0 : gcc_assert (GET_CODE (exp) == SET);
4429 :
4430 0 : if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4431 0 : || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4432 0 : return false;
4433 : }
4434 : }
4435 :
4436 : return true;
4437 : }
4438 :
4439 : static unsigned int
4440 963979 : rest_of_handle_peephole2 (void)
4441 : {
4442 963979 : if (HAVE_peephole2)
4443 0 : peephole2_optimize ();
4444 :
4445 963979 : return 0;
4446 : }
4447 :
4448 : namespace {
4449 :
4450 : const pass_data pass_data_peephole2 =
4451 : {
4452 : RTL_PASS, /* type */
4453 : "peephole2", /* name */
4454 : OPTGROUP_NONE, /* optinfo_flags */
4455 : TV_PEEPHOLE2, /* tv_id */
4456 : 0, /* properties_required */
4457 : 0, /* properties_provided */
4458 : 0, /* properties_destroyed */
4459 : 0, /* todo_flags_start */
4460 : TODO_df_finish, /* todo_flags_finish */
4461 : };
4462 :
4463 : class pass_peephole2 : public rtl_opt_pass
4464 : {
4465 : public:
4466 285722 : pass_peephole2 (gcc::context *ctxt)
4467 571444 : : rtl_opt_pass (pass_data_peephole2, ctxt)
4468 : {}
4469 :
4470 : /* opt_pass methods: */
4471 : /* The epiphany backend creates a second instance of this pass, so we need
4472 : a clone method. */
4473 0 : opt_pass * clone () final override { return new pass_peephole2 (m_ctxt); }
4474 1471370 : bool gate (function *) final override
4475 : {
4476 1471370 : return (optimize > 0 && flag_peephole2);
4477 : }
4478 963979 : unsigned int execute (function *) final override
4479 : {
4480 963979 : return rest_of_handle_peephole2 ();
4481 : }
4482 :
4483 : }; // class pass_peephole2
4484 :
4485 : } // anon namespace
4486 :
4487 : rtl_opt_pass *
4488 285722 : make_pass_peephole2 (gcc::context *ctxt)
4489 : {
4490 285722 : return new pass_peephole2 (ctxt);
4491 : }
4492 :
4493 : namespace {
4494 :
4495 : const pass_data pass_data_split_all_insns =
4496 : {
4497 : RTL_PASS, /* type */
4498 : "split1", /* name */
4499 : OPTGROUP_NONE, /* optinfo_flags */
4500 : TV_NONE, /* tv_id */
4501 : 0, /* properties_required */
4502 : PROP_rtl_split_insns, /* properties_provided */
4503 : 0, /* properties_destroyed */
4504 : 0, /* todo_flags_start */
4505 : 0, /* todo_flags_finish */
4506 : };
4507 :
4508 : class pass_split_all_insns : public rtl_opt_pass
4509 : {
4510 : public:
4511 285722 : pass_split_all_insns (gcc::context *ctxt)
4512 571444 : : rtl_opt_pass (pass_data_split_all_insns, ctxt)
4513 : {}
4514 :
4515 : /* opt_pass methods: */
4516 : /* The epiphany backend creates a second instance of this pass, so
4517 : we need a clone method. */
4518 0 : opt_pass * clone () final override
4519 : {
4520 0 : return new pass_split_all_insns (m_ctxt);
4521 : }
4522 1471360 : unsigned int execute (function *) final override
4523 : {
4524 1471360 : split_all_insns ();
4525 1471360 : return 0;
4526 : }
4527 :
4528 : }; // class pass_split_all_insns
4529 :
4530 : } // anon namespace
4531 :
4532 : rtl_opt_pass *
4533 285722 : make_pass_split_all_insns (gcc::context *ctxt)
4534 : {
4535 285722 : return new pass_split_all_insns (ctxt);
4536 : }
4537 :
4538 : namespace {
4539 :
4540 : const pass_data pass_data_split_after_reload =
4541 : {
4542 : RTL_PASS, /* type */
4543 : "split2", /* name */
4544 : OPTGROUP_NONE, /* optinfo_flags */
4545 : TV_NONE, /* tv_id */
4546 : 0, /* properties_required */
4547 : 0, /* properties_provided */
4548 : 0, /* properties_destroyed */
4549 : 0, /* todo_flags_start */
4550 : 0, /* todo_flags_finish */
4551 : };
4552 :
4553 : class pass_split_after_reload : public rtl_opt_pass
4554 : {
4555 : public:
4556 285722 : pass_split_after_reload (gcc::context *ctxt)
4557 571444 : : rtl_opt_pass (pass_data_split_after_reload, ctxt)
4558 : {}
4559 :
4560 : /* opt_pass methods: */
4561 1471370 : bool gate (function *) final override
4562 : {
4563 : /* If optimizing, then go ahead and split insns now. */
4564 1471370 : return optimize > 0;
4565 : }
4566 :
4567 1043685 : unsigned int execute (function *) final override
4568 : {
4569 1043685 : split_all_insns ();
4570 1043685 : return 0;
4571 : }
4572 :
4573 : }; // class pass_split_after_reload
4574 :
4575 : } // anon namespace
4576 :
4577 : rtl_opt_pass *
4578 285722 : make_pass_split_after_reload (gcc::context *ctxt)
4579 : {
4580 285722 : return new pass_split_after_reload (ctxt);
4581 : }
4582 :
4583 : static bool
4584 2942740 : enable_split_before_sched2 (void)
4585 : {
4586 : #ifdef INSN_SCHEDULING
4587 2087372 : return optimize > 0 && flag_schedule_insns_after_reload;
4588 : #else
4589 : return false;
4590 : #endif
4591 : }
4592 :
4593 : namespace {
4594 :
4595 : const pass_data pass_data_split_before_sched2 =
4596 : {
4597 : RTL_PASS, /* type */
4598 : "split3", /* name */
4599 : OPTGROUP_NONE, /* optinfo_flags */
4600 : TV_NONE, /* tv_id */
4601 : 0, /* properties_required */
4602 : 0, /* properties_provided */
4603 : 0, /* properties_destroyed */
4604 : 0, /* todo_flags_start */
4605 : 0, /* todo_flags_finish */
4606 : };
4607 :
4608 : class pass_split_before_sched2 : public rtl_opt_pass
4609 : {
4610 : public:
4611 285722 : pass_split_before_sched2 (gcc::context *ctxt)
4612 571444 : : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4613 : {}
4614 :
4615 : /* opt_pass methods: */
4616 1471370 : bool gate (function *) final override
4617 : {
4618 1471370 : return enable_split_before_sched2 ();
4619 : }
4620 :
4621 963984 : unsigned int execute (function *) final override
4622 : {
4623 963984 : split_all_insns ();
4624 963984 : return 0;
4625 : }
4626 :
4627 : }; // class pass_split_before_sched2
4628 :
4629 : } // anon namespace
4630 :
4631 : rtl_opt_pass *
4632 285722 : make_pass_split_before_sched2 (gcc::context *ctxt)
4633 : {
4634 285722 : return new pass_split_before_sched2 (ctxt);
4635 : }
4636 :
4637 : namespace {
4638 :
4639 : const pass_data pass_data_split_before_regstack =
4640 : {
4641 : RTL_PASS, /* type */
4642 : "split4", /* name */
4643 : OPTGROUP_NONE, /* optinfo_flags */
4644 : TV_NONE, /* tv_id */
4645 : 0, /* properties_required */
4646 : 0, /* properties_provided */
4647 : 0, /* properties_destroyed */
4648 : 0, /* todo_flags_start */
4649 : 0, /* todo_flags_finish */
4650 : };
4651 :
4652 : class pass_split_before_regstack : public rtl_opt_pass
4653 : {
4654 : public:
4655 285722 : pass_split_before_regstack (gcc::context *ctxt)
4656 571444 : : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4657 : {}
4658 :
4659 : /* opt_pass methods: */
4660 : bool gate (function *) final override;
4661 507466 : unsigned int execute (function *) final override
4662 : {
4663 507466 : split_all_insns ();
4664 507466 : return 0;
4665 : }
4666 :
4667 : }; // class pass_split_before_regstack
4668 :
4669 : bool
4670 1471370 : pass_split_before_regstack::gate (function *)
4671 : {
4672 : #if HAVE_ATTR_length && defined (STACK_REGS)
4673 : /* If flow2 creates new instructions which need splitting
4674 : and scheduling after reload is not done, they might not be
4675 : split until final which doesn't allow splitting
4676 : if HAVE_ATTR_length. Selective scheduling can result in
4677 : further instructions that need splitting. */
4678 : #ifdef INSN_SCHEDULING
4679 2435355 : return !enable_split_before_sched2 () || flag_selective_scheduling2;
4680 : #else
4681 : return !enable_split_before_sched2 ();
4682 : #endif
4683 : #else
4684 : return false;
4685 : #endif
4686 : }
4687 :
4688 : } // anon namespace
4689 :
4690 : rtl_opt_pass *
4691 285722 : make_pass_split_before_regstack (gcc::context *ctxt)
4692 : {
4693 285722 : return new pass_split_before_regstack (ctxt);
4694 : }
4695 :
4696 : namespace {
4697 :
4698 : const pass_data pass_data_split_for_shorten_branches =
4699 : {
4700 : RTL_PASS, /* type */
4701 : "split5", /* name */
4702 : OPTGROUP_NONE, /* optinfo_flags */
4703 : TV_NONE, /* tv_id */
4704 : 0, /* properties_required */
4705 : 0, /* properties_provided */
4706 : 0, /* properties_destroyed */
4707 : 0, /* todo_flags_start */
4708 : 0, /* todo_flags_finish */
4709 : };
4710 :
4711 : class pass_split_for_shorten_branches : public rtl_opt_pass
4712 : {
4713 : public:
4714 285722 : pass_split_for_shorten_branches (gcc::context *ctxt)
4715 571444 : : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4716 : {}
4717 :
4718 : /* opt_pass methods: */
4719 1471370 : bool gate (function *) final override
4720 : {
4721 : /* The placement of the splitting that we do for shorten_branches
4722 : depends on whether regstack is used by the target or not. */
4723 : #if HAVE_ATTR_length && !defined (STACK_REGS)
4724 : return true;
4725 : #else
4726 1471370 : return false;
4727 : #endif
4728 : }
4729 :
4730 0 : unsigned int execute (function *) final override
4731 : {
4732 0 : split_all_insns_noflow ();
4733 0 : return 0;
4734 : }
4735 :
4736 : }; // class pass_split_for_shorten_branches
4737 :
4738 : } // anon namespace
4739 :
4740 : rtl_opt_pass *
4741 285722 : make_pass_split_for_shorten_branches (gcc::context *ctxt)
4742 : {
4743 285722 : return new pass_split_for_shorten_branches (ctxt);
4744 : }
4745 :
4746 : /* (Re)initialize the target information after a change in target. */
4747 :
4748 : void
4749 214527 : recog_init ()
4750 : {
4751 : /* The information is zero-initialized, so we don't need to do anything
4752 : first time round. */
4753 214527 : if (!this_target_recog->x_initialized)
4754 : {
4755 212397 : this_target_recog->x_initialized = true;
4756 212397 : return;
4757 : }
4758 2130 : memset (this_target_recog->x_bool_attr_masks, 0,
4759 : sizeof (this_target_recog->x_bool_attr_masks));
4760 32314230 : for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4761 32312100 : if (this_target_recog->x_op_alt[i])
4762 : {
4763 29870 : free (this_target_recog->x_op_alt[i]);
4764 29870 : this_target_recog->x_op_alt[i] = 0;
4765 : }
4766 : }
|