Line data Source code
1 : /* Store motion via Lazy Code Motion on the reverse CFG.
2 : Copyright (C) 1997-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "rtl.h"
25 : #include "tree.h"
26 : #include "predict.h"
27 : #include "df.h"
28 : #include "toplev.h"
29 :
30 : #include "cfgrtl.h"
31 : #include "cfganal.h"
32 : #include "lcm.h"
33 : #include "cfgcleanup.h"
34 : #include "expr.h"
35 : #include "tree-pass.h"
36 : #include "dbgcnt.h"
37 : #include "rtl-iter.h"
38 : #include "print-rtl.h"
39 :
40 : /* This pass implements downward store motion.
41 : As of May 1, 2009, the pass is not enabled by default on any target,
42 : but bootstrap completes on ia64 and x86_64 with the pass enabled. */
43 :
44 : /* TODO:
45 : - remove_reachable_equiv_notes is an incomprehensible pile of goo and
46 : a compile time hog that needs a rewrite (maybe cache st_exprs to
47 : invalidate REG_EQUAL/REG_EQUIV notes for?).
48 : - pattern_regs in st_expr should be a regset (on its own obstack).
49 : - store_motion_mems should be a vec instead of a list.
50 : - there should be an alloc pool for struct st_expr objects.
51 : - investigate whether it is helpful to make the address of an st_expr
52 : a cselib VALUE.
53 : - when GIMPLE alias information is exported, the effectiveness of this
54 : pass should be re-evaluated.
55 : */
56 :
57 : /* This is a list of store expressions (MEMs). The structure is used
58 : as an expression table to track stores which look interesting, and
59 : might be moveable towards the exit block. */
60 :
61 : struct st_expr
62 : {
63 : /* Pattern of this mem. */
64 : rtx pattern;
65 : /* List of registers mentioned by the mem. */
66 : vec<rtx> pattern_regs;
67 : /* INSN list of stores that are locally anticipatable. */
68 : vec<rtx_insn *> antic_stores;
69 : /* INSN list of stores that are locally available. */
70 : vec<rtx_insn *> avail_stores;
71 : /* Next in the list. */
72 : struct st_expr * next;
73 : /* Store ID in the dataflow bitmaps. */
74 : int index;
75 : /* Hash value for the hash table. */
76 : unsigned int hash_index;
77 : /* Register holding the stored expression when a store is moved.
78 : This field is also used as a cache in find_moveable_store, see
79 : LAST_AVAIL_CHECK_FAILURE below. */
80 : rtx reaching_reg;
81 : };
82 :
83 : /* Head of the list of load/store memory refs. */
84 : static struct st_expr * store_motion_mems = NULL;
85 :
86 : /* These bitmaps will hold the local dataflow properties per basic block. */
87 : static sbitmap *st_kill, *st_avloc, *st_antloc, *st_transp;
88 :
89 : /* Nonzero for expressions which should be inserted on a specific edge. */
90 : static sbitmap *st_insert_map;
91 :
92 : /* Nonzero for expressions which should be deleted in a specific block. */
93 : static sbitmap *st_delete_map;
94 :
95 : /* Global holding the number of store expressions we are dealing with. */
96 : static int num_stores;
97 :
98 : /* Contains the edge_list returned by pre_edge_lcm. */
99 : static struct edge_list *edge_list;
100 :
101 : /* Hashtable helpers. */
102 :
103 : struct st_expr_hasher : nofree_ptr_hash <st_expr>
104 : {
105 : static inline hashval_t hash (const st_expr *);
106 : static inline bool equal (const st_expr *, const st_expr *);
107 : };
108 :
109 : inline hashval_t
110 510 : st_expr_hasher::hash (const st_expr *x)
111 : {
112 510 : int do_not_record_p = 0;
113 510 : return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
114 : }
115 :
116 : inline bool
117 518 : st_expr_hasher::equal (const st_expr *ptr1, const st_expr *ptr2)
118 : {
119 518 : return exp_equiv_p (ptr1->pattern, ptr2->pattern, 0, true);
120 : }
121 :
122 : /* Hashtable for the load/store memory refs. */
123 : static hash_table<st_expr_hasher> *store_motion_mems_table;
124 :
125 : /* This will search the st_expr list for a matching expression. If it
126 : doesn't find one, we create one and initialize it. */
127 :
128 : static struct st_expr *
129 163 : st_expr_entry (rtx x)
130 : {
131 163 : int do_not_record_p = 0;
132 163 : struct st_expr * ptr;
133 163 : unsigned int hash;
134 163 : st_expr **slot;
135 163 : struct st_expr e;
136 :
137 163 : hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
138 : NULL, /*have_reg_qty=*/false);
139 :
140 163 : e.pattern = x;
141 163 : slot = store_motion_mems_table->find_slot_with_hash (&e, hash, INSERT);
142 163 : if (*slot)
143 : return *slot;
144 :
145 136 : ptr = XNEW (struct st_expr);
146 :
147 136 : ptr->next = store_motion_mems;
148 136 : ptr->pattern = x;
149 136 : ptr->pattern_regs.create (0);
150 136 : ptr->antic_stores.create (0);
151 136 : ptr->avail_stores.create (0);
152 136 : ptr->reaching_reg = NULL_RTX;
153 136 : ptr->index = 0;
154 136 : ptr->hash_index = hash;
155 136 : store_motion_mems = ptr;
156 136 : *slot = ptr;
157 :
158 136 : return ptr;
159 : }
160 :
161 : /* Free up an individual st_expr entry. */
162 :
163 : static void
164 136 : free_st_expr_entry (struct st_expr * ptr)
165 : {
166 136 : ptr->antic_stores.release ();
167 136 : ptr->avail_stores.release ();
168 136 : ptr->pattern_regs.release ();
169 :
170 136 : free (ptr);
171 136 : }
172 :
173 : /* Free up all memory associated with the st_expr list. */
174 :
175 : static void
176 32 : free_store_motion_mems (void)
177 : {
178 32 : delete store_motion_mems_table;
179 32 : store_motion_mems_table = NULL;
180 :
181 146 : while (store_motion_mems)
182 : {
183 114 : struct st_expr * tmp = store_motion_mems;
184 114 : store_motion_mems = store_motion_mems->next;
185 114 : free_st_expr_entry (tmp);
186 : }
187 32 : store_motion_mems = NULL;
188 32 : }
189 :
190 : /* Assign each element of the list of mems a monotonically increasing value. */
191 :
192 : static int
193 53 : enumerate_store_motion_mems (void)
194 : {
195 53 : struct st_expr * ptr;
196 53 : int n = 0;
197 :
198 167 : for (ptr = store_motion_mems; ptr != NULL; ptr = ptr->next)
199 114 : ptr->index = n++;
200 :
201 53 : return n;
202 : }
203 :
204 : /* Return first item in the list. */
205 :
206 : static inline struct st_expr *
207 430 : first_st_expr (void)
208 : {
209 430 : return store_motion_mems;
210 : }
211 :
212 : /* Return the next item in the list after the specified one. */
213 :
214 : static inline struct st_expr *
215 2106 : next_st_expr (struct st_expr * ptr)
216 : {
217 2106 : return ptr->next;
218 : }
219 :
220 : /* Dump debugging info about the store_motion_mems list. */
221 :
222 : static void
223 2 : print_store_motion_mems (FILE * file)
224 : {
225 2 : struct st_expr * ptr;
226 :
227 2 : fprintf (dump_file, "STORE_MOTION list of MEM exprs considered:\n");
228 :
229 3 : for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
230 : {
231 1 : fprintf (file, " Pattern (%3d): ", ptr->index);
232 :
233 1 : print_rtl (file, ptr->pattern);
234 :
235 1 : fprintf (file, "\n ANTIC stores : ");
236 1 : print_rtx_insn_vec (file, ptr->antic_stores);
237 :
238 1 : fprintf (file, "\n AVAIL stores : ");
239 :
240 1 : print_rtx_insn_vec (file, ptr->avail_stores);
241 :
242 1 : fprintf (file, "\n\n");
243 : }
244 :
245 2 : fprintf (file, "\n");
246 2 : }
247 :
248 : /* Return zero if some of the registers in list X are killed
249 : due to set of registers in bitmap REGS_SET. */
250 :
251 : static bool
252 1198 : store_ops_ok (const vec<rtx> &x, int *regs_set)
253 : {
254 3512 : for (rtx temp : x)
255 809 : if (regs_set[REGNO (temp)])
256 : return false;
257 :
258 : return true;
259 : }
260 :
261 : /* Returns a list of registers mentioned in X.
262 : FIXME: A regset would be prettier and less expensive. */
263 :
264 : static void
265 143 : extract_mentioned_regs (rtx x, vec<rtx> *mentioned_regs)
266 : {
267 143 : subrtx_var_iterator::array_type array;
268 589 : FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
269 : {
270 446 : rtx x = *iter;
271 446 : if (REG_P (x))
272 92 : mentioned_regs->safe_push (x);
273 : }
274 143 : }
275 :
276 : /* Check to see if the load X is aliased with STORE_PATTERN.
277 : AFTER is true if we are checking the case when STORE_PATTERN occurs
278 : after the X. */
279 :
280 : static bool
281 727 : load_kills_store (const_rtx x, const_rtx store_pattern, int after)
282 : {
283 727 : if (after)
284 133 : return anti_dependence (x, store_pattern);
285 : else
286 594 : return true_dependence (store_pattern, GET_MODE (store_pattern), x);
287 : }
288 :
289 : /* Go through the entire rtx X, looking for any loads which might alias
290 : STORE_PATTERN. Return true if found.
291 : AFTER is true if we are checking the case when STORE_PATTERN occurs
292 : after the insn X. */
293 :
294 : static bool
295 13332 : find_loads (const_rtx x, const_rtx store_pattern, int after)
296 : {
297 13332 : const char * fmt;
298 13332 : int i, j;
299 13332 : int ret = false;
300 :
301 13332 : if (!x)
302 : return false;
303 :
304 13332 : if (GET_CODE (x) == SET)
305 4620 : x = SET_SRC (x);
306 :
307 13332 : if (MEM_P (x))
308 : {
309 727 : if (load_kills_store (x, store_pattern, after))
310 : return true;
311 : }
312 :
313 : /* Recursively process the insn. */
314 13150 : fmt = GET_RTX_FORMAT (GET_CODE (x));
315 :
316 30221 : for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
317 : {
318 17071 : if (fmt[i] == 'e')
319 7633 : ret |= find_loads (XEXP (x, i), store_pattern, after);
320 9438 : else if (fmt[i] == 'E')
321 147 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
322 86 : ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
323 : }
324 13150 : return ret;
325 : }
326 :
327 : /* Go through pattern PAT looking for any loads which might kill the
328 : store in X. Return true if found.
329 : AFTER is true if we are checking the case when loads kill X occurs
330 : after the insn for PAT. */
331 :
332 : static inline bool
333 5421 : store_killed_in_pat (const_rtx x, const_rtx pat, int after)
334 : {
335 5421 : if (GET_CODE (pat) == SET)
336 : {
337 4649 : rtx dest = SET_DEST (pat);
338 :
339 4649 : if (GET_CODE (dest) == ZERO_EXTRACT)
340 0 : dest = XEXP (dest, 0);
341 :
342 : /* Check for memory stores to aliased objects. */
343 4649 : if (MEM_P (dest)
344 4649 : && !exp_equiv_p (dest, x, 0, true))
345 : {
346 1330 : if (after)
347 : {
348 197 : if (output_dependence (dest, x))
349 : return true;
350 : }
351 : else
352 : {
353 1133 : if (output_dependence (x, dest))
354 : return true;
355 : }
356 : }
357 : }
358 :
359 5392 : if (find_loads (pat, x, after))
360 : return true;
361 :
362 : return false;
363 : }
364 :
365 : /* Check if INSN kills the store pattern X (is aliased with it).
366 : AFTER is true if we are checking the case when store X occurs
367 : after the insn. Return true if it does. */
368 :
369 : static bool
370 6111 : store_killed_in_insn (const_rtx x, const vec<rtx> &x_regs,
371 : const rtx_insn *insn, int after)
372 : {
373 6111 : const_rtx note, pat;
374 :
375 6111 : if (! NONDEBUG_INSN_P (insn))
376 : return false;
377 :
378 4703 : if (CALL_P (insn))
379 : {
380 : /* A normal or pure call might read from pattern,
381 : but a const call will not. */
382 36 : if (!RTL_CONST_CALL_P (insn))
383 : return true;
384 :
385 : /* But even a const call reads its parameters. Check whether the
386 : base of some of registers used in mem is stack pointer. */
387 0 : for (rtx temp : x_regs)
388 0 : if (may_be_sp_based_p (temp))
389 : return true;
390 :
391 : return false;
392 : }
393 :
394 4667 : pat = PATTERN (insn);
395 4667 : if (GET_CODE (pat) == SET)
396 : {
397 3879 : if (store_killed_in_pat (x, pat, after))
398 : return true;
399 : }
400 788 : else if (GET_CODE (pat) == PARALLEL)
401 : {
402 : int i;
403 :
404 2298 : for (i = 0; i < XVECLEN (pat, 0); i++)
405 1542 : if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
406 : return true;
407 : }
408 18 : else if (find_loads (PATTERN (insn), x, after))
409 : return true;
410 :
411 : /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
412 : location aliased with X, then this insn kills X. */
413 4469 : note = find_reg_equal_equiv_note (insn);
414 4469 : if (! note)
415 : return false;
416 203 : note = XEXP (note, 0);
417 :
418 : /* However, if the note represents a must alias rather than a may
419 : alias relationship, then it does not kill X. */
420 203 : if (exp_equiv_p (note, x, 0, true))
421 : return false;
422 :
423 : /* See if there are any aliased loads in the note. */
424 203 : return find_loads (note, x, after);
425 : }
426 :
427 : /* Returns true if the expression X is loaded or clobbered on or after INSN
428 : within basic block BB. REGS_SET_AFTER is bitmap of registers set in
429 : or after the insn. X_REGS is list of registers mentioned in X. If the store
430 : is killed, return the last insn in that it occurs in FAIL_INSN. */
431 :
432 : static bool
433 1047 : store_killed_after (const_rtx x, const vec<rtx> &x_regs,
434 : const rtx_insn *insn, const_basic_block bb,
435 : int *regs_set_after, rtx *fail_insn)
436 : {
437 1047 : rtx_insn *last = BB_END (bb), *act;
438 :
439 1047 : if (!store_ops_ok (x_regs, regs_set_after))
440 : {
441 : /* We do not know where it will happen. */
442 36 : if (fail_insn)
443 18 : *fail_insn = NULL_RTX;
444 36 : return true;
445 : }
446 :
447 : /* Scan from the end, so that fail_insn is determined correctly. */
448 6127 : for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
449 5332 : if (store_killed_in_insn (x, x_regs, act, false))
450 : {
451 216 : if (fail_insn)
452 20 : *fail_insn = act;
453 216 : return true;
454 : }
455 :
456 : return false;
457 : }
458 :
459 : /* Returns true if the expression X is loaded or clobbered on or before INSN
460 : within basic block BB. X_REGS is list of registers mentioned in X.
461 : REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
462 : static bool
463 151 : store_killed_before (const_rtx x, const vec<rtx> &x_regs,
464 : const rtx_insn *insn, const_basic_block bb,
465 : int *regs_set_before)
466 : {
467 151 : rtx_insn *first = BB_HEAD (bb);
468 :
469 151 : if (!store_ops_ok (x_regs, regs_set_before))
470 : return true;
471 :
472 886 : for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
473 779 : if (store_killed_in_insn (x, x_regs, insn, true))
474 : return true;
475 :
476 : return false;
477 : }
478 :
479 : /* The last insn in the basic block that compute_store_table is processing,
480 : where store_killed_after is true for X.
481 : Since we go through the basic block from BB_END to BB_HEAD, this is
482 : also the available store at the end of the basic block. Therefore
483 : this is in effect a cache, to avoid calling store_killed_after for
484 : equivalent aliasing store expressions.
485 : This value is only meaningful during the computation of the store
486 : table. We hi-jack the REACHING_REG field of struct st_expr to save
487 : a bit of memory. */
488 : #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
489 :
490 : /* Determine whether INSN is MEM store pattern that we will consider moving.
491 : REGS_SET_BEFORE is bitmap of registers set before (and including) the
492 : current insn, REGS_SET_AFTER is bitmap of registers set after (and
493 : including) the insn in this basic block. We must be passing through BB from
494 : head to end, as we are using this fact to speed things up.
495 :
496 : The results are stored this way:
497 :
498 : -- the first anticipatable expression is added into ANTIC_STORES
499 : -- if the processed expression is not anticipatable, NULL_RTX is added
500 : there instead, so that we can use it as indicator that no further
501 : expression of this type may be anticipatable
502 : -- if the expression is available, it is added as head of AVAIL_STORES;
503 : consequently, all of them but this head are dead and may be deleted.
504 : -- if the expression is not available, the insn due to that it fails to be
505 : available is stored in REACHING_REG (via LAST_AVAIL_CHECK_FAILURE).
506 :
507 : The things are complicated a bit by fact that there already may be stores
508 : to the same MEM from other blocks; also caller must take care of the
509 : necessary cleanup of the temporary markers after end of the basic block.
510 : */
511 :
512 : static void
513 911 : find_moveable_store (rtx_insn *insn, int *regs_set_before, int *regs_set_after)
514 : {
515 911 : struct st_expr * ptr;
516 911 : rtx dest, set;
517 911 : int check_anticipatable, check_available;
518 911 : basic_block bb = BLOCK_FOR_INSN (insn);
519 :
520 911 : set = single_set (insn);
521 911 : if (!set)
522 : return;
523 :
524 868 : dest = SET_DEST (set);
525 :
526 169 : if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
527 1035 : || GET_MODE (dest) == BLKmode)
528 : return;
529 :
530 167 : if (side_effects_p (dest))
531 : return;
532 :
533 : /* If we are handling exceptions, we must be careful with memory references
534 : that may trap. If we are not, the behavior is undefined, so we may just
535 : continue. */
536 164 : if (cfun->can_throw_non_call_exceptions && may_trap_p (dest))
537 : return;
538 :
539 : /* Even if the destination cannot trap, the source may. In this case we'd
540 : need to handle updating the REG_EH_REGION note. */
541 164 : if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
542 : return;
543 :
544 : /* Make sure that the SET_SRC of this store insns can be assigned to
545 : a register, or we will fail later on in replace_store_insn, which
546 : assumes that we can do this. But sometimes the target machine has
547 : oddities like MEM read-modify-write instruction. See for example
548 : PR24257. */
549 164 : if (!can_assign_to_reg_without_clobbers_p (SET_SRC (set),
550 164 : GET_MODE (SET_SRC (set))))
551 : return;
552 :
553 163 : ptr = st_expr_entry (dest);
554 163 : if (ptr->pattern_regs.is_empty ())
555 143 : extract_mentioned_regs (dest, &ptr->pattern_regs);
556 :
557 : /* Do not check for anticipatability if we either found one anticipatable
558 : store already, or tested for one and found out that it was killed. */
559 163 : check_anticipatable = 0;
560 163 : if (ptr->antic_stores.is_empty ())
561 : check_anticipatable = 1;
562 : else
563 : {
564 18 : rtx_insn *tmp = ptr->antic_stores.last ();
565 18 : if (tmp != NULL_RTX
566 18 : && BLOCK_FOR_INSN (tmp) != bb)
567 : check_anticipatable = 1;
568 : }
569 : if (check_anticipatable)
570 : {
571 151 : rtx_insn *tmp;
572 151 : if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
573 44 : tmp = NULL;
574 : else
575 107 : tmp = insn;
576 151 : ptr->antic_stores.safe_push (tmp);
577 : }
578 :
579 : /* It is not necessary to check whether store is available if we did
580 : it successfully before; if we failed before, do not bother to check
581 : until we reach the insn that caused us to fail. */
582 163 : check_available = 0;
583 163 : if (ptr->avail_stores.is_empty ())
584 : check_available = 1;
585 : else
586 : {
587 11 : rtx_insn *tmp = ptr->avail_stores.last ();
588 11 : if (BLOCK_FOR_INSN (tmp) != bb)
589 : check_available = 1;
590 : }
591 : if (check_available)
592 : {
593 : /* Check that we have already reached the insn at that the check
594 : failed last time. */
595 163 : if (LAST_AVAIL_CHECK_FAILURE (ptr))
596 : {
597 0 : rtx_insn *tmp;
598 0 : for (tmp = BB_END (bb);
599 0 : tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
600 0 : tmp = PREV_INSN (tmp))
601 0 : continue;
602 0 : if (tmp == insn)
603 : check_available = 0;
604 0 : }
605 : else
606 163 : check_available = store_killed_after (dest, ptr->pattern_regs, insn,
607 : bb, regs_set_after,
608 : &LAST_AVAIL_CHECK_FAILURE (ptr));
609 : }
610 163 : if (!check_available)
611 125 : ptr->avail_stores.safe_push (insn);
612 : }
613 :
614 : /* Find available and anticipatable stores. */
615 :
616 : static int
617 53 : compute_store_table (void)
618 : {
619 53 : int ret;
620 53 : basic_block bb;
621 53 : rtx_insn *insn;
622 53 : rtx_insn *tmp;
623 53 : df_ref def;
624 53 : int *last_set_in, *already_set;
625 53 : struct st_expr * ptr, **prev_next_ptr_ptr;
626 53 : unsigned int max_gcse_regno = max_reg_num ();
627 :
628 53 : store_motion_mems = NULL;
629 53 : store_motion_mems_table = new hash_table<st_expr_hasher> (13);
630 53 : last_set_in = XCNEWVEC (int, max_gcse_regno);
631 53 : already_set = XNEWVEC (int, max_gcse_regno);
632 :
633 : /* Find all the stores we care about. */
634 269 : FOR_EACH_BB_FN (bb, cfun)
635 : {
636 : /* First compute the registers set in this block. */
637 1529 : FOR_BB_INSNS (bb, insn)
638 : {
639 :
640 1313 : if (! NONDEBUG_INSN_P (insn))
641 402 : continue;
642 :
643 4034 : FOR_EACH_INSN_DEF (def, insn)
644 3123 : last_set_in[DF_REF_REGNO (def)] = INSN_UID (insn);
645 : }
646 :
647 : /* Now find the stores. */
648 216 : memset (already_set, 0, sizeof (int) * max_gcse_regno);
649 1529 : FOR_BB_INSNS (bb, insn)
650 : {
651 1313 : if (! NONDEBUG_INSN_P (insn))
652 402 : continue;
653 :
654 4034 : FOR_EACH_INSN_DEF (def, insn)
655 3123 : already_set[DF_REF_REGNO (def)] = INSN_UID (insn);
656 :
657 : /* Now that we've marked regs, look for stores. */
658 911 : find_moveable_store (insn, already_set, last_set_in);
659 :
660 : /* Unmark regs that are no longer set. */
661 4034 : FOR_EACH_INSN_DEF (def, insn)
662 3123 : if (last_set_in[DF_REF_REGNO (def)] == INSN_UID (insn))
663 2739 : last_set_in[DF_REF_REGNO (def)] = 0;
664 : }
665 :
666 216 : if (flag_checking)
667 : {
668 : /* last_set_in should now be all-zero. */
669 32510 : for (unsigned regno = 0; regno < max_gcse_regno; regno++)
670 32294 : gcc_assert (!last_set_in[regno]);
671 : }
672 :
673 : /* Clear temporary marks. */
674 1209 : for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
675 : {
676 993 : LAST_AVAIL_CHECK_FAILURE (ptr) = NULL_RTX;
677 1986 : if (!ptr->antic_stores.is_empty ()
678 846 : && (tmp = ptr->antic_stores.last ()) == NULL)
679 44 : ptr->antic_stores.pop ();
680 : }
681 : }
682 :
683 : /* Remove the stores that are not available anywhere, as there will
684 : be no opportunity to optimize them. */
685 53 : for (ptr = store_motion_mems, prev_next_ptr_ptr = &store_motion_mems;
686 189 : ptr != NULL;
687 136 : ptr = *prev_next_ptr_ptr)
688 : {
689 136 : if (ptr->avail_stores.is_empty ())
690 : {
691 22 : *prev_next_ptr_ptr = ptr->next;
692 22 : store_motion_mems_table->remove_elt_with_hash (ptr, ptr->hash_index);
693 22 : free_st_expr_entry (ptr);
694 : }
695 : else
696 114 : prev_next_ptr_ptr = &ptr->next;
697 : }
698 :
699 53 : ret = enumerate_store_motion_mems ();
700 :
701 53 : if (dump_file)
702 2 : print_store_motion_mems (dump_file);
703 :
704 53 : free (last_set_in);
705 53 : free (already_set);
706 53 : return ret;
707 : }
708 :
709 : /* In all code following after this, REACHING_REG has its original
710 : meaning again. Avoid confusion, and undef the accessor macro for
711 : the temporary marks usage in compute_store_table. */
712 : #undef LAST_AVAIL_CHECK_FAILURE
713 :
714 : /* Insert an instruction at the beginning of a basic block, and update
715 : the BB_HEAD if needed. */
716 :
717 : static void
718 2 : insert_insn_start_basic_block (rtx_insn *insn, basic_block bb)
719 : {
720 : /* Insert at start of successor block. */
721 2 : rtx_insn *prev = PREV_INSN (BB_HEAD (bb));
722 2 : rtx_insn *before = BB_HEAD (bb);
723 6 : while (before != 0)
724 : {
725 4 : if (! LABEL_P (before)
726 4 : && !NOTE_INSN_BASIC_BLOCK_P (before))
727 : break;
728 2 : prev = before;
729 2 : if (prev == BB_END (bb))
730 : break;
731 2 : before = NEXT_INSN (before);
732 : }
733 :
734 2 : insn = emit_insn_after_noloc (insn, prev, bb);
735 :
736 2 : if (dump_file)
737 : {
738 0 : fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
739 : bb->index);
740 0 : print_inline_rtx (dump_file, insn, 6);
741 0 : fprintf (dump_file, "\n");
742 : }
743 2 : }
744 :
745 : /* This routine will insert a store on an edge. EXPR is the st_expr entry for
746 : the memory reference, and E is the edge to insert it on. Returns nonzero
747 : if an edge insertion was performed. */
748 :
749 : static int
750 19 : insert_store (struct st_expr * expr, edge e)
751 : {
752 19 : rtx reg;
753 19 : rtx_insn *insn;
754 19 : basic_block bb;
755 19 : edge tmp;
756 19 : edge_iterator ei;
757 :
758 : /* We did all the deleted before this insert, so if we didn't delete a
759 : store, then we haven't set the reaching reg yet either. */
760 19 : if (expr->reaching_reg == NULL_RTX)
761 : return 0;
762 :
763 19 : if (e->flags & EDGE_FAKE)
764 : return 0;
765 :
766 19 : reg = expr->reaching_reg;
767 19 : insn = gen_move_insn (copy_rtx (expr->pattern), reg);
768 :
769 : /* If we are inserting this expression on ALL predecessor edges of a BB,
770 : insert it at the start of the BB, and reset the insert bits on the other
771 : edges so we don't try to insert it on the other edges. */
772 19 : bb = e->dest;
773 38 : FOR_EACH_EDGE (tmp, ei, e->dest->preds)
774 36 : if (!(tmp->flags & EDGE_FAKE))
775 : {
776 36 : int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
777 :
778 36 : gcc_assert (index != EDGE_INDEX_NO_EDGE);
779 36 : if (! bitmap_bit_p (st_insert_map[index], expr->index))
780 : break;
781 : }
782 :
783 : /* If tmp is NULL, we found an insertion on every edge, blank the
784 : insertion vector for these edges, and insert at the start of the BB. */
785 19 : if (!tmp && bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
786 : {
787 4 : FOR_EACH_EDGE (tmp, ei, e->dest->preds)
788 : {
789 2 : int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
790 2 : bitmap_clear_bit (st_insert_map[index], expr->index);
791 : }
792 2 : insert_insn_start_basic_block (insn, bb);
793 2 : return 0;
794 : }
795 :
796 : /* We can't put stores in the front of blocks pointed to by abnormal
797 : edges since that may put a store where one didn't used to be. */
798 17 : gcc_assert (!(e->flags & EDGE_ABNORMAL));
799 :
800 17 : insert_insn_on_edge (insn, e);
801 :
802 17 : if (dump_file)
803 : {
804 1 : fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
805 1 : e->src->index, e->dest->index);
806 1 : print_inline_rtx (dump_file, insn, 6);
807 1 : fprintf (dump_file, "\n");
808 : }
809 :
810 : return 1;
811 : }
812 :
813 : /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
814 : memory location in SMEXPR set in basic block BB.
815 :
816 : This could be rather expensive. */
817 :
818 : static void
819 14 : remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
820 : {
821 14 : edge_iterator *stack, ei;
822 14 : int sp;
823 14 : edge act;
824 14 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
825 14 : rtx note;
826 14 : rtx_insn *insn;
827 14 : rtx mem = smexpr->pattern;
828 :
829 14 : stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun));
830 14 : sp = 0;
831 14 : ei = ei_start (bb->succs);
832 :
833 14 : bitmap_clear (visited);
834 :
835 28 : act = (EDGE_COUNT (ei_container (ei))
836 14 : ? EDGE_I (ei_container (ei), 0)
837 : : NULL);
838 111 : for (;;)
839 : {
840 111 : if (!act)
841 : {
842 35 : if (!sp)
843 : {
844 14 : free (stack);
845 14 : return;
846 : }
847 21 : act = ei_edge (stack[--sp]);
848 : }
849 97 : bb = act->dest;
850 :
851 143 : if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
852 97 : || bitmap_bit_p (visited, bb->index))
853 : {
854 46 : if (!ei_end_p (ei))
855 36 : ei_next (&ei);
856 46 : act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
857 46 : continue;
858 : }
859 51 : bitmap_set_bit (visited, bb->index);
860 :
861 51 : rtx_insn *last;
862 51 : if (bitmap_bit_p (st_antloc[bb->index], smexpr->index))
863 : {
864 15 : unsigned int i;
865 31 : FOR_EACH_VEC_ELT_REVERSE (smexpr->antic_stores, i, last)
866 16 : if (BLOCK_FOR_INSN (last) == bb)
867 : break;
868 : }
869 : else
870 36 : last = NEXT_INSN (BB_END (bb));
871 :
872 265 : for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
873 214 : if (NONDEBUG_INSN_P (insn))
874 : {
875 129 : note = find_reg_equal_equiv_note (insn);
876 129 : if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
877 129 : continue;
878 :
879 0 : if (dump_file)
880 0 : fprintf (dump_file,
881 : "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
882 0 : INSN_UID (insn));
883 0 : remove_note (insn, note);
884 : }
885 :
886 51 : if (!ei_end_p (ei))
887 40 : ei_next (&ei);
888 51 : act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
889 :
890 51 : if (EDGE_COUNT (bb->succs) > 0)
891 : {
892 51 : if (act)
893 21 : stack[sp++] = ei;
894 51 : ei = ei_start (bb->succs);
895 162 : act = (EDGE_COUNT (ei_container (ei))
896 51 : ? EDGE_I (ei_container (ei), 0)
897 : : NULL);
898 : }
899 : }
900 14 : }
901 :
902 : /* This routine will replace a store with a SET to a specified register. */
903 :
904 : static void
905 14 : replace_store_insn (rtx reg, rtx_insn *del, basic_block bb,
906 : struct st_expr *smexpr)
907 : {
908 14 : rtx_insn *insn;
909 14 : rtx mem, note, set;
910 :
911 14 : insn = prepare_copy_insn (reg, SET_SRC (single_set (del)));
912 :
913 14 : unsigned int i;
914 14 : rtx_insn *temp;
915 32 : FOR_EACH_VEC_ELT_REVERSE (smexpr->antic_stores, i, temp)
916 17 : if (temp == del)
917 : {
918 13 : smexpr->antic_stores[i] = insn;
919 13 : break;
920 : }
921 :
922 : /* Move the notes from the deleted insn to its replacement. */
923 14 : REG_NOTES (insn) = REG_NOTES (del);
924 :
925 : /* Emit the insn AFTER all the notes are transferred.
926 : This is cheaper since we avoid df rescanning for the note change. */
927 14 : insn = emit_insn_after (insn, del);
928 :
929 14 : if (dump_file)
930 : {
931 1 : fprintf (dump_file,
932 : "STORE_MOTION delete insn in BB %d:\n ", bb->index);
933 1 : print_inline_rtx (dump_file, del, 6);
934 1 : fprintf (dump_file, "\nSTORE_MOTION replaced with insn:\n ");
935 1 : print_inline_rtx (dump_file, insn, 6);
936 1 : fprintf (dump_file, "\n");
937 : }
938 :
939 14 : delete_insn (del);
940 :
941 : /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
942 : they are no longer accurate provided that they are reached by this
943 : definition, so drop them. */
944 14 : mem = smexpr->pattern;
945 121 : for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
946 107 : if (NONDEBUG_INSN_P (insn))
947 : {
948 107 : set = single_set (insn);
949 107 : if (!set)
950 0 : continue;
951 107 : if (exp_equiv_p (SET_DEST (set), mem, 0, true))
952 14 : return;
953 107 : note = find_reg_equal_equiv_note (insn);
954 107 : if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
955 107 : continue;
956 :
957 0 : if (dump_file)
958 0 : fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
959 0 : INSN_UID (insn));
960 0 : remove_note (insn, note);
961 : }
962 14 : remove_reachable_equiv_notes (bb, smexpr);
963 : }
964 :
965 :
966 : /* Delete a store, but copy the value that would have been stored into
967 : the reaching_reg for later storing. */
968 :
969 : static void
970 14 : delete_store (struct st_expr * expr, basic_block bb)
971 : {
972 14 : rtx reg;
973 :
974 14 : if (expr->reaching_reg == NULL_RTX)
975 13 : expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
976 :
977 14 : reg = expr->reaching_reg;
978 :
979 14 : unsigned int len = expr->avail_stores.length ();
980 18 : for (unsigned int i = len - 1; i < len; i--)
981 : {
982 18 : rtx_insn *del = expr->avail_stores[i];
983 18 : if (BLOCK_FOR_INSN (del) == bb)
984 : {
985 : /* We know there is only one since we deleted redundant
986 : ones during the available computation. */
987 14 : replace_store_insn (reg, del, bb, expr);
988 14 : break;
989 : }
990 : }
991 14 : }
992 :
993 : /* Fill in available, anticipatable, transparent and kill vectors in
994 : STORE_DATA, based on lists of available and anticipatable stores. */
995 : static void
996 32 : build_store_vectors (void)
997 : {
998 32 : basic_block bb;
999 32 : int *regs_set_in_block;
1000 32 : rtx_insn *insn;
1001 32 : struct st_expr * ptr;
1002 32 : unsigned int max_gcse_regno = max_reg_num ();
1003 :
1004 : /* Build the gen_vector. This is any store in the table which is not killed
1005 : by aliasing later in its block. */
1006 32 : st_avloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1007 : num_stores);
1008 32 : bitmap_vector_clear (st_avloc, last_basic_block_for_fn (cfun));
1009 :
1010 32 : st_antloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1011 : num_stores);
1012 32 : bitmap_vector_clear (st_antloc, last_basic_block_for_fn (cfun));
1013 :
1014 146 : for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1015 : {
1016 114 : unsigned int len = ptr->avail_stores.length ();
1017 239 : for (unsigned int i = len - 1; i < len; i--)
1018 : {
1019 125 : insn = ptr->avail_stores[i];
1020 125 : bb = BLOCK_FOR_INSN (insn);
1021 :
1022 : /* If we've already seen an available expression in this block,
1023 : we can delete this one (It occurs earlier in the block). We'll
1024 : copy the SRC expression to an unused register in case there
1025 : are any side effects. */
1026 125 : if (bitmap_bit_p (st_avloc[bb->index], ptr->index))
1027 : {
1028 0 : rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
1029 0 : if (dump_file)
1030 0 : fprintf (dump_file, "Removing redundant store:\n");
1031 0 : replace_store_insn (r, insn, bb, ptr);
1032 0 : continue;
1033 0 : }
1034 125 : bitmap_set_bit (st_avloc[bb->index], ptr->index);
1035 : }
1036 :
1037 114 : unsigned int i;
1038 431 : FOR_EACH_VEC_ELT_REVERSE (ptr->antic_stores, i, insn)
1039 : {
1040 89 : bb = BLOCK_FOR_INSN (insn);
1041 89 : bitmap_set_bit (st_antloc[bb->index], ptr->index);
1042 : }
1043 : }
1044 :
1045 32 : st_kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_stores);
1046 32 : bitmap_vector_clear (st_kill, last_basic_block_for_fn (cfun));
1047 :
1048 32 : st_transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_stores);
1049 32 : bitmap_vector_clear (st_transp, last_basic_block_for_fn (cfun));
1050 32 : regs_set_in_block = XNEWVEC (int, max_gcse_regno);
1051 :
1052 180 : FOR_EACH_BB_FN (bb, cfun)
1053 : {
1054 148 : memset (regs_set_in_block, 0, sizeof (int) * max_gcse_regno);
1055 :
1056 1061 : FOR_BB_INSNS (bb, insn)
1057 913 : if (NONDEBUG_INSN_P (insn))
1058 : {
1059 618 : df_ref def;
1060 1856 : FOR_EACH_INSN_DEF (def, insn)
1061 : {
1062 1238 : unsigned int ref_regno = DF_REF_REGNO (def);
1063 1238 : if (ref_regno < max_gcse_regno)
1064 1238 : regs_set_in_block[DF_REF_REGNO (def)] = 1;
1065 : }
1066 : }
1067 :
1068 1032 : for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1069 : {
1070 884 : if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
1071 : bb, regs_set_in_block, NULL))
1072 : {
1073 : /* It should not be necessary to consider the expression
1074 : killed if it is both anticipatable and available. */
1075 214 : if (!bitmap_bit_p (st_antloc[bb->index], ptr->index)
1076 214 : || !bitmap_bit_p (st_avloc[bb->index], ptr->index))
1077 214 : bitmap_set_bit (st_kill[bb->index], ptr->index);
1078 : }
1079 : else
1080 670 : bitmap_set_bit (st_transp[bb->index], ptr->index);
1081 : }
1082 : }
1083 :
1084 32 : free (regs_set_in_block);
1085 :
1086 32 : if (dump_file)
1087 : {
1088 1 : dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
1089 : last_basic_block_for_fn (cfun));
1090 1 : dump_bitmap_vector (dump_file, "st_kill", "", st_kill,
1091 1 : last_basic_block_for_fn (cfun));
1092 1 : dump_bitmap_vector (dump_file, "st_transp", "", st_transp,
1093 1 : last_basic_block_for_fn (cfun));
1094 1 : dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
1095 1 : last_basic_block_for_fn (cfun));
1096 : }
1097 32 : }
1098 :
1099 : /* Free memory used by store motion. */
1100 :
1101 : static void
1102 32 : free_store_memory (void)
1103 : {
1104 32 : free_store_motion_mems ();
1105 :
1106 32 : if (st_avloc)
1107 32 : sbitmap_vector_free (st_avloc);
1108 32 : if (st_kill)
1109 32 : sbitmap_vector_free (st_kill);
1110 32 : if (st_transp)
1111 32 : sbitmap_vector_free (st_transp);
1112 32 : if (st_antloc)
1113 32 : sbitmap_vector_free (st_antloc);
1114 32 : if (st_insert_map)
1115 32 : sbitmap_vector_free (st_insert_map);
1116 32 : if (st_delete_map)
1117 32 : sbitmap_vector_free (st_delete_map);
1118 :
1119 32 : st_avloc = st_kill = st_transp = st_antloc = NULL;
1120 32 : st_insert_map = st_delete_map = NULL;
1121 32 : }
1122 :
1123 : /* Perform store motion. Much like gcse, except we move expressions the
1124 : other way by looking at the flowgraph in reverse.
1125 : Return non-zero if transformations are performed by the pass. */
1126 :
1127 : static int
1128 53 : one_store_motion_pass (void)
1129 : {
1130 53 : basic_block bb;
1131 53 : int x;
1132 53 : struct st_expr * ptr;
1133 53 : int did_edge_inserts = 0;
1134 53 : int n_stores_deleted = 0;
1135 53 : int n_stores_created = 0;
1136 :
1137 53 : init_alias_analysis ();
1138 :
1139 : /* Find all the available and anticipatable stores. */
1140 53 : num_stores = compute_store_table ();
1141 53 : if (num_stores == 0)
1142 : {
1143 21 : delete store_motion_mems_table;
1144 21 : store_motion_mems_table = NULL;
1145 21 : end_alias_analysis ();
1146 21 : return 0;
1147 : }
1148 :
1149 : /* Now compute kill & transp vectors. */
1150 32 : build_store_vectors ();
1151 32 : connect_infinite_loops_to_exit ();
1152 :
1153 32 : edge_list = pre_edge_rev_lcm (num_stores, st_transp, st_avloc,
1154 : st_antloc, st_kill, &st_insert_map,
1155 : &st_delete_map);
1156 :
1157 : /* Now we want to insert the new stores which are going to be needed. */
1158 146 : for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1159 : {
1160 : /* If any of the edges we have above are abnormal, we can't move this
1161 : store. */
1162 1542 : for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
1163 1428 : if (bitmap_bit_p (st_insert_map[x], ptr->index)
1164 1428 : && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
1165 : break;
1166 :
1167 114 : if (x >= 0)
1168 : {
1169 0 : if (dump_file != NULL)
1170 0 : fprintf (dump_file,
1171 : "Can't replace store %d: abnormal edge from %d to %d\n",
1172 0 : ptr->index, INDEX_EDGE (edge_list, x)->src->index,
1173 0 : INDEX_EDGE (edge_list, x)->dest->index);
1174 0 : continue;
1175 : }
1176 :
1177 : /* Now we want to insert the new stores which are going to be needed. */
1178 :
1179 998 : FOR_EACH_BB_FN (bb, cfun)
1180 884 : if (bitmap_bit_p (st_delete_map[bb->index], ptr->index))
1181 : {
1182 14 : delete_store (ptr, bb);
1183 14 : n_stores_deleted++;
1184 : }
1185 :
1186 1542 : for (x = 0; x < NUM_EDGES (edge_list); x++)
1187 1428 : if (bitmap_bit_p (st_insert_map[x], ptr->index))
1188 : {
1189 19 : did_edge_inserts |= insert_store (ptr, INDEX_EDGE (edge_list, x));
1190 19 : n_stores_created++;
1191 : }
1192 : }
1193 :
1194 32 : if (did_edge_inserts)
1195 6 : commit_edge_insertions ();
1196 :
1197 32 : free_store_memory ();
1198 32 : free_edge_list (edge_list);
1199 32 : remove_fake_exit_edges ();
1200 32 : end_alias_analysis ();
1201 :
1202 32 : if (dump_file)
1203 : {
1204 1 : fprintf (dump_file, "STORE_MOTION of %s, %d basic blocks, ",
1205 1 : current_function_name (), n_basic_blocks_for_fn (cfun));
1206 1 : fprintf (dump_file, "%d insns deleted, %d insns created\n",
1207 : n_stores_deleted, n_stores_created);
1208 : }
1209 :
1210 32 : return (n_stores_deleted > 0 || n_stores_created > 0);
1211 : }
1212 :
1213 :
1214 : static unsigned int
1215 53 : execute_rtl_store_motion (void)
1216 : {
1217 53 : delete_unreachable_blocks ();
1218 53 : df_analyze ();
1219 53 : flag_rerun_cse_after_global_opts |= one_store_motion_pass ();
1220 53 : return 0;
1221 : }
1222 :
1223 : namespace {
1224 :
1225 : const pass_data pass_data_rtl_store_motion =
1226 : {
1227 : RTL_PASS, /* type */
1228 : "store_motion", /* name */
1229 : OPTGROUP_NONE, /* optinfo_flags */
1230 : TV_LSM, /* tv_id */
1231 : PROP_cfglayout, /* properties_required */
1232 : 0, /* properties_provided */
1233 : 0, /* properties_destroyed */
1234 : 0, /* todo_flags_start */
1235 : TODO_df_finish, /* todo_flags_finish */
1236 : };
1237 :
1238 : class pass_rtl_store_motion : public rtl_opt_pass
1239 : {
1240 : public:
1241 285722 : pass_rtl_store_motion (gcc::context *ctxt)
1242 571444 : : rtl_opt_pass (pass_data_rtl_store_motion, ctxt)
1243 : {}
1244 :
1245 : /* opt_pass methods: */
1246 : bool gate (function *) final override;
1247 53 : unsigned int execute (function *) final override
1248 : {
1249 53 : return execute_rtl_store_motion ();
1250 : }
1251 :
1252 : }; // class pass_rtl_store_motion
1253 :
1254 : bool
1255 1471370 : pass_rtl_store_motion::gate (function *fun)
1256 : {
1257 1043686 : return optimize > 0 && flag_gcse_sm
1258 57 : && !fun->calls_setjmp
1259 57 : && optimize_function_for_speed_p (fun)
1260 1471423 : && dbg_cnt (store_motion);
1261 : }
1262 :
1263 : } // anon namespace
1264 :
1265 : rtl_opt_pass *
1266 285722 : make_pass_rtl_store_motion (gcc::context *ctxt)
1267 : {
1268 285722 : return new pass_rtl_store_motion (ctxt);
1269 : }
|