Line data Source code
1 : /* Harden conditionals.
2 : Copyright (C) 2021-2026 Free Software Foundation, Inc.
3 : Contributed by Alexandre Oliva <oliva@adacore.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "target.h"
26 : #include "rtl.h"
27 : #include "tree.h"
28 : #include "fold-const.h"
29 : #include "gimple.h"
30 : #include "gimplify.h"
31 : #include "tree-pass.h"
32 : #include "ssa.h"
33 : #include "gimple-iterator.h"
34 : #include "tree-cfg.h"
35 : #include "basic-block.h"
36 : #include "cfghooks.h"
37 : #include "cfgloop.h"
38 : #include "tree-eh.h"
39 : #include "sbitmap.h"
40 : #include "diagnostic.h"
41 : #include "intl.h"
42 :
43 : namespace {
44 :
45 : /* These passes introduces redundant, but reversed conditionals at
46 : compares, such as those used in conditional branches, and those
47 : that compute boolean results. This doesn't make much sense for
48 : abstract CPUs, but this kind of hardening may avoid undesirable
49 : execution paths on actual CPUs under such attacks as of power
50 : deprivation. */
51 :
52 : /* Define a pass to harden conditionals other than branches. */
53 :
54 : const pass_data pass_data_harden_compares = {
55 : GIMPLE_PASS,
56 : "hardcmp",
57 : OPTGROUP_NONE,
58 : TV_NONE,
59 : PROP_cfg | PROP_ssa, // properties_required
60 : 0, // properties_provided
61 : 0, // properties_destroyed
62 : 0, // properties_start
63 : TODO_update_ssa
64 : | TODO_cleanup_cfg, // properties_finish
65 : };
66 :
67 : class pass_harden_compares : public gimple_opt_pass
68 : {
69 : public:
70 285722 : pass_harden_compares (gcc::context *ctxt)
71 571444 : : gimple_opt_pass (pass_data_harden_compares, ctxt)
72 : {}
73 0 : opt_pass *clone () final override
74 : {
75 0 : return new pass_harden_compares (m_ctxt);
76 : }
77 1472150 : bool gate (function *) final override
78 : {
79 1472150 : return flag_harden_compares;
80 : }
81 : unsigned int execute (function *) final override;
82 : };
83 :
84 : /* Define a pass to harden conditionals in branches. This pass must
85 : run after the above, otherwise it will re-harden the checks
86 : introduced by the above. */
87 :
88 : const pass_data pass_data_harden_conditional_branches = {
89 : GIMPLE_PASS,
90 : "hardcbr",
91 : OPTGROUP_NONE,
92 : TV_NONE,
93 : PROP_cfg | PROP_ssa, // properties_required
94 : 0, // properties_provided
95 : 0, // properties_destroyed
96 : 0, // properties_start
97 : TODO_update_ssa
98 : | TODO_cleanup_cfg, // properties_finish
99 : };
100 :
101 : class pass_harden_conditional_branches : public gimple_opt_pass
102 : {
103 : public:
104 285722 : pass_harden_conditional_branches (gcc::context *ctxt)
105 571444 : : gimple_opt_pass (pass_data_harden_conditional_branches, ctxt)
106 : {}
107 0 : opt_pass *clone () final override
108 : {
109 0 : return new pass_harden_conditional_branches (m_ctxt);
110 : }
111 1472150 : bool gate (function *) final override
112 : {
113 1472150 : return flag_harden_conditional_branches;
114 : }
115 : unsigned int execute (function *) final override;
116 : };
117 :
118 : }
119 :
120 : /* If VAL is an SSA name, return an SSA name holding the same value,
121 : but without the compiler's knowing that it holds the same value, so
122 : that uses thereof can't be optimized the way VAL might. Insert
123 : stmts that initialize it before *GSIP, with LOC.
124 :
125 : Otherwise, VAL must be an invariant, returned unchanged. */
126 :
127 : static inline tree
128 236 : detach_value (location_t loc, gimple_stmt_iterator *gsip, tree val)
129 : {
130 236 : if (TREE_CONSTANT (val) || TREE_CODE (val) != SSA_NAME)
131 : {
132 86 : gcc_checking_assert (is_gimple_min_invariant (val));
133 : return val;
134 : }
135 :
136 : /* Create a SSA "copy" of VAL. It would be nice to have it named
137 : after the corresponding variable, but sharing the same decl is
138 : problematic when VAL is a DECL_BY_REFERENCE RESULT_DECL, and
139 : copying just the identifier hits -fcompare-debug failures. */
140 150 : tree ret = make_ssa_name (TREE_TYPE (val));
141 :
142 : /* Some modes won't fit in general regs, so we fall back to memory
143 : for them. ??? It would be ideal to try to identify an alternate,
144 : wider or more suitable register class, and use the corresponding
145 : constraint, but there's no logic to go from register class to
146 : constraint, even if there is a corresponding constraint, and even
147 : if we could enumerate constraints, we can't get to their string
148 : either. So this will do for now. */
149 150 : bool need_memory = true;
150 150 : enum machine_mode mode = TYPE_MODE (TREE_TYPE (val));
151 150 : if (mode != BLKmode)
152 150 : for (int i = 0; i < FIRST_PSEUDO_REGISTER; i++)
153 150 : if (TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], i)
154 150 : && targetm.hard_regno_mode_ok (i, mode))
155 : {
156 : need_memory = false;
157 : break;
158 : }
159 :
160 150 : tree asminput = val;
161 150 : tree asmoutput = ret;
162 150 : const char *constraint_out = need_memory ? "=m" : "=g";
163 0 : const char *constraint_in = need_memory ? "m" : "0";
164 :
165 0 : if (need_memory)
166 : {
167 0 : tree temp = create_tmp_var (TREE_TYPE (val), "dtch");
168 0 : mark_addressable (temp);
169 :
170 0 : gassign *copyin = gimple_build_assign (temp, asminput);
171 0 : gimple_set_location (copyin, loc);
172 0 : gsi_insert_before (gsip, copyin, GSI_SAME_STMT);
173 :
174 0 : asminput = asmoutput = temp;
175 : }
176 :
177 : /* Output an asm statement with matching input and output. It does
178 : nothing, but after it the compiler no longer knows the output
179 : still holds the same value as the input. */
180 150 : vec<tree, va_gc> *inputs = NULL;
181 150 : vec<tree, va_gc> *outputs = NULL;
182 300 : vec_safe_push (outputs,
183 : build_tree_list
184 150 : (build_tree_list
185 : (NULL_TREE, build_string (strlen (constraint_out),
186 : constraint_out)),
187 : asmoutput));
188 300 : vec_safe_push (inputs,
189 : build_tree_list
190 150 : (build_tree_list
191 : (NULL_TREE, build_string (strlen (constraint_in),
192 : constraint_in)),
193 : asminput));
194 150 : gasm *detach = gimple_build_asm_vec ("", inputs, outputs,
195 : NULL, NULL);
196 150 : gimple_set_location (detach, loc);
197 150 : gsi_insert_before (gsip, detach, GSI_SAME_STMT);
198 :
199 150 : if (need_memory)
200 : {
201 0 : gassign *copyout = gimple_build_assign (ret, asmoutput);
202 0 : gimple_set_location (copyout, loc);
203 0 : gsi_insert_before (gsip, copyout, GSI_SAME_STMT);
204 0 : SSA_NAME_DEF_STMT (ret) = copyout;
205 :
206 0 : gassign *clobber = gimple_build_assign (asmoutput,
207 : build_clobber
208 0 : (TREE_TYPE (asmoutput)));
209 0 : gimple_set_location (clobber, loc);
210 0 : gsi_insert_before (gsip, clobber, GSI_SAME_STMT);
211 : }
212 : else
213 150 : SSA_NAME_DEF_STMT (ret) = detach;
214 :
215 : return ret;
216 : }
217 :
218 : /* Build a cond stmt out of COP, LHS, RHS, insert it before *GSIP with
219 : location LOC. *GSIP must be at the end of a basic block. The succ
220 : edge out of the block becomes the true or false edge opposite to
221 : that in FLAGS. Create a new block with a single trap stmt, in the
222 : cold partition if the function is partitioned,, and a new edge to
223 : it as the other edge for the cond. */
224 :
225 : static inline void
226 161 : insert_check_and_trap (location_t loc, gimple_stmt_iterator *gsip,
227 : int flags, enum tree_code cop, tree lhs, tree rhs)
228 : {
229 161 : basic_block chk = gsi_bb (*gsip);
230 :
231 161 : gcond *cond = gimple_build_cond (cop, lhs, rhs, NULL, NULL);
232 161 : gimple_set_location (cond, loc);
233 161 : gsi_insert_before (gsip, cond, GSI_SAME_STMT);
234 :
235 161 : basic_block trp = create_empty_bb (chk);
236 161 : trp->count = profile_count::zero ();
237 :
238 161 : gimple_stmt_iterator gsit = gsi_after_labels (trp);
239 161 : gcall *trap = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
240 161 : gimple_call_set_ctrl_altering (trap, true);
241 161 : gimple_set_location (trap, loc);
242 161 : gsi_insert_before (&gsit, trap, GSI_SAME_STMT);
243 :
244 161 : if (dump_file)
245 98 : fprintf (dump_file,
246 : "Adding reversed compare to block %i, and trap to block %i\n",
247 : chk->index, trp->index);
248 :
249 161 : if (BB_PARTITION (chk))
250 0 : BB_SET_PARTITION (trp, BB_COLD_PARTITION);
251 :
252 161 : int true_false_flag = flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
253 161 : gcc_assert (true_false_flag);
254 161 : int neg_true_false_flag = (~flags) & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
255 :
256 : /* Remove the fallthru bit, and set the truth value for the
257 : preexisting edge and for the newly-created one. In hardcbr,
258 : FLAGS is taken from the edge of the original cond expr that we're
259 : dealing with, so the reversed compare is expected to yield the
260 : negated result, and the same result calls for a trap. In
261 : hardcmp, we're comparing the boolean results of the original and
262 : of the reversed compare, so we're passed FLAGS to trap on
263 : equality. */
264 161 : single_succ_edge (chk)->flags &= ~EDGE_FALLTHRU;
265 161 : single_succ_edge (chk)->flags |= neg_true_false_flag;
266 161 : single_succ_edge (chk)->probability = profile_probability::always ();
267 161 : edge e = make_edge (chk, trp, true_false_flag);
268 161 : e->goto_locus = loc;
269 161 : e->probability = profile_probability::never ();
270 :
271 161 : if (dom_info_available_p (CDI_DOMINATORS))
272 161 : set_immediate_dominator (CDI_DOMINATORS, trp, chk);
273 161 : if (current_loops)
274 161 : add_bb_to_loop (trp, current_loops->tree_root);
275 161 : }
276 :
277 : /* Split edge E, and insert_check_and_trap (see above) in the
278 : newly-created block, using already-detached copies of LHS's and
279 : RHS's values (see detach_value above) for the COP compare. */
280 :
281 : static inline void
282 86 : insert_edge_check_and_trap (location_t loc, edge e,
283 : enum tree_code cop, tree lhs, tree rhs)
284 : {
285 86 : int flags = e->flags;
286 86 : basic_block src = e->src;
287 86 : basic_block dest = e->dest;
288 86 : location_t eloc = e->goto_locus;
289 :
290 86 : basic_block chk = split_edge (e);
291 86 : e = NULL;
292 :
293 86 : single_pred_edge (chk)->goto_locus = loc;
294 86 : single_succ_edge (chk)->goto_locus = eloc;
295 :
296 86 : if (dump_file)
297 56 : fprintf (dump_file,
298 : "Splitting edge %i->%i into block %i\n",
299 : src->index, dest->index, chk->index);
300 :
301 86 : gimple_stmt_iterator gsik = gsi_after_labels (chk);
302 :
303 86 : insert_check_and_trap (loc, &gsik, flags, cop, lhs, rhs);
304 86 : }
305 :
306 : /* Harden cond stmts at the end of FUN's blocks. */
307 :
308 : unsigned int
309 39 : pass_harden_conditional_branches::execute (function *fun)
310 : {
311 : /* Record the preexisting blocks, to avoid visiting newly-created
312 : blocks. */
313 39 : auto_sbitmap to_visit (last_basic_block_for_fn (fun));
314 39 : bitmap_clear (to_visit);
315 :
316 39 : basic_block bb;
317 212 : FOR_EACH_BB_FN (bb, fun)
318 173 : bitmap_set_bit (to_visit, bb->index);
319 :
320 39 : sbitmap_iterator it;
321 39 : unsigned i;
322 251 : EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it)
323 : {
324 173 : bb = BASIC_BLOCK_FOR_FN (fun, i);
325 :
326 173 : gimple_stmt_iterator gsi = gsi_last_bb (bb);
327 :
328 173 : if (gsi_end_p (gsi))
329 130 : continue;
330 :
331 151 : gcond *cond = dyn_cast <gcond *> (gsi_stmt (gsi));
332 151 : if (!cond)
333 108 : continue;
334 :
335 : /* Turn:
336 :
337 : if (x op y) goto l1; else goto l2;
338 :
339 : into:
340 :
341 : if (x op y) goto l1'; else goto l2';
342 : l1': if (x' cop y') goto l1'trap; else goto l1;
343 : l1'trap: __builtin_trap ();
344 : l2': if (x' cop y') goto l2; else goto l2'trap;
345 : l2'trap: __builtin_trap ();
346 :
347 : where cop is a complementary boolean operation to op; l1', l1'trap,
348 : l2' and l2'trap are newly-created labels; and x' and y' hold the same
349 : value as x and y, but in a way that does not enable the compiler to
350 : optimize the redundant compare away.
351 : */
352 :
353 43 : enum tree_code op = gimple_cond_code (cond);
354 43 : tree lhs = gimple_cond_lhs (cond);
355 43 : tree rhs = gimple_cond_rhs (cond);
356 43 : location_t loc = gimple_location (cond);
357 :
358 43 : enum tree_code cop = invert_tree_comparison (op, HONOR_NANS (lhs));
359 :
360 43 : if (cop == ERROR_MARK)
361 : /* ??? Can we do better? */
362 0 : continue;
363 :
364 : /* Detach the values before the compares. If we do so later,
365 : the compiler may use values inferred from the compares. */
366 43 : bool same_p = (lhs == rhs);
367 43 : lhs = detach_value (loc, &gsi, lhs);
368 43 : rhs = same_p ? lhs : detach_value (loc, &gsi, rhs);
369 :
370 43 : insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 0), cop, lhs, rhs);
371 43 : insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 1), cop, lhs, rhs);
372 : }
373 :
374 39 : return 0;
375 39 : }
376 :
377 : /* Instantiate a hardcbr pass. */
378 :
379 : gimple_opt_pass *
380 285722 : make_pass_harden_conditional_branches (gcc::context *ctxt)
381 : {
382 285722 : return new pass_harden_conditional_branches (ctxt);
383 : }
384 :
385 : /* Return the fallthru edge of a block whose other edge is an EH
386 : edge. If EHP is not NULL, store the EH edge in it. */
387 : static inline edge
388 24 : non_eh_succ_edge (basic_block bb, edge *ehp = NULL)
389 : {
390 24 : gcc_checking_assert (EDGE_COUNT (bb->succs) == 2);
391 :
392 24 : edge ret = find_fallthru_edge (bb->succs);
393 :
394 24 : int eh_idx = EDGE_SUCC (bb, 0) == ret;
395 24 : edge eh = EDGE_SUCC (bb, eh_idx);
396 :
397 24 : gcc_checking_assert (!(ret->flags & EDGE_EH)
398 : && (eh->flags & EDGE_EH));
399 :
400 24 : if (ehp)
401 15 : *ehp = eh;
402 :
403 24 : return ret;
404 : }
405 :
406 : /* Harden boolean-yielding compares in FUN. */
407 :
408 : unsigned int
409 55 : pass_harden_compares::execute (function *fun)
410 : {
411 : /* Record the preexisting blocks, to avoid visiting newly-created
412 : blocks. */
413 55 : auto_sbitmap to_visit (last_basic_block_for_fn (fun));
414 55 : bitmap_clear (to_visit);
415 :
416 55 : basic_block bb;
417 308 : FOR_EACH_BB_FN (bb, fun)
418 253 : bitmap_set_bit (to_visit, bb->index);
419 :
420 55 : sbitmap_iterator it;
421 55 : unsigned i;
422 363 : EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it)
423 : {
424 253 : bb = BASIC_BLOCK_FOR_FN (fun, i);
425 :
426 506 : for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
427 1613 : !gsi_end_p (gsi); gsi_prev (&gsi))
428 : {
429 680 : gassign *asgn = dyn_cast <gassign *> (gsi_stmt (gsi));
430 680 : if (!asgn)
431 605 : continue;
432 :
433 : /* Turn:
434 :
435 : z = x op y;
436 :
437 : into:
438 :
439 : z = x op y;
440 : z' = x' cop y';
441 : if (z == z') __builtin_trap ();
442 :
443 : where cop is a complementary boolean operation to op; and x'
444 : and y' hold the same value as x and y, but in a way that does
445 : not enable the compiler to optimize the redundant compare
446 : away.
447 : */
448 :
449 304 : enum tree_code op = gimple_assign_rhs_code (asgn);
450 :
451 304 : enum tree_code cop;
452 :
453 304 : switch (op)
454 : {
455 75 : case EQ_EXPR:
456 75 : case NE_EXPR:
457 75 : case GT_EXPR:
458 75 : case GE_EXPR:
459 75 : case LT_EXPR:
460 75 : case LE_EXPR:
461 75 : case LTGT_EXPR:
462 75 : case UNEQ_EXPR:
463 75 : case UNGT_EXPR:
464 75 : case UNGE_EXPR:
465 75 : case UNLT_EXPR:
466 75 : case UNLE_EXPR:
467 75 : case ORDERED_EXPR:
468 75 : case UNORDERED_EXPR:
469 75 : cop = invert_tree_comparison (op,
470 : HONOR_NANS
471 75 : (gimple_assign_rhs1 (asgn)));
472 :
473 75 : if (cop == ERROR_MARK)
474 : /* ??? Can we do better? */
475 0 : continue;
476 :
477 75 : break;
478 :
479 : /* ??? Maybe handle these too? */
480 229 : case TRUTH_NOT_EXPR:
481 : /* ??? The code below assumes binary ops, it would have to
482 : be adjusted for TRUTH_NOT_EXPR, since it's unary. */
483 229 : case TRUTH_ANDIF_EXPR:
484 229 : case TRUTH_ORIF_EXPR:
485 229 : case TRUTH_AND_EXPR:
486 229 : case TRUTH_OR_EXPR:
487 229 : case TRUTH_XOR_EXPR:
488 229 : default:
489 229 : continue;
490 : }
491 :
492 : /* These are the operands for the verification. */
493 75 : tree lhs = gimple_assign_lhs (asgn);
494 75 : tree op1 = gimple_assign_rhs1 (asgn);
495 75 : tree op2 = gimple_assign_rhs2 (asgn);
496 75 : location_t loc = gimple_location (asgn);
497 :
498 : /* Vector booleans can't be used in conditional branches. ???
499 : Can we do better? How to reduce compare and
500 : reversed-compare result vectors to a single boolean? */
501 75 : if (VECTOR_TYPE_P (TREE_TYPE (op1)))
502 0 : continue;
503 :
504 : /* useless_type_conversion_p enables conversions from 1-bit
505 : integer types to boolean to be discarded. */
506 75 : gcc_checking_assert (TREE_CODE (TREE_TYPE (lhs)) == BOOLEAN_TYPE
507 : || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
508 : && TYPE_PRECISION (TREE_TYPE (lhs)) == 1));
509 :
510 75 : tree rhs = copy_ssa_name (lhs);
511 :
512 : /* Detach the values before the compares, so that the
513 : compiler infers nothing from them, not even from a
514 : throwing compare that didn't throw. */
515 75 : bool same_p = (op1 == op2);
516 75 : op1 = detach_value (loc, &gsi, op1);
517 75 : op2 = same_p ? op1 : detach_value (loc, &gsi, op2);
518 :
519 75 : gimple_stmt_iterator gsi_split = gsi;
520 : /* Don't separate the original assignment from debug stmts
521 : that might be associated with it, and arrange to split the
522 : block after debug stmts, so as to make sure the split block
523 : won't be debug stmts only. */
524 75 : gsi_next_nondebug (&gsi_split);
525 :
526 75 : bool throwing_compare_p = stmt_ends_bb_p (asgn);
527 75 : if (throwing_compare_p)
528 : {
529 9 : basic_block nbb = split_edge (non_eh_succ_edge
530 : (gimple_bb (asgn)));
531 9 : gsi_split = gsi_start_bb (nbb);
532 :
533 9 : if (dump_file)
534 0 : fprintf (dump_file,
535 : "Splitting non-EH edge from block %i into %i"
536 : " after a throwing compare\n",
537 0 : gimple_bb (asgn)->index, nbb->index);
538 : }
539 :
540 75 : gassign *asgnck = gimple_build_assign (rhs, cop, op1, op2);
541 75 : gimple_set_location (asgnck, loc);
542 75 : gsi_insert_before (&gsi_split, asgnck, GSI_SAME_STMT);
543 :
544 : /* We wish to insert a cond_expr after the compare, so arrange
545 : for it to be at the end of a block if it isn't, and for it
546 : to have a single successor in case there's more than
547 : one, as in PR104975. */
548 75 : if (!gsi_end_p (gsi_split)
549 75 : || !single_succ_p (gsi_bb (gsi_split)))
550 : {
551 66 : if (!gsi_end_p (gsi_split))
552 66 : gsi_prev (&gsi_split);
553 : else
554 0 : gsi_split = gsi_last_bb (gsi_bb (gsi_split));
555 66 : basic_block obb = gsi_bb (gsi_split);
556 66 : basic_block nbb = split_block (obb, gsi_stmt (gsi_split))->dest;
557 66 : gsi_next (&gsi_split);
558 66 : gcc_checking_assert (gsi_end_p (gsi_split));
559 :
560 66 : single_succ_edge (bb)->goto_locus = loc;
561 :
562 66 : if (dump_file)
563 42 : fprintf (dump_file,
564 : "Splitting block %i into %i"
565 : " before the conditional trap branch\n",
566 : obb->index, nbb->index);
567 : }
568 :
569 : /* If the check assignment must end a basic block, we can't
570 : insert the conditional branch in the same block, so split
571 : the block again, and prepare to insert the conditional
572 : branch in the new block.
573 :
574 : Also assign an EH region to the compare. Even though it's
575 : unlikely that the hardening compare will throw after the
576 : original compare didn't, the compiler won't even know that
577 : it's the same compare operands, so add the EH edge anyway. */
578 75 : if (throwing_compare_p)
579 : {
580 9 : add_stmt_to_eh_lp (asgnck, lookup_stmt_eh_lp (asgn));
581 9 : edge eh = make_eh_edge (asgnck);
582 : /* This compare looks like it could raise an exception,
583 : but it's dominated by the original compare, that
584 : would raise an exception first, so the EH edge from
585 : this one is never really taken. */
586 9 : eh->probability = profile_probability::never ();
587 9 : if (eh->dest->count.initialized_p ())
588 5 : eh->dest->count += eh->count ();
589 : else
590 4 : eh->dest->count = eh->count ();
591 :
592 9 : edge ckeh;
593 9 : basic_block nbb = split_edge (non_eh_succ_edge
594 : (gimple_bb (asgnck), &ckeh));
595 9 : gcc_checking_assert (eh == ckeh);
596 9 : gsi_split = gsi_start_bb (nbb);
597 :
598 9 : if (dump_file)
599 0 : fprintf (dump_file,
600 : "Splitting non-EH edge from block %i into %i after"
601 : " the newly-inserted reversed throwing compare\n",
602 0 : gimple_bb (asgnck)->index, nbb->index);
603 :
604 9 : if (!gimple_seq_empty_p (phi_nodes (ckeh->dest)))
605 : {
606 6 : edge aseh;
607 6 : non_eh_succ_edge (gimple_bb (asgn), &aseh);
608 :
609 6 : gcc_checking_assert (aseh->dest == ckeh->dest);
610 :
611 6 : for (gphi_iterator psi = gsi_start_phis (ckeh->dest);
612 12 : !gsi_end_p (psi); gsi_next (&psi))
613 : {
614 6 : gphi *phi = psi.phi ();
615 6 : add_phi_arg (phi, PHI_ARG_DEF_FROM_EDGE (phi, aseh), ckeh,
616 : gimple_phi_arg_location_from_edge (phi, aseh));
617 : }
618 :
619 6 : if (dump_file)
620 0 : fprintf (dump_file,
621 : "Copying PHI args in EH block %i from %i to %i\n",
622 0 : aseh->dest->index, aseh->src->index,
623 0 : ckeh->src->index);
624 : }
625 : }
626 :
627 75 : gcc_checking_assert (single_succ_p (gsi_bb (gsi_split)));
628 :
629 75 : insert_check_and_trap (loc, &gsi_split, EDGE_TRUE_VALUE,
630 : EQ_EXPR, lhs, rhs);
631 : }
632 : }
633 :
634 55 : return 0;
635 55 : }
636 :
637 : /* Instantiate a hardcmp pass. */
638 :
639 : gimple_opt_pass *
640 285722 : make_pass_harden_compares (gcc::context *ctxt)
641 : {
642 285722 : return new pass_harden_compares (ctxt);
643 : }
|