Line data Source code
1 : /* Optimize and expand sanitizer functions.
2 : Copyright (C) 2014-2026 Free Software Foundation, Inc.
3 : Contributed by Marek Polacek <polacek@redhat.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "ssa.h"
28 : #include "tree-pass.h"
29 : #include "tree-ssa-operands.h"
30 : #include "gimple-pretty-print.h"
31 : #include "fold-const.h"
32 : #include "gimple-iterator.h"
33 : #include "stringpool.h"
34 : #include "attribs.h"
35 : #include "asan.h"
36 : #include "ubsan.h"
37 : #include "tree-hash-traits.h"
38 : #include "gimple-ssa.h"
39 : #include "tree-phinodes.h"
40 : #include "ssa-iterators.h"
41 : #include "gimplify.h"
42 : #include "gimple-iterator.h"
43 : #include "gimple-walk.h"
44 : #include "cfghooks.h"
45 : #include "tree-dfa.h"
46 : #include "tree-ssa.h"
47 : #include "varasm.h"
48 :
49 : /* This is used to carry information about basic blocks. It is
50 : attached to the AUX field of the standard CFG block. */
51 :
52 : struct sanopt_info
53 : {
54 : /* True if this BB might call (directly or indirectly) free/munmap
55 : or similar operation. */
56 : bool has_freeing_call_p;
57 :
58 : /* True if HAS_FREEING_CALL_P flag has been computed. */
59 : bool has_freeing_call_computed_p;
60 :
61 : /* True if there is a block with HAS_FREEING_CALL_P flag set
62 : on any path between an immediate dominator of BB, denoted
63 : imm(BB), and BB. */
64 : bool imm_dom_path_with_freeing_call_p;
65 :
66 : /* True if IMM_DOM_PATH_WITH_FREEING_CALL_P has been computed. */
67 : bool imm_dom_path_with_freeing_call_computed_p;
68 :
69 : /* Number of possibly freeing calls encountered in this bb
70 : (so far). */
71 : uint64_t freeing_call_events;
72 :
73 : /* True if BB is currently being visited during computation
74 : of IMM_DOM_PATH_WITH_FREEING_CALL_P flag. */
75 : bool being_visited_p;
76 :
77 : /* True if this BB has been visited in the dominator walk. */
78 : bool visited_p;
79 : };
80 :
81 : /* If T has a single definition of form T = T2, return T2. */
82 :
83 : static gimple *
84 11812 : maybe_get_single_definition (tree t)
85 : {
86 11812 : if (TREE_CODE (t) == SSA_NAME)
87 : {
88 6752 : gimple *g = SSA_NAME_DEF_STMT (t);
89 6752 : if (gimple_assign_single_p (g))
90 4151 : return g;
91 : }
92 : return NULL;
93 : }
94 :
95 : /* Tree triplet for vptr_check_map. */
96 : struct sanopt_tree_triplet
97 : {
98 : tree t1, t2, t3;
99 : };
100 :
101 : /* Traits class for tree triplet hash maps below. */
102 :
103 : struct sanopt_tree_triplet_hash : typed_noop_remove <sanopt_tree_triplet>
104 : {
105 : typedef sanopt_tree_triplet value_type;
106 : typedef sanopt_tree_triplet compare_type;
107 :
108 : static hashval_t
109 137 : hash (const sanopt_tree_triplet &ref)
110 : {
111 137 : inchash::hash hstate (0);
112 137 : inchash::add_expr (ref.t1, hstate);
113 137 : inchash::add_expr (ref.t2, hstate);
114 137 : inchash::add_expr (ref.t3, hstate);
115 137 : return hstate.end ();
116 : }
117 :
118 : static bool
119 65 : equal (const sanopt_tree_triplet &ref1, const sanopt_tree_triplet &ref2)
120 : {
121 65 : return operand_equal_p (ref1.t1, ref2.t1, 0)
122 19 : && operand_equal_p (ref1.t2, ref2.t2, 0)
123 73 : && operand_equal_p (ref1.t3, ref2.t3, 0);
124 : }
125 :
126 : static void
127 : mark_deleted (sanopt_tree_triplet &ref)
128 : {
129 : ref.t1 = reinterpret_cast<tree> (1);
130 : }
131 :
132 : static const bool empty_zero_p = true;
133 :
134 : static void
135 0 : mark_empty (sanopt_tree_triplet &ref)
136 : {
137 0 : ref.t1 = NULL;
138 : }
139 :
140 : static bool
141 141 : is_deleted (const sanopt_tree_triplet &ref)
142 : {
143 141 : return ref.t1 == reinterpret_cast<tree> (1);
144 : }
145 :
146 : static bool
147 98982 : is_empty (const sanopt_tree_triplet &ref)
148 : {
149 98914 : return ref.t1 == NULL;
150 : }
151 : };
152 :
153 : /* Tree couple for ptr_check_map. */
154 : struct sanopt_tree_couple
155 : {
156 : tree ptr;
157 : bool pos_p;
158 : };
159 :
160 : /* Traits class for tree triplet hash maps below. */
161 :
162 : struct sanopt_tree_couple_hash : typed_noop_remove <sanopt_tree_couple>
163 : {
164 : typedef sanopt_tree_couple value_type;
165 : typedef sanopt_tree_couple compare_type;
166 :
167 : static hashval_t
168 6225 : hash (const sanopt_tree_couple &ref)
169 : {
170 6225 : inchash::hash hstate (0);
171 6225 : inchash::add_expr (ref.ptr, hstate);
172 6225 : hstate.add_int (ref.pos_p);
173 6225 : return hstate.end ();
174 : }
175 :
176 : static bool
177 4624 : equal (const sanopt_tree_couple &ref1, const sanopt_tree_couple &ref2)
178 : {
179 4624 : return operand_equal_p (ref1.ptr, ref2.ptr, 0)
180 4624 : && ref1.pos_p == ref2.pos_p;
181 : }
182 :
183 : static void
184 : mark_deleted (sanopt_tree_couple &ref)
185 : {
186 : ref.ptr = reinterpret_cast<tree> (1);
187 : }
188 :
189 : static const bool empty_zero_p = true;
190 :
191 : static void
192 0 : mark_empty (sanopt_tree_couple &ref)
193 : {
194 0 : ref.ptr = NULL;
195 : }
196 :
197 : static bool
198 6467 : is_deleted (const sanopt_tree_couple &ref)
199 : {
200 6467 : return ref.ptr == reinterpret_cast<tree> (1);
201 : }
202 :
203 : static bool
204 132999 : is_empty (const sanopt_tree_couple &ref)
205 : {
206 132436 : return ref.ptr == NULL;
207 : }
208 : };
209 :
210 : /* This is used to carry various hash maps and variables used
211 : in sanopt_optimize_walker. */
212 :
213 : class sanopt_ctx
214 : {
215 : public:
216 : /* This map maps a pointer (the first argument of UBSAN_NULL) to
217 : a vector of UBSAN_NULL call statements that check this pointer. */
218 : hash_map<tree, auto_vec<gimple *> > null_check_map;
219 :
220 : /* This map maps a pointer (the second argument of ASAN_CHECK) to
221 : a vector of ASAN_CHECK call statements that check the access. */
222 : hash_map<tree_operand_hash, auto_vec<gimple *> > asan_check_map;
223 :
224 : /* This map maps a tree triplet (the first, second and fourth argument
225 : of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check
226 : that virtual table pointer. */
227 : hash_map<sanopt_tree_triplet_hash, auto_vec<gimple *> > vptr_check_map;
228 :
229 : /* This map maps a couple (tree and boolean) to a vector of UBSAN_PTR
230 : call statements that check that pointer overflow. */
231 : hash_map<sanopt_tree_couple_hash, auto_vec<gimple *> > ptr_check_map;
232 :
233 : /* Number of IFN_ASAN_CHECK statements. */
234 : int asan_num_accesses;
235 :
236 : /* True when the current functions constains an ASAN_MARK. */
237 : bool contains_asan_mark;
238 : };
239 :
240 : /* Return true if there might be any call to free/munmap operation
241 : on any path in between DOM (which should be imm(BB)) and BB. */
242 :
243 : static bool
244 2422 : imm_dom_path_with_freeing_call (basic_block bb, basic_block dom)
245 : {
246 2422 : sanopt_info *info = (sanopt_info *) bb->aux;
247 2422 : edge e;
248 2422 : edge_iterator ei;
249 :
250 2422 : if (info->imm_dom_path_with_freeing_call_computed_p)
251 968 : return info->imm_dom_path_with_freeing_call_p;
252 :
253 1454 : info->being_visited_p = true;
254 :
255 3568 : FOR_EACH_EDGE (e, ei, bb->preds)
256 : {
257 2131 : sanopt_info *pred_info = (sanopt_info *) e->src->aux;
258 :
259 2131 : if (e->src == dom)
260 1049 : continue;
261 :
262 1082 : if ((pred_info->imm_dom_path_with_freeing_call_computed_p
263 19 : && pred_info->imm_dom_path_with_freeing_call_p)
264 1082 : || (pred_info->has_freeing_call_computed_p
265 232 : && pred_info->has_freeing_call_p))
266 : {
267 17 : info->imm_dom_path_with_freeing_call_computed_p = true;
268 17 : info->imm_dom_path_with_freeing_call_p = true;
269 17 : info->being_visited_p = false;
270 17 : return true;
271 : }
272 : }
273 :
274 3176 : FOR_EACH_EDGE (e, ei, bb->preds)
275 : {
276 1994 : sanopt_info *pred_info = (sanopt_info *) e->src->aux;
277 :
278 1994 : if (e->src == dom)
279 1049 : continue;
280 :
281 945 : if (pred_info->has_freeing_call_computed_p)
282 178 : continue;
283 :
284 767 : gimple_stmt_iterator gsi;
285 5560 : for (gsi = gsi_start_bb (e->src); !gsi_end_p (gsi); gsi_next (&gsi))
286 : {
287 4281 : gimple *stmt = gsi_stmt (gsi);
288 4281 : gasm *asm_stmt;
289 :
290 4281 : if ((is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
291 8307 : || ((asm_stmt = dyn_cast <gasm *> (stmt))
292 18 : && (gimple_asm_clobbers_memory_p (asm_stmt)
293 0 : || gimple_asm_volatile_p (asm_stmt))))
294 : {
295 255 : pred_info->has_freeing_call_p = true;
296 255 : break;
297 : }
298 : }
299 :
300 767 : pred_info->has_freeing_call_computed_p = true;
301 767 : if (pred_info->has_freeing_call_p)
302 : {
303 255 : info->imm_dom_path_with_freeing_call_computed_p = true;
304 255 : info->imm_dom_path_with_freeing_call_p = true;
305 255 : info->being_visited_p = false;
306 255 : return true;
307 : }
308 : }
309 :
310 2555 : FOR_EACH_EDGE (e, ei, bb->preds)
311 : {
312 1419 : if (e->src == dom)
313 1037 : continue;
314 :
315 : basic_block src;
316 1121 : for (src = e->src; src != dom; )
317 : {
318 847 : sanopt_info *pred_info = (sanopt_info *) src->aux;
319 847 : if (pred_info->being_visited_p)
320 : break;
321 785 : basic_block imm = get_immediate_dominator (CDI_DOMINATORS, src);
322 785 : if (imm_dom_path_with_freeing_call (src, imm))
323 : {
324 46 : info->imm_dom_path_with_freeing_call_computed_p = true;
325 46 : info->imm_dom_path_with_freeing_call_p = true;
326 46 : info->being_visited_p = false;
327 46 : return true;
328 : }
329 : src = imm;
330 : }
331 : }
332 :
333 1136 : info->imm_dom_path_with_freeing_call_computed_p = true;
334 1136 : info->imm_dom_path_with_freeing_call_p = false;
335 1136 : info->being_visited_p = false;
336 1136 : return false;
337 : }
338 :
339 : /* Get the first dominating check from the list of stored checks.
340 : Non-dominating checks are silently dropped. */
341 :
342 : static gimple *
343 19773 : maybe_get_dominating_check (auto_vec<gimple *> &v)
344 : {
345 21170 : for (; !v.is_empty (); v.pop ())
346 : {
347 5532 : gimple *g = v.last ();
348 5532 : sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
349 5532 : if (!si->visited_p)
350 : /* At this point we shouldn't have any statements
351 : that aren't dominating the current BB. */
352 : return g;
353 : }
354 : return NULL;
355 : }
356 :
357 : /* Optimize away redundant UBSAN_NULL calls. */
358 :
359 : static bool
360 2227 : maybe_optimize_ubsan_null_ifn (class sanopt_ctx *ctx, gimple *stmt)
361 : {
362 2227 : gcc_assert (gimple_call_num_args (stmt) == 3);
363 2227 : tree ptr = gimple_call_arg (stmt, 0);
364 2227 : tree cur_align = gimple_call_arg (stmt, 2);
365 2227 : gcc_assert (TREE_CODE (cur_align) == INTEGER_CST);
366 2227 : bool remove = false;
367 :
368 2227 : auto_vec<gimple *> &v = ctx->null_check_map.get_or_insert (ptr);
369 2227 : gimple *g = maybe_get_dominating_check (v);
370 2227 : if (!g)
371 : {
372 : /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's
373 : nothing to optimize yet. */
374 1338 : v.safe_push (stmt);
375 1338 : return false;
376 : }
377 :
378 : /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we
379 : can drop this one. But only if this check doesn't specify stricter
380 : alignment. */
381 :
382 889 : tree align = gimple_call_arg (g, 2);
383 889 : int kind = tree_to_shwi (gimple_call_arg (g, 1));
384 : /* If this is a NULL pointer check where we had segv anyway, we can
385 : remove it. */
386 889 : if (integer_zerop (align)
387 889 : && (kind == UBSAN_LOAD_OF
388 120 : || kind == UBSAN_STORE_OF
389 120 : || kind == UBSAN_MEMBER_ACCESS))
390 : remove = true;
391 : /* Otherwise remove the check in non-recovering mode, or if the
392 : stmts have same location. */
393 793 : else if (integer_zerop (align))
394 24 : remove = (flag_sanitize_recover & SANITIZE_NULL) == 0
395 12 : || (flag_sanitize_trap & SANITIZE_NULL) != 0
396 24 : || gimple_location (g) == gimple_location (stmt);
397 769 : else if (tree_int_cst_le (cur_align, align))
398 768 : remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0
399 615 : || (flag_sanitize_trap & SANITIZE_ALIGNMENT) != 0
400 1343 : || gimple_location (g) == gimple_location (stmt);
401 :
402 360 : if (!remove && gimple_bb (g) == gimple_bb (stmt)
403 252 : && tree_int_cst_compare (cur_align, align) == 0)
404 252 : v.pop ();
405 :
406 889 : if (!remove)
407 360 : v.safe_push (stmt);
408 : return remove;
409 : }
410 :
411 : /* Return true when pointer PTR for a given CUR_OFFSET is already sanitized
412 : in a given sanitization context CTX. */
413 :
414 : static bool
415 1507 : has_dominating_ubsan_ptr_check (sanopt_ctx *ctx, tree ptr,
416 : offset_int &cur_offset)
417 : {
418 1507 : bool pos_p = !wi::neg_p (cur_offset);
419 1507 : sanopt_tree_couple couple;
420 1507 : couple.ptr = ptr;
421 1507 : couple.pos_p = pos_p;
422 :
423 1507 : auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (couple);
424 1507 : gimple *g = maybe_get_dominating_check (v);
425 1507 : if (!g)
426 : return false;
427 :
428 : /* We already have recorded a UBSAN_PTR check for this pointer. Perhaps we
429 : can drop this one. But only if this check doesn't specify larger offset.
430 : */
431 659 : tree offset = gimple_call_arg (g, 1);
432 659 : gcc_assert (TREE_CODE (offset) == INTEGER_CST);
433 1318 : offset_int ooffset = wi::sext (wi::to_offset (offset), POINTER_SIZE);
434 :
435 659 : if (pos_p)
436 : {
437 564 : if (wi::les_p (cur_offset, ooffset))
438 : return true;
439 : }
440 95 : else if (!pos_p && wi::les_p (ooffset, cur_offset))
441 : return true;
442 :
443 : return false;
444 : }
445 :
446 : /* Record UBSAN_PTR check of given context CTX. Register pointer PTR on
447 : a given OFFSET that it's handled by GIMPLE STMT. */
448 :
449 : static void
450 733 : record_ubsan_ptr_check_stmt (sanopt_ctx *ctx, gimple *stmt, tree ptr,
451 : const offset_int &offset)
452 : {
453 733 : sanopt_tree_couple couple;
454 733 : couple.ptr = ptr;
455 733 : couple.pos_p = !wi::neg_p (offset);
456 :
457 733 : auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (couple);
458 733 : v.safe_push (stmt);
459 733 : }
460 :
461 : /* Optimize away redundant UBSAN_PTR calls. */
462 :
463 : static bool
464 1520 : maybe_optimize_ubsan_ptr_ifn (sanopt_ctx *ctx, gimple *stmt)
465 : {
466 1520 : poly_int64 bitsize, pbitpos;
467 1520 : machine_mode mode;
468 1520 : int volatilep = 0, reversep, unsignedp = 0;
469 1520 : tree offset;
470 :
471 1520 : gcc_assert (gimple_call_num_args (stmt) == 2);
472 1520 : tree ptr = gimple_call_arg (stmt, 0);
473 1520 : tree off = gimple_call_arg (stmt, 1);
474 :
475 1520 : if (TREE_CODE (off) != INTEGER_CST)
476 : return false;
477 :
478 1221 : if (integer_zerop (off))
479 : return true;
480 :
481 2442 : offset_int cur_offset = wi::sext (wi::to_offset (off), POINTER_SIZE);
482 1221 : if (has_dominating_ubsan_ptr_check (ctx, ptr, cur_offset))
483 : return true;
484 :
485 917 : tree base = ptr;
486 917 : if (TREE_CODE (base) == ADDR_EXPR)
487 : {
488 445 : base = TREE_OPERAND (base, 0);
489 :
490 445 : HOST_WIDE_INT bitpos;
491 445 : base = get_inner_reference (base, &bitsize, &pbitpos, &offset, &mode,
492 : &unsignedp, &reversep, &volatilep);
493 40 : if ((offset == NULL_TREE || TREE_CODE (offset) == INTEGER_CST)
494 445 : && DECL_P (base)
495 : && ((!VAR_P (base)
496 : && TREE_CODE (base) != PARM_DECL
497 : && TREE_CODE (base) != RESULT_DECL)
498 444 : || !DECL_REGISTER (base))
499 445 : && pbitpos.is_constant (&bitpos))
500 : {
501 445 : offset_int expr_offset;
502 445 : if (offset)
503 40 : expr_offset = wi::to_offset (offset) + bitpos / BITS_PER_UNIT;
504 : else
505 405 : expr_offset = bitpos / BITS_PER_UNIT;
506 445 : expr_offset = wi::sext (expr_offset, POINTER_SIZE);
507 445 : offset_int total_offset = expr_offset + cur_offset;
508 445 : if (total_offset != wi::sext (total_offset, POINTER_SIZE))
509 : {
510 8 : record_ubsan_ptr_check_stmt (ctx, stmt, ptr, cur_offset);
511 307 : return false;
512 : }
513 :
514 : /* If BASE is a fixed size automatic variable or
515 : global variable defined in the current TU, we don't have
516 : to instrument anything if offset is within address
517 : of the variable. */
518 437 : if ((VAR_P (base)
519 : || TREE_CODE (base) == PARM_DECL
520 : || TREE_CODE (base) == RESULT_DECL)
521 436 : && DECL_SIZE_UNIT (base)
522 436 : && TREE_CODE (DECL_SIZE_UNIT (base)) == INTEGER_CST
523 436 : && (!is_global_var (base) || decl_binds_to_current_def_p (base)))
524 : {
525 383 : offset_int base_size = wi::to_offset (DECL_SIZE_UNIT (base));
526 383 : if (!wi::neg_p (expr_offset)
527 702 : && wi::les_p (total_offset, base_size))
528 : {
529 255 : if (!wi::neg_p (total_offset)
530 470 : && wi::les_p (total_offset, base_size))
531 215 : return true;
532 : }
533 : }
534 :
535 : /* Following expression: UBSAN_PTR (&MEM_REF[ptr + x], y) can be
536 : handled as follows:
537 :
538 : 1) sign (x) == sign (y), then check for dominating check of (x + y)
539 : 2) sign (x) != sign (y), then first check if we have a dominating
540 : check for ptr + x. If so, then we have 2 situations:
541 : a) sign (x) == sign (x + y), here we are done, example:
542 : UBSAN_PTR (&MEM_REF[ptr + 100], -50)
543 : b) check for dominating check of ptr + x + y.
544 : */
545 :
546 222 : bool sign_cur_offset = !wi::neg_p (cur_offset);
547 222 : bool sign_expr_offset = !wi::neg_p (expr_offset);
548 :
549 222 : tree base_addr
550 222 : = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (base)), base);
551 :
552 222 : bool add = false;
553 222 : if (sign_cur_offset == sign_expr_offset)
554 : {
555 103 : if (has_dominating_ubsan_ptr_check (ctx, base_addr, total_offset))
556 : return true;
557 : else
558 : add = true;
559 : }
560 : else
561 : {
562 119 : if (!has_dominating_ubsan_ptr_check (ctx, base_addr, expr_offset))
563 : ; /* Don't record base_addr + expr_offset, it's not a guarding
564 : check. */
565 : else
566 : {
567 96 : bool sign_total_offset = !wi::neg_p (total_offset);
568 96 : if (sign_expr_offset == sign_total_offset)
569 : return true;
570 : else
571 : {
572 64 : if (has_dominating_ubsan_ptr_check (ctx, base_addr,
573 : total_offset))
574 : return true;
575 : else
576 : add = true;
577 : }
578 : }
579 : }
580 :
581 : /* Record a new dominating check for base_addr + total_offset. */
582 115 : if (add && !operand_equal_p (base, base_addr, 0))
583 115 : record_ubsan_ptr_check_stmt (ctx, stmt, base_addr,
584 : total_offset);
585 : }
586 : }
587 :
588 : /* For this PTR we don't have any UBSAN_PTR stmts recorded, so there's
589 : nothing to optimize yet. */
590 610 : record_ubsan_ptr_check_stmt (ctx, stmt, ptr, cur_offset);
591 :
592 610 : return false;
593 : }
594 :
595 : /* Optimize away redundant UBSAN_VPTR calls. The second argument
596 : is the value loaded from the virtual table, so rely on FRE to find out
597 : when we can actually optimize. */
598 :
599 : static bool
600 76 : maybe_optimize_ubsan_vptr_ifn (class sanopt_ctx *ctx, gimple *stmt)
601 : {
602 76 : gcc_assert (gimple_call_num_args (stmt) == 5);
603 76 : sanopt_tree_triplet triplet;
604 76 : triplet.t1 = gimple_call_arg (stmt, 0);
605 76 : triplet.t2 = gimple_call_arg (stmt, 1);
606 76 : triplet.t3 = gimple_call_arg (stmt, 3);
607 :
608 76 : auto_vec<gimple *> &v = ctx->vptr_check_map.get_or_insert (triplet);
609 76 : gimple *g = maybe_get_dominating_check (v);
610 76 : if (!g)
611 : {
612 : /* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's
613 : nothing to optimize yet. */
614 68 : v.safe_push (stmt);
615 68 : return false;
616 : }
617 :
618 : return true;
619 : }
620 :
621 : /* Checks whether value of T in CHECK and USE is the same. */
622 :
623 : static bool
624 23 : same_value_p (gimple *check, gimple *use, tree t)
625 : {
626 23 : tree check_vuse = gimple_vuse (check);
627 23 : tree use_vuse = gimple_vuse (use);
628 :
629 23 : if (TREE_CODE (t) == SSA_NAME
630 23 : || is_gimple_min_invariant (t)
631 46 : || ! use_vuse)
632 9 : return true;
633 :
634 14 : if (check_vuse == use_vuse)
635 : return true;
636 :
637 : return false;
638 : }
639 :
640 : /* Returns TRUE if ASan check of length LEN in block BB can be removed
641 : if preceded by checks in V. */
642 :
643 : static bool
644 2294 : can_remove_asan_check (auto_vec<gimple *> &v, tree len, basic_block bb,
645 : gimple *base_stmt, tree base_addr)
646 : {
647 2294 : unsigned int i;
648 2294 : gimple *g;
649 2294 : gimple *to_pop = NULL;
650 2294 : bool remove = false;
651 2294 : basic_block last_bb = bb;
652 2294 : bool cleanup = false;
653 :
654 5292 : FOR_EACH_VEC_ELT_REVERSE (v, i, g)
655 : {
656 2294 : basic_block gbb = gimple_bb (g);
657 2294 : sanopt_info *si = (sanopt_info *) gbb->aux;
658 2294 : if (gimple_uid (g) < si->freeing_call_events)
659 : {
660 : /* If there is a potentially freeing call after g in gbb, we should
661 : remove it from the vector, can't use in optimization. */
662 671 : cleanup = true;
663 671 : continue;
664 : }
665 :
666 1623 : tree glen = gimple_call_arg (g, 2);
667 1623 : gcc_assert (TREE_CODE (glen) == INTEGER_CST);
668 :
669 : /* If we've checked only smaller length than we want to check now,
670 : we can't remove the current stmt. If g is in the same basic block,
671 : we want to remove it though, as the current stmt is better. */
672 1623 : if (tree_int_cst_lt (glen, len))
673 : {
674 74 : if (gbb == bb)
675 : {
676 54 : to_pop = g;
677 54 : cleanup = true;
678 : }
679 74 : continue;
680 : }
681 :
682 2728 : while (last_bb != gbb)
683 : {
684 : /* Paths from last_bb to bb have been checked before.
685 : gbb is necessarily a dominator of last_bb, but not necessarily
686 : immediate dominator. */
687 2218 : if (((sanopt_info *) last_bb->aux)->freeing_call_events)
688 : break;
689 :
690 1637 : basic_block imm = get_immediate_dominator (CDI_DOMINATORS, last_bb);
691 1637 : gcc_assert (imm);
692 1637 : if (imm_dom_path_with_freeing_call (last_bb, imm))
693 : break;
694 :
695 : last_bb = imm;
696 : }
697 1549 : if (last_bb != gbb)
698 : break;
699 : // In case of base_addr residing in memory we also need to check aliasing
700 510 : remove = ! base_addr || same_value_p (g, base_stmt, base_addr);
701 : break;
702 : }
703 :
704 2294 : if (cleanup)
705 : {
706 725 : unsigned int j = 0, l = v.length ();
707 1492 : for (i = 0; i < l; i++)
708 767 : if (v[i] != to_pop
709 767 : && (gimple_uid (v[i])
710 : == ((sanopt_info *)
711 713 : gimple_bb (v[i])->aux)->freeing_call_events))
712 : {
713 42 : if (i != j)
714 0 : v[j] = v[i];
715 42 : j++;
716 : }
717 725 : v.truncate (j);
718 : }
719 :
720 2294 : return remove;
721 : }
722 :
723 : /* Optimize away redundant ASAN_CHECK calls. */
724 :
725 : static bool
726 11812 : maybe_optimize_asan_check_ifn (class sanopt_ctx *ctx, gimple *stmt)
727 : {
728 11812 : gcc_assert (gimple_call_num_args (stmt) == 4);
729 11812 : tree ptr = gimple_call_arg (stmt, 1);
730 11812 : tree len = gimple_call_arg (stmt, 2);
731 11812 : basic_block bb = gimple_bb (stmt);
732 11812 : sanopt_info *info = (sanopt_info *) bb->aux;
733 :
734 11812 : if (TREE_CODE (len) != INTEGER_CST)
735 : return false;
736 11812 : if (integer_zerop (len))
737 : return false;
738 :
739 11812 : gimple_set_uid (stmt, info->freeing_call_events);
740 :
741 11812 : auto_vec<gimple *> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
742 :
743 11812 : gimple *base_stmt = maybe_get_single_definition (ptr);
744 11812 : tree base_addr = base_stmt ? gimple_assign_rhs1 (base_stmt) : NULL_TREE;
745 11812 : auto_vec<gimple *> *base_checks = NULL;
746 11812 : if (base_addr)
747 : {
748 4151 : base_checks = &ctx->asan_check_map.get_or_insert (base_addr);
749 : /* Original pointer might have been invalidated. */
750 4151 : ptr_checks = ctx->asan_check_map.get (ptr);
751 : }
752 :
753 11812 : gimple *g = maybe_get_dominating_check (*ptr_checks);
754 11812 : gimple *g2 = NULL;
755 :
756 11812 : if (base_checks)
757 : /* Try with base address as well. */
758 4151 : g2 = maybe_get_dominating_check (*base_checks);
759 :
760 11812 : if (g == NULL && g2 == NULL)
761 : {
762 : /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's
763 : nothing to optimize yet. */
764 9878 : ptr_checks->safe_push (stmt);
765 9878 : if (base_checks)
766 3465 : base_checks->safe_push (stmt);
767 9878 : return false;
768 : }
769 :
770 1934 : bool remove = false;
771 :
772 1934 : if (ptr_checks)
773 1934 : remove = can_remove_asan_check (*ptr_checks, len, bb, NULL, NULL);
774 :
775 1934 : if (!remove && base_checks)
776 : /* Try with base address as well. */
777 360 : remove = can_remove_asan_check (*base_checks, len, bb, base_stmt,
778 : base_addr);
779 :
780 1934 : if (!remove)
781 : {
782 1438 : ptr_checks->safe_push (stmt);
783 1438 : if (base_checks)
784 351 : base_checks->safe_push (stmt);
785 : }
786 :
787 : return remove;
788 : }
789 :
790 : /* Try to optimize away redundant UBSAN_NULL and ASAN_CHECK calls.
791 :
792 : We walk blocks in the CFG via a depth first search of the dominator
793 : tree; we push unique UBSAN_NULL or ASAN_CHECK statements into a vector
794 : in the NULL_CHECK_MAP or ASAN_CHECK_MAP hash maps as we enter the
795 : blocks. When leaving a block, we mark the block as visited; then
796 : when checking the statements in the vector, we ignore statements that
797 : are coming from already visited blocks, because these cannot dominate
798 : anything anymore. CTX is a sanopt context. */
799 :
800 : static void
801 37535 : sanopt_optimize_walker (basic_block bb, class sanopt_ctx *ctx)
802 : {
803 37535 : basic_block son;
804 37535 : gimple_stmt_iterator gsi;
805 37535 : sanopt_info *info = (sanopt_info *) bb->aux;
806 37535 : bool asan_check_optimize
807 37535 : = ((flag_sanitize & (SANITIZE_ADDRESS | SANITIZE_HWADDRESS)) != 0);
808 :
809 257156 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
810 : {
811 182086 : gimple *stmt = gsi_stmt (gsi);
812 182086 : bool remove = false;
813 :
814 182086 : if (!is_gimple_call (stmt))
815 : {
816 : /* Handle asm volatile or asm with "memory" clobber
817 : the same as potentially freeing call. */
818 143584 : gasm *asm_stmt = dyn_cast <gasm *> (stmt);
819 143584 : if (asm_stmt
820 143584 : && asan_check_optimize
821 143584 : && (gimple_asm_clobbers_memory_p (asm_stmt)
822 56 : || gimple_asm_volatile_p (asm_stmt)))
823 194 : info->freeing_call_events++;
824 143584 : gsi_next (&gsi);
825 143584 : continue;
826 143584 : }
827 :
828 38502 : if (asan_check_optimize && !nonfreeing_call_p (stmt))
829 13306 : info->freeing_call_events++;
830 :
831 : /* If __asan_before_dynamic_init ("module"); is followed by
832 : __asan_after_dynamic_init (); without intervening memory loads/stores,
833 : there is nothing to guard, so optimize both away. */
834 38502 : if (asan_check_optimize
835 38502 : && gimple_call_builtin_p (stmt, BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT))
836 : {
837 19 : gcc_assert (!hwasan_sanitize_p ());
838 19 : use_operand_p use;
839 19 : gimple *use_stmt;
840 38 : if (single_imm_use (gimple_vdef (stmt), &use, &use_stmt))
841 : {
842 19 : if (is_gimple_call (use_stmt)
843 19 : && gimple_call_builtin_p (use_stmt,
844 : BUILT_IN_ASAN_AFTER_DYNAMIC_INIT))
845 : {
846 7 : unlink_stmt_vdef (use_stmt);
847 7 : gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
848 7 : gsi_remove (&gsi2, true);
849 7 : remove = true;
850 : }
851 : }
852 : }
853 :
854 38502 : if (gimple_call_internal_p (stmt))
855 22894 : switch (gimple_call_internal_fn (stmt))
856 : {
857 2227 : case IFN_UBSAN_NULL:
858 2227 : remove = maybe_optimize_ubsan_null_ifn (ctx, stmt);
859 2227 : break;
860 76 : case IFN_UBSAN_VPTR:
861 76 : remove = maybe_optimize_ubsan_vptr_ifn (ctx, stmt);
862 76 : break;
863 1520 : case IFN_UBSAN_PTR:
864 1520 : remove = maybe_optimize_ubsan_ptr_ifn (ctx, stmt);
865 1520 : break;
866 11812 : case IFN_HWASAN_CHECK:
867 11812 : case IFN_ASAN_CHECK:
868 11812 : if (asan_check_optimize)
869 11812 : remove = maybe_optimize_asan_check_ifn (ctx, stmt);
870 11812 : if (!remove)
871 11316 : ctx->asan_num_accesses++;
872 : break;
873 6171 : case IFN_ASAN_MARK:
874 6171 : ctx->contains_asan_mark = true;
875 6171 : break;
876 : default:
877 : break;
878 : }
879 :
880 38006 : if (remove)
881 : {
882 : /* Drop this check. */
883 1643 : if (dump_file && (dump_flags & TDF_DETAILS))
884 : {
885 104 : fprintf (dump_file, "Optimizing out: ");
886 104 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
887 : }
888 1643 : unlink_stmt_vdef (stmt);
889 1643 : gsi_remove (&gsi, true);
890 : }
891 : else
892 : {
893 36859 : if (dump_file && (dump_flags & TDF_DETAILS))
894 : {
895 188 : fprintf (dump_file, "Leaving: ");
896 188 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
897 : }
898 :
899 36859 : gsi_next (&gsi);
900 : }
901 : }
902 :
903 37535 : if (asan_check_optimize)
904 : {
905 28342 : info->has_freeing_call_p = info->freeing_call_events != 0;
906 28342 : info->has_freeing_call_computed_p = true;
907 : }
908 :
909 37535 : for (son = first_dom_son (CDI_DOMINATORS, bb);
910 67553 : son;
911 30018 : son = next_dom_son (CDI_DOMINATORS, son))
912 30018 : sanopt_optimize_walker (son, ctx);
913 :
914 : /* We're leaving this BB, so mark it to that effect. */
915 37535 : info->visited_p = true;
916 37535 : }
917 :
918 : /* Try to remove redundant sanitizer checks in function FUN. */
919 :
920 : static int
921 7517 : sanopt_optimize (function *fun, bool *contains_asan_mark)
922 : {
923 7517 : class sanopt_ctx ctx;
924 7517 : ctx.asan_num_accesses = 0;
925 7517 : ctx.contains_asan_mark = false;
926 :
927 : /* Set up block info for each basic block. */
928 7517 : alloc_aux_for_blocks (sizeof (sanopt_info));
929 :
930 : /* We're going to do a dominator walk, so ensure that we have
931 : dominance information. */
932 7517 : calculate_dominance_info (CDI_DOMINATORS);
933 :
934 : /* Recursively walk the dominator tree optimizing away
935 : redundant checks. */
936 7517 : sanopt_optimize_walker (ENTRY_BLOCK_PTR_FOR_FN (fun), &ctx);
937 :
938 7517 : free_aux_for_blocks ();
939 :
940 7517 : *contains_asan_mark = ctx.contains_asan_mark;
941 15034 : return ctx.asan_num_accesses;
942 7517 : }
943 :
944 : /* Perform optimization of sanitize functions. */
945 :
946 : namespace {
947 :
948 : const pass_data pass_data_sanopt =
949 : {
950 : GIMPLE_PASS, /* type */
951 : "sanopt", /* name */
952 : OPTGROUP_NONE, /* optinfo_flags */
953 : TV_NONE, /* tv_id */
954 : ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
955 : 0, /* properties_provided */
956 : 0, /* properties_destroyed */
957 : 0, /* todo_flags_start */
958 : TODO_update_ssa, /* todo_flags_finish */
959 : };
960 :
961 : class pass_sanopt : public gimple_opt_pass
962 : {
963 : public:
964 285722 : pass_sanopt (gcc::context *ctxt)
965 571444 : : gimple_opt_pass (pass_data_sanopt, ctxt)
966 : {}
967 :
968 : /* opt_pass methods: */
969 1472150 : bool gate (function *) final override
970 : {
971 : /* SANITIZE_RETURN is handled in the front-end. When trapping,
972 : SANITIZE_UNREACHABLE is handled by builtin_decl_unreachable. */
973 1472150 : unsigned int mask = SANITIZE_RETURN;
974 1472150 : if (flag_sanitize_trap & SANITIZE_UNREACHABLE)
975 327 : mask |= SANITIZE_UNREACHABLE;
976 1472150 : return flag_sanitize & ~mask;
977 : }
978 : unsigned int execute (function *) final override;
979 :
980 : }; // class pass_sanopt
981 :
982 : /* Sanitize all ASAN_MARK unpoison calls that are not reachable by a BB
983 : that contains an ASAN_MARK poison. All these ASAN_MARK unpoison call
984 : can be removed as all variables are unpoisoned in a function prologue. */
985 :
986 : static void
987 1436 : sanitize_asan_mark_unpoison (void)
988 : {
989 : /* 1) Find all BBs that contain an ASAN_MARK poison call. */
990 1436 : auto_bitmap with_poison;
991 1436 : basic_block bb;
992 :
993 14966 : FOR_EACH_BB_FN (bb, cfun)
994 : {
995 13530 : gimple_stmt_iterator gsi;
996 163516 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
997 : {
998 71292 : gimple *stmt = gsi_stmt (gsi);
999 71292 : if (asan_mark_p (stmt, ASAN_MARK_POISON))
1000 : {
1001 3064 : bitmap_set_bit (with_poison, bb->index);
1002 3064 : break;
1003 : }
1004 : }
1005 : }
1006 :
1007 1436 : auto_sbitmap poisoned (last_basic_block_for_fn (cfun) + 1);
1008 1436 : bitmap_clear (poisoned);
1009 : /* We now treat with_poison as worklist. */
1010 1436 : bitmap worklist = with_poison;
1011 :
1012 : /* 2) Propagate the information to all reachable blocks. */
1013 13572 : while (!bitmap_empty_p (worklist))
1014 : {
1015 10700 : unsigned i = bitmap_clear_first_set_bit (worklist);
1016 10700 : basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1017 10700 : gcc_assert (bb);
1018 :
1019 10700 : edge e;
1020 10700 : edge_iterator ei;
1021 22110 : FOR_EACH_EDGE (e, ei, bb->succs)
1022 11410 : if (!bitmap_bit_p (poisoned, e->dest->index))
1023 : {
1024 8346 : bitmap_set_bit (poisoned, e->dest->index);
1025 8346 : bitmap_set_bit (worklist, e->dest->index);
1026 : }
1027 : }
1028 :
1029 : /* 3) Iterate all BBs not included in POISONED BBs and remove unpoison
1030 : ASAN_MARK preceding an ASAN_MARK poison (which can still happen). */
1031 14966 : FOR_EACH_BB_FN (bb, cfun)
1032 : {
1033 13530 : if (bitmap_bit_p (poisoned, bb->index))
1034 6939 : continue;
1035 :
1036 6591 : gimple_stmt_iterator gsi;
1037 55909 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1038 : {
1039 44454 : gimple *stmt = gsi_stmt (gsi);
1040 44454 : if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1041 : {
1042 3972 : if (asan_mark_p (stmt, ASAN_MARK_POISON))
1043 : break;
1044 : else
1045 : {
1046 2245 : if (dump_file)
1047 2 : fprintf (dump_file, "Removing ASAN_MARK unpoison\n");
1048 2245 : unlink_stmt_vdef (stmt);
1049 2245 : release_defs (stmt);
1050 2245 : gsi_remove (&gsi, true);
1051 2245 : continue;
1052 : }
1053 : }
1054 :
1055 40482 : gsi_next (&gsi);
1056 : }
1057 : }
1058 1436 : }
1059 :
1060 : /* Return true when STMT is either ASAN_CHECK call or a call of a function
1061 : that can contain an ASAN_CHECK. */
1062 :
1063 : static bool
1064 59755 : maybe_contains_asan_check (gimple *stmt)
1065 : {
1066 59755 : if (is_gimple_call (stmt))
1067 : {
1068 14057 : if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1069 : return false;
1070 : else
1071 7473 : return !(gimple_call_flags (stmt) & ECF_CONST);
1072 : }
1073 45698 : else if (is_a<gasm *> (stmt))
1074 86 : return true;
1075 :
1076 : return false;
1077 : }
1078 :
1079 : /* Sanitize all ASAN_MARK poison calls that are not followed by an ASAN_CHECK
1080 : call. These calls can be removed. */
1081 :
1082 : static void
1083 1436 : sanitize_asan_mark_poison (void)
1084 : {
1085 : /* 1) Find all BBs that possibly contain an ASAN_CHECK. */
1086 1436 : auto_bitmap with_check;
1087 1436 : basic_block bb;
1088 :
1089 14966 : FOR_EACH_BB_FN (bb, cfun)
1090 : {
1091 13530 : gimple_stmt_iterator gsi;
1092 104438 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
1093 : {
1094 44472 : gimple *stmt = gsi_stmt (gsi);
1095 44472 : if (maybe_contains_asan_check (stmt))
1096 : {
1097 5783 : bitmap_set_bit (with_check, bb->index);
1098 5783 : break;
1099 : }
1100 : }
1101 : }
1102 :
1103 1436 : auto_sbitmap can_reach_check (last_basic_block_for_fn (cfun) + 1);
1104 1436 : bitmap_clear (can_reach_check);
1105 : /* We now treat with_check as worklist. */
1106 1436 : bitmap worklist = with_check;
1107 :
1108 : /* 2) Propagate the information to all definitions blocks. */
1109 18557 : while (!bitmap_empty_p (worklist))
1110 : {
1111 15685 : unsigned i = bitmap_clear_first_set_bit (worklist);
1112 15685 : basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1113 15685 : gcc_assert (bb);
1114 :
1115 15685 : edge e;
1116 15685 : edge_iterator ei;
1117 33995 : FOR_EACH_EDGE (e, ei, bb->preds)
1118 18310 : if (!bitmap_bit_p (can_reach_check, e->src->index))
1119 : {
1120 10011 : bitmap_set_bit (can_reach_check, e->src->index);
1121 10011 : bitmap_set_bit (worklist, e->src->index);
1122 : }
1123 : }
1124 :
1125 : /* 3) Iterate all BBs not included in CAN_REACH_CHECK BBs and remove poison
1126 : ASAN_MARK not followed by a call to function having an ASAN_CHECK. */
1127 14966 : FOR_EACH_BB_FN (bb, cfun)
1128 : {
1129 13530 : if (bitmap_bit_p (can_reach_check, bb->index))
1130 8651 : continue;
1131 :
1132 4879 : gimple_stmt_iterator gsi;
1133 4879 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1134 : {
1135 15283 : gimple *stmt = gsi_stmt (gsi);
1136 15283 : if (maybe_contains_asan_check (stmt))
1137 : break;
1138 13541 : else if (asan_mark_p (stmt, ASAN_MARK_POISON))
1139 : {
1140 2783 : if (dump_file)
1141 2 : fprintf (dump_file, "Removing ASAN_MARK poison\n");
1142 2783 : unlink_stmt_vdef (stmt);
1143 2783 : release_defs (stmt);
1144 2783 : gimple_stmt_iterator gsi2 = gsi;
1145 2783 : gsi_prev (&gsi);
1146 2783 : gsi_remove (&gsi2, true);
1147 2783 : continue;
1148 2783 : }
1149 :
1150 29178 : gsi_prev (&gsi);
1151 : }
1152 : }
1153 1436 : }
1154 :
1155 : /* Rewrite all usages of tree OP which is a PARM_DECL with a VAR_DECL
1156 : that is it's DECL_VALUE_EXPR. */
1157 :
1158 : static tree
1159 6567 : rewrite_usage_of_param (tree *op, int *walk_subtrees, void *)
1160 : {
1161 6567 : if (TREE_CODE (*op) == PARM_DECL && DECL_HAS_VALUE_EXPR_P (*op))
1162 : {
1163 251 : *op = DECL_VALUE_EXPR (*op);
1164 251 : *walk_subtrees = 0;
1165 : }
1166 :
1167 6567 : return NULL;
1168 : }
1169 :
1170 : /* For a given function FUN, rewrite all addressable parameters so that
1171 : a new automatic variable is introduced. Right after function entry
1172 : a parameter is assigned to the variable. */
1173 :
1174 : static void
1175 6080 : sanitize_rewrite_addressable_params (function *fun)
1176 : {
1177 6080 : gimple *g;
1178 6080 : gimple_seq stmts = NULL;
1179 6080 : bool has_any_addressable_param = false;
1180 6080 : auto_vec<tree> clear_value_expr_list;
1181 :
1182 6080 : for (tree arg = DECL_ARGUMENTS (current_function_decl);
1183 12290 : arg; arg = DECL_CHAIN (arg))
1184 : {
1185 6210 : tree type = TREE_TYPE (arg);
1186 6210 : if (TREE_ADDRESSABLE (arg)
1187 135 : && !TREE_ADDRESSABLE (type)
1188 135 : && !TREE_THIS_VOLATILE (arg)
1189 6339 : && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
1190 : {
1191 123 : TREE_ADDRESSABLE (arg) = 0;
1192 123 : DECL_NOT_GIMPLE_REG_P (arg) = 0;
1193 : /* The parameter is no longer addressable. */
1194 123 : has_any_addressable_param = true;
1195 :
1196 : /* Create a new automatic variable. */
1197 123 : tree var = build_decl (DECL_SOURCE_LOCATION (arg),
1198 123 : VAR_DECL, DECL_NAME (arg), type);
1199 123 : TREE_ADDRESSABLE (var) = 1;
1200 123 : DECL_IGNORED_P (var) = 1;
1201 :
1202 123 : gimple_add_tmp_var (var);
1203 :
1204 : /* We skip parameters that have a DECL_VALUE_EXPR. */
1205 123 : if (DECL_HAS_VALUE_EXPR_P (arg))
1206 1 : continue;
1207 :
1208 122 : if (dump_file)
1209 : {
1210 0 : fprintf (dump_file,
1211 : "Rewriting parameter whose address is taken: ");
1212 0 : print_generic_expr (dump_file, arg, dump_flags);
1213 0 : fputc ('\n', dump_file);
1214 : }
1215 :
1216 122 : SET_DECL_PT_UID (var, DECL_PT_UID (arg));
1217 :
1218 : /* Assign value of parameter to newly created variable. */
1219 122 : if ((TREE_CODE (type) == COMPLEX_TYPE
1220 122 : || TREE_CODE (type) == VECTOR_TYPE))
1221 : {
1222 : /* We need to create a SSA name that will be used for the
1223 : assignment. */
1224 20 : tree tmp = get_or_create_ssa_default_def (cfun, arg);
1225 20 : g = gimple_build_assign (var, tmp);
1226 20 : gimple_set_location (g, DECL_SOURCE_LOCATION (arg));
1227 20 : gimple_seq_add_stmt (&stmts, g);
1228 : }
1229 : else
1230 : {
1231 102 : g = gimple_build_assign (var, arg);
1232 102 : gimple_set_location (g, DECL_SOURCE_LOCATION (arg));
1233 102 : gimple_seq_add_stmt (&stmts, g);
1234 : }
1235 :
1236 122 : if (target_for_debug_bind (arg))
1237 : {
1238 38 : g = gimple_build_debug_bind (arg, var, NULL);
1239 38 : gimple_seq_add_stmt (&stmts, g);
1240 38 : clear_value_expr_list.safe_push (arg);
1241 : }
1242 :
1243 122 : DECL_HAS_VALUE_EXPR_P (arg) = 1;
1244 122 : SET_DECL_VALUE_EXPR (arg, var);
1245 : }
1246 : }
1247 :
1248 6080 : if (!has_any_addressable_param)
1249 5984 : return;
1250 :
1251 : /* Replace all usages of PARM_DECLs with the newly
1252 : created variable VAR. */
1253 96 : basic_block bb;
1254 438 : FOR_EACH_BB_FN (bb, fun)
1255 : {
1256 342 : gimple_stmt_iterator gsi;
1257 3189 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1258 : {
1259 2505 : gimple *stmt = gsi_stmt (gsi);
1260 2505 : gimple_stmt_iterator it = gsi_for_stmt (stmt);
1261 2505 : walk_gimple_stmt (&it, NULL, rewrite_usage_of_param, NULL);
1262 : }
1263 438 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1264 : {
1265 96 : gphi *phi = dyn_cast<gphi *> (gsi_stmt (gsi));
1266 310 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
1267 : {
1268 214 : hash_set<tree> visited_nodes;
1269 214 : walk_tree (gimple_phi_arg_def_ptr (phi, i),
1270 : rewrite_usage_of_param, NULL, &visited_nodes);
1271 214 : }
1272 : }
1273 : }
1274 :
1275 : /* Unset value expr for parameters for which we created debug bind
1276 : expressions. */
1277 210 : for (tree arg : clear_value_expr_list)
1278 : {
1279 38 : DECL_HAS_VALUE_EXPR_P (arg) = 0;
1280 38 : SET_DECL_VALUE_EXPR (arg, NULL_TREE);
1281 : }
1282 :
1283 : /* Insert default assignments at the beginning of a function. */
1284 96 : basic_block entry_bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
1285 96 : entry_bb = split_edge (single_succ_edge (entry_bb));
1286 :
1287 96 : gimple_stmt_iterator gsi = gsi_start_bb (entry_bb);
1288 96 : gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT);
1289 6080 : }
1290 :
1291 : unsigned int
1292 19346 : pass_sanopt::execute (function *fun)
1293 : {
1294 : /* n.b. ASAN_MARK is used for both HWASAN and ASAN.
1295 : asan_num_accesses is hence used to count either HWASAN_CHECK or ASAN_CHECK
1296 : stuff. This is fine because you can only have one of these active at a
1297 : time. */
1298 19346 : basic_block bb;
1299 19346 : int asan_num_accesses = 0;
1300 19346 : bool contains_asan_mark = false;
1301 19346 : int ret = 0;
1302 :
1303 : /* Try to remove redundant checks. */
1304 19346 : if (optimize
1305 13930 : && (flag_sanitize
1306 13930 : & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_HWADDRESS
1307 : | SANITIZE_ADDRESS | SANITIZE_VPTR | SANITIZE_POINTER_OVERFLOW)))
1308 7517 : asan_num_accesses = sanopt_optimize (fun, &contains_asan_mark);
1309 11829 : else if (flag_sanitize & (SANITIZE_ADDRESS | SANITIZE_HWADDRESS))
1310 : {
1311 1365 : gimple_stmt_iterator gsi;
1312 6369 : FOR_EACH_BB_FN (bb, fun)
1313 30283 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1314 : {
1315 20275 : gimple *stmt = gsi_stmt (gsi);
1316 20275 : if (gimple_call_internal_p (stmt, IFN_ASAN_CHECK))
1317 2071 : ++asan_num_accesses;
1318 18204 : else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1319 728 : contains_asan_mark = true;
1320 : }
1321 : }
1322 :
1323 16702 : if (asan_num_accesses || contains_asan_mark || asan_sanitize_stack_p ()
1324 22309 : || hwasan_sanitize_stack_p ())
1325 6171 : asan_maybe_insert_dynamic_shadow_at_function_entry (fun);
1326 :
1327 19346 : if (contains_asan_mark)
1328 : {
1329 1436 : sanitize_asan_mark_unpoison ();
1330 1436 : sanitize_asan_mark_poison ();
1331 : }
1332 :
1333 19346 : if (asan_sanitize_stack_p () || hwassist_sanitize_stack_p ())
1334 6080 : sanitize_rewrite_addressable_params (fun);
1335 :
1336 38692 : bool use_calls = param_asan_instrumentation_with_call_threshold < INT_MAX
1337 19346 : && asan_num_accesses >= param_asan_instrumentation_with_call_threshold;
1338 :
1339 19346 : hash_map<tree, tree> shadow_vars_mapping;
1340 19346 : bool need_commit_edge_insert = false;
1341 192692 : FOR_EACH_BB_FN (bb, fun)
1342 : {
1343 173346 : gimple_stmt_iterator gsi;
1344 1003738 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1345 : {
1346 657046 : gimple *stmt = gsi_stmt (gsi);
1347 657046 : bool no_next = false;
1348 :
1349 657046 : if (!is_gimple_call (stmt))
1350 : {
1351 518502 : gsi_next (&gsi);
1352 518502 : continue;
1353 : }
1354 :
1355 138544 : if (gimple_call_internal_p (stmt))
1356 : {
1357 30025 : enum internal_fn ifn = gimple_call_internal_fn (stmt);
1358 30025 : int this_ret = TODO_cleanup_cfg;
1359 30025 : switch (ifn)
1360 : {
1361 2942 : case IFN_UBSAN_NULL:
1362 2942 : no_next = ubsan_expand_null_ifn (&gsi);
1363 2942 : break;
1364 1561 : case IFN_UBSAN_BOUNDS:
1365 1561 : no_next = ubsan_expand_bounds_ifn (&gsi);
1366 1561 : break;
1367 368 : case IFN_UBSAN_OBJECT_SIZE:
1368 368 : no_next = ubsan_expand_objsize_ifn (&gsi);
1369 368 : break;
1370 1558 : case IFN_UBSAN_PTR:
1371 1558 : no_next = ubsan_expand_ptr_ifn (&gsi);
1372 1558 : break;
1373 498 : case IFN_UBSAN_VPTR:
1374 498 : no_next = ubsan_expand_vptr_ifn (&gsi);
1375 498 : break;
1376 381 : case IFN_HWASAN_CHECK:
1377 381 : no_next = hwasan_expand_check_ifn (&gsi, use_calls);
1378 381 : break;
1379 13094 : case IFN_ASAN_CHECK:
1380 13094 : no_next = asan_expand_check_ifn (&gsi, use_calls);
1381 13094 : break;
1382 1902 : case IFN_ASAN_MARK:
1383 1902 : no_next = asan_expand_mark_ifn (&gsi);
1384 1902 : break;
1385 36 : case IFN_ASAN_POISON:
1386 36 : no_next = asan_expand_poison_ifn (&gsi,
1387 : &need_commit_edge_insert,
1388 : shadow_vars_mapping);
1389 36 : break;
1390 0 : case IFN_HWASAN_MARK:
1391 0 : no_next = hwasan_expand_mark_ifn (&gsi);
1392 0 : break;
1393 : default:
1394 : this_ret = 0;
1395 : break;
1396 : }
1397 30025 : ret |= this_ret;
1398 : }
1399 108519 : else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1400 : {
1401 35056 : tree callee = gimple_call_fndecl (stmt);
1402 35056 : switch (DECL_FUNCTION_CODE (callee))
1403 : {
1404 25 : case BUILT_IN_UNREACHABLE:
1405 25 : if (sanitize_flags_p (SANITIZE_UNREACHABLE))
1406 0 : no_next = ubsan_instrument_unreachable (&gsi);
1407 : break;
1408 : default:
1409 : break;
1410 : }
1411 : }
1412 :
1413 138544 : if (dump_file && (dump_flags & TDF_DETAILS))
1414 : {
1415 331 : fprintf (dump_file, "Expanded: ");
1416 331 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1417 : }
1418 :
1419 138544 : if (!no_next)
1420 123070 : gsi_next (&gsi);
1421 : }
1422 : }
1423 :
1424 19346 : if (need_commit_edge_insert)
1425 0 : gsi_commit_edge_inserts ();
1426 :
1427 19346 : return ret;
1428 19346 : }
1429 :
1430 : } // anon namespace
1431 :
1432 : gimple_opt_pass *
1433 285722 : make_pass_sanopt (gcc::context *ctxt)
1434 : {
1435 285722 : return new pass_sanopt (ctxt);
1436 : }
|