Line data Source code
1 : /* AddressSanitizer, a fast memory error detector.
2 : Copyright (C) 2012-2026 Free Software Foundation, Inc.
3 : Contributed by Kostya Serebryany <kcc@google.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 :
22 : #include "config.h"
23 : #include "system.h"
24 : #include "coretypes.h"
25 : #include "backend.h"
26 : #include "target.h"
27 : #include "rtl.h"
28 : #include "tree.h"
29 : #include "gimple.h"
30 : #include "cfghooks.h"
31 : #include "alloc-pool.h"
32 : #include "tree-pass.h"
33 : #include "memmodel.h"
34 : #include "tm_p.h"
35 : #include "ssa.h"
36 : #include "stringpool.h"
37 : #include "tree-ssanames.h"
38 : #include "optabs.h"
39 : #include "emit-rtl.h"
40 : #include "cgraph.h"
41 : #include "gimple-pretty-print.h"
42 : #include "alias.h"
43 : #include "fold-const.h"
44 : #include "cfganal.h"
45 : #include "gimplify.h"
46 : #include "gimple-iterator.h"
47 : #include "varasm.h"
48 : #include "stor-layout.h"
49 : #include "tree-iterator.h"
50 : #include "stringpool.h"
51 : #include "attribs.h"
52 : #include "asan.h"
53 : #include "dojump.h"
54 : #include "explow.h"
55 : #include "expr.h"
56 : #include "output.h"
57 : #include "langhooks.h"
58 : #include "cfgloop.h"
59 : #include "gimple-builder.h"
60 : #include "gimple-fold.h"
61 : #include "ubsan.h"
62 : #include "builtins.h"
63 : #include "fnmatch.h"
64 : #include "tree-inline.h"
65 : #include "tree-ssa.h"
66 : #include "tree-eh.h"
67 : #include "diagnostic-core.h"
68 :
69 : /* AddressSanitizer finds out-of-bounds and use-after-free bugs
70 : with <2x slowdown on average.
71 :
72 : The tool consists of two parts:
73 : instrumentation module (this file) and a run-time library.
74 : The instrumentation module adds a run-time check before every memory insn.
75 : For a 8- or 16- byte load accessing address X:
76 : ShadowAddr = (X >> 3) + Offset
77 : ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
78 : if (ShadowValue)
79 : __asan_report_load8(X);
80 : For a load of N bytes (N=1, 2 or 4) from address X:
81 : ShadowAddr = (X >> 3) + Offset
82 : ShadowValue = *(char*)ShadowAddr;
83 : if (ShadowValue)
84 : if ((X & 7) + N - 1 > ShadowValue)
85 : __asan_report_loadN(X);
86 : Stores are instrumented similarly, but using __asan_report_storeN functions.
87 : A call too __asan_init_vN() is inserted to the list of module CTORs.
88 : N is the version number of the AddressSanitizer API. The changes between the
89 : API versions are listed in libsanitizer/asan/asan_interface_internal.h.
90 :
91 : The run-time library redefines malloc (so that redzone are inserted around
92 : the allocated memory) and free (so that reuse of free-ed memory is delayed),
93 : provides __asan_report* and __asan_init_vN functions.
94 :
95 : Read more:
96 : http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
97 :
98 : The current implementation supports detection of out-of-bounds and
99 : use-after-free in the heap, on the stack and for global variables.
100 :
101 : [Protection of stack variables]
102 :
103 : To understand how detection of out-of-bounds and use-after-free works
104 : for stack variables, lets look at this example on x86_64 where the
105 : stack grows downward:
106 :
107 : int
108 : foo ()
109 : {
110 : char a[24] = {0};
111 : int b[2] = {0};
112 :
113 : a[5] = 1;
114 : b[1] = 2;
115 :
116 : return a[5] + b[1];
117 : }
118 :
119 : For this function, the stack protected by asan will be organized as
120 : follows, from the top of the stack to the bottom:
121 :
122 : Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
123 :
124 : Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
125 : the next slot be 32 bytes aligned; this one is called Partial
126 : Redzone; this 32 bytes alignment is an asan constraint]
127 :
128 : Slot 3/ [24 bytes for variable 'a']
129 :
130 : Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
131 :
132 : Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
133 :
134 : Slot 6/ [8 bytes for variable 'b']
135 :
136 : Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
137 : 'LEFT RedZone']
138 :
139 : The 32 bytes of LEFT red zone at the bottom of the stack can be
140 : decomposed as such:
141 :
142 : 1/ The first 8 bytes contain a magical asan number that is always
143 : 0x41B58AB3.
144 :
145 : 2/ The following 8 bytes contains a pointer to a string (to be
146 : parsed at runtime by the runtime asan library), which format is
147 : the following:
148 :
149 : "<function-name> <space> <num-of-variables-on-the-stack>
150 : (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
151 : <length-of-var-in-bytes> ){n} "
152 :
153 : where '(...){n}' means the content inside the parenthesis occurs 'n'
154 : times, with 'n' being the number of variables on the stack.
155 :
156 : 3/ The following 8 bytes contain the PC of the current function which
157 : will be used by the run-time library to print an error message.
158 :
159 : 4/ The following 8 bytes are reserved for internal use by the run-time.
160 :
161 : The shadow memory for that stack layout is going to look like this:
162 :
163 : - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
164 : The F1 byte pattern is a magic number called
165 : ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
166 : the memory for that shadow byte is part of a the LEFT red zone
167 : intended to seat at the bottom of the variables on the stack.
168 :
169 : - content of shadow memory 8 bytes for slots 6 and 5:
170 : 0xF4F4F400. The F4 byte pattern is a magic number
171 : called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
172 : memory region for this shadow byte is a PARTIAL red zone
173 : intended to pad a variable A, so that the slot following
174 : {A,padding} is 32 bytes aligned.
175 :
176 : Note that the fact that the least significant byte of this
177 : shadow memory content is 00 means that 8 bytes of its
178 : corresponding memory (which corresponds to the memory of
179 : variable 'b') is addressable.
180 :
181 : - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
182 : The F2 byte pattern is a magic number called
183 : ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
184 : region for this shadow byte is a MIDDLE red zone intended to
185 : seat between two 32 aligned slots of {variable,padding}.
186 :
187 : - content of shadow memory 8 bytes for slot 3 and 2:
188 : 0xF4000000. This represents is the concatenation of
189 : variable 'a' and the partial red zone following it, like what we
190 : had for variable 'b'. The least significant 3 bytes being 00
191 : means that the 3 bytes of variable 'a' are addressable.
192 :
193 : - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
194 : The F3 byte pattern is a magic number called
195 : ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
196 : region for this shadow byte is a RIGHT red zone intended to seat
197 : at the top of the variables of the stack.
198 :
199 : Note that the real variable layout is done in expand_used_vars in
200 : cfgexpand.cc. As far as Address Sanitizer is concerned, it lays out
201 : stack variables as well as the different red zones, emits some
202 : prologue code to populate the shadow memory as to poison (mark as
203 : non-accessible) the regions of the red zones and mark the regions of
204 : stack variables as accessible, and emit some epilogue code to
205 : un-poison (mark as accessible) the regions of red zones right before
206 : the function exits.
207 :
208 : [Protection of global variables]
209 :
210 : The basic idea is to insert a red zone between two global variables
211 : and install a constructor function that calls the asan runtime to do
212 : the populating of the relevant shadow memory regions at load time.
213 :
214 : So the global variables are laid out as to insert a red zone between
215 : them. The size of the red zones is so that each variable starts on a
216 : 32 bytes boundary.
217 :
218 : Then a constructor function is installed so that, for each global
219 : variable, it calls the runtime asan library function
220 : __asan_register_globals_with an instance of this type:
221 :
222 : struct __asan_global
223 : {
224 : // Address of the beginning of the global variable.
225 : const void *__beg;
226 :
227 : // Initial size of the global variable.
228 : uptr __size;
229 :
230 : // Size of the global variable + size of the red zone. This
231 : // size is 32 bytes aligned.
232 : uptr __size_with_redzone;
233 :
234 : // Name of the global variable.
235 : const void *__name;
236 :
237 : // Name of the module where the global variable is declared.
238 : const void *__module_name;
239 :
240 : // 1 if it has dynamic initialization, 0 otherwise.
241 : uptr __has_dynamic_init;
242 :
243 : // A pointer to struct that contains source location, could be NULL.
244 : __asan_global_source_location *__location;
245 : }
246 :
247 : A destructor function that calls the runtime asan library function
248 : _asan_unregister_globals is also installed. */
249 :
250 : static unsigned HOST_WIDE_INT asan_shadow_offset_value;
251 : static bool asan_shadow_offset_computed;
252 : static vec<char *> sanitized_sections;
253 : static tree last_alloca_addr;
254 :
255 : /* Set of variable declarations that are going to be guarded by
256 : use-after-scope sanitizer. */
257 :
258 : hash_set<tree> *asan_handled_variables = NULL;
259 :
260 : hash_set <tree> *asan_used_labels = NULL;
261 :
262 : /* Global variables for HWASAN stack tagging. */
263 : /* hwasan_frame_tag_offset records the offset from the frame base tag that the
264 : next object should have. */
265 : static uint8_t hwasan_frame_tag_offset = 0;
266 : /* hwasan_frame_base_ptr is a pointer with the same address as
267 : `virtual_stack_vars_rtx` for the current frame, and with the frame base tag
268 : stored in it. N.b. this global RTX does not need to be marked GTY, but is
269 : done so anyway. The need is not there since all uses are in just one pass
270 : (cfgexpand) and there are no calls to ggc_collect between the uses. We mark
271 : it GTY(()) anyway to allow the use of the variable later on if needed by
272 : future features. */
273 : static GTY(()) rtx hwasan_frame_base_ptr = NULL_RTX;
274 : /* hwasan_frame_base_init_seq is the sequence of RTL insns that will initialize
275 : the hwasan_frame_base_ptr. When the hwasan_frame_base_ptr is requested, we
276 : generate this sequence but do not emit it. If the sequence was created it
277 : is emitted once the function body has been expanded.
278 :
279 : This delay is because the frame base pointer may be needed anywhere in the
280 : function body, or needed by the expand_used_vars function. Emitting once in
281 : a known place is simpler than requiring the emission of the instructions to
282 : be know where it should go depending on the first place the hwasan frame
283 : base is needed. */
284 : static GTY(()) rtx_insn *hwasan_frame_base_init_seq = NULL;
285 :
286 : /* Structure defining the extent of one object on the stack that HWASAN needs
287 : to tag in the corresponding shadow stack space.
288 :
289 : The range this object spans on the stack is between `untagged_base +
290 : nearest_offset` and `untagged_base + farthest_offset`.
291 : `tagged_base` is an rtx containing the same value as `untagged_base` but
292 : with a random tag stored in the top byte. We record both `untagged_base`
293 : and `tagged_base` so that `hwasan_emit_prologue` can use both without having
294 : to emit RTL into the instruction stream to re-calculate one from the other.
295 : (`hwasan_emit_prologue` needs to use both bases since the
296 : __hwasan_tag_memory call it emits uses an untagged value, and it calculates
297 : the tag to store in shadow memory based on the tag_offset plus the tag in
298 : tagged_base). */
299 : struct hwasan_stack_var
300 : {
301 : rtx untagged_base;
302 : rtx tagged_base;
303 : poly_int64 nearest_offset;
304 : poly_int64 farthest_offset;
305 : uint8_t tag_offset;
306 : };
307 :
308 : /* Variable recording all stack variables that HWASAN needs to tag.
309 : Does not need to be marked as GTY(()) since every use is in the cfgexpand
310 : pass and gcc_collect is not called in the middle of that pass. */
311 : static vec<hwasan_stack_var> hwasan_tagged_stack_vars;
312 :
313 :
314 : /* Sets shadow offset to value in string VAL. */
315 :
316 : bool
317 11 : set_asan_shadow_offset (const char *val)
318 : {
319 11 : char *endp;
320 :
321 11 : errno = 0;
322 : #ifdef HAVE_LONG_LONG
323 11 : asan_shadow_offset_value = strtoull (val, &endp, 0);
324 : #else
325 : asan_shadow_offset_value = strtoul (val, &endp, 0);
326 : #endif
327 11 : if (!(*val != '\0' && *endp == '\0' && errno == 0))
328 : return false;
329 :
330 11 : asan_shadow_offset_computed = true;
331 :
332 11 : return true;
333 : }
334 :
335 : /* Set list of user-defined sections that need to be sanitized. */
336 :
337 : void
338 40 : set_sanitized_sections (const char *sections)
339 : {
340 40 : char *pat;
341 40 : unsigned i;
342 50 : FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
343 10 : free (pat);
344 40 : sanitized_sections.truncate (0);
345 :
346 90 : for (const char *s = sections; *s; )
347 : {
348 : const char *end;
349 220 : for (end = s; *end && *end != ','; ++end);
350 50 : size_t len = end - s;
351 50 : sanitized_sections.safe_push (xstrndup (s, len));
352 50 : s = *end ? end + 1 : end;
353 : }
354 40 : }
355 :
356 : bool
357 99988 : asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
358 : {
359 99988 : return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
360 99988 : && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
361 : }
362 :
363 : bool
364 20447431 : asan_sanitize_stack_p (void)
365 : {
366 20447431 : return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_stack);
367 : }
368 :
369 : bool
370 1472557 : asan_sanitize_allocas_p (void)
371 : {
372 1472557 : return (asan_sanitize_stack_p () && param_asan_protect_allocas);
373 : }
374 :
375 : bool
376 11224 : asan_instrument_reads (void)
377 : {
378 11224 : return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_reads);
379 : }
380 :
381 : bool
382 9643 : asan_instrument_writes (void)
383 : {
384 9643 : return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_writes);
385 : }
386 :
387 : bool
388 3627 : asan_memintrin (void)
389 : {
390 3627 : return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_memintrin);
391 : }
392 :
393 :
394 : /* Support for --param asan-kernel-mem-intrinsic-prefix=1. */
395 : static GTY(()) rtx asan_memfn_rtls[3];
396 :
397 : rtx
398 42 : asan_memfn_rtl (tree fndecl)
399 : {
400 42 : int i;
401 42 : const char *f, *p;
402 42 : char buf[sizeof ("__hwasan_memmove")];
403 :
404 42 : switch (DECL_FUNCTION_CODE (fndecl))
405 : {
406 : case BUILT_IN_MEMCPY: i = 0; f = "memcpy"; break;
407 14 : case BUILT_IN_MEMSET: i = 1; f = "memset"; break;
408 14 : case BUILT_IN_MEMMOVE: i = 2; f = "memmove"; break;
409 0 : default: gcc_unreachable ();
410 : }
411 42 : if (asan_memfn_rtls[i] == NULL_RTX)
412 : {
413 42 : tree save_name = DECL_NAME (fndecl);
414 42 : tree save_assembler_name = DECL_ASSEMBLER_NAME (fndecl);
415 42 : rtx save_rtl = DECL_RTL (fndecl);
416 42 : if (flag_sanitize & SANITIZE_KERNEL_HWADDRESS)
417 : p = "__hwasan_";
418 : else
419 42 : p = "__asan_";
420 42 : strcpy (buf, p);
421 42 : strcat (buf, f);
422 42 : DECL_NAME (fndecl) = get_identifier (buf);
423 42 : DECL_ASSEMBLER_NAME_RAW (fndecl) = NULL_TREE;
424 42 : SET_DECL_RTL (fndecl, NULL_RTX);
425 42 : asan_memfn_rtls[i] = DECL_RTL (fndecl);
426 42 : DECL_NAME (fndecl) = save_name;
427 42 : DECL_ASSEMBLER_NAME_RAW (fndecl) = save_assembler_name;
428 42 : SET_DECL_RTL (fndecl, save_rtl);
429 : }
430 42 : return asan_memfn_rtls[i];
431 : }
432 :
433 :
434 : /* Checks whether section SEC should be sanitized. */
435 :
436 : static bool
437 270 : section_sanitized_p (const char *sec)
438 : {
439 270 : char *pat;
440 270 : unsigned i;
441 420 : FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
442 330 : if (fnmatch (pat, sec, FNM_PERIOD) == 0)
443 : return true;
444 : return false;
445 : }
446 :
447 : /* Returns Asan shadow offset. */
448 :
449 : static unsigned HOST_WIDE_INT
450 17239 : asan_shadow_offset ()
451 : {
452 17239 : if (!asan_shadow_offset_computed)
453 : {
454 1693 : asan_shadow_offset_computed = true;
455 1693 : asan_shadow_offset_value = targetm.asan_shadow_offset ();
456 : }
457 17239 : return asan_shadow_offset_value;
458 : }
459 :
460 : static bool
461 23410 : asan_dynamic_shadow_offset_p ()
462 : {
463 23410 : return (asan_shadow_offset_value == 0)
464 23410 : && targetm.asan_dynamic_shadow_offset_p ();
465 : }
466 :
467 : /* Returns Asan shadow offset has been set. */
468 : bool
469 0 : asan_shadow_offset_set_p ()
470 : {
471 0 : return asan_shadow_offset_computed;
472 : }
473 :
474 : alias_set_type asan_shadow_set = -1;
475 :
476 : /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
477 : alias set is used for all shadow memory accesses. */
478 : static GTY(()) tree shadow_ptr_types[3];
479 :
480 : /* Decl for __asan_option_detect_stack_use_after_return. */
481 : static GTY(()) tree asan_detect_stack_use_after_return;
482 :
483 : static GTY (()) tree asan_shadow_memory_dynamic_address;
484 :
485 : /* Local copy for the asan_shadow_memory_dynamic_address within the
486 : function. */
487 : static GTY (()) tree asan_local_shadow_memory_dynamic_address;
488 :
489 : static tree
490 0 : get_asan_shadow_memory_dynamic_address_decl ()
491 : {
492 0 : if (asan_shadow_memory_dynamic_address == NULL_TREE)
493 : {
494 0 : tree id, decl;
495 0 : id = get_identifier ("__asan_shadow_memory_dynamic_address");
496 0 : decl
497 0 : = build_decl (BUILTINS_LOCATION, VAR_DECL, id, pointer_sized_int_node);
498 0 : SET_DECL_ASSEMBLER_NAME (decl, id);
499 0 : TREE_ADDRESSABLE (decl) = 1;
500 0 : DECL_ARTIFICIAL (decl) = 1;
501 0 : DECL_IGNORED_P (decl) = 1;
502 0 : DECL_EXTERNAL (decl) = 1;
503 0 : TREE_STATIC (decl) = 1;
504 0 : TREE_PUBLIC (decl) = 1;
505 0 : TREE_USED (decl) = 1;
506 0 : asan_shadow_memory_dynamic_address = decl;
507 : }
508 :
509 0 : return asan_shadow_memory_dynamic_address;
510 : }
511 :
512 : void
513 6171 : asan_maybe_insert_dynamic_shadow_at_function_entry (function *fun)
514 : {
515 6171 : asan_local_shadow_memory_dynamic_address = NULL_TREE;
516 6171 : if (!asan_dynamic_shadow_offset_p ())
517 : return;
518 :
519 0 : gimple *g;
520 :
521 0 : tree lhs = create_tmp_var (pointer_sized_int_node,
522 : "__local_asan_shadow_memory_dynamic_address");
523 :
524 0 : g = gimple_build_assign (lhs, get_asan_shadow_memory_dynamic_address_decl ());
525 0 : gimple_set_location (g, fun->function_start_locus);
526 0 : edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
527 0 : gsi_insert_on_edge_immediate (e, g);
528 :
529 0 : asan_local_shadow_memory_dynamic_address = lhs;
530 : }
531 :
532 : /* Hashtable support for memory references used by gimple
533 : statements. */
534 :
535 : /* This type represents a reference to a memory region. */
536 : struct asan_mem_ref
537 : {
538 : /* The expression of the beginning of the memory region. */
539 : tree start;
540 :
541 : /* The size of the access. */
542 : HOST_WIDE_INT access_size;
543 : };
544 :
545 : object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
546 :
547 : /* Initializes an instance of asan_mem_ref. */
548 :
549 : static void
550 121796 : asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
551 : {
552 121816 : ref->start = start;
553 121816 : ref->access_size = access_size;
554 0 : }
555 :
556 : /* Allocates memory for an instance of asan_mem_ref into the memory
557 : pool returned by asan_mem_ref_get_alloc_pool and initialize it.
558 : START is the address of (or the expression pointing to) the
559 : beginning of memory reference. ACCESS_SIZE is the size of the
560 : access to the referenced memory. */
561 :
562 : static asan_mem_ref*
563 27654 : asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
564 : {
565 0 : asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
566 :
567 27654 : asan_mem_ref_init (ref, start, access_size);
568 27654 : return ref;
569 : }
570 :
571 : /* This builds and returns a pointer to the end of the memory region
572 : that starts at START and of length LEN. */
573 :
574 : tree
575 0 : asan_mem_ref_get_end (tree start, tree len)
576 : {
577 0 : if (len == NULL_TREE || integer_zerop (len))
578 0 : return start;
579 :
580 0 : if (!ptrofftype_p (len))
581 0 : len = convert_to_ptrofftype (len);
582 :
583 0 : return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
584 : }
585 :
586 : /* Return a tree expression that represents the end of the referenced
587 : memory region. Beware that this function can actually build a new
588 : tree expression. */
589 :
590 : tree
591 0 : asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
592 : {
593 0 : return asan_mem_ref_get_end (ref->start, len);
594 : }
595 :
596 : struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
597 : {
598 : static inline hashval_t hash (const asan_mem_ref *);
599 : static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
600 : };
601 :
602 : /* Hash a memory reference. */
603 :
604 : inline hashval_t
605 167033 : asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
606 : {
607 167033 : return iterative_hash_expr (mem_ref->start, 0);
608 : }
609 :
610 : /* Compare two memory references. We accept the length of either
611 : memory references to be NULL_TREE. */
612 :
613 : inline bool
614 119091 : asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
615 : const asan_mem_ref *m2)
616 : {
617 119091 : return operand_equal_p (m1->start, m2->start, 0);
618 : }
619 :
620 : static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
621 :
622 : /* Returns a reference to the hash table containing memory references.
623 : This function ensures that the hash table is created. Note that
624 : this hash table is updated by the function
625 : update_mem_ref_hash_table. */
626 :
627 : static hash_table<asan_mem_ref_hasher> *
628 62659 : get_mem_ref_hash_table ()
629 : {
630 62659 : if (!asan_mem_ref_ht)
631 3331 : asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
632 :
633 62659 : return asan_mem_ref_ht;
634 : }
635 :
636 : /* Clear all entries from the memory references hash table. */
637 :
638 : static void
639 34253 : empty_mem_ref_hash_table ()
640 : {
641 34253 : if (asan_mem_ref_ht)
642 18104 : asan_mem_ref_ht->empty ();
643 34253 : }
644 :
645 : /* Free the memory references hash table. */
646 :
647 : static void
648 6352 : free_mem_ref_resources ()
649 : {
650 6352 : delete asan_mem_ref_ht;
651 6352 : asan_mem_ref_ht = NULL;
652 :
653 6352 : asan_mem_ref_pool.release ();
654 6352 : }
655 :
656 : /* Return true iff the memory reference REF has been instrumented. */
657 :
658 : static bool
659 34993 : has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
660 : {
661 34993 : asan_mem_ref r;
662 34993 : asan_mem_ref_init (&r, ref, access_size);
663 :
664 34993 : asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
665 34993 : return saved_ref && saved_ref->access_size >= access_size;
666 : }
667 :
668 : /* Return true iff the memory reference REF has been instrumented. */
669 :
670 : static bool
671 21045 : has_mem_ref_been_instrumented (const asan_mem_ref *ref)
672 : {
673 0 : return has_mem_ref_been_instrumented (ref->start, ref->access_size);
674 : }
675 :
676 : /* Return true iff access to memory region starting at REF and of
677 : length LEN has been instrumented. */
678 :
679 : static bool
680 821 : has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
681 : {
682 821 : HOST_WIDE_INT size_in_bytes
683 821 : = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
684 :
685 238 : return size_in_bytes != -1
686 238 : && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
687 : }
688 :
689 : /* Set REF to the memory reference present in a gimple assignment
690 : ASSIGNMENT. Return true upon successful completion, false
691 : otherwise. */
692 :
693 : static bool
694 24241 : get_mem_ref_of_assignment (const gassign *assignment,
695 : asan_mem_ref *ref,
696 : bool *ref_is_store)
697 : {
698 24241 : gcc_assert (gimple_assign_single_p (assignment));
699 :
700 24241 : if (gimple_store_p (assignment)
701 24241 : && !gimple_clobber_p (assignment))
702 : {
703 9996 : ref->start = gimple_assign_lhs (assignment);
704 9996 : *ref_is_store = true;
705 : }
706 14245 : else if (gimple_assign_load_p (assignment))
707 : {
708 11029 : ref->start = gimple_assign_rhs1 (assignment);
709 11029 : *ref_is_store = false;
710 : }
711 : else
712 : return false;
713 :
714 21025 : ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
715 21025 : return true;
716 : }
717 :
718 : /* Return address of last allocated dynamic alloca. */
719 :
720 : static tree
721 412 : get_last_alloca_addr ()
722 : {
723 412 : if (last_alloca_addr)
724 : return last_alloca_addr;
725 :
726 195 : last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
727 195 : gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
728 195 : edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
729 195 : gsi_insert_on_edge_immediate (e, g);
730 195 : return last_alloca_addr;
731 : }
732 :
733 : /* Insert __asan_allocas_unpoison (top, bottom) call before
734 : __builtin_stack_restore (new_sp) call.
735 : The pseudocode of this routine should look like this:
736 : top = last_alloca_addr;
737 : bot = new_sp;
738 : __asan_allocas_unpoison (top, bot);
739 : last_alloca_addr = new_sp;
740 : __builtin_stack_restore (new_sp);
741 : In general, we can't use new_sp as bot parameter because on some
742 : architectures SP has non zero offset from dynamic stack area. Moreover, on
743 : some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
744 : particular function only after all callees were expanded to rtl.
745 : The most noticeable example is PowerPC{,64}, see
746 : http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
747 : To overcome the issue we use following trick: pass new_sp as a second
748 : parameter to __asan_allocas_unpoison and rewrite it during expansion with
749 : new_sp + (virtual_dynamic_stack_rtx - sp) later in
750 : expand_asan_emit_allocas_unpoison function.
751 :
752 : HWASAN needs to do very similar, the eventual pseudocode should be:
753 : __hwasan_tag_memory (virtual_stack_dynamic_rtx,
754 : 0,
755 : new_sp - sp);
756 : __builtin_stack_restore (new_sp)
757 :
758 : Need to use the same trick to handle STACK_DYNAMIC_OFFSET as described
759 : above. */
760 :
761 : static void
762 426 : handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
763 : {
764 426 : if (!iter
765 428 : || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()
766 2 : || memtag_sanitize_allocas_p ()))
767 215 : return;
768 :
769 211 : tree restored_stack = gimple_call_arg (call, 0);
770 :
771 211 : gimple *g;
772 :
773 211 : if (hwasan_sanitize_allocas_p () || memtag_sanitize_allocas_p ())
774 : {
775 0 : enum internal_fn fn = IFN_HWASAN_ALLOCA_UNPOISON;
776 : /* There is only one piece of information `expand_HWASAN_ALLOCA_UNPOISON`
777 : needs to work. This is the length of the area that we're
778 : deallocating. Since the stack pointer is known at expand time, the
779 : position of the new stack pointer after deallocation is enough
780 : information to calculate this length. */
781 0 : g = gimple_build_call_internal (fn, 1, restored_stack);
782 : }
783 : else
784 : {
785 211 : tree last_alloca = get_last_alloca_addr ();
786 211 : tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
787 211 : g = gimple_build_call (fn, 2, last_alloca, restored_stack);
788 211 : gsi_insert_before (iter, g, GSI_SAME_STMT);
789 211 : g = gimple_build_assign (last_alloca, restored_stack);
790 : }
791 :
792 211 : gsi_insert_before (iter, g, GSI_SAME_STMT);
793 : }
794 :
795 : /* Deploy and poison redzones around __builtin_alloca call. To do this, we
796 : should replace this call with another one with changed parameters and
797 : replace all its uses with new address, so
798 : addr = __builtin_alloca (old_size, align);
799 : is replaced by
800 : left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
801 : Following two statements are optimized out if we know that
802 : old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
803 : redzone.
804 : misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
805 : partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
806 : right_redzone_size = ASAN_RED_ZONE_SIZE;
807 : additional_size = left_redzone_size + partial_redzone_size +
808 : right_redzone_size;
809 : new_size = old_size + additional_size;
810 : new_alloca = __builtin_alloca (new_size, max (align, 32))
811 : __asan_alloca_poison (new_alloca, old_size)
812 : addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
813 : last_alloca_addr = new_alloca;
814 : ADDITIONAL_SIZE is added to make new memory allocation contain not only
815 : requested memory, but also left, partial and right redzones as well as some
816 : additional space, required by alignment. */
817 :
818 : static void
819 406 : handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
820 : {
821 406 : if (!iter
822 408 : || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()
823 2 : || memtag_sanitize_allocas_p ()))
824 205 : return;
825 :
826 201 : gassign *g;
827 201 : gcall *gg;
828 201 : tree callee = gimple_call_fndecl (call);
829 201 : tree lhs = gimple_call_lhs (call);
830 201 : tree old_size = gimple_call_arg (call, 0);
831 201 : tree ptr_type = lhs ? TREE_TYPE (lhs) : ptr_type_node;
832 201 : tree partial_size = NULL_TREE;
833 201 : unsigned int align
834 201 : = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
835 398 : ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
836 :
837 201 : bool throws = false;
838 201 : edge e = NULL;
839 201 : if (stmt_can_throw_internal (cfun, call))
840 : {
841 9 : if (!lhs)
842 : return;
843 9 : throws = true;
844 9 : e = find_fallthru_edge (gsi_bb (*iter)->succs);
845 : }
846 :
847 201 : if (hwasan_sanitize_allocas_p () || memtag_sanitize_allocas_p ())
848 : {
849 0 : gimple_seq stmts = NULL;
850 0 : location_t loc = gimple_location (gsi_stmt (*iter));
851 : /* HWASAN and MEMTAG need a different expansion.
852 :
853 : addr = __builtin_alloca (size, align);
854 :
855 : in case of HWASAN, should be replaced by
856 :
857 : new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment;
858 : untagged_addr = __builtin_alloca (new_size, align);
859 : tag = __hwasan_choose_alloca_tag ();
860 : addr = ifn_HWASAN_SET_TAG (untagged_addr, tag);
861 : __hwasan_tag_memory (untagged_addr, tag, new_size);
862 :
863 : in case of MEMTAG, should be replaced by
864 :
865 : new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment;
866 : untagged_addr = __builtin_alloca (new_size, align);
867 : addr = ifn_HWASAN_ALLOCA_POISON (untagged_addr, new_size);
868 :
869 : where a new tag is chosen and set on untagged_addr when
870 : HWASAN_ALLOCA_POISON is expanded. */
871 :
872 : /* Ensure alignment at least HWASAN_TAG_GRANULE_SIZE bytes so we start on
873 : a tag granule. */
874 0 : align = align > HWASAN_TAG_GRANULE_SIZE ? align : HWASAN_TAG_GRANULE_SIZE;
875 :
876 0 : tree old_size = gimple_call_arg (call, 0);
877 0 : tree new_size = gimple_build_round_up (&stmts, loc, size_type_node,
878 : old_size,
879 0 : HWASAN_TAG_GRANULE_SIZE);
880 :
881 : /* Make the alloca call */
882 0 : tree untagged_addr
883 0 : = gimple_build (&stmts, loc,
884 : as_combined_fn (BUILT_IN_ALLOCA_WITH_ALIGN), ptr_type,
885 0 : new_size, build_int_cst (size_type_node, align));
886 :
887 0 : tree addr;
888 :
889 0 : if (memtag_sanitize_p ())
890 0 : addr = gimple_build (&stmts, loc, CFN_HWASAN_ALLOCA_POISON, ptr_type,
891 : untagged_addr, new_size);
892 : else
893 : {
894 : /* Choose the tag.
895 : Here we use an internal function so we can choose the tag at expand
896 : time. We need the decision to be made after stack variables have been
897 : assigned their tag (i.e. once the hwasan_frame_tag_offset variable has
898 : been set to one after the last stack variables tag). */
899 0 : tree tag = gimple_build (&stmts, loc, CFN_HWASAN_CHOOSE_TAG,
900 : unsigned_char_type_node);
901 :
902 : /* Add tag to pointer. */
903 0 : addr = gimple_build (&stmts, loc, CFN_HWASAN_SET_TAG, ptr_type,
904 : untagged_addr, tag);
905 :
906 : /* Tag shadow memory.
907 : NOTE: require using `untagged_addr` here for libhwasan API. */
908 0 : gimple_build (&stmts, loc, as_combined_fn (BUILT_IN_HWASAN_TAG_MEM),
909 : void_type_node, untagged_addr, tag, new_size);
910 : }
911 :
912 : /* Insert the built up code sequence into the original instruction stream
913 : the iterator points to. */
914 0 : gsi_insert_seq_before (iter, stmts, GSI_SAME_STMT);
915 :
916 : /* Finally, replace old alloca ptr with NEW_ALLOCA. */
917 0 : replace_call_with_value (iter, addr);
918 0 : return;
919 : }
920 :
921 201 : tree last_alloca = get_last_alloca_addr ();
922 201 : const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
923 :
924 : /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
925 : bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
926 : manually. */
927 201 : align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
928 :
929 201 : tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
930 201 : tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
931 :
932 : /* Extract lower bits from old_size. */
933 201 : wide_int size_nonzero_bits = get_nonzero_bits (old_size);
934 201 : wide_int rz_mask
935 201 : = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
936 201 : wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
937 :
938 : /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
939 : redzone. Otherwise, compute its size here. */
940 201 : if (wi::ne_p (old_size_lower_bits, 0))
941 : {
942 : /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
943 : partial_size = ASAN_RED_ZONE_SIZE - misalign. */
944 198 : g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
945 : BIT_AND_EXPR, old_size, alloca_rz_mask);
946 198 : gsi_insert_before (iter, g, GSI_SAME_STMT);
947 198 : tree misalign = gimple_assign_lhs (g);
948 198 : g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
949 : redzone_size, misalign);
950 198 : gsi_insert_before (iter, g, GSI_SAME_STMT);
951 198 : partial_size = gimple_assign_lhs (g);
952 : }
953 :
954 : /* additional_size = align + ASAN_RED_ZONE_SIZE. */
955 402 : tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
956 201 : + ASAN_RED_ZONE_SIZE);
957 : /* If alloca has partial redzone, include it to additional_size too. */
958 201 : if (partial_size)
959 : {
960 : /* additional_size += partial_size. */
961 198 : g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
962 : partial_size, additional_size);
963 198 : gsi_insert_before (iter, g, GSI_SAME_STMT);
964 198 : additional_size = gimple_assign_lhs (g);
965 : }
966 :
967 : /* new_size = old_size + additional_size. */
968 201 : g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
969 : additional_size);
970 201 : gsi_insert_before (iter, g, GSI_SAME_STMT);
971 201 : tree new_size = gimple_assign_lhs (g);
972 :
973 : /* Build new __builtin_alloca call:
974 : new_alloca_with_rz = __builtin_alloca (new_size, align). */
975 201 : tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
976 201 : gg = gimple_build_call (fn, 2, new_size,
977 201 : build_int_cst (size_type_node, align));
978 201 : tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
979 201 : gimple_call_set_lhs (gg, new_alloca_with_rz);
980 201 : if (throws)
981 : {
982 9 : gimple_call_set_lhs (call, NULL);
983 9 : gsi_replace (iter, gg, true);
984 : }
985 : else
986 192 : gsi_insert_before (iter, gg, GSI_SAME_STMT);
987 :
988 : /* new_alloca = new_alloca_with_rz + align. */
989 201 : g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
990 : new_alloca_with_rz,
991 : build_int_cst (size_type_node,
992 201 : align / BITS_PER_UNIT));
993 201 : gimple_stmt_iterator gsi = gsi_none ();
994 201 : if (throws)
995 : {
996 9 : gsi_insert_on_edge_immediate (e, g);
997 9 : gsi = gsi_for_stmt (g);
998 : }
999 : else
1000 192 : gsi_insert_before (iter, g, GSI_SAME_STMT);
1001 201 : tree new_alloca = gimple_assign_lhs (g);
1002 :
1003 : /* Poison newly created alloca redzones:
1004 : __asan_alloca_poison (new_alloca, old_size). */
1005 201 : fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
1006 201 : gg = gimple_build_call (fn, 2, new_alloca, old_size);
1007 201 : if (throws)
1008 9 : gsi_insert_after (&gsi, gg, GSI_NEW_STMT);
1009 : else
1010 192 : gsi_insert_before (iter, gg, GSI_SAME_STMT);
1011 :
1012 : /* Save new_alloca_with_rz value into last_alloca to use it during
1013 : allocas unpoisoning. */
1014 201 : g = gimple_build_assign (last_alloca, new_alloca_with_rz);
1015 201 : if (throws)
1016 9 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1017 : else
1018 192 : gsi_insert_before (iter, g, GSI_SAME_STMT);
1019 :
1020 : /* Finally, replace old alloca ptr with NEW_ALLOCA. */
1021 201 : if (throws)
1022 : {
1023 9 : g = gimple_build_assign (lhs, new_alloca);
1024 9 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1025 : }
1026 : else
1027 192 : replace_call_with_value (iter, new_alloca);
1028 201 : }
1029 :
1030 : /* Return the memory references contained in a gimple statement
1031 : representing a builtin call that has to do with memory access. */
1032 :
1033 : static bool
1034 7242 : get_mem_refs_of_builtin_call (gcall *call,
1035 : asan_mem_ref *src0,
1036 : tree *src0_len,
1037 : bool *src0_is_store,
1038 : asan_mem_ref *src1,
1039 : tree *src1_len,
1040 : bool *src1_is_store,
1041 : asan_mem_ref *dst,
1042 : tree *dst_len,
1043 : bool *dst_is_store,
1044 : bool *dest_is_deref,
1045 : bool *intercepted_p,
1046 : gimple_stmt_iterator *iter = NULL)
1047 : {
1048 7242 : gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1049 :
1050 7242 : tree callee = gimple_call_fndecl (call);
1051 7242 : tree source0 = NULL_TREE, source1 = NULL_TREE,
1052 7242 : dest = NULL_TREE, len = NULL_TREE;
1053 7242 : bool is_store = true, got_reference_p = false;
1054 7242 : HOST_WIDE_INT access_size = 1;
1055 :
1056 7242 : *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
1057 :
1058 7242 : switch (DECL_FUNCTION_CODE (callee))
1059 : {
1060 : /* (s, s, n) style memops. */
1061 112 : case BUILT_IN_BCMP:
1062 112 : case BUILT_IN_MEMCMP:
1063 112 : source0 = gimple_call_arg (call, 0);
1064 112 : source1 = gimple_call_arg (call, 1);
1065 112 : len = gimple_call_arg (call, 2);
1066 112 : break;
1067 :
1068 : /* (src, dest, n) style memops. */
1069 0 : case BUILT_IN_BCOPY:
1070 0 : source0 = gimple_call_arg (call, 0);
1071 0 : dest = gimple_call_arg (call, 1);
1072 0 : len = gimple_call_arg (call, 2);
1073 0 : break;
1074 :
1075 : /* (dest, src, n) style memops. */
1076 960 : case BUILT_IN_MEMCPY:
1077 960 : case BUILT_IN_MEMCPY_CHK:
1078 960 : case BUILT_IN_MEMMOVE:
1079 960 : case BUILT_IN_MEMMOVE_CHK:
1080 960 : case BUILT_IN_MEMPCPY:
1081 960 : case BUILT_IN_MEMPCPY_CHK:
1082 960 : dest = gimple_call_arg (call, 0);
1083 960 : source0 = gimple_call_arg (call, 1);
1084 960 : len = gimple_call_arg (call, 2);
1085 960 : break;
1086 :
1087 : /* (dest, n) style memops. */
1088 0 : case BUILT_IN_BZERO:
1089 0 : dest = gimple_call_arg (call, 0);
1090 0 : len = gimple_call_arg (call, 1);
1091 0 : break;
1092 :
1093 : /* (dest, x, n) style memops*/
1094 388 : case BUILT_IN_MEMSET:
1095 388 : case BUILT_IN_MEMSET_CHK:
1096 388 : dest = gimple_call_arg (call, 0);
1097 388 : len = gimple_call_arg (call, 2);
1098 388 : break;
1099 :
1100 110 : case BUILT_IN_STRLEN:
1101 : /* Special case strlen here since its length is taken from its return
1102 : value.
1103 :
1104 : The approach taken by the sanitizers is to check a memory access
1105 : before it's taken. For ASAN strlen is intercepted by libasan, so no
1106 : check is inserted by the compiler.
1107 :
1108 : This function still returns `true` and provides a length to the rest
1109 : of the ASAN pass in order to record what areas have been checked,
1110 : avoiding superfluous checks later on.
1111 :
1112 : HWASAN does not intercept any of these internal functions.
1113 : This means that checks for memory accesses must be inserted by the
1114 : compiler.
1115 : strlen is a special case, because we can tell the length from the
1116 : return of the function, but that is not known until after the function
1117 : has returned.
1118 :
1119 : Hence we can't check the memory access before it happens.
1120 : We could check the memory access after it has already happened, but
1121 : for now we choose to just ignore `strlen` calls.
1122 : This decision was simply made because that means the special case is
1123 : limited to this one case of this one function. */
1124 110 : if (hwassist_sanitize_p ())
1125 : return false;
1126 78 : source0 = gimple_call_arg (call, 0);
1127 78 : len = gimple_call_lhs (call);
1128 78 : break;
1129 :
1130 426 : case BUILT_IN_STACK_RESTORE:
1131 426 : handle_builtin_stack_restore (call, iter);
1132 426 : break;
1133 :
1134 406 : CASE_BUILT_IN_ALLOCA:
1135 406 : handle_builtin_alloca (call, iter);
1136 406 : break;
1137 : /* And now the __atomic* and __sync builtins.
1138 : These are handled differently from the classical memory
1139 : access builtins above. */
1140 :
1141 0 : case BUILT_IN_ATOMIC_LOAD_1:
1142 0 : is_store = false;
1143 : /* FALLTHRU */
1144 0 : case BUILT_IN_SYNC_FETCH_AND_ADD_1:
1145 0 : case BUILT_IN_SYNC_FETCH_AND_SUB_1:
1146 0 : case BUILT_IN_SYNC_FETCH_AND_OR_1:
1147 0 : case BUILT_IN_SYNC_FETCH_AND_AND_1:
1148 0 : case BUILT_IN_SYNC_FETCH_AND_XOR_1:
1149 0 : case BUILT_IN_SYNC_FETCH_AND_NAND_1:
1150 0 : case BUILT_IN_SYNC_ADD_AND_FETCH_1:
1151 0 : case BUILT_IN_SYNC_SUB_AND_FETCH_1:
1152 0 : case BUILT_IN_SYNC_OR_AND_FETCH_1:
1153 0 : case BUILT_IN_SYNC_AND_AND_FETCH_1:
1154 0 : case BUILT_IN_SYNC_XOR_AND_FETCH_1:
1155 0 : case BUILT_IN_SYNC_NAND_AND_FETCH_1:
1156 0 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1157 0 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
1158 0 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
1159 0 : case BUILT_IN_SYNC_LOCK_RELEASE_1:
1160 0 : case BUILT_IN_ATOMIC_EXCHANGE_1:
1161 0 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1162 0 : case BUILT_IN_ATOMIC_STORE_1:
1163 0 : case BUILT_IN_ATOMIC_ADD_FETCH_1:
1164 0 : case BUILT_IN_ATOMIC_SUB_FETCH_1:
1165 0 : case BUILT_IN_ATOMIC_AND_FETCH_1:
1166 0 : case BUILT_IN_ATOMIC_NAND_FETCH_1:
1167 0 : case BUILT_IN_ATOMIC_XOR_FETCH_1:
1168 0 : case BUILT_IN_ATOMIC_OR_FETCH_1:
1169 0 : case BUILT_IN_ATOMIC_FETCH_ADD_1:
1170 0 : case BUILT_IN_ATOMIC_FETCH_SUB_1:
1171 0 : case BUILT_IN_ATOMIC_FETCH_AND_1:
1172 0 : case BUILT_IN_ATOMIC_FETCH_NAND_1:
1173 0 : case BUILT_IN_ATOMIC_FETCH_XOR_1:
1174 0 : case BUILT_IN_ATOMIC_FETCH_OR_1:
1175 0 : access_size = 1;
1176 0 : goto do_atomic;
1177 :
1178 0 : case BUILT_IN_ATOMIC_LOAD_2:
1179 0 : is_store = false;
1180 : /* FALLTHRU */
1181 0 : case BUILT_IN_SYNC_FETCH_AND_ADD_2:
1182 0 : case BUILT_IN_SYNC_FETCH_AND_SUB_2:
1183 0 : case BUILT_IN_SYNC_FETCH_AND_OR_2:
1184 0 : case BUILT_IN_SYNC_FETCH_AND_AND_2:
1185 0 : case BUILT_IN_SYNC_FETCH_AND_XOR_2:
1186 0 : case BUILT_IN_SYNC_FETCH_AND_NAND_2:
1187 0 : case BUILT_IN_SYNC_ADD_AND_FETCH_2:
1188 0 : case BUILT_IN_SYNC_SUB_AND_FETCH_2:
1189 0 : case BUILT_IN_SYNC_OR_AND_FETCH_2:
1190 0 : case BUILT_IN_SYNC_AND_AND_FETCH_2:
1191 0 : case BUILT_IN_SYNC_XOR_AND_FETCH_2:
1192 0 : case BUILT_IN_SYNC_NAND_AND_FETCH_2:
1193 0 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1194 0 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
1195 0 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
1196 0 : case BUILT_IN_SYNC_LOCK_RELEASE_2:
1197 0 : case BUILT_IN_ATOMIC_EXCHANGE_2:
1198 0 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1199 0 : case BUILT_IN_ATOMIC_STORE_2:
1200 0 : case BUILT_IN_ATOMIC_ADD_FETCH_2:
1201 0 : case BUILT_IN_ATOMIC_SUB_FETCH_2:
1202 0 : case BUILT_IN_ATOMIC_AND_FETCH_2:
1203 0 : case BUILT_IN_ATOMIC_NAND_FETCH_2:
1204 0 : case BUILT_IN_ATOMIC_XOR_FETCH_2:
1205 0 : case BUILT_IN_ATOMIC_OR_FETCH_2:
1206 0 : case BUILT_IN_ATOMIC_FETCH_ADD_2:
1207 0 : case BUILT_IN_ATOMIC_FETCH_SUB_2:
1208 0 : case BUILT_IN_ATOMIC_FETCH_AND_2:
1209 0 : case BUILT_IN_ATOMIC_FETCH_NAND_2:
1210 0 : case BUILT_IN_ATOMIC_FETCH_XOR_2:
1211 0 : case BUILT_IN_ATOMIC_FETCH_OR_2:
1212 0 : access_size = 2;
1213 0 : goto do_atomic;
1214 :
1215 0 : case BUILT_IN_ATOMIC_LOAD_4:
1216 0 : is_store = false;
1217 : /* FALLTHRU */
1218 56 : case BUILT_IN_SYNC_FETCH_AND_ADD_4:
1219 56 : case BUILT_IN_SYNC_FETCH_AND_SUB_4:
1220 56 : case BUILT_IN_SYNC_FETCH_AND_OR_4:
1221 56 : case BUILT_IN_SYNC_FETCH_AND_AND_4:
1222 56 : case BUILT_IN_SYNC_FETCH_AND_XOR_4:
1223 56 : case BUILT_IN_SYNC_FETCH_AND_NAND_4:
1224 56 : case BUILT_IN_SYNC_ADD_AND_FETCH_4:
1225 56 : case BUILT_IN_SYNC_SUB_AND_FETCH_4:
1226 56 : case BUILT_IN_SYNC_OR_AND_FETCH_4:
1227 56 : case BUILT_IN_SYNC_AND_AND_FETCH_4:
1228 56 : case BUILT_IN_SYNC_XOR_AND_FETCH_4:
1229 56 : case BUILT_IN_SYNC_NAND_AND_FETCH_4:
1230 56 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1231 56 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
1232 56 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
1233 56 : case BUILT_IN_SYNC_LOCK_RELEASE_4:
1234 56 : case BUILT_IN_ATOMIC_EXCHANGE_4:
1235 56 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1236 56 : case BUILT_IN_ATOMIC_STORE_4:
1237 56 : case BUILT_IN_ATOMIC_ADD_FETCH_4:
1238 56 : case BUILT_IN_ATOMIC_SUB_FETCH_4:
1239 56 : case BUILT_IN_ATOMIC_AND_FETCH_4:
1240 56 : case BUILT_IN_ATOMIC_NAND_FETCH_4:
1241 56 : case BUILT_IN_ATOMIC_XOR_FETCH_4:
1242 56 : case BUILT_IN_ATOMIC_OR_FETCH_4:
1243 56 : case BUILT_IN_ATOMIC_FETCH_ADD_4:
1244 56 : case BUILT_IN_ATOMIC_FETCH_SUB_4:
1245 56 : case BUILT_IN_ATOMIC_FETCH_AND_4:
1246 56 : case BUILT_IN_ATOMIC_FETCH_NAND_4:
1247 56 : case BUILT_IN_ATOMIC_FETCH_XOR_4:
1248 56 : case BUILT_IN_ATOMIC_FETCH_OR_4:
1249 56 : access_size = 4;
1250 56 : goto do_atomic;
1251 :
1252 0 : case BUILT_IN_ATOMIC_LOAD_8:
1253 0 : is_store = false;
1254 : /* FALLTHRU */
1255 0 : case BUILT_IN_SYNC_FETCH_AND_ADD_8:
1256 0 : case BUILT_IN_SYNC_FETCH_AND_SUB_8:
1257 0 : case BUILT_IN_SYNC_FETCH_AND_OR_8:
1258 0 : case BUILT_IN_SYNC_FETCH_AND_AND_8:
1259 0 : case BUILT_IN_SYNC_FETCH_AND_XOR_8:
1260 0 : case BUILT_IN_SYNC_FETCH_AND_NAND_8:
1261 0 : case BUILT_IN_SYNC_ADD_AND_FETCH_8:
1262 0 : case BUILT_IN_SYNC_SUB_AND_FETCH_8:
1263 0 : case BUILT_IN_SYNC_OR_AND_FETCH_8:
1264 0 : case BUILT_IN_SYNC_AND_AND_FETCH_8:
1265 0 : case BUILT_IN_SYNC_XOR_AND_FETCH_8:
1266 0 : case BUILT_IN_SYNC_NAND_AND_FETCH_8:
1267 0 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1268 0 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
1269 0 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
1270 0 : case BUILT_IN_SYNC_LOCK_RELEASE_8:
1271 0 : case BUILT_IN_ATOMIC_EXCHANGE_8:
1272 0 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1273 0 : case BUILT_IN_ATOMIC_STORE_8:
1274 0 : case BUILT_IN_ATOMIC_ADD_FETCH_8:
1275 0 : case BUILT_IN_ATOMIC_SUB_FETCH_8:
1276 0 : case BUILT_IN_ATOMIC_AND_FETCH_8:
1277 0 : case BUILT_IN_ATOMIC_NAND_FETCH_8:
1278 0 : case BUILT_IN_ATOMIC_XOR_FETCH_8:
1279 0 : case BUILT_IN_ATOMIC_OR_FETCH_8:
1280 0 : case BUILT_IN_ATOMIC_FETCH_ADD_8:
1281 0 : case BUILT_IN_ATOMIC_FETCH_SUB_8:
1282 0 : case BUILT_IN_ATOMIC_FETCH_AND_8:
1283 0 : case BUILT_IN_ATOMIC_FETCH_NAND_8:
1284 0 : case BUILT_IN_ATOMIC_FETCH_XOR_8:
1285 0 : case BUILT_IN_ATOMIC_FETCH_OR_8:
1286 0 : access_size = 8;
1287 0 : goto do_atomic;
1288 :
1289 0 : case BUILT_IN_ATOMIC_LOAD_16:
1290 0 : is_store = false;
1291 : /* FALLTHRU */
1292 : case BUILT_IN_SYNC_FETCH_AND_ADD_16:
1293 : case BUILT_IN_SYNC_FETCH_AND_SUB_16:
1294 : case BUILT_IN_SYNC_FETCH_AND_OR_16:
1295 : case BUILT_IN_SYNC_FETCH_AND_AND_16:
1296 : case BUILT_IN_SYNC_FETCH_AND_XOR_16:
1297 : case BUILT_IN_SYNC_FETCH_AND_NAND_16:
1298 : case BUILT_IN_SYNC_ADD_AND_FETCH_16:
1299 : case BUILT_IN_SYNC_SUB_AND_FETCH_16:
1300 : case BUILT_IN_SYNC_OR_AND_FETCH_16:
1301 : case BUILT_IN_SYNC_AND_AND_FETCH_16:
1302 : case BUILT_IN_SYNC_XOR_AND_FETCH_16:
1303 : case BUILT_IN_SYNC_NAND_AND_FETCH_16:
1304 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1305 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
1306 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
1307 : case BUILT_IN_SYNC_LOCK_RELEASE_16:
1308 : case BUILT_IN_ATOMIC_EXCHANGE_16:
1309 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1310 : case BUILT_IN_ATOMIC_STORE_16:
1311 : case BUILT_IN_ATOMIC_ADD_FETCH_16:
1312 : case BUILT_IN_ATOMIC_SUB_FETCH_16:
1313 : case BUILT_IN_ATOMIC_AND_FETCH_16:
1314 : case BUILT_IN_ATOMIC_NAND_FETCH_16:
1315 : case BUILT_IN_ATOMIC_XOR_FETCH_16:
1316 : case BUILT_IN_ATOMIC_OR_FETCH_16:
1317 : case BUILT_IN_ATOMIC_FETCH_ADD_16:
1318 : case BUILT_IN_ATOMIC_FETCH_SUB_16:
1319 : case BUILT_IN_ATOMIC_FETCH_AND_16:
1320 : case BUILT_IN_ATOMIC_FETCH_NAND_16:
1321 : case BUILT_IN_ATOMIC_FETCH_XOR_16:
1322 : case BUILT_IN_ATOMIC_FETCH_OR_16:
1323 : access_size = 16;
1324 : /* FALLTHRU */
1325 56 : do_atomic:
1326 56 : {
1327 56 : dest = gimple_call_arg (call, 0);
1328 : /* DEST represents the address of a memory location.
1329 : instrument_derefs wants the memory location, so lets
1330 : dereference the address DEST before handing it to
1331 : instrument_derefs. */
1332 112 : tree type = build_nonstandard_integer_type (access_size
1333 56 : * BITS_PER_UNIT, 1);
1334 56 : dest = build2 (MEM_REF, type, dest,
1335 : build_int_cst (build_pointer_type (char_type_node), 0));
1336 56 : break;
1337 : }
1338 :
1339 : default:
1340 : /* The other builtins memory access are not instrumented in this
1341 : function because they either don't have any length parameter,
1342 : or their length parameter is just a limit. */
1343 : break;
1344 : }
1345 :
1346 2426 : if (len != NULL_TREE)
1347 : {
1348 1538 : if (source0 != NULL_TREE)
1349 : {
1350 1150 : src0->start = source0;
1351 1150 : src0->access_size = access_size;
1352 1150 : *src0_len = len;
1353 1150 : *src0_is_store = false;
1354 : }
1355 :
1356 1538 : if (source1 != NULL_TREE)
1357 : {
1358 112 : src1->start = source1;
1359 112 : src1->access_size = access_size;
1360 112 : *src1_len = len;
1361 112 : *src1_is_store = false;
1362 : }
1363 :
1364 1538 : if (dest != NULL_TREE)
1365 : {
1366 1348 : dst->start = dest;
1367 1348 : dst->access_size = access_size;
1368 1348 : *dst_len = len;
1369 1348 : *dst_is_store = true;
1370 : }
1371 :
1372 : got_reference_p = true;
1373 : }
1374 5672 : else if (dest)
1375 : {
1376 56 : dst->start = dest;
1377 56 : dst->access_size = access_size;
1378 56 : *dst_len = NULL_TREE;
1379 56 : *dst_is_store = is_store;
1380 56 : *dest_is_deref = true;
1381 56 : got_reference_p = true;
1382 : }
1383 :
1384 : return got_reference_p;
1385 : }
1386 :
1387 : /* Return true iff a given gimple statement has been instrumented.
1388 : Note that the statement is "defined" by the memory references it
1389 : contains. */
1390 :
1391 : static bool
1392 150258 : has_stmt_been_instrumented_p (gimple *stmt)
1393 : {
1394 150258 : if (gimple_assign_single_p (stmt))
1395 : {
1396 24241 : bool r_is_store;
1397 24241 : asan_mem_ref r;
1398 24241 : asan_mem_ref_init (&r, NULL, 1);
1399 :
1400 24241 : if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
1401 : &r_is_store))
1402 : {
1403 21025 : if (!has_mem_ref_been_instrumented (&r))
1404 21025 : return false;
1405 839 : if (r_is_store && gimple_assign_load_p (stmt))
1406 : {
1407 1 : asan_mem_ref src;
1408 1 : asan_mem_ref_init (&src, NULL, 1);
1409 1 : src.start = gimple_assign_rhs1 (stmt);
1410 1 : src.access_size = int_size_in_bytes (TREE_TYPE (src.start));
1411 1 : if (!has_mem_ref_been_instrumented (&src))
1412 : return false;
1413 : }
1414 838 : return true;
1415 : }
1416 : }
1417 126017 : else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1418 : {
1419 3639 : asan_mem_ref src0, src1, dest;
1420 3639 : asan_mem_ref_init (&src0, NULL, 1);
1421 3639 : asan_mem_ref_init (&src1, NULL, 1);
1422 3639 : asan_mem_ref_init (&dest, NULL, 1);
1423 :
1424 3639 : tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1425 3639 : bool src0_is_store = false, src1_is_store = false,
1426 : dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1427 3639 : if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
1428 : &src0, &src0_len, &src0_is_store,
1429 : &src1, &src1_len, &src1_is_store,
1430 : &dest, &dest_len, &dest_is_store,
1431 : &dest_is_deref, &intercepted_p))
1432 : {
1433 809 : if (src0.start != NULL_TREE
1434 809 : && !has_mem_ref_been_instrumented (&src0, src0_len))
1435 809 : return false;
1436 :
1437 240 : if (src1.start != NULL_TREE
1438 240 : && !has_mem_ref_been_instrumented (&src1, src1_len))
1439 : return false;
1440 :
1441 240 : if (dest.start != NULL_TREE
1442 240 : && !has_mem_ref_been_instrumented (&dest, dest_len))
1443 : return false;
1444 :
1445 12 : return true;
1446 : }
1447 : }
1448 122378 : else if (is_gimple_call (stmt)
1449 17465 : && gimple_store_p (stmt)
1450 122599 : && (gimple_call_builtin_p (stmt)
1451 221 : || gimple_call_internal_p (stmt)
1452 220 : || !aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
1453 220 : gimple_call_fntype (stmt))))
1454 : {
1455 19 : asan_mem_ref r;
1456 19 : asan_mem_ref_init (&r, NULL, 1);
1457 :
1458 19 : r.start = gimple_call_lhs (stmt);
1459 19 : r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
1460 19 : return has_mem_ref_been_instrumented (&r);
1461 : }
1462 :
1463 : return false;
1464 : }
1465 :
1466 : /* Insert a memory reference into the hash table. */
1467 :
1468 : static void
1469 27666 : update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
1470 : {
1471 27666 : hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
1472 :
1473 27666 : asan_mem_ref r;
1474 27666 : asan_mem_ref_init (&r, ref, access_size);
1475 :
1476 27666 : asan_mem_ref **slot = ht->find_slot (&r, INSERT);
1477 27666 : if (*slot == NULL || (*slot)->access_size < access_size)
1478 27654 : *slot = asan_mem_ref_new (ref, access_size);
1479 27666 : }
1480 :
1481 : /* Initialize shadow_ptr_types array. */
1482 :
1483 : static void
1484 2438 : asan_init_shadow_ptr_types (void)
1485 : {
1486 2438 : asan_shadow_set = new_alias_set ();
1487 2438 : tree types[3] = { signed_char_type_node, short_integer_type_node,
1488 2438 : integer_type_node };
1489 :
1490 9752 : for (unsigned i = 0; i < 3; i++)
1491 : {
1492 7314 : shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
1493 7314 : TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
1494 7314 : shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
1495 : }
1496 :
1497 2438 : initialize_sanitizer_builtins ();
1498 2438 : }
1499 :
1500 : /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
1501 :
1502 : static tree
1503 11540 : asan_pp_string (pretty_printer *pp)
1504 : {
1505 11540 : const char *buf = pp_formatted_text (pp);
1506 11540 : size_t len = strlen (buf);
1507 11540 : tree ret = build_string (len + 1, buf);
1508 23080 : TREE_TYPE (ret)
1509 11540 : = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
1510 11540 : build_index_type (size_int (len)));
1511 11540 : TREE_READONLY (ret) = 1;
1512 11540 : TREE_STATIC (ret) = 1;
1513 11540 : return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1514 : }
1515 :
1516 : /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1517 : though. */
1518 :
1519 : static void
1520 2191 : asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1521 : {
1522 2191 : rtx_insn *insn, *insns, *jump;
1523 2191 : rtx_code_label *top_label;
1524 2191 : rtx end, addr, tmp;
1525 :
1526 2191 : gcc_assert ((len & 3) == 0);
1527 2191 : start_sequence ();
1528 2191 : clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1529 2191 : insns = end_sequence ();
1530 7850 : for (insn = insns; insn; insn = NEXT_INSN (insn))
1531 3481 : if (CALL_P (insn))
1532 : break;
1533 2191 : if (insn == NULL_RTX)
1534 : {
1535 2178 : emit_insn (insns);
1536 2178 : return;
1537 : }
1538 :
1539 13 : top_label = gen_label_rtx ();
1540 13 : addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1541 13 : shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1542 13 : end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1543 13 : emit_label (top_label);
1544 :
1545 13 : emit_move_insn (shadow_mem, const0_rtx);
1546 13 : tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1547 : true, OPTAB_LIB_WIDEN);
1548 13 : if (tmp != addr)
1549 0 : emit_move_insn (addr, tmp);
1550 13 : emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1551 13 : jump = get_last_insn ();
1552 13 : gcc_assert (JUMP_P (jump));
1553 13 : add_reg_br_prob_note (jump,
1554 26 : profile_probability::guessed_always ()
1555 : .apply_scale (80, 100));
1556 : }
1557 :
1558 : void
1559 6116 : asan_function_start (void)
1560 : {
1561 6116 : ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC", current_function_funcdef_no);
1562 6116 : }
1563 :
1564 : /* Return number of shadow bytes that are occupied by a local variable
1565 : of SIZE bytes. */
1566 :
1567 : static unsigned HOST_WIDE_INT
1568 1894 : shadow_mem_size (unsigned HOST_WIDE_INT size)
1569 : {
1570 : /* It must be possible to align stack variables to granularity
1571 : of shadow memory. */
1572 1894 : gcc_assert (BITS_PER_UNIT
1573 : * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
1574 :
1575 1894 : return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1576 : }
1577 :
1578 : /* Always emit 4 bytes at a time. */
1579 : #define RZ_BUFFER_SIZE 4
1580 :
1581 : /* ASAN redzone buffer container that handles emission of shadow bytes. */
1582 3238 : class asan_redzone_buffer
1583 : {
1584 : public:
1585 : /* Constructor. */
1586 1619 : asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INT prev_offset):
1587 1619 : m_shadow_mem (shadow_mem), m_prev_offset (prev_offset),
1588 1619 : m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE)
1589 : {}
1590 :
1591 : /* Emit VALUE shadow byte at a given OFFSET. */
1592 : void emit_redzone_byte (HOST_WIDE_INT offset, unsigned char value);
1593 :
1594 : /* Emit RTX emission of the content of the buffer. */
1595 : void flush_redzone_payload (void);
1596 :
1597 : private:
1598 : /* Flush if the content of the buffer is full
1599 : (equal to RZ_BUFFER_SIZE). */
1600 : void flush_if_full (void);
1601 :
1602 : /* Memory where we last emitted a redzone payload. */
1603 : rtx m_shadow_mem;
1604 :
1605 : /* Relative offset where we last emitted a redzone payload. */
1606 : HOST_WIDE_INT m_prev_offset;
1607 :
1608 : /* Relative original offset. Used for checking only. */
1609 : HOST_WIDE_INT m_original_offset;
1610 :
1611 : public:
1612 : /* Buffer with redzone payload. */
1613 : auto_vec<unsigned char> m_shadow_bytes;
1614 : };
1615 :
1616 : /* Emit VALUE shadow byte at a given OFFSET. */
1617 :
1618 : void
1619 21031 : asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INT offset,
1620 : unsigned char value)
1621 : {
1622 21031 : gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0);
1623 21031 : gcc_assert (offset >= m_prev_offset);
1624 :
1625 21031 : HOST_WIDE_INT off
1626 21031 : = m_prev_offset + ASAN_SHADOW_GRANULARITY * m_shadow_bytes.length ();
1627 21031 : if (off == offset)
1628 : /* Consecutive shadow memory byte. */;
1629 4255 : else if (offset < m_prev_offset + (HOST_WIDE_INT) (ASAN_SHADOW_GRANULARITY
1630 : * RZ_BUFFER_SIZE)
1631 4255 : && !m_shadow_bytes.is_empty ())
1632 : {
1633 : /* Shadow memory byte with a small gap. */
1634 70 : for (; off < offset; off += ASAN_SHADOW_GRANULARITY)
1635 35 : m_shadow_bytes.safe_push (0);
1636 : }
1637 : else
1638 : {
1639 4220 : if (!m_shadow_bytes.is_empty ())
1640 328 : flush_redzone_payload ();
1641 :
1642 : /* Maybe start earlier in order to use aligned store. */
1643 4220 : HOST_WIDE_INT align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE;
1644 4220 : if (align)
1645 : {
1646 1053 : offset -= align;
1647 2687 : for (unsigned i = 0; i < align / BITS_PER_UNIT; i++)
1648 1634 : m_shadow_bytes.safe_push (0);
1649 : }
1650 :
1651 : /* Adjust m_prev_offset and m_shadow_mem. */
1652 4220 : HOST_WIDE_INT diff = offset - m_prev_offset;
1653 4220 : m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,
1654 : diff >> ASAN_SHADOW_SHIFT);
1655 4220 : m_prev_offset = offset;
1656 : }
1657 21031 : m_shadow_bytes.safe_push (value);
1658 21031 : flush_if_full ();
1659 21031 : }
1660 :
1661 : /* Emit RTX emission of the content of the buffer. */
1662 :
1663 : void
1664 5839 : asan_redzone_buffer::flush_redzone_payload (void)
1665 : {
1666 5839 : gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1667 :
1668 5839 : if (m_shadow_bytes.is_empty ())
1669 5839 : return;
1670 :
1671 : /* Be sure we always emit to an aligned address. */
1672 5839 : gcc_assert (((m_prev_offset - m_original_offset)
1673 : & (ASAN_RED_ZONE_SIZE - 1)) == 0);
1674 :
1675 : /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed. */
1676 : unsigned l = m_shadow_bytes.length ();
1677 12334 : for (unsigned i = 0; i <= RZ_BUFFER_SIZE - l; i++)
1678 6495 : m_shadow_bytes.safe_push (0);
1679 :
1680 5839 : if (dump_file && (dump_flags & TDF_DETAILS))
1681 0 : fprintf (dump_file,
1682 : "Flushing rzbuffer at offset %" PRId64 " with: ", m_prev_offset);
1683 :
1684 : unsigned HOST_WIDE_INT val = 0;
1685 29195 : for (unsigned i = 0; i < RZ_BUFFER_SIZE; i++)
1686 : {
1687 23356 : unsigned char v
1688 23356 : = m_shadow_bytes[BYTES_BIG_ENDIAN ? RZ_BUFFER_SIZE - i - 1 : i];
1689 23356 : val |= (unsigned HOST_WIDE_INT)v << (BITS_PER_UNIT * i);
1690 23356 : if (dump_file && (dump_flags & TDF_DETAILS))
1691 0 : fprintf (dump_file, "%02x ", v);
1692 : }
1693 :
1694 5839 : if (dump_file && (dump_flags & TDF_DETAILS))
1695 0 : fprintf (dump_file, "\n");
1696 :
1697 5839 : rtx c = gen_int_mode (val, SImode);
1698 5839 : m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0);
1699 5839 : emit_move_insn (m_shadow_mem, c);
1700 5839 : m_shadow_bytes.truncate (0);
1701 : }
1702 :
1703 : /* Flush if the content of the buffer is full
1704 : (equal to RZ_BUFFER_SIZE). */
1705 :
1706 : void
1707 21031 : asan_redzone_buffer::flush_if_full (void)
1708 : {
1709 21031 : if (m_shadow_bytes.length () == RZ_BUFFER_SIZE)
1710 5511 : flush_redzone_payload ();
1711 21031 : }
1712 :
1713 :
1714 : /* HWAddressSanitizer (hwasan) is a probabilistic method for detecting
1715 : out-of-bounds and use-after-free bugs.
1716 : Read more:
1717 : http://code.google.com/p/address-sanitizer/
1718 :
1719 : Similar to AddressSanitizer (asan) it consists of two parts: the
1720 : instrumentation module in this file, and a run-time library.
1721 :
1722 : The instrumentation module adds a run-time check before every memory insn in
1723 : the same manner as asan (see the block comment for AddressSanitizer above).
1724 : Currently, hwasan only adds out-of-line instrumentation, where each check is
1725 : implemented as a function call to the run-time library. Hence a check for a
1726 : load of N bytes from address X would be implemented with a function call to
1727 : __hwasan_loadN(X), and checking a store of N bytes from address X would be
1728 : implemented with a function call to __hwasan_storeN(X).
1729 :
1730 : The main difference between hwasan and asan is in the information stored to
1731 : help this checking. Both sanitizers use a shadow memory area which stores
1732 : data recording the state of main memory at a corresponding address.
1733 :
1734 : For hwasan, each 16 byte granule in main memory has a corresponding 1 byte
1735 : in shadow memory. This shadow address can be calculated with equation:
1736 : (addr >> log_2(HWASAN_TAG_GRANULE_SIZE))
1737 : + __hwasan_shadow_memory_dynamic_address;
1738 : The conversion between real and shadow memory for asan is given in the block
1739 : comment at the top of this file.
1740 : The description of how this shadow memory is laid out for asan is in the
1741 : block comment at the top of this file, here we describe how this shadow
1742 : memory is used for hwasan.
1743 :
1744 : For hwasan, each variable is assigned a byte-sized 'tag'. The extent of
1745 : the shadow memory for that variable is filled with the assigned tag, and
1746 : every pointer referencing that variable has its top byte set to the same
1747 : tag. The run-time library redefines malloc so that every allocation returns
1748 : a tagged pointer and tags the corresponding shadow memory with the same tag.
1749 :
1750 : On each pointer dereference the tag found in the pointer is compared to the
1751 : tag found in the shadow memory corresponding to the accessed memory address.
1752 : If these tags are found to differ then this memory access is judged to be
1753 : invalid and a report is generated.
1754 :
1755 : This method of bug detection is not perfect -- it can not catch every bad
1756 : access -- but catches them probabilistically instead. There is always the
1757 : possibility that an invalid memory access will happen to access memory
1758 : tagged with the same tag as the pointer that this access used.
1759 : The chances of this are approx. 0.4% for any two uncorrelated objects.
1760 :
1761 : Random tag generation can mitigate this problem by decreasing the
1762 : probability that an invalid access will be missed in the same manner over
1763 : multiple runs. i.e. if two objects are tagged the same in one run of the
1764 : binary they are unlikely to be tagged the same in the next run.
1765 : Both heap and stack allocated objects have random tags by default.
1766 :
1767 : [16 byte granule implications]
1768 : Since the shadow memory only has a resolution on real memory of 16 bytes,
1769 : invalid accesses that are within the same 16 byte granule as a valid
1770 : address will not be caught.
1771 :
1772 : There is a "short-granule" feature in the runtime library which does catch
1773 : such accesses, but this feature is not implemented for stack objects (since
1774 : stack objects are allocated and tagged by compiler instrumentation, and
1775 : this feature has not yet been implemented in GCC instrumentation).
1776 :
1777 : Another outcome of this 16 byte resolution is that each tagged object must
1778 : be 16 byte aligned. If two objects were to share any 16 byte granule in
1779 : memory, then they both would have to be given the same tag, and invalid
1780 : accesses to one using a pointer to the other would be undetectable.
1781 :
1782 : [Compiler instrumentation]
1783 : Compiler instrumentation ensures that two adjacent buffers on the stack are
1784 : given different tags, this means an access to one buffer using a pointer
1785 : generated from the other (e.g. through buffer overrun) will have mismatched
1786 : tags and be caught by hwasan.
1787 :
1788 : We don't randomly tag every object on the stack, since that would require
1789 : keeping many registers to record each tag. Instead we randomly generate a
1790 : tag for each function frame, and each new stack object uses a tag offset
1791 : from that frame tag.
1792 : i.e. each object is tagged as RFT + offset, where RFT is the "random frame
1793 : tag" generated for this frame.
1794 : This means that randomisation does not peturb the difference between tags
1795 : on tagged stack objects within a frame, but this is mitigated by the fact
1796 : that objects with the same tag within a frame are very far apart
1797 : (approx. 2^HWASAN_TAG_SIZE objects apart).
1798 :
1799 : As a demonstration, using the same example program as in the asan block
1800 : comment above:
1801 :
1802 : int
1803 : foo ()
1804 : {
1805 : char a[24] = {0};
1806 : int b[2] = {0};
1807 :
1808 : a[5] = 1;
1809 : b[1] = 2;
1810 :
1811 : return a[5] + b[1];
1812 : }
1813 :
1814 : On AArch64 the stack will be ordered as follows for the above function:
1815 :
1816 : Slot 1/ [24 bytes for variable 'a']
1817 : Slot 2/ [8 bytes padding for alignment]
1818 : Slot 3/ [8 bytes for variable 'b']
1819 : Slot 4/ [8 bytes padding for alignment]
1820 :
1821 : (The padding is there to ensure 16 byte alignment as described in the 16
1822 : byte granule implications).
1823 :
1824 : While the shadow memory will be ordered as follows:
1825 :
1826 : - 2 bytes (representing 32 bytes in real memory) tagged with RFT + 1.
1827 : - 1 byte (representing 16 bytes in real memory) tagged with RFT + 2.
1828 :
1829 : And any pointer to "a" will have the tag RFT + 1, and any pointer to "b"
1830 : will have the tag RFT + 2.
1831 :
1832 : [Top Byte Ignore requirements]
1833 : Hwasan requires the ability to store an 8 bit tag in every pointer. There
1834 : is no instrumentation done to remove this tag from pointers before
1835 : dereferencing, which means the hardware must ignore this tag during memory
1836 : accesses.
1837 :
1838 : Architectures where this feature is available should indicate this using
1839 : the TARGET_MEMTAG_CAN_TAG_ADDRESSES hook.
1840 :
1841 : [Stack requires cleanup on unwinding]
1842 : During normal operation of a hwasan sanitized program more space in the
1843 : shadow memory becomes tagged as the stack grows. As the stack shrinks this
1844 : shadow memory space must become untagged. If it is not untagged then when
1845 : the stack grows again (during other function calls later on in the program)
1846 : objects on the stack that are usually not tagged (e.g. parameters passed on
1847 : the stack) can be placed in memory whose shadow space is tagged with
1848 : something else, and accesses can cause false positive reports.
1849 :
1850 : Hence we place untagging code on every epilogue of functions which tag some
1851 : stack objects.
1852 :
1853 : Moreover, the run-time library intercepts longjmp & setjmp to untag when
1854 : the stack is unwound this way.
1855 :
1856 : C++ exceptions are not yet handled, which means this sanitizer can not
1857 : handle C++ code that throws exceptions -- it will give false positives
1858 : after an exception has been thrown. The implementation that the hwasan
1859 : library has for handling these relies on the frame pointer being after any
1860 : local variables. This is not generally the case for GCC. */
1861 :
1862 :
1863 : /* Returns whether we are tagging pointers and checking those tags on memory
1864 : access. */
1865 : bool
1866 29022348 : hwasan_sanitize_p ()
1867 : {
1868 29022348 : return sanitize_flags_p (SANITIZE_HWADDRESS);
1869 : }
1870 :
1871 : /* Are we tagging the stack? */
1872 : bool
1873 27227313 : hwasan_sanitize_stack_p ()
1874 : {
1875 27227313 : return (hwasan_sanitize_p () && param_hwasan_instrument_stack);
1876 : }
1877 :
1878 : /* Are we tagging alloca objects? */
1879 : bool
1880 1472375 : hwasan_sanitize_allocas_p (void)
1881 : {
1882 1472375 : return (hwasan_sanitize_stack_p () && param_hwasan_instrument_allocas);
1883 : }
1884 :
1885 : /* Should we instrument reads? */
1886 : bool
1887 373 : hwasan_instrument_reads (void)
1888 : {
1889 373 : return (hwasan_sanitize_p () && param_hwasan_instrument_reads);
1890 : }
1891 :
1892 : /* Should we instrument writes? */
1893 : bool
1894 214 : hwasan_instrument_writes (void)
1895 : {
1896 214 : return (hwasan_sanitize_p () && param_hwasan_instrument_writes);
1897 : }
1898 :
1899 : /* Should we instrument builtin calls? */
1900 : bool
1901 95 : hwasan_memintrin (void)
1902 : {
1903 95 : return (hwasan_sanitize_p () && param_hwasan_instrument_mem_intrinsics);
1904 : }
1905 :
1906 : /* MEMoryTAGging sanitizer (MEMTAG) uses a hardware based capability known as
1907 : memory tagging to detect memory safety vulnerabilities. Similar to HWASAN,
1908 : it is also a probabilistic method.
1909 :
1910 : MEMTAG relies on the optional extension in armv8.5a known as MTE (Memory
1911 : Tagging Extension). The extension is available in AArch64 only and
1912 : introduces two types of tags:
1913 : - Logical Address Tag - bits 56-59 (TARGET_MEMTAG_TAG_BITSIZE) of the
1914 : virtual address.
1915 : - Allocation Tag - 4 bits for each tag granule (TARGET_MEMTAG_GRANULE_SIZE
1916 : set to 16 bytes), stored separately.
1917 : Load / store instructions raise an exception if tags differ, thereby
1918 : providing a faster way (than HWASAN) to detect memory safety issues.
1919 : Further, new instructions are available in MTE to manipulate (generate,
1920 : update address with) tags. Load / store instructions with SP base register
1921 : and immediate offset do not check tags.
1922 :
1923 : PS: Currently, MEMTAG sanitizer is capable of stack (variable / memory)
1924 : tagging only.
1925 :
1926 : In general, detecting stack-related memory bugs requires the compiler to:
1927 : - ensure that each tag granule is only used by one variable at a time.
1928 : This includes alloca.
1929 : - Tag/Color: put tags into each stack variable pointer.
1930 : - Untag: the function epilogue will retag the memory.
1931 :
1932 : MEMTAG sanitizer is based off the HWASAN sanitizer implementation
1933 : internally. Similar to HWASAN:
1934 : - Assigning an independently random tag to each variable is carried out by
1935 : keeping a tagged base pointer. A tagged base pointer allows addressing
1936 : variables with (addr offset, tag offset).
1937 : */
1938 :
1939 : /* Returns whether we are tagging pointers and checking those tags on memory
1940 : access. */
1941 : bool
1942 3007051 : memtag_sanitize_p ()
1943 : {
1944 3007051 : return sanitize_flags_p (SANITIZE_MEMTAG);
1945 : }
1946 :
1947 : /* Are we tagging the stack? */
1948 : bool
1949 25737496 : memtag_sanitize_stack_p ()
1950 : {
1951 25737496 : return (sanitize_flags_p (SANITIZE_MEMTAG_STACK));
1952 : }
1953 :
1954 : /* Are we tagging alloca objects? */
1955 : bool
1956 416 : memtag_sanitize_allocas_p (void)
1957 : {
1958 416 : return (memtag_sanitize_stack_p () && param_memtag_instrument_allocas);
1959 : }
1960 :
1961 : /* Are we taggin mem intrinsics? */
1962 : bool
1963 24 : memtag_memintrin (void)
1964 : {
1965 24 : return (memtag_sanitize_p () && param_memtag_instrument_mem_intrinsics);
1966 : }
1967 :
1968 : /* Returns whether we are tagging pointers and checking those tags on memory
1969 : access. */
1970 : bool
1971 70841 : hwassist_sanitize_p ()
1972 : {
1973 70841 : return (hwasan_sanitize_p () || memtag_sanitize_p ());
1974 : }
1975 :
1976 : /* Are we tagging stack objects for hwasan or memtag? */
1977 : bool
1978 25741511 : hwassist_sanitize_stack_p ()
1979 : {
1980 25741511 : return (hwasan_sanitize_stack_p () || memtag_sanitize_stack_p ());
1981 : }
1982 :
1983 : /* Insert code to protect stack vars. The prologue sequence should be emitted
1984 : directly, epilogue sequence returned. BASE is the register holding the
1985 : stack base, against which OFFSETS array offsets are relative to, OFFSETS
1986 : array contains pairs of offsets in reverse order, always the end offset
1987 : of some gap that needs protection followed by starting offset,
1988 : and DECLS is an array of representative decls for each var partition.
1989 : LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1990 : elements long (OFFSETS include gap before the first variable as well
1991 : as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1992 : register which stack vars DECL_RTLs are based on. Either BASE should be
1993 : assigned to PBASE, when not doing use after return protection, or
1994 : corresponding address based on __asan_stack_malloc* return value. */
1995 :
1996 : rtx_insn *
1997 1619 : asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1998 : HOST_WIDE_INT *offsets, tree *decls, int length)
1999 : {
2000 1619 : rtx shadow_base, shadow_mem, ret, mem, orig_base;
2001 1619 : rtx_code_label *lab;
2002 1619 : rtx_insn *insns;
2003 1619 : char buf[32];
2004 1619 : HOST_WIDE_INT base_offset = offsets[length - 1];
2005 1619 : HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
2006 1619 : HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
2007 1619 : HOST_WIDE_INT last_offset, last_size, last_size_aligned;
2008 1619 : int l;
2009 1619 : unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
2010 1619 : tree str_cst, decl, id;
2011 1619 : int use_after_return_class = -1;
2012 :
2013 : /* Don't emit anything when doing error recovery, the assertions
2014 : might fail e.g. if a function had a frame offset overflow. */
2015 1619 : if (seen_error ())
2016 : return NULL;
2017 :
2018 1619 : if (shadow_ptr_types[0] == NULL_TREE)
2019 0 : asan_init_shadow_ptr_types ();
2020 :
2021 1619 : expanded_location cfun_xloc
2022 1619 : = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
2023 :
2024 : /* First of all, prepare the description string. */
2025 1619 : pretty_printer asan_pp;
2026 :
2027 1619 : pp_decimal_int (&asan_pp, length / 2 - 1);
2028 1619 : pp_space (&asan_pp);
2029 4617 : for (l = length - 2; l; l -= 2)
2030 : {
2031 2998 : tree decl = decls[l / 2 - 1];
2032 2998 : pp_wide_integer (&asan_pp, offsets[l] - base_offset);
2033 2998 : pp_space (&asan_pp);
2034 2998 : pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
2035 2998 : pp_space (&asan_pp);
2036 :
2037 2998 : expanded_location xloc
2038 2998 : = expand_location (DECL_SOURCE_LOCATION (decl));
2039 2998 : char location[32];
2040 :
2041 2998 : if (xloc.file == cfun_xloc.file)
2042 2841 : sprintf (location, ":%d", xloc.line);
2043 : else
2044 157 : location[0] = '\0';
2045 :
2046 2998 : if (DECL_P (decl) && DECL_NAME (decl))
2047 : {
2048 2494 : unsigned idlen
2049 2494 : = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
2050 2494 : pp_decimal_int (&asan_pp, idlen);
2051 2494 : pp_space (&asan_pp);
2052 2494 : pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2053 2494 : pp_string (&asan_pp, location);
2054 : }
2055 : else
2056 504 : pp_string (&asan_pp, "9 <unknown>");
2057 :
2058 2998 : if (l > 2)
2059 1379 : pp_space (&asan_pp);
2060 : }
2061 1619 : str_cst = asan_pp_string (&asan_pp);
2062 :
2063 3177 : gcc_checking_assert (offsets[0] == (crtl->stack_protect_guard
2064 : ? -ASAN_RED_ZONE_SIZE : 0));
2065 : /* Emit the prologue sequence. */
2066 1619 : if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
2067 1588 : && param_asan_use_after_return)
2068 : {
2069 1554 : HOST_WIDE_INT adjusted_frame_size = asan_frame_size;
2070 : /* The stack protector guard is allocated at the top of the frame
2071 : and cfgexpand.cc then uses align_frame_offset (ASAN_RED_ZONE_SIZE);
2072 : while in that case we can still use asan_frame_size, we need to take
2073 : that into account when computing base_align_bias. */
2074 1554 : if (alignb > ASAN_RED_ZONE_SIZE && crtl->stack_protect_guard)
2075 28 : adjusted_frame_size += ASAN_RED_ZONE_SIZE;
2076 1554 : use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
2077 : /* __asan_stack_malloc_N guarantees alignment
2078 : N < 6 ? (64 << N) : 4096 bytes. */
2079 1554 : if (alignb > (use_after_return_class < 6
2080 1554 : ? (64U << use_after_return_class) : 4096U))
2081 : use_after_return_class = -1;
2082 1554 : else if (alignb > ASAN_RED_ZONE_SIZE
2083 45 : && (adjusted_frame_size & (alignb - 1)))
2084 : {
2085 28 : base_align_bias
2086 28 : = ((adjusted_frame_size + alignb - 1)
2087 28 : & ~(alignb - HOST_WIDE_INT_1)) - adjusted_frame_size;
2088 28 : use_after_return_class
2089 28 : = floor_log2 (asan_frame_size + base_align_bias - 1) - 5;
2090 28 : if (use_after_return_class > 10)
2091 : {
2092 65 : base_align_bias = 0;
2093 65 : use_after_return_class = -1;
2094 : }
2095 : }
2096 : }
2097 :
2098 : /* Align base if target is STRICT_ALIGNMENT. */
2099 1619 : if (STRICT_ALIGNMENT)
2100 : {
2101 : const HOST_WIDE_INT align
2102 : = (GET_MODE_ALIGNMENT (SImode) / BITS_PER_UNIT) << ASAN_SHADOW_SHIFT;
2103 : base = expand_binop (Pmode, and_optab, base, gen_int_mode (-align, Pmode),
2104 : NULL_RTX, 1, OPTAB_DIRECT);
2105 : }
2106 :
2107 1619 : if (use_after_return_class == -1 && pbase)
2108 65 : emit_move_insn (pbase, base);
2109 :
2110 1619 : base = expand_binop (Pmode, add_optab, base,
2111 1619 : gen_int_mode (base_offset - base_align_bias, Pmode),
2112 : NULL_RTX, 1, OPTAB_DIRECT);
2113 1619 : orig_base = NULL_RTX;
2114 1619 : if (use_after_return_class != -1)
2115 : {
2116 1554 : if (asan_detect_stack_use_after_return == NULL_TREE)
2117 : {
2118 1162 : id = get_identifier ("__asan_option_detect_stack_use_after_return");
2119 1162 : decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
2120 : integer_type_node);
2121 1162 : SET_DECL_ASSEMBLER_NAME (decl, id);
2122 1162 : TREE_ADDRESSABLE (decl) = 1;
2123 1162 : DECL_ARTIFICIAL (decl) = 1;
2124 1162 : DECL_IGNORED_P (decl) = 1;
2125 1162 : DECL_EXTERNAL (decl) = 1;
2126 1162 : TREE_STATIC (decl) = 1;
2127 1162 : TREE_PUBLIC (decl) = 1;
2128 1162 : TREE_USED (decl) = 1;
2129 1162 : asan_detect_stack_use_after_return = decl;
2130 : }
2131 1554 : orig_base = gen_reg_rtx (Pmode);
2132 1554 : emit_move_insn (orig_base, base);
2133 1554 : ret = expand_normal (asan_detect_stack_use_after_return);
2134 1554 : lab = gen_label_rtx ();
2135 1554 : emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
2136 : VOIDmode, 0, lab,
2137 : profile_probability::very_likely ());
2138 1554 : snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
2139 : use_after_return_class);
2140 1554 : ret = init_one_libfunc (buf);
2141 1554 : ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
2142 : GEN_INT (asan_frame_size
2143 : + base_align_bias),
2144 1554 : TYPE_MODE (pointer_sized_int_node));
2145 : /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
2146 : and NULL otherwise. Check RET value is NULL here and jump over the
2147 : BASE reassignment in this case. Otherwise, reassign BASE to RET. */
2148 1554 : emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
2149 : VOIDmode, 0, lab,
2150 : profile_probability:: very_unlikely ());
2151 1554 : ret = convert_memory_address (Pmode, ret);
2152 1554 : emit_move_insn (base, ret);
2153 1554 : emit_label (lab);
2154 1554 : emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
2155 : gen_int_mode (base_align_bias
2156 1554 : - base_offset, Pmode),
2157 : NULL_RTX, 1, OPTAB_DIRECT));
2158 : }
2159 1619 : mem = gen_rtx_MEM (ptr_mode, base);
2160 1619 : mem = adjust_address (mem, VOIDmode, base_align_bias);
2161 1619 : emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
2162 3238 : mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
2163 1619 : emit_move_insn (mem, expand_normal (str_cst));
2164 3238 : mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
2165 1619 : ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
2166 1619 : id = get_identifier (buf);
2167 1619 : decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2168 : VAR_DECL, id, char_type_node);
2169 1619 : SET_DECL_ASSEMBLER_NAME (decl, id);
2170 1619 : TREE_ADDRESSABLE (decl) = 1;
2171 1619 : TREE_READONLY (decl) = 1;
2172 1619 : DECL_ARTIFICIAL (decl) = 1;
2173 1619 : DECL_IGNORED_P (decl) = 1;
2174 1619 : TREE_STATIC (decl) = 1;
2175 1619 : TREE_PUBLIC (decl) = 0;
2176 1619 : TREE_USED (decl) = 1;
2177 1619 : DECL_INITIAL (decl) = decl;
2178 1619 : TREE_ASM_WRITTEN (decl) = 1;
2179 1619 : TREE_ASM_WRITTEN (id) = 1;
2180 1619 : DECL_ALIGN_RAW (decl) = DECL_ALIGN_RAW (current_function_decl);
2181 1619 : emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
2182 1619 : shadow_base = expand_binop (Pmode, lshr_optab, base,
2183 1619 : gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
2184 : NULL_RTX, 1, OPTAB_DIRECT);
2185 1619 : if (asan_dynamic_shadow_offset_p ())
2186 : {
2187 0 : ret = expand_normal (get_asan_shadow_memory_dynamic_address_decl ());
2188 0 : shadow_base
2189 0 : = expand_simple_binop (Pmode, PLUS, shadow_base, ret, NULL_RTX,
2190 : /* unsignedp = */ 1, OPTAB_WIDEN);
2191 0 : shadow_base = plus_constant (Pmode, shadow_base,
2192 0 : (base_align_bias >> ASAN_SHADOW_SHIFT));
2193 : }
2194 : else
2195 : {
2196 1619 : shadow_base = plus_constant (Pmode, shadow_base,
2197 1619 : asan_shadow_offset ()
2198 1619 : + (base_align_bias >> ASAN_SHADOW_SHIFT));
2199 : }
2200 1619 : gcc_assert (asan_shadow_set != -1
2201 : && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
2202 1619 : shadow_mem = gen_rtx_MEM (SImode, shadow_base);
2203 1619 : set_mem_alias_set (shadow_mem, asan_shadow_set);
2204 1619 : if (STRICT_ALIGNMENT)
2205 : set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
2206 1619 : prev_offset = base_offset;
2207 :
2208 1619 : asan_redzone_buffer rz_buffer (shadow_mem, prev_offset);
2209 6236 : for (l = length; l; l -= 2)
2210 : {
2211 4617 : if (l == 2)
2212 1619 : cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
2213 4617 : offset = offsets[l - 1];
2214 :
2215 4617 : bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY - 1);
2216 : /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then
2217 : the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0.
2218 : In that case we have to emit one extra byte that will describe
2219 : how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed. */
2220 4617 : if (extra_byte)
2221 : {
2222 1491 : HOST_WIDE_INT aoff
2223 1491 : = base_offset + ((offset - base_offset)
2224 1491 : & ~(ASAN_SHADOW_GRANULARITY - HOST_WIDE_INT_1));
2225 1491 : rz_buffer.emit_redzone_byte (aoff, offset - aoff);
2226 1491 : offset = aoff + ASAN_SHADOW_GRANULARITY;
2227 : }
2228 :
2229 : /* Calculate size of red zone payload. */
2230 24157 : while (offset < offsets[l - 2])
2231 : {
2232 19540 : rz_buffer.emit_redzone_byte (offset, cur_shadow_byte);
2233 19540 : offset += ASAN_SHADOW_GRANULARITY;
2234 : }
2235 :
2236 4617 : cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
2237 : }
2238 :
2239 : /* As the automatic variables are aligned to
2240 : ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be
2241 : flushed here. */
2242 1619 : gcc_assert (rz_buffer.m_shadow_bytes.is_empty ());
2243 :
2244 1619 : do_pending_stack_adjust ();
2245 :
2246 : /* Construct epilogue sequence. */
2247 1619 : start_sequence ();
2248 :
2249 1619 : lab = NULL;
2250 1619 : if (use_after_return_class != -1)
2251 : {
2252 1554 : rtx_code_label *lab2 = gen_label_rtx ();
2253 1554 : char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
2254 1554 : emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
2255 : VOIDmode, 0, lab2,
2256 : profile_probability::very_likely ());
2257 1554 : shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
2258 1554 : set_mem_alias_set (shadow_mem, asan_shadow_set);
2259 1554 : mem = gen_rtx_MEM (ptr_mode, base);
2260 1554 : mem = adjust_address (mem, VOIDmode, base_align_bias);
2261 1554 : emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
2262 1554 : unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
2263 1554 : bool asan_stack_free_emitted_p = false;
2264 1554 : if (use_after_return_class < 5
2265 1554 : && can_store_by_pieces (sz, builtin_memset_read_str, &c,
2266 : BITS_PER_UNIT, true))
2267 : /* Emit memset (ShadowBase, kAsanStackAfterReturnMagic, ShadowSize). */
2268 1438 : store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
2269 : BITS_PER_UNIT, true, RETURN_BEGIN);
2270 116 : else if (use_after_return_class >= 5
2271 130 : || !set_storage_via_setmem (shadow_mem,
2272 : GEN_INT (sz),
2273 14 : gen_int_mode (c, QImode),
2274 : BITS_PER_UNIT, BITS_PER_UNIT,
2275 : -1, sz, sz, sz))
2276 : {
2277 102 : snprintf (buf, sizeof buf, "__asan_stack_free_%d",
2278 : use_after_return_class);
2279 102 : ret = init_one_libfunc (buf);
2280 102 : rtx addr = convert_memory_address (ptr_mode, base);
2281 102 : rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
2282 204 : emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
2283 : GEN_INT (asan_frame_size + base_align_bias),
2284 102 : TYPE_MODE (pointer_sized_int_node),
2285 : orig_addr, ptr_mode);
2286 102 : asan_stack_free_emitted_p = true;
2287 : }
2288 1554 : if (!asan_stack_free_emitted_p)
2289 : {
2290 : /* Emit **SavedFlagPtr (FakeStack, class_id) = 0. */
2291 1452 : unsigned HOST_WIDE_INT offset = (1 << (use_after_return_class + 6));
2292 1452 : offset -= GET_MODE_SIZE (ptr_mode);
2293 1452 : mem = gen_rtx_MEM (ptr_mode, base);
2294 1452 : mem = adjust_address (mem, ptr_mode, offset);
2295 1452 : rtx addr = gen_reg_rtx (ptr_mode);
2296 1452 : emit_move_insn (addr, mem);
2297 1452 : addr = convert_memory_address (Pmode, addr);
2298 1452 : mem = gen_rtx_MEM (QImode, addr);
2299 1452 : emit_move_insn (mem, const0_rtx);
2300 : }
2301 1554 : lab = gen_label_rtx ();
2302 1554 : emit_jump (lab);
2303 1554 : emit_label (lab2);
2304 : }
2305 :
2306 1619 : shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
2307 1619 : set_mem_alias_set (shadow_mem, asan_shadow_set);
2308 :
2309 1619 : if (STRICT_ALIGNMENT)
2310 : set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
2311 :
2312 1619 : prev_offset = base_offset;
2313 1619 : last_offset = base_offset;
2314 1619 : last_size = 0;
2315 1619 : last_size_aligned = 0;
2316 6236 : for (l = length; l; l -= 2)
2317 : {
2318 4617 : offset = base_offset + ((offsets[l - 1] - base_offset)
2319 4617 : & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
2320 4617 : if (last_offset + last_size_aligned < offset)
2321 : {
2322 572 : shadow_mem = adjust_address (shadow_mem, VOIDmode,
2323 : (last_offset - prev_offset)
2324 : >> ASAN_SHADOW_SHIFT);
2325 572 : prev_offset = last_offset;
2326 572 : asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
2327 572 : last_offset = offset;
2328 572 : last_size = 0;
2329 : }
2330 : else
2331 4045 : last_size = offset - last_offset;
2332 4617 : last_size += base_offset + ((offsets[l - 2] - base_offset)
2333 4617 : & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
2334 4617 : - offset;
2335 :
2336 : /* Unpoison shadow memory that corresponds to a variable that is
2337 : is subject of use-after-return sanitization. */
2338 4617 : if (l > 2)
2339 : {
2340 2998 : decl = decls[l / 2 - 2];
2341 2998 : if (asan_handled_variables != NULL
2342 2998 : && asan_handled_variables->contains (decl))
2343 : {
2344 769 : HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
2345 769 : if (dump_file && (dump_flags & TDF_DETAILS))
2346 : {
2347 0 : const char *n = (DECL_NAME (decl)
2348 0 : ? IDENTIFIER_POINTER (DECL_NAME (decl))
2349 0 : : "<unknown>");
2350 0 : fprintf (dump_file, "Unpoisoning shadow stack for variable: "
2351 : "%s (%" PRId64 " B)\n", n, size);
2352 : }
2353 :
2354 769 : last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
2355 : }
2356 : }
2357 4617 : last_size_aligned
2358 4617 : = ((last_size + (ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
2359 : & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
2360 : }
2361 1619 : if (last_size_aligned)
2362 : {
2363 1619 : shadow_mem = adjust_address (shadow_mem, VOIDmode,
2364 : (last_offset - prev_offset)
2365 : >> ASAN_SHADOW_SHIFT);
2366 1619 : asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
2367 : }
2368 :
2369 : /* Clean-up set with instrumented stack variables. */
2370 1975 : delete asan_handled_variables;
2371 1619 : asan_handled_variables = NULL;
2372 1708 : delete asan_used_labels;
2373 1619 : asan_used_labels = NULL;
2374 :
2375 1619 : do_pending_stack_adjust ();
2376 1619 : if (lab)
2377 1554 : emit_label (lab);
2378 :
2379 1619 : insns = end_sequence ();
2380 1619 : return insns;
2381 1619 : }
2382 :
2383 : /* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds
2384 : to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE
2385 : indicates whether we're emitting new instructions sequence or not. */
2386 :
2387 : rtx_insn *
2388 182 : asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
2389 : {
2390 182 : if (before)
2391 38 : push_to_sequence (before);
2392 : else
2393 144 : start_sequence ();
2394 182 : rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
2395 182 : top = convert_memory_address (ptr_mode, top);
2396 182 : bot = convert_memory_address (ptr_mode, bot);
2397 182 : emit_library_call (ret, LCT_NORMAL, ptr_mode,
2398 : top, ptr_mode, bot, ptr_mode);
2399 :
2400 182 : do_pending_stack_adjust ();
2401 182 : return end_sequence ();
2402 : }
2403 :
2404 : /* Return true if DECL, a global var, might be overridden and needs
2405 : therefore a local alias. */
2406 :
2407 : static bool
2408 3908 : asan_needs_local_alias (tree decl)
2409 : {
2410 3908 : return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
2411 : }
2412 :
2413 : /* Return true if DECL, a global var, is an artificial ODR indicator symbol
2414 : therefore doesn't need protection. */
2415 :
2416 : static bool
2417 8098 : is_odr_indicator (tree decl)
2418 : {
2419 8098 : return (DECL_ARTIFICIAL (decl)
2420 8098 : && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
2421 : }
2422 :
2423 : /* Return true if DECL is a VAR_DECL that should be protected
2424 : by Address Sanitizer, by appending a red zone with protected
2425 : shadow memory after it and aligning it to at least
2426 : ASAN_RED_ZONE_SIZE bytes. */
2427 :
2428 : bool
2429 21607 : asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
2430 : {
2431 21607 : if (!param_asan_globals)
2432 : return false;
2433 :
2434 21444 : rtx rtl, symbol;
2435 :
2436 21444 : if (TREE_CODE (decl) == STRING_CST)
2437 : {
2438 : /* Instrument all STRING_CSTs except those created
2439 : by asan_pp_string here. */
2440 12895 : if (shadow_ptr_types[0] != NULL_TREE
2441 12861 : && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2442 25756 : && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
2443 : return false;
2444 : return true;
2445 : }
2446 8549 : if (!VAR_P (decl)
2447 : /* TLS vars aren't statically protectable. */
2448 8549 : || DECL_THREAD_LOCAL_P (decl)
2449 : /* Externs will be protected elsewhere. */
2450 8543 : || DECL_EXTERNAL (decl)
2451 : /* PR sanitizer/81697: For architectures that use section anchors first
2452 : call to asan_protect_global may occur before DECL_RTL (decl) is set.
2453 : We should ignore DECL_RTL_SET_P then, because otherwise the first call
2454 : to asan_protect_global will return FALSE and the following calls on the
2455 : same decl after setting DECL_RTL (decl) will return TRUE and we'll end
2456 : up with inconsistency at runtime. */
2457 8543 : || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
2458 : /* Comdat vars pose an ABI problem, we can't know if
2459 : the var that is selected by the linker will have
2460 : padding or not. */
2461 8531 : || DECL_ONE_ONLY (decl)
2462 : /* Similarly for common vars. People can use -fno-common.
2463 : Note: Linux kernel is built with -fno-common, so we do instrument
2464 : globals there even if it is C. */
2465 8248 : || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
2466 : /* Don't protect if using user section, often vars placed
2467 : into user section from multiple TUs are then assumed
2468 : to be an array of such vars, putting padding in there
2469 : breaks this assumption. */
2470 8248 : || (DECL_SECTION_NAME (decl) != NULL
2471 270 : && !symtab_node::get (decl)->implicit_section
2472 270 : && !section_sanitized_p (DECL_SECTION_NAME (decl)))
2473 : /* Don't protect variables in non-generic address-space. */
2474 8158 : || !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)))
2475 8156 : || DECL_SIZE (decl) == 0
2476 8156 : || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
2477 8156 : || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2478 8156 : || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
2479 8156 : || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
2480 8098 : || TREE_TYPE (decl) == ubsan_get_source_location_type ()
2481 16647 : || is_odr_indicator (decl))
2482 451 : return false;
2483 :
2484 8098 : if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
2485 : {
2486 :
2487 8098 : rtl = DECL_RTL (decl);
2488 8098 : if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
2489 : return false;
2490 8098 : symbol = XEXP (rtl, 0);
2491 :
2492 8098 : if (CONSTANT_POOL_ADDRESS_P (symbol)
2493 8098 : || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2494 : return false;
2495 : }
2496 :
2497 8098 : if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
2498 : return false;
2499 :
2500 : if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
2501 : return false;
2502 :
2503 : return true;
2504 : }
2505 :
2506 : /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
2507 : IS_STORE is either 1 (for a store) or 0 (for a load). */
2508 :
2509 : static tree
2510 13022 : report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
2511 : int *nargs)
2512 : {
2513 13022 : gcc_assert (!hwassist_sanitize_p ());
2514 :
2515 13022 : static enum built_in_function report[2][2][6]
2516 : = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
2517 : BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
2518 : BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
2519 : { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
2520 : BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
2521 : BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
2522 : { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
2523 : BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
2524 : BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
2525 : BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
2526 : BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
2527 : BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
2528 : { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
2529 : BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
2530 : BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
2531 : BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
2532 : BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
2533 : BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
2534 13022 : if (size_in_bytes == -1)
2535 : {
2536 735 : *nargs = 2;
2537 735 : return builtin_decl_implicit (report[recover_p][is_store][5]);
2538 : }
2539 12287 : *nargs = 1;
2540 12287 : int size_log2 = exact_log2 (size_in_bytes);
2541 12287 : return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
2542 : }
2543 :
2544 : /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
2545 : IS_STORE is either 1 (for a store) or 0 (for a load). */
2546 :
2547 : static tree
2548 103 : check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
2549 : int *nargs)
2550 : {
2551 103 : static enum built_in_function check[2][2][6]
2552 : = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
2553 : BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
2554 : BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
2555 : { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
2556 : BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
2557 : BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
2558 : { { BUILT_IN_ASAN_LOAD1_NOABORT,
2559 : BUILT_IN_ASAN_LOAD2_NOABORT,
2560 : BUILT_IN_ASAN_LOAD4_NOABORT,
2561 : BUILT_IN_ASAN_LOAD8_NOABORT,
2562 : BUILT_IN_ASAN_LOAD16_NOABORT,
2563 : BUILT_IN_ASAN_LOADN_NOABORT },
2564 : { BUILT_IN_ASAN_STORE1_NOABORT,
2565 : BUILT_IN_ASAN_STORE2_NOABORT,
2566 : BUILT_IN_ASAN_STORE4_NOABORT,
2567 : BUILT_IN_ASAN_STORE8_NOABORT,
2568 : BUILT_IN_ASAN_STORE16_NOABORT,
2569 : BUILT_IN_ASAN_STOREN_NOABORT } } };
2570 103 : if (size_in_bytes == -1)
2571 : {
2572 28 : *nargs = 2;
2573 28 : return builtin_decl_implicit (check[recover_p][is_store][5]);
2574 : }
2575 75 : *nargs = 1;
2576 75 : int size_log2 = exact_log2 (size_in_bytes);
2577 75 : return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
2578 : }
2579 :
2580 : /* Split the current basic block and create a condition statement
2581 : insertion point right before or after the statement pointed to by
2582 : ITER. Return an iterator to the point at which the caller might
2583 : safely insert the condition statement.
2584 :
2585 : THEN_BLOCK must be set to the address of an uninitialized instance
2586 : of basic_block. The function will then set *THEN_BLOCK to the
2587 : 'then block' of the condition statement to be inserted by the
2588 : caller.
2589 :
2590 : If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
2591 : *THEN_BLOCK to *FALLTHROUGH_BLOCK.
2592 :
2593 : Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
2594 : block' of the condition statement to be inserted by the caller.
2595 :
2596 : Note that *FALLTHROUGH_BLOCK is a new block that contains the
2597 : statements starting from *ITER, and *THEN_BLOCK is a new empty
2598 : block.
2599 :
2600 : *ITER is adjusted to point to always point to the first statement
2601 : of the basic block * FALLTHROUGH_BLOCK. That statement is the
2602 : same as what ITER was pointing to prior to calling this function,
2603 : if BEFORE_P is true; otherwise, it is its following statement. */
2604 :
2605 : gimple_stmt_iterator
2606 17172 : create_cond_insert_point (gimple_stmt_iterator *iter,
2607 : bool before_p,
2608 : bool then_more_likely_p,
2609 : bool create_then_fallthru_edge,
2610 : basic_block *then_block,
2611 : basic_block *fallthrough_block)
2612 : {
2613 17172 : gimple_stmt_iterator gsi = *iter;
2614 :
2615 17172 : if (!gsi_end_p (gsi) && before_p)
2616 1279 : gsi_prev (&gsi);
2617 :
2618 17172 : basic_block cur_bb = gsi_bb (*iter);
2619 :
2620 17172 : edge e = split_block (cur_bb, gsi_stmt (gsi));
2621 :
2622 : /* Get a hold on the 'condition block', the 'then block' and the
2623 : 'else block'. */
2624 17172 : basic_block cond_bb = e->src;
2625 17172 : basic_block fallthru_bb = e->dest;
2626 17172 : basic_block then_bb = create_empty_bb (cond_bb);
2627 17172 : if (current_loops)
2628 : {
2629 17172 : add_bb_to_loop (then_bb, cond_bb->loop_father);
2630 17172 : loops_state_set (LOOPS_NEED_FIXUP);
2631 : }
2632 :
2633 : /* Set up the newly created 'then block'. */
2634 17172 : e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
2635 17172 : profile_probability fallthrough_probability
2636 : = then_more_likely_p
2637 17172 : ? profile_probability::very_unlikely ()
2638 17172 : : profile_probability::very_likely ();
2639 17172 : e->probability = fallthrough_probability.invert ();
2640 17172 : then_bb->count = e->count ();
2641 17172 : if (create_then_fallthru_edge)
2642 4303 : make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
2643 :
2644 : /* Set up the fallthrough basic block. */
2645 17172 : e = find_edge (cond_bb, fallthru_bb);
2646 17172 : e->flags = EDGE_FALSE_VALUE;
2647 17172 : e->probability = fallthrough_probability;
2648 :
2649 : /* Update dominance info for the newly created then_bb; note that
2650 : fallthru_bb's dominance info has already been updated by
2651 : split_bock. */
2652 17172 : if (dom_info_available_p (CDI_DOMINATORS))
2653 15721 : set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
2654 :
2655 17172 : *then_block = then_bb;
2656 17172 : *fallthrough_block = fallthru_bb;
2657 17172 : *iter = gsi_start_bb (fallthru_bb);
2658 :
2659 17172 : return gsi_last_bb (cond_bb);
2660 : }
2661 :
2662 : /* Insert an if condition followed by a 'then block' right before the
2663 : statement pointed to by ITER. The fallthrough block -- which is the
2664 : else block of the condition as well as the destination of the
2665 : outcoming edge of the 'then block' -- starts with the statement
2666 : pointed to by ITER.
2667 :
2668 : COND is the condition of the if.
2669 :
2670 : If THEN_MORE_LIKELY_P is true, the probability of the edge to the
2671 : 'then block' is higher than the probability of the edge to the
2672 : fallthrough block.
2673 :
2674 : Upon completion of the function, *THEN_BB is set to the newly
2675 : inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
2676 : fallthrough block.
2677 :
2678 : *ITER is adjusted to still point to the same statement it was
2679 : pointing to initially. */
2680 :
2681 : static void
2682 0 : insert_if_then_before_iter (gcond *cond,
2683 : gimple_stmt_iterator *iter,
2684 : bool then_more_likely_p,
2685 : basic_block *then_bb,
2686 : basic_block *fallthrough_bb)
2687 : {
2688 0 : gimple_stmt_iterator cond_insert_point =
2689 0 : create_cond_insert_point (iter,
2690 : /*before_p=*/true,
2691 : then_more_likely_p,
2692 : /*create_then_fallthru_edge=*/true,
2693 : then_bb,
2694 : fallthrough_bb);
2695 0 : gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
2696 0 : }
2697 :
2698 : /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
2699 : If RETURN_ADDRESS is set to true, return memory location instread
2700 : of a value in the shadow memory. */
2701 :
2702 : static tree
2703 15620 : build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
2704 : tree base_addr, tree shadow_ptr_type,
2705 : bool return_address = false)
2706 : {
2707 15620 : tree t, uintptr_type = TREE_TYPE (base_addr);
2708 15620 : tree shadow_type = TREE_TYPE (shadow_ptr_type);
2709 15620 : gimple *g;
2710 :
2711 15620 : t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
2712 15620 : g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
2713 : base_addr, t);
2714 15620 : gimple_set_location (g, location);
2715 15620 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
2716 :
2717 15620 : if (asan_dynamic_shadow_offset_p ())
2718 0 : t = asan_local_shadow_memory_dynamic_address;
2719 : else
2720 15620 : t = build_int_cst (uintptr_type, asan_shadow_offset ());
2721 15620 : g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
2722 : gimple_assign_lhs (g), t);
2723 15620 : gimple_set_location (g, location);
2724 15620 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
2725 :
2726 15620 : g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
2727 : gimple_assign_lhs (g));
2728 15620 : gimple_set_location (g, location);
2729 15620 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
2730 :
2731 15620 : if (!return_address)
2732 : {
2733 13726 : t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
2734 : build_int_cst (shadow_ptr_type, 0));
2735 13726 : g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
2736 13726 : gimple_set_location (g, location);
2737 13726 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
2738 : }
2739 :
2740 15620 : return gimple_assign_lhs (g);
2741 : }
2742 :
2743 : /* BASE can already be an SSA_NAME; in that case, do not create a
2744 : new SSA_NAME for it. */
2745 :
2746 : static tree
2747 13653 : maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
2748 : bool before_p)
2749 : {
2750 13653 : STRIP_USELESS_TYPE_CONVERSION (base);
2751 13653 : if (TREE_CODE (base) == SSA_NAME)
2752 : return base;
2753 11901 : gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)), base);
2754 11901 : gimple_set_location (g, loc);
2755 11901 : if (before_p)
2756 11901 : gsi_safe_insert_before (iter, g);
2757 : else
2758 0 : gsi_insert_after (iter, g, GSI_NEW_STMT);
2759 11901 : return gimple_assign_lhs (g);
2760 : }
2761 :
2762 : /* LEN can already have necessary size and precision;
2763 : in that case, do not create a new variable. */
2764 :
2765 : tree
2766 0 : maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
2767 : bool before_p)
2768 : {
2769 0 : if (ptrofftype_p (len))
2770 : return len;
2771 0 : gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2772 : NOP_EXPR, len);
2773 0 : gimple_set_location (g, loc);
2774 0 : if (before_p)
2775 0 : gsi_safe_insert_before (iter, g);
2776 : else
2777 0 : gsi_insert_after (iter, g, GSI_NEW_STMT);
2778 0 : return gimple_assign_lhs (g);
2779 : }
2780 :
2781 : /* Instrument the memory access instruction BASE. Insert new
2782 : statements before or after ITER.
2783 :
2784 : Note that the memory access represented by BASE can be either an
2785 : SSA_NAME, or a non-SSA expression. LOCATION is the source code
2786 : location. IS_STORE is TRUE for a store, FALSE for a load.
2787 : BEFORE_P is TRUE for inserting the instrumentation code before
2788 : ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
2789 : for a scalar memory access and FALSE for memory region access.
2790 : NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
2791 : length. ALIGN tells alignment of accessed memory object.
2792 :
2793 : START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
2794 : memory region have already been instrumented.
2795 :
2796 : If BEFORE_P is TRUE, *ITER is arranged to still point to the
2797 : statement it was pointing to prior to calling this function,
2798 : otherwise, it points to the statement logically following it. */
2799 :
2800 : static void
2801 13653 : build_check_stmt (location_t loc, tree base, tree len,
2802 : HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
2803 : bool is_non_zero_len, bool before_p, bool is_store,
2804 : bool is_scalar_access, unsigned int align = 0)
2805 : {
2806 13653 : gimple *g;
2807 :
2808 13653 : gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
2809 13653 : gcc_assert (size_in_bytes == -1 || size_in_bytes >= 1);
2810 :
2811 13653 : base = unshare_expr (base);
2812 13653 : base = maybe_create_ssa_name (loc, base, iter, before_p);
2813 :
2814 13653 : if (len)
2815 : {
2816 0 : len = unshare_expr (len);
2817 0 : len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
2818 : }
2819 : else
2820 : {
2821 13653 : gcc_assert (size_in_bytes != -1);
2822 13653 : len = build_int_cst (pointer_sized_int_node, size_in_bytes);
2823 : }
2824 :
2825 13653 : if (size_in_bytes > 1)
2826 : {
2827 11555 : if ((size_in_bytes & (size_in_bytes - 1)) != 0
2828 11185 : || size_in_bytes > 16)
2829 : is_scalar_access = false;
2830 10932 : else if (align && align < size_in_bytes * BITS_PER_UNIT)
2831 : {
2832 : /* On non-strict alignment targets, if
2833 : 16-byte access is just 8-byte aligned,
2834 : this will result in misaligned shadow
2835 : memory 2 byte load, but otherwise can
2836 : be handled using one read. */
2837 251 : if (size_in_bytes != 16
2838 : || STRICT_ALIGNMENT
2839 118 : || align < 8 * BITS_PER_UNIT)
2840 13653 : is_scalar_access = false;
2841 : }
2842 : }
2843 :
2844 13653 : HOST_WIDE_INT flags = 0;
2845 13653 : if (is_store)
2846 6569 : flags |= ASAN_CHECK_STORE;
2847 13653 : if (is_non_zero_len)
2848 13653 : flags |= ASAN_CHECK_NON_ZERO_LEN;
2849 13653 : if (is_scalar_access)
2850 12888 : flags |= ASAN_CHECK_SCALAR_ACCESS;
2851 :
2852 13653 : enum internal_fn fn = hwassist_sanitize_p ()
2853 13653 : ? IFN_HWASAN_CHECK
2854 13272 : : IFN_ASAN_CHECK;
2855 :
2856 13653 : g = gimple_build_call_internal (fn, 4,
2857 13653 : build_int_cst (integer_type_node, flags),
2858 : base, len,
2859 : build_int_cst (integer_type_node,
2860 13653 : align / BITS_PER_UNIT));
2861 13653 : gimple_set_location (g, loc);
2862 13653 : if (before_p)
2863 13653 : gsi_safe_insert_before (iter, g);
2864 : else
2865 : {
2866 0 : gsi_insert_after (iter, g, GSI_NEW_STMT);
2867 0 : gsi_next (iter);
2868 : }
2869 13653 : }
2870 :
2871 : /* If T represents a memory access, add instrumentation code before ITER.
2872 : LOCATION is source code location.
2873 : IS_STORE is either TRUE (for a store) or FALSE (for a load). */
2874 :
2875 : static void
2876 20867 : instrument_derefs (gimple_stmt_iterator *iter, tree t,
2877 : location_t location, bool is_store)
2878 : {
2879 20867 : if (is_store && !(asan_instrument_writes () || hwasan_instrument_writes ()))
2880 7157 : return;
2881 20825 : if (!is_store && !(asan_instrument_reads () || hwasan_instrument_reads ()))
2882 : return;
2883 :
2884 20769 : tree type, base;
2885 20769 : HOST_WIDE_INT size_in_bytes;
2886 20769 : if (location == UNKNOWN_LOCATION)
2887 342 : location = EXPR_LOCATION (t);
2888 :
2889 20769 : type = TREE_TYPE (t);
2890 20769 : switch (TREE_CODE (t))
2891 : {
2892 20575 : case ARRAY_REF:
2893 20575 : case COMPONENT_REF:
2894 20575 : case INDIRECT_REF:
2895 20575 : case MEM_REF:
2896 20575 : case VAR_DECL:
2897 20575 : case BIT_FIELD_REF:
2898 20575 : break;
2899 : /* FALLTHRU */
2900 : default:
2901 : return;
2902 : }
2903 :
2904 20575 : size_in_bytes = int_size_in_bytes (type);
2905 20575 : if (size_in_bytes <= 0)
2906 : return;
2907 :
2908 20575 : poly_int64 bitsize, bitpos;
2909 20575 : tree offset;
2910 20575 : machine_mode mode;
2911 20575 : int unsignedp, reversep, volatilep = 0;
2912 20575 : tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2913 : &unsignedp, &reversep, &volatilep);
2914 :
2915 20575 : if (TREE_CODE (t) == COMPONENT_REF
2916 20575 : && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2917 : {
2918 72 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2919 72 : instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
2920 72 : TREE_OPERAND (t, 0), repr,
2921 72 : TREE_OPERAND (t, 2)),
2922 : location, is_store);
2923 72 : return;
2924 : }
2925 :
2926 20503 : if (!multiple_p (bitpos, BITS_PER_UNIT)
2927 20503 : || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2928 : return;
2929 :
2930 20503 : if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2931 : return;
2932 :
2933 : /* Accesses to non-generic address-spaces should not be instrumented. */
2934 20497 : if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (inner))))
2935 : return;
2936 :
2937 20487 : poly_int64 decl_size;
2938 20487 : if ((VAR_P (inner)
2939 7653 : || (TREE_CODE (inner) == RESULT_DECL
2940 44 : && !aggregate_value_p (inner, current_function_decl)))
2941 12834 : && offset == NULL_TREE
2942 12349 : && DECL_SIZE (inner)
2943 12349 : && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
2944 40974 : && known_subrange_p (bitpos, bitsize, 0, decl_size))
2945 : {
2946 12330 : if (VAR_P (inner) && DECL_THREAD_LOCAL_P (inner))
2947 : return;
2948 : /* If we're not sanitizing globals and we can tell statically that this
2949 : access is inside a global variable, then there's no point adding
2950 : instrumentation to check the access. N.b. hwasan currently never
2951 : sanitizes globals. */
2952 24480 : if ((hwassist_sanitize_p () || !param_asan_globals)
2953 12408 : && is_global_var (inner))
2954 : return;
2955 12144 : if (!TREE_STATIC (inner))
2956 : {
2957 : /* Automatic vars in the current function will be always
2958 : accessible. */
2959 7316 : if (decl_function_context (inner) == current_function_decl
2960 7316 : && (!asan_sanitize_use_after_scope ()
2961 6524 : || !TREE_ADDRESSABLE (inner)))
2962 : return;
2963 : }
2964 : /* Always instrument external vars, they might be dynamically
2965 : initialized. */
2966 4828 : else if (!DECL_EXTERNAL (inner))
2967 : {
2968 : /* For static vars if they are known not to be dynamically
2969 : initialized, they will be always accessible. */
2970 4828 : varpool_node *vnode = varpool_node::get (inner);
2971 4828 : if (vnode && !vnode->dynamically_initialized)
2972 : return;
2973 : }
2974 : }
2975 :
2976 13710 : if (DECL_P (inner)
2977 6127 : && decl_function_context (inner) == current_function_decl
2978 19178 : && !TREE_ADDRESSABLE (inner))
2979 99 : mark_addressable (inner);
2980 :
2981 13710 : base = build_fold_addr_expr (t);
2982 13710 : if (!has_mem_ref_been_instrumented (base, size_in_bytes))
2983 : {
2984 13653 : unsigned int align = get_object_alignment (t);
2985 13653 : build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
2986 : /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
2987 : is_store, /*is_scalar_access*/true, align);
2988 13653 : update_mem_ref_hash_table (base, size_in_bytes);
2989 13653 : update_mem_ref_hash_table (t, size_in_bytes);
2990 : }
2991 :
2992 : }
2993 :
2994 : /* Insert a memory reference into the hash table if access length
2995 : can be determined in compile time. */
2996 :
2997 : static void
2998 1287 : maybe_update_mem_ref_hash_table (tree base, tree len)
2999 : {
3000 1347 : if (!POINTER_TYPE_P (TREE_TYPE (base))
3001 1347 : || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
3002 : return;
3003 :
3004 1287 : HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3005 :
3006 360 : if (size_in_bytes != -1)
3007 360 : update_mem_ref_hash_table (base, size_in_bytes);
3008 : }
3009 :
3010 : /* Instrument an access to a contiguous memory region that starts at
3011 : the address pointed to by BASE, over a length of LEN (expressed in
3012 : the sizeof (*BASE) bytes). ITER points to the instruction before
3013 : which the instrumentation instructions must be inserted. LOCATION
3014 : is the source location that the instrumentation instructions must
3015 : have. If IS_STORE is true, then the memory access is a store;
3016 : otherwise, it's a load. */
3017 :
3018 : static void
3019 0 : instrument_mem_region_access (tree base, tree len,
3020 : gimple_stmt_iterator *iter,
3021 : location_t location, bool is_store)
3022 : {
3023 0 : if (!POINTER_TYPE_P (TREE_TYPE (base))
3024 0 : || !INTEGRAL_TYPE_P (TREE_TYPE (len))
3025 0 : || integer_zerop (len))
3026 0 : return;
3027 :
3028 0 : HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3029 :
3030 0 : if ((size_in_bytes == -1)
3031 0 : || !has_mem_ref_been_instrumented (base, size_in_bytes))
3032 : {
3033 0 : build_check_stmt (location, base, len, size_in_bytes, iter,
3034 : /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
3035 : is_store, /*is_scalar_access*/false, /*align*/0);
3036 : }
3037 :
3038 0 : maybe_update_mem_ref_hash_table (base, len);
3039 0 : *iter = gsi_for_stmt (gsi_stmt (*iter));
3040 : }
3041 :
3042 : /* Instrument the call to a built-in memory access function that is
3043 : pointed to by the iterator ITER.
3044 :
3045 : Upon completion, return TRUE iff *ITER has been advanced to the
3046 : statement following the one it was originally pointing to. */
3047 :
3048 : static bool
3049 3627 : instrument_builtin_call (gimple_stmt_iterator *iter)
3050 : {
3051 3651 : if (!(asan_memintrin () || hwasan_memintrin ()
3052 24 : || memtag_memintrin ()))
3053 : return false;
3054 :
3055 3603 : bool iter_advanced_p = false;
3056 3603 : gcall *call = as_a <gcall *> (gsi_stmt (*iter));
3057 :
3058 3603 : gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
3059 :
3060 3603 : location_t loc = gimple_location (call);
3061 :
3062 3603 : asan_mem_ref src0, src1, dest;
3063 3603 : asan_mem_ref_init (&src0, NULL, 1);
3064 3603 : asan_mem_ref_init (&src1, NULL, 1);
3065 3603 : asan_mem_ref_init (&dest, NULL, 1);
3066 :
3067 3603 : tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
3068 3603 : bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
3069 3603 : dest_is_deref = false, intercepted_p = true;
3070 :
3071 3603 : if (get_mem_refs_of_builtin_call (call,
3072 : &src0, &src0_len, &src0_is_store,
3073 : &src1, &src1_len, &src1_is_store,
3074 : &dest, &dest_len, &dest_is_store,
3075 : &dest_is_deref, &intercepted_p, iter))
3076 : {
3077 785 : if (dest_is_deref)
3078 : {
3079 28 : instrument_derefs (iter, dest.start, loc, dest_is_store);
3080 28 : gsi_next (iter);
3081 28 : iter_advanced_p = true;
3082 : }
3083 757 : else if (!intercepted_p
3084 0 : && (src0_len || src1_len || dest_len))
3085 : {
3086 0 : if (src0.start != NULL_TREE)
3087 0 : instrument_mem_region_access (src0.start, src0_len,
3088 : iter, loc, /*is_store=*/false);
3089 0 : if (src1.start != NULL_TREE)
3090 0 : instrument_mem_region_access (src1.start, src1_len,
3091 : iter, loc, /*is_store=*/false);
3092 0 : if (dest.start != NULL_TREE)
3093 0 : instrument_mem_region_access (dest.start, dest_len,
3094 : iter, loc, /*is_store=*/true);
3095 :
3096 0 : *iter = gsi_for_stmt (call);
3097 0 : gsi_next (iter);
3098 0 : iter_advanced_p = true;
3099 : }
3100 : else
3101 : {
3102 757 : if (src0.start != NULL_TREE)
3103 569 : maybe_update_mem_ref_hash_table (src0.start, src0_len);
3104 757 : if (src1.start != NULL_TREE)
3105 50 : maybe_update_mem_ref_hash_table (src1.start, src1_len);
3106 757 : if (dest.start != NULL_TREE)
3107 668 : maybe_update_mem_ref_hash_table (dest.start, dest_len);
3108 : }
3109 : }
3110 : return iter_advanced_p;
3111 : }
3112 :
3113 : /* Instrument the assignment statement ITER if it is subject to
3114 : instrumentation. Return TRUE iff instrumentation actually
3115 : happened. In that case, the iterator ITER is advanced to the next
3116 : logical expression following the one initially pointed to by ITER,
3117 : and the relevant memory reference that which access has been
3118 : instrumented is added to the memory references hash table. */
3119 :
3120 : static bool
3121 22485 : maybe_instrument_assignment (gimple_stmt_iterator *iter)
3122 : {
3123 22485 : gimple *s = gsi_stmt (*iter);
3124 :
3125 22485 : gcc_assert (gimple_assign_single_p (s));
3126 :
3127 22485 : tree ref_expr = NULL_TREE;
3128 22485 : bool is_store, is_instrumented = false;
3129 :
3130 22485 : if (gimple_store_p (s))
3131 : {
3132 9589 : ref_expr = gimple_assign_lhs (s);
3133 9589 : is_store = true;
3134 9589 : instrument_derefs (iter, ref_expr,
3135 : gimple_location (s),
3136 : is_store);
3137 9589 : is_instrumented = true;
3138 : }
3139 :
3140 22485 : if (gimple_assign_load_p (s))
3141 : {
3142 11064 : ref_expr = gimple_assign_rhs1 (s);
3143 11064 : is_store = false;
3144 11064 : instrument_derefs (iter, ref_expr,
3145 : gimple_location (s),
3146 : is_store);
3147 11064 : is_instrumented = true;
3148 : }
3149 :
3150 22485 : if (is_instrumented)
3151 20187 : gsi_next (iter);
3152 :
3153 22485 : return is_instrumented;
3154 : }
3155 :
3156 : /* Instrument the function call pointed to by the iterator ITER, if it
3157 : is subject to instrumentation. At the moment, the only function
3158 : calls that are instrumented are some built-in functions that access
3159 : memory. Look at instrument_builtin_call to learn more.
3160 :
3161 : Upon completion return TRUE iff *ITER was advanced to the statement
3162 : following the one it was originally pointing to. */
3163 :
3164 : static bool
3165 21092 : maybe_instrument_call (gimple_stmt_iterator *iter)
3166 : {
3167 21092 : gimple *stmt = gsi_stmt (*iter);
3168 21092 : bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
3169 :
3170 21092 : if (is_builtin && instrument_builtin_call (iter))
3171 : return true;
3172 :
3173 21064 : if (gimple_call_noreturn_p (stmt))
3174 : {
3175 971 : if (is_builtin)
3176 : {
3177 280 : tree callee = gimple_call_fndecl (stmt);
3178 280 : switch (DECL_FUNCTION_CODE (callee))
3179 : {
3180 : case BUILT_IN_UNREACHABLE:
3181 : case BUILT_IN_UNREACHABLE_TRAP:
3182 : case BUILT_IN_TRAP:
3183 : /* Don't instrument these. */
3184 : return false;
3185 : default:
3186 : break;
3187 : }
3188 : }
3189 832 : if (gimple_call_internal_p (stmt, IFN_ABNORMAL_DISPATCHER))
3190 : /* Don't instrument this. */
3191 : return false;
3192 : /* If a function does not return, then we must handle clearing up the
3193 : shadow stack accordingly. For ASAN we can simply set the entire stack
3194 : to "valid" for accesses by setting the shadow space to 0 and all
3195 : accesses will pass checks. That means that some bad accesses may be
3196 : missed, but we will not report any false positives.
3197 :
3198 : This is not possible for HWASAN. Since there is no "always valid" tag
3199 : we can not set any space to "always valid". If we were to clear the
3200 : entire shadow stack then code resuming from `longjmp` or a caught
3201 : exception would trigger false positives when correctly accessing
3202 : variables on the stack. Hence we need to handle things like
3203 : `longjmp`, thread exit, and exceptions in a different way. These
3204 : problems must be handled externally to the compiler, e.g. in the
3205 : language runtime. */
3206 747 : if (! hwassist_sanitize_p ())
3207 : {
3208 712 : tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
3209 712 : gimple *g = gimple_build_call (decl, 0);
3210 712 : gimple_set_location (g, gimple_location (stmt));
3211 712 : gsi_safe_insert_before (iter, g);
3212 : }
3213 : }
3214 :
3215 20840 : bool instrumented = false;
3216 20840 : if (gimple_store_p (stmt)
3217 20840 : && (gimple_call_builtin_p (stmt)
3218 221 : || gimple_call_internal_p (stmt)
3219 220 : || !aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
3220 220 : gimple_call_fntype (stmt))))
3221 : {
3222 19 : tree ref_expr = gimple_call_lhs (stmt);
3223 19 : instrument_derefs (iter, ref_expr,
3224 : gimple_location (stmt),
3225 : /*is_store=*/true);
3226 :
3227 19 : instrumented = true;
3228 : }
3229 :
3230 : /* Walk through gimple_call arguments and check them id needed. */
3231 20840 : unsigned args_num = gimple_call_num_args (stmt);
3232 65551 : for (unsigned i = 0; i < args_num; ++i)
3233 : {
3234 44711 : tree arg = gimple_call_arg (stmt, i);
3235 : /* If ARG is not a non-aggregate register variable, compiler in general
3236 : creates temporary for it and pass it as argument to gimple call.
3237 : But in some cases, e.g. when we pass by value a small structure that
3238 : fits to register, compiler can avoid extra overhead by pulling out
3239 : these temporaries. In this case, we should check the argument. */
3240 44711 : if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
3241 : {
3242 95 : instrument_derefs (iter, arg,
3243 : gimple_location (stmt),
3244 : /*is_store=*/false);
3245 95 : instrumented = true;
3246 : }
3247 : }
3248 20840 : if (instrumented)
3249 108 : gsi_next (iter);
3250 : return instrumented;
3251 : }
3252 :
3253 : /* Walk each instruction of all basic block and instrument those that
3254 : represent memory references: loads, stores, or function calls.
3255 : In a given basic block, this function avoids instrumenting memory
3256 : references that have already been instrumented. */
3257 :
3258 : static void
3259 6352 : transform_statements (void)
3260 : {
3261 6352 : basic_block bb, last_bb = NULL;
3262 6352 : gimple_stmt_iterator i;
3263 6352 : int saved_last_basic_block = last_basic_block_for_fn (cfun);
3264 :
3265 34599 : FOR_EACH_BB_FN (bb, cfun)
3266 : {
3267 28247 : basic_block prev_bb = bb;
3268 :
3269 28247 : if (bb->index >= saved_last_basic_block) continue;
3270 :
3271 : /* Flush the mem ref hash table, if current bb doesn't have
3272 : exactly one predecessor, or if that predecessor (skipping
3273 : over asan created basic blocks) isn't the last processed
3274 : basic block. Thus we effectively flush on extended basic
3275 : block boundaries. */
3276 28245 : while (single_pred_p (prev_bb))
3277 : {
3278 23289 : prev_bb = single_pred (prev_bb);
3279 23289 : if (prev_bb->index < saved_last_basic_block)
3280 : break;
3281 : }
3282 28239 : if (prev_bb != last_bb)
3283 19058 : empty_mem_ref_hash_table ();
3284 28239 : last_bb = bb;
3285 :
3286 206736 : for (i = gsi_start_bb (bb); !gsi_end_p (i);)
3287 : {
3288 150258 : gimple *s = gsi_stmt (i);
3289 :
3290 150258 : if (has_stmt_been_instrumented_p (s))
3291 850 : gsi_next (&i);
3292 149408 : else if (gimple_assign_single_p (s)
3293 23403 : && !gimple_clobber_p (s)
3294 171893 : && maybe_instrument_assignment (&i))
3295 : /* Nothing to do as maybe_instrument_assignment advanced
3296 : the iterator I. */;
3297 129221 : else if (is_gimple_call (s) && maybe_instrument_call (&i))
3298 : /* Nothing to do as maybe_instrument_call
3299 : advanced the iterator I. */;
3300 : else
3301 : {
3302 : /* No instrumentation happened.
3303 :
3304 : If the current instruction is a function call that
3305 : might free something, let's forget about the memory
3306 : references that got instrumented. Otherwise we might
3307 : miss some instrumentation opportunities. Do the same
3308 : for a ASAN_MARK poisoning internal function. */
3309 129085 : if (is_gimple_call (s)
3310 129085 : && (!nonfreeing_call_p (s)
3311 5761 : || asan_mark_p (s, ASAN_MARK_POISON)))
3312 15195 : empty_mem_ref_hash_table ();
3313 :
3314 129085 : gsi_next (&i);
3315 : }
3316 : }
3317 : }
3318 6352 : free_mem_ref_resources ();
3319 6352 : }
3320 :
3321 : /* Build
3322 : __asan_before_dynamic_init (module_name)
3323 : or
3324 : __asan_after_dynamic_init ()
3325 : call. */
3326 :
3327 : tree
3328 42 : asan_dynamic_init_call (bool after_p)
3329 : {
3330 42 : if (shadow_ptr_types[0] == NULL_TREE)
3331 21 : asan_init_shadow_ptr_types ();
3332 :
3333 63 : tree fn = builtin_decl_implicit (after_p
3334 : ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
3335 : : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
3336 42 : tree module_name_cst = NULL_TREE;
3337 42 : if (!after_p)
3338 : {
3339 21 : pretty_printer module_name_pp;
3340 21 : pp_string (&module_name_pp, main_input_filename);
3341 :
3342 21 : module_name_cst = asan_pp_string (&module_name_pp);
3343 21 : module_name_cst = fold_convert (const_ptr_type_node,
3344 : module_name_cst);
3345 21 : }
3346 :
3347 42 : return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
3348 : }
3349 :
3350 : /* Build
3351 : struct __asan_global
3352 : {
3353 : const void *__beg;
3354 : uptr __size;
3355 : uptr __size_with_redzone;
3356 : const void *__name;
3357 : const void *__module_name;
3358 : uptr __has_dynamic_init;
3359 : __asan_global_source_location *__location;
3360 : char *__odr_indicator;
3361 : } type. */
3362 :
3363 : static tree
3364 1054 : asan_global_struct (void)
3365 : {
3366 1054 : static const char *field_names[]
3367 : = { "__beg", "__size", "__size_with_redzone",
3368 : "__name", "__module_name", "__has_dynamic_init", "__location",
3369 : "__odr_indicator" };
3370 1054 : tree fields[ARRAY_SIZE (field_names)], ret;
3371 1054 : unsigned i;
3372 :
3373 1054 : ret = make_node (RECORD_TYPE);
3374 10540 : for (i = 0; i < ARRAY_SIZE (field_names); i++)
3375 : {
3376 8432 : fields[i]
3377 8432 : = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
3378 : get_identifier (field_names[i]),
3379 8432 : (i == 0 || i == 3) ? const_ptr_type_node
3380 : : pointer_sized_int_node);
3381 8432 : DECL_CONTEXT (fields[i]) = ret;
3382 8432 : if (i)
3383 7378 : DECL_CHAIN (fields[i - 1]) = fields[i];
3384 : }
3385 1054 : tree type_decl = build_decl (input_location, TYPE_DECL,
3386 : get_identifier ("__asan_global"), ret);
3387 1054 : DECL_IGNORED_P (type_decl) = 1;
3388 1054 : DECL_ARTIFICIAL (type_decl) = 1;
3389 1054 : TYPE_FIELDS (ret) = fields[0];
3390 1054 : TYPE_NAME (ret) = type_decl;
3391 1054 : TYPE_STUB_DECL (ret) = type_decl;
3392 1054 : TYPE_ARTIFICIAL (ret) = 1;
3393 1054 : layout_type (ret);
3394 1054 : return ret;
3395 : }
3396 :
3397 : /* Create and return odr indicator symbol for DECL.
3398 : TYPE is __asan_global struct type as returned by asan_global_struct. */
3399 :
3400 : static tree
3401 1250 : create_odr_indicator (tree decl, tree type)
3402 : {
3403 1250 : char *name;
3404 1250 : tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3405 1250 : tree decl_name
3406 1250 : = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
3407 0 : : DECL_NAME (decl));
3408 : /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
3409 1250 : if (decl_name == NULL_TREE)
3410 0 : return build_int_cst (uptr, 0);
3411 1250 : const char *dname = IDENTIFIER_POINTER (decl_name);
3412 1250 : if (HAS_DECL_ASSEMBLER_NAME_P (decl))
3413 1250 : dname = targetm.strip_name_encoding (dname);
3414 1250 : size_t len = strlen (dname) + sizeof ("__odr_asan_");
3415 1250 : name = XALLOCAVEC (char, len);
3416 1250 : snprintf (name, len, "__odr_asan_%s", dname);
3417 : #ifndef NO_DOT_IN_LABEL
3418 1250 : name[sizeof ("__odr_asan") - 1] = '.';
3419 : #elif !defined(NO_DOLLAR_IN_LABEL)
3420 : name[sizeof ("__odr_asan") - 1] = '$';
3421 : #endif
3422 1250 : tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
3423 : char_type_node);
3424 1250 : TREE_ADDRESSABLE (var) = 1;
3425 1250 : TREE_READONLY (var) = 0;
3426 1250 : TREE_THIS_VOLATILE (var) = 1;
3427 1250 : DECL_ARTIFICIAL (var) = 1;
3428 1250 : DECL_IGNORED_P (var) = 1;
3429 1250 : TREE_STATIC (var) = 1;
3430 1250 : TREE_PUBLIC (var) = 1;
3431 1250 : DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
3432 1250 : DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
3433 :
3434 1250 : TREE_USED (var) = 1;
3435 1250 : tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
3436 : build_int_cst (unsigned_type_node, 0));
3437 1250 : TREE_CONSTANT (ctor) = 1;
3438 1250 : TREE_STATIC (ctor) = 1;
3439 1250 : DECL_INITIAL (var) = ctor;
3440 1250 : DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
3441 1250 : NULL, DECL_ATTRIBUTES (var));
3442 1250 : make_decl_rtl (var);
3443 1250 : varpool_node::finalize_decl (var);
3444 1250 : return fold_convert (uptr, build_fold_addr_expr (var));
3445 : }
3446 :
3447 : /* Return true if DECL, a global var, might be overridden and needs
3448 : an additional odr indicator symbol. */
3449 :
3450 : static bool
3451 3908 : asan_needs_odr_indicator_p (tree decl)
3452 : {
3453 : /* Don't emit ODR indicators for kernel because:
3454 : a) Kernel is written in C thus doesn't need ODR indicators.
3455 : b) Some kernel code may have assumptions about symbols containing specific
3456 : patterns in their names. Since ODR indicators contain original names
3457 : of symbols they are emitted for, these assumptions would be broken for
3458 : ODR indicator symbols. */
3459 3908 : return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
3460 3908 : && !DECL_ARTIFICIAL (decl)
3461 1644 : && !DECL_WEAK (decl)
3462 5552 : && TREE_PUBLIC (decl));
3463 : }
3464 :
3465 : /* Append description of a single global DECL into vector V.
3466 : TYPE is __asan_global struct type as returned by asan_global_struct. */
3467 :
3468 : static void
3469 3908 : asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
3470 : {
3471 3908 : tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3472 3908 : unsigned HOST_WIDE_INT size;
3473 3908 : tree str_cst, module_name_cst, refdecl = decl;
3474 3908 : vec<constructor_elt, va_gc> *vinner = NULL;
3475 :
3476 3908 : pretty_printer asan_pp, module_name_pp;
3477 :
3478 3908 : if (DECL_NAME (decl))
3479 3908 : pp_tree_identifier (&asan_pp, DECL_NAME (decl));
3480 : else
3481 0 : pp_string (&asan_pp, "<unknown>");
3482 3908 : str_cst = asan_pp_string (&asan_pp);
3483 :
3484 3908 : if (!in_lto_p)
3485 3485 : pp_string (&module_name_pp, main_input_filename);
3486 : else
3487 : {
3488 423 : const_tree tu = get_ultimate_context ((const_tree)decl);
3489 423 : if (tu != NULL_TREE)
3490 290 : pp_string (&module_name_pp, IDENTIFIER_POINTER (DECL_NAME (tu)));
3491 : else
3492 133 : pp_string (&module_name_pp, aux_base_name);
3493 : }
3494 :
3495 3908 : module_name_cst = asan_pp_string (&module_name_pp);
3496 :
3497 3908 : if (asan_needs_local_alias (decl))
3498 : {
3499 0 : char buf[20];
3500 0 : ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
3501 0 : refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
3502 0 : VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
3503 0 : TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
3504 0 : TREE_READONLY (refdecl) = TREE_READONLY (decl);
3505 0 : TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
3506 0 : DECL_NOT_GIMPLE_REG_P (refdecl) = DECL_NOT_GIMPLE_REG_P (decl);
3507 0 : DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
3508 0 : DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
3509 0 : TREE_STATIC (refdecl) = 1;
3510 0 : TREE_PUBLIC (refdecl) = 0;
3511 0 : TREE_USED (refdecl) = 1;
3512 0 : assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
3513 : }
3514 :
3515 3908 : tree odr_indicator_ptr
3516 3908 : = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
3517 2658 : : build_int_cst (uptr, 0));
3518 3908 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3519 : fold_convert (const_ptr_type_node,
3520 : build_fold_addr_expr (refdecl)));
3521 3908 : size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
3522 3908 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
3523 3908 : size += asan_red_zone_size (size);
3524 3908 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
3525 3908 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3526 : fold_convert (const_ptr_type_node, str_cst));
3527 3908 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3528 : fold_convert (const_ptr_type_node, module_name_cst));
3529 3908 : varpool_node *vnode = varpool_node::get (decl);
3530 3908 : int has_dynamic_init = 0;
3531 : /* FIXME: Enable initialization order fiasco detection in LTO mode once
3532 : proper fix for PR 79061 will be applied. */
3533 3908 : if (!in_lto_p)
3534 3485 : has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
3535 3908 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3536 : build_int_cst (uptr, has_dynamic_init));
3537 3908 : tree locptr = NULL_TREE;
3538 3908 : location_t loc = DECL_SOURCE_LOCATION (decl);
3539 3908 : expanded_location xloc = expand_location (loc);
3540 3908 : if (xloc.file != NULL)
3541 : {
3542 2084 : static int lasanloccnt = 0;
3543 2084 : char buf[25];
3544 2084 : ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
3545 2084 : tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3546 : ubsan_get_source_location_type ());
3547 2084 : TREE_STATIC (var) = 1;
3548 2084 : TREE_PUBLIC (var) = 0;
3549 2084 : DECL_ARTIFICIAL (var) = 1;
3550 2084 : DECL_IGNORED_P (var) = 1;
3551 2084 : pretty_printer filename_pp;
3552 2084 : pp_string (&filename_pp, xloc.file);
3553 2084 : tree str = asan_pp_string (&filename_pp);
3554 2084 : tree ctor = build_constructor_va (TREE_TYPE (var), 3,
3555 : NULL_TREE, str, NULL_TREE,
3556 : build_int_cst (unsigned_type_node,
3557 2084 : xloc.line), NULL_TREE,
3558 : build_int_cst (unsigned_type_node,
3559 2084 : xloc.column));
3560 2084 : TREE_CONSTANT (ctor) = 1;
3561 2084 : TREE_STATIC (ctor) = 1;
3562 2084 : DECL_INITIAL (var) = ctor;
3563 2084 : varpool_node::finalize_decl (var);
3564 2084 : locptr = fold_convert (uptr, build_fold_addr_expr (var));
3565 2084 : }
3566 : else
3567 1824 : locptr = build_int_cst (uptr, 0);
3568 3908 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
3569 3908 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
3570 3908 : init = build_constructor (type, vinner);
3571 3908 : CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
3572 3908 : }
3573 :
3574 : /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
3575 : void
3576 38015 : initialize_sanitizer_builtins (void)
3577 : {
3578 38015 : tree decl;
3579 :
3580 38015 : if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
3581 37953 : return;
3582 :
3583 62 : tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
3584 62 : tree BT_FN_VOID_PTR
3585 62 : = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
3586 62 : tree BT_FN_VOID_CONST_PTR
3587 62 : = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
3588 62 : tree BT_FN_VOID_PTR_PTR
3589 62 : = build_function_type_list (void_type_node, ptr_type_node,
3590 : ptr_type_node, NULL_TREE);
3591 62 : tree BT_FN_VOID_PTR_PTR_PTR
3592 62 : = build_function_type_list (void_type_node, ptr_type_node,
3593 : ptr_type_node, ptr_type_node, NULL_TREE);
3594 62 : tree BT_FN_VOID_PTR_PTRMODE
3595 62 : = build_function_type_list (void_type_node, ptr_type_node,
3596 : pointer_sized_int_node, NULL_TREE);
3597 62 : tree BT_FN_VOID_INT
3598 62 : = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
3599 62 : tree BT_FN_SIZE_CONST_PTR_INT
3600 62 : = build_function_type_list (size_type_node, const_ptr_type_node,
3601 : integer_type_node, NULL_TREE);
3602 :
3603 62 : tree BT_FN_VOID_UINT8_UINT8
3604 62 : = build_function_type_list (void_type_node, unsigned_char_type_node,
3605 : unsigned_char_type_node, NULL_TREE);
3606 62 : tree BT_FN_VOID_UINT16_UINT16
3607 62 : = build_function_type_list (void_type_node, uint16_type_node,
3608 : uint16_type_node, NULL_TREE);
3609 62 : tree BT_FN_VOID_UINT32_UINT32
3610 62 : = build_function_type_list (void_type_node, uint32_type_node,
3611 : uint32_type_node, NULL_TREE);
3612 62 : tree BT_FN_VOID_UINT64_UINT64
3613 62 : = build_function_type_list (void_type_node, uint64_type_node,
3614 : uint64_type_node, NULL_TREE);
3615 62 : tree BT_FN_VOID_FLOAT_FLOAT
3616 62 : = build_function_type_list (void_type_node, float_type_node,
3617 : float_type_node, NULL_TREE);
3618 62 : tree BT_FN_VOID_DOUBLE_DOUBLE
3619 62 : = build_function_type_list (void_type_node, double_type_node,
3620 : double_type_node, NULL_TREE);
3621 62 : tree BT_FN_VOID_UINT64_PTR
3622 62 : = build_function_type_list (void_type_node, uint64_type_node,
3623 : ptr_type_node, NULL_TREE);
3624 :
3625 62 : tree BT_FN_PTR_CONST_PTR_UINT8
3626 62 : = build_function_type_list (ptr_type_node, const_ptr_type_node,
3627 : unsigned_char_type_node, NULL_TREE);
3628 62 : tree BT_FN_VOID_PTR_UINT8_PTRMODE
3629 62 : = build_function_type_list (void_type_node, ptr_type_node,
3630 : unsigned_char_type_node,
3631 : pointer_sized_int_node, NULL_TREE);
3632 :
3633 62 : tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
3634 62 : tree BT_FN_IX_CONST_VPTR_INT[5];
3635 62 : tree BT_FN_IX_VPTR_IX_INT[5];
3636 62 : tree BT_FN_VOID_VPTR_IX_INT[5];
3637 62 : tree vptr
3638 62 : = build_pointer_type (build_qualified_type (void_type_node,
3639 : TYPE_QUAL_VOLATILE));
3640 62 : tree cvptr
3641 62 : = build_pointer_type (build_qualified_type (void_type_node,
3642 : TYPE_QUAL_VOLATILE
3643 : |TYPE_QUAL_CONST));
3644 62 : tree boolt
3645 62 : = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
3646 62 : int i;
3647 372 : for (i = 0; i < 5; i++)
3648 : {
3649 310 : tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
3650 310 : BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
3651 310 : = build_function_type_list (boolt, vptr, ptr_type_node, ix,
3652 : integer_type_node, integer_type_node,
3653 : NULL_TREE);
3654 310 : BT_FN_IX_CONST_VPTR_INT[i]
3655 310 : = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
3656 310 : BT_FN_IX_VPTR_IX_INT[i]
3657 310 : = build_function_type_list (ix, vptr, ix, integer_type_node,
3658 : NULL_TREE);
3659 310 : BT_FN_VOID_VPTR_IX_INT[i]
3660 310 : = build_function_type_list (void_type_node, vptr, ix,
3661 : integer_type_node, NULL_TREE);
3662 : }
3663 : #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
3664 : #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
3665 : #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
3666 : #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
3667 : #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
3668 : #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
3669 : #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
3670 : #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
3671 : #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
3672 : #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
3673 : #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
3674 : #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
3675 : #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
3676 : #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
3677 : #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
3678 : #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
3679 : #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
3680 : #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
3681 : #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
3682 : #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
3683 : #undef ATTR_NOTHROW_LIST
3684 : #define ATTR_NOTHROW_LIST ECF_NOTHROW
3685 : #undef ATTR_NOTHROW_LEAF_LIST
3686 : #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
3687 : #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
3688 : #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
3689 : #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
3690 : #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
3691 : #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3692 : #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
3693 : ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
3694 : #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
3695 : #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
3696 : ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
3697 : #undef ATTR_COLD_NOTHROW_LEAF_LIST
3698 : #define ATTR_COLD_NOTHROW_LEAF_LIST \
3699 : /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
3700 : #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
3701 : #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
3702 : /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
3703 : #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
3704 : #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
3705 : /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3706 : #undef ATTR_PURE_NOTHROW_LEAF_LIST
3707 : #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
3708 : #undef DEF_BUILTIN_STUB
3709 : #define DEF_BUILTIN_STUB(ENUM, NAME)
3710 : #undef DEF_SANITIZER_BUILTIN_1
3711 : #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS) \
3712 : do { \
3713 : decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
3714 : BUILT_IN_NORMAL, NAME, NULL_TREE); \
3715 : set_call_expr_flags (decl, ATTRS); \
3716 : set_builtin_decl (ENUM, decl, true); \
3717 : } while (0)
3718 : #undef DEF_SANITIZER_BUILTIN
3719 : #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
3720 : DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
3721 :
3722 : #include "sanitizer.def"
3723 :
3724 : /* -fsanitize=object-size uses __builtin_dynamic_object_size and
3725 : __builtin_object_size, but they might not be available for e.g. Fortran at
3726 : this point. We use DEF_SANITIZER_BUILTIN here only as a convenience
3727 : macro. */
3728 62 : if (flag_sanitize & SANITIZE_OBJECT_SIZE)
3729 : {
3730 21 : if (!builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
3731 15 : DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
3732 : BT_FN_SIZE_CONST_PTR_INT,
3733 : ATTR_PURE_NOTHROW_LEAF_LIST);
3734 83 : if (!builtin_decl_implicit_p (BUILT_IN_DYNAMIC_OBJECT_SIZE))
3735 15 : DEF_SANITIZER_BUILTIN_1 (BUILT_IN_DYNAMIC_OBJECT_SIZE,
3736 : "dynamic_object_size",
3737 : BT_FN_SIZE_CONST_PTR_INT,
3738 : ATTR_PURE_NOTHROW_LEAF_LIST);
3739 : }
3740 :
3741 : #undef DEF_SANITIZER_BUILTIN_1
3742 : #undef DEF_SANITIZER_BUILTIN
3743 : #undef DEF_BUILTIN_STUB
3744 : }
3745 :
3746 : /* Called via htab_traverse. Count number of emitted
3747 : STRING_CSTs in the constant hash table. */
3748 :
3749 : int
3750 3221 : count_string_csts (constant_descriptor_tree **slot,
3751 : unsigned HOST_WIDE_INT *data)
3752 : {
3753 3221 : struct constant_descriptor_tree *desc = *slot;
3754 3221 : if (TREE_CODE (desc->value) == STRING_CST
3755 3199 : && TREE_ASM_WRITTEN (desc->value)
3756 6420 : && asan_protect_global (desc->value))
3757 1624 : ++*data;
3758 3221 : return 1;
3759 : }
3760 :
3761 : /* Helper structure to pass two parameters to
3762 : add_string_csts. */
3763 :
3764 : struct asan_add_string_csts_data
3765 : {
3766 : tree type;
3767 : vec<constructor_elt, va_gc> *v;
3768 : };
3769 :
3770 : /* Called via hash_table::traverse. Call asan_add_global
3771 : on emitted STRING_CSTs from the constant hash table. */
3772 :
3773 : int
3774 3292 : add_string_csts (constant_descriptor_tree **slot,
3775 : asan_add_string_csts_data *aascd)
3776 : {
3777 3292 : struct constant_descriptor_tree *desc = *slot;
3778 3292 : if (TREE_CODE (desc->value) == STRING_CST
3779 3277 : && TREE_ASM_WRITTEN (desc->value)
3780 6569 : && asan_protect_global (desc->value))
3781 : {
3782 1624 : asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
3783 : aascd->type, aascd->v);
3784 : }
3785 3292 : return 1;
3786 : }
3787 :
3788 : /* Needs to be GTY(()), because cgraph_build_static_cdtor may
3789 : invoke ggc_collect. */
3790 : static GTY(()) tree asan_ctor_statements;
3791 :
3792 : /* Module-level instrumentation.
3793 : - Insert __asan_init_vN() into the list of CTORs.
3794 : - TODO: insert redzones around globals.
3795 : */
3796 :
3797 : void
3798 2435 : asan_finish_file (void)
3799 : {
3800 2435 : varpool_node *vnode;
3801 2435 : unsigned HOST_WIDE_INT gcount = 0;
3802 :
3803 2435 : if (shadow_ptr_types[0] == NULL_TREE)
3804 110 : asan_init_shadow_ptr_types ();
3805 : /* Avoid instrumenting code in the asan ctors/dtors.
3806 : We don't need to insert padding after the description strings,
3807 : nor after .LASAN* array. */
3808 2435 : flag_sanitize &= ~SANITIZE_ADDRESS;
3809 :
3810 : /* For user-space we want asan constructors to run first.
3811 : Linux kernel does not support priorities other than default, and the only
3812 : other user of constructors is coverage. So we run with the default
3813 : priority. */
3814 126 : int priority = flag_sanitize & SANITIZE_USER_ADDRESS
3815 2435 : ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
3816 :
3817 2435 : if (flag_sanitize & SANITIZE_USER_ADDRESS)
3818 : {
3819 2309 : tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
3820 2309 : append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3821 2309 : fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
3822 2309 : append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3823 : }
3824 4938 : FOR_EACH_DEFINED_VARIABLE (vnode)
3825 2503 : if (TREE_ASM_WRITTEN (vnode->decl)
3826 2503 : && asan_protect_global (vnode->decl))
3827 2284 : ++gcount;
3828 2435 : hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
3829 2435 : const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
3830 5656 : (&gcount);
3831 2435 : if (gcount)
3832 : {
3833 1054 : tree type = asan_global_struct (), var, ctor;
3834 1054 : tree dtor_statements = NULL_TREE;
3835 1054 : vec<constructor_elt, va_gc> *v;
3836 1054 : char buf[20];
3837 :
3838 1054 : type = build_array_type_nelts (type, gcount);
3839 1054 : ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
3840 1054 : var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3841 : type);
3842 1054 : TREE_STATIC (var) = 1;
3843 1054 : TREE_PUBLIC (var) = 0;
3844 1054 : DECL_ARTIFICIAL (var) = 1;
3845 1054 : DECL_IGNORED_P (var) = 1;
3846 1054 : vec_alloc (v, gcount);
3847 3475 : FOR_EACH_DEFINED_VARIABLE (vnode)
3848 2421 : if (TREE_ASM_WRITTEN (vnode->decl)
3849 2421 : && asan_protect_global (vnode->decl))
3850 2284 : asan_add_global (vnode->decl, TREE_TYPE (type), v);
3851 1054 : struct asan_add_string_csts_data aascd;
3852 1054 : aascd.type = TREE_TYPE (type);
3853 1054 : aascd.v = v;
3854 1054 : const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
3855 4346 : (&aascd);
3856 1054 : ctor = build_constructor (type, v);
3857 1054 : TREE_CONSTANT (ctor) = 1;
3858 1054 : TREE_STATIC (ctor) = 1;
3859 1054 : DECL_INITIAL (var) = ctor;
3860 1054 : SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
3861 : ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
3862 :
3863 1054 : varpool_node::finalize_decl (var);
3864 :
3865 1054 : tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
3866 1054 : tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
3867 1054 : append_to_statement_list (build_call_expr (fn, 2,
3868 : build_fold_addr_expr (var),
3869 : gcount_tree),
3870 : &asan_ctor_statements);
3871 :
3872 1054 : fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
3873 1054 : append_to_statement_list (build_call_expr (fn, 2,
3874 : build_fold_addr_expr (var),
3875 : gcount_tree),
3876 : &dtor_statements);
3877 1054 : cgraph_build_static_cdtor ('D', dtor_statements, priority);
3878 : }
3879 2435 : if (asan_ctor_statements)
3880 2309 : cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
3881 2435 : flag_sanitize |= SANITIZE_ADDRESS;
3882 2435 : }
3883 :
3884 : /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
3885 : on SHADOW address. Newly added statements will be added to ITER with
3886 : given location LOC. We mark SIZE bytes in shadow memory, where
3887 : LAST_CHUNK_SIZE is greater than zero in situation where we are at the
3888 : end of a variable. */
3889 :
3890 : static void
3891 1961 : asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
3892 : tree shadow,
3893 : unsigned HOST_WIDE_INT base_addr_offset,
3894 : bool is_clobber, unsigned size,
3895 : unsigned last_chunk_size)
3896 : {
3897 1961 : tree shadow_ptr_type;
3898 :
3899 1961 : switch (size)
3900 : {
3901 1380 : case 1:
3902 1380 : shadow_ptr_type = shadow_ptr_types[0];
3903 1380 : break;
3904 90 : case 2:
3905 90 : shadow_ptr_type = shadow_ptr_types[1];
3906 90 : break;
3907 491 : case 4:
3908 491 : shadow_ptr_type = shadow_ptr_types[2];
3909 491 : break;
3910 0 : default:
3911 0 : gcc_unreachable ();
3912 : }
3913 :
3914 1961 : unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
3915 1961 : unsigned HOST_WIDE_INT val = 0;
3916 1961 : unsigned last_pos = size;
3917 1961 : if (last_chunk_size && !is_clobber)
3918 346 : last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
3919 5485 : for (unsigned i = 0; i < size; ++i)
3920 : {
3921 3524 : unsigned char shadow_c = c;
3922 3524 : if (i == last_pos)
3923 346 : shadow_c = last_chunk_size;
3924 3524 : val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
3925 : }
3926 :
3927 : /* Handle last chunk in unpoisoning. */
3928 1961 : tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
3929 :
3930 1961 : tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
3931 1961 : build_int_cst (shadow_ptr_type, base_addr_offset));
3932 :
3933 1961 : gimple *g = gimple_build_assign (dest, magic);
3934 1961 : gimple_set_location (g, loc);
3935 1961 : gsi_insert_after (iter, g, GSI_NEW_STMT);
3936 1961 : }
3937 :
3938 : /* Expand the ASAN_MARK builtins. */
3939 :
3940 : bool
3941 1902 : asan_expand_mark_ifn (gimple_stmt_iterator *iter)
3942 : {
3943 1902 : gimple *g = gsi_stmt (*iter);
3944 1902 : location_t loc = gimple_location (g);
3945 1902 : HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
3946 1902 : bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
3947 :
3948 1902 : tree base = gimple_call_arg (g, 1);
3949 1902 : gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
3950 1902 : tree decl = TREE_OPERAND (base, 0);
3951 :
3952 : /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
3953 1902 : if (TREE_CODE (decl) == COMPONENT_REF
3954 1902 : && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
3955 1 : decl = TREE_OPERAND (decl, 0);
3956 :
3957 1902 : gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
3958 :
3959 1902 : if (hwassist_sanitize_p ())
3960 : {
3961 0 : gcc_assert (param_hwasan_instrument_stack);
3962 0 : gimple_seq stmts = NULL;
3963 : /* Here we swap ASAN_MARK calls for HWASAN_MARK.
3964 : This is because we are using the approach of using ASAN_MARK as a
3965 : synonym until here.
3966 : That approach means we don't yet have to duplicate all the special
3967 : cases for ASAN_MARK and ASAN_POISON with the exact same handling but
3968 : called HWASAN_MARK etc.
3969 :
3970 : N.b. __asan_poison_stack_memory (which implements ASAN_MARK for ASAN)
3971 : rounds the size up to its shadow memory granularity, while
3972 : __hwasan_tag_memory (which implements the same for HWASAN) does not.
3973 : Hence we emit HWASAN_MARK with an aligned size unlike ASAN_MARK. */
3974 0 : tree len = gimple_call_arg (g, 2);
3975 0 : tree new_len = gimple_build_round_up (&stmts, loc, size_type_node, len,
3976 0 : HWASAN_TAG_GRANULE_SIZE);
3977 0 : gimple_build (&stmts, loc, CFN_HWASAN_MARK,
3978 : void_type_node, gimple_call_arg (g, 0),
3979 : base, new_len);
3980 0 : gsi_replace_with_seq (iter, stmts, true);
3981 0 : return false;
3982 : }
3983 :
3984 1902 : if (is_poison)
3985 : {
3986 1259 : if (asan_handled_variables == NULL)
3987 356 : asan_handled_variables = new hash_set<tree> (16);
3988 1259 : asan_handled_variables->add (decl);
3989 : }
3990 1902 : tree len = gimple_call_arg (g, 2);
3991 :
3992 1902 : gcc_assert (poly_int_tree_p (len));
3993 :
3994 1902 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3995 : NOP_EXPR, base);
3996 1902 : gimple_set_location (g, loc);
3997 1902 : gsi_replace (iter, g, false);
3998 1902 : tree base_addr = gimple_assign_lhs (g);
3999 :
4000 : /* Generate direct emission if size_in_bytes is small. */
4001 1902 : unsigned threshold = param_use_after_scope_direct_emission_threshold;
4002 1902 : if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) <= threshold)
4003 : {
4004 1894 : unsigned HOST_WIDE_INT size_in_bytes = tree_to_uhwi (len);
4005 1894 : const unsigned HOST_WIDE_INT shadow_size
4006 1894 : = shadow_mem_size (size_in_bytes);
4007 1894 : const unsigned int shadow_align
4008 1894 : = (get_pointer_alignment (base) / BITS_PER_UNIT) >> ASAN_SHADOW_SHIFT;
4009 :
4010 1894 : tree shadow = build_shadow_mem_access (iter, loc, base_addr,
4011 : shadow_ptr_types[0], true);
4012 :
4013 3855 : for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
4014 : {
4015 1961 : unsigned size = 1;
4016 1961 : if (shadow_size - offset >= 4
4017 : && (!STRICT_ALIGNMENT || shadow_align >= 4))
4018 : size = 4;
4019 1470 : else if (shadow_size - offset >= 2
4020 : && (!STRICT_ALIGNMENT || shadow_align >= 2))
4021 90 : size = 2;
4022 :
4023 1961 : unsigned HOST_WIDE_INT last_chunk_size = 0;
4024 1961 : unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
4025 1961 : if (s > size_in_bytes)
4026 1193 : last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
4027 :
4028 1961 : asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
4029 : size, last_chunk_size);
4030 1961 : offset += size;
4031 : }
4032 : }
4033 : else
4034 : {
4035 8 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4036 : NOP_EXPR, len);
4037 8 : gimple_set_location (g, loc);
4038 8 : gsi_safe_insert_before (iter, g);
4039 8 : tree sz_arg = gimple_assign_lhs (g);
4040 :
4041 8 : tree fun
4042 8 : = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
4043 : : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
4044 8 : g = gimple_build_call (fun, 2, base_addr, sz_arg);
4045 8 : gimple_set_location (g, loc);
4046 8 : gsi_insert_after (iter, g, GSI_NEW_STMT);
4047 : }
4048 :
4049 : return false;
4050 : }
4051 :
4052 : /* Expand the ASAN_{LOAD,STORE} builtins. */
4053 :
4054 : bool
4055 13094 : asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
4056 : {
4057 13094 : gcc_assert (!hwassist_sanitize_p ());
4058 13094 : gimple *g = gsi_stmt (*iter);
4059 13094 : location_t loc = gimple_location (g);
4060 13094 : bool recover_p;
4061 13094 : if (flag_sanitize & SANITIZE_USER_ADDRESS)
4062 13049 : recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
4063 : else
4064 45 : recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
4065 :
4066 13094 : HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
4067 13094 : gcc_assert (flags < ASAN_CHECK_LAST);
4068 13094 : bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
4069 13094 : bool is_store = (flags & ASAN_CHECK_STORE) != 0;
4070 13094 : bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
4071 :
4072 13094 : tree base = gimple_call_arg (g, 1);
4073 13094 : tree len = gimple_call_arg (g, 2);
4074 13094 : HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
4075 :
4076 25425 : HOST_WIDE_INT size_in_bytes
4077 13094 : = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
4078 :
4079 13094 : if (use_calls)
4080 : {
4081 : /* Instrument using callbacks. */
4082 103 : gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4083 : NOP_EXPR, base);
4084 103 : gimple_set_location (g, loc);
4085 103 : gsi_insert_before (iter, g, GSI_SAME_STMT);
4086 103 : tree base_addr = gimple_assign_lhs (g);
4087 :
4088 103 : int nargs;
4089 103 : tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
4090 103 : if (nargs == 1)
4091 75 : g = gimple_build_call (fun, 1, base_addr);
4092 : else
4093 : {
4094 28 : gcc_assert (nargs == 2);
4095 28 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4096 : NOP_EXPR, len);
4097 28 : gimple_set_location (g, loc);
4098 28 : gsi_insert_before (iter, g, GSI_SAME_STMT);
4099 28 : tree sz_arg = gimple_assign_lhs (g);
4100 28 : g = gimple_build_call (fun, nargs, base_addr, sz_arg);
4101 : }
4102 103 : gimple_set_location (g, loc);
4103 103 : gsi_replace (iter, g, false);
4104 103 : return false;
4105 : }
4106 :
4107 12991 : HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
4108 :
4109 12256 : tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
4110 12991 : tree shadow_type = TREE_TYPE (shadow_ptr_type);
4111 :
4112 12991 : gimple_stmt_iterator gsi = *iter;
4113 :
4114 12991 : if (!is_non_zero_len)
4115 : {
4116 : /* So, the length of the memory area to asan-protect is
4117 : non-constant. Let's guard the generated instrumentation code
4118 : like:
4119 :
4120 : if (len != 0)
4121 : {
4122 : //asan instrumentation code goes here.
4123 : }
4124 : // falltrough instructions, starting with *ITER. */
4125 :
4126 0 : g = gimple_build_cond (NE_EXPR,
4127 : len,
4128 0 : build_int_cst (TREE_TYPE (len), 0),
4129 : NULL_TREE, NULL_TREE);
4130 0 : gimple_set_location (g, loc);
4131 :
4132 0 : basic_block then_bb, fallthrough_bb;
4133 0 : insert_if_then_before_iter (as_a <gcond *> (g), iter,
4134 : /*then_more_likely_p=*/true,
4135 : &then_bb, &fallthrough_bb);
4136 : /* Note that fallthrough_bb starts with the statement that was
4137 : pointed to by ITER. */
4138 :
4139 : /* The 'then block' of the 'if (len != 0) condition is where
4140 : we'll generate the asan instrumentation code now. */
4141 0 : gsi = gsi_last_bb (then_bb);
4142 : }
4143 :
4144 : /* Get an iterator on the point where we can add the condition
4145 : statement for the instrumentation. */
4146 12991 : basic_block then_bb, else_bb;
4147 12991 : gsi = create_cond_insert_point (&gsi, /*before_p*/false,
4148 : /*then_more_likely_p=*/false,
4149 : /*create_then_fallthru_edge*/recover_p,
4150 : &then_bb,
4151 : &else_bb);
4152 :
4153 12991 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4154 : NOP_EXPR, base);
4155 12991 : gimple_set_location (g, loc);
4156 12991 : gsi_insert_before (&gsi, g, GSI_NEW_STMT);
4157 12991 : tree base_addr = gimple_assign_lhs (g);
4158 :
4159 12991 : tree t = NULL_TREE;
4160 12991 : if (real_size_in_bytes >= 8)
4161 : {
4162 8351 : tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
4163 : shadow_ptr_type);
4164 8351 : t = shadow;
4165 : }
4166 : else
4167 : {
4168 : /* Slow path for 1, 2 and 4 byte accesses. */
4169 : /* Test (shadow != 0)
4170 : & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
4171 4640 : tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
4172 : shadow_ptr_type);
4173 4640 : gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
4174 4640 : gimple_seq seq = NULL;
4175 4640 : gimple_seq_add_stmt (&seq, shadow_test);
4176 : /* Aligned (>= 8 bytes) can test just
4177 : (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
4178 : to be 0. */
4179 4640 : if (align < 8)
4180 : {
4181 3130 : gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
4182 : base_addr, 7));
4183 3130 : gimple_seq_add_stmt (&seq,
4184 6260 : build_type_cast (shadow_type,
4185 : gimple_seq_last (seq)));
4186 3130 : if (real_size_in_bytes > 1)
4187 1080 : gimple_seq_add_stmt (&seq,
4188 1080 : build_assign (PLUS_EXPR,
4189 : gimple_seq_last (seq),
4190 1080 : real_size_in_bytes - 1));
4191 6260 : t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
4192 : }
4193 : else
4194 1510 : t = build_int_cst (shadow_type, real_size_in_bytes - 1);
4195 4640 : gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
4196 9280 : gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
4197 : gimple_seq_last (seq)));
4198 9280 : t = gimple_assign_lhs (gimple_seq_last (seq));
4199 4640 : gimple_seq_set_location (seq, loc);
4200 4640 : gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
4201 :
4202 : /* For non-constant, misaligned or otherwise weird access sizes,
4203 : check first and last byte. */
4204 4640 : if (size_in_bytes == -1)
4205 : {
4206 735 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4207 : MINUS_EXPR, len,
4208 : build_int_cst (pointer_sized_int_node, 1));
4209 735 : gimple_set_location (g, loc);
4210 735 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4211 735 : tree last = gimple_assign_lhs (g);
4212 735 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4213 : PLUS_EXPR, base_addr, last);
4214 735 : gimple_set_location (g, loc);
4215 735 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4216 735 : tree base_end_addr = gimple_assign_lhs (g);
4217 :
4218 735 : tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
4219 : shadow_ptr_type);
4220 735 : gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
4221 735 : gimple_seq seq = NULL;
4222 735 : gimple_seq_add_stmt (&seq, shadow_test);
4223 735 : gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
4224 : base_end_addr, 7));
4225 1470 : gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
4226 : gimple_seq_last (seq)));
4227 1470 : gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
4228 : gimple_seq_last (seq),
4229 : shadow));
4230 1470 : gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
4231 : gimple_seq_last (seq)));
4232 1470 : gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
4233 : gimple_seq_last (seq)));
4234 1470 : t = gimple_assign_lhs (gimple_seq_last (seq));
4235 735 : gimple_seq_set_location (seq, loc);
4236 735 : gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
4237 : }
4238 : }
4239 :
4240 12991 : g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
4241 : NULL_TREE, NULL_TREE);
4242 12991 : gimple_set_location (g, loc);
4243 12991 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4244 :
4245 : /* Generate call to the run-time library (e.g. __asan_report_load8). */
4246 12991 : gsi = gsi_start_bb (then_bb);
4247 12991 : int nargs;
4248 12991 : tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
4249 12991 : g = gimple_build_call (fun, nargs, base_addr, len);
4250 12991 : gimple_set_location (g, loc);
4251 12991 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4252 :
4253 12991 : gsi_remove (iter, true);
4254 12991 : *iter = gsi_start_bb (else_bb);
4255 :
4256 12991 : return true;
4257 : }
4258 :
4259 : /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
4260 : into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
4261 :
4262 : static tree
4263 31 : create_asan_shadow_var (tree var_decl,
4264 : hash_map<tree, tree> &shadow_vars_mapping)
4265 : {
4266 31 : tree *slot = shadow_vars_mapping.get (var_decl);
4267 31 : if (slot == NULL)
4268 : {
4269 31 : tree shadow_var = copy_node (var_decl);
4270 :
4271 31 : copy_body_data id;
4272 31 : memset (&id, 0, sizeof (copy_body_data));
4273 31 : id.src_fn = id.dst_fn = current_function_decl;
4274 31 : copy_decl_for_dup_finish (&id, var_decl, shadow_var);
4275 :
4276 31 : DECL_ARTIFICIAL (shadow_var) = 1;
4277 31 : DECL_IGNORED_P (shadow_var) = 1;
4278 31 : DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
4279 31 : gimple_add_tmp_var (shadow_var);
4280 :
4281 31 : shadow_vars_mapping.put (var_decl, shadow_var);
4282 31 : return shadow_var;
4283 : }
4284 : else
4285 0 : return *slot;
4286 : }
4287 :
4288 : /* Expand ASAN_POISON ifn. */
4289 :
4290 : bool
4291 36 : asan_expand_poison_ifn (gimple_stmt_iterator *iter,
4292 : bool *need_commit_edge_insert,
4293 : hash_map<tree, tree> &shadow_vars_mapping)
4294 : {
4295 36 : gimple *g = gsi_stmt (*iter);
4296 36 : tree poisoned_var = gimple_call_lhs (g);
4297 36 : if (!poisoned_var || has_zero_uses (poisoned_var))
4298 : {
4299 5 : gsi_remove (iter, true);
4300 5 : return true;
4301 : }
4302 :
4303 31 : if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
4304 0 : SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
4305 : create_tmp_var (TREE_TYPE (poisoned_var)));
4306 :
4307 31 : tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
4308 : shadow_vars_mapping);
4309 :
4310 31 : bool recover_p;
4311 31 : if (flag_sanitize & SANITIZE_USER_ADDRESS)
4312 31 : recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
4313 : else
4314 0 : recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
4315 31 : tree size = DECL_SIZE_UNIT (shadow_var);
4316 31 : gimple *poison_call
4317 31 : = gimple_build_call_internal (IFN_ASAN_MARK, 3,
4318 : build_int_cst (integer_type_node,
4319 : ASAN_MARK_POISON),
4320 : build_fold_addr_expr (shadow_var), size);
4321 :
4322 31 : gimple *use;
4323 31 : imm_use_iterator imm_iter;
4324 124 : FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
4325 : {
4326 62 : if (is_gimple_debug (use))
4327 31 : continue;
4328 :
4329 31 : int nargs;
4330 31 : bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
4331 31 : gcall *call;
4332 31 : if (hwassist_sanitize_p ())
4333 : {
4334 0 : tree fun = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_MISMATCH4);
4335 : /* NOTE: hwasan has no __hwasan_report_* functions like asan does.
4336 : We use __hwasan_tag_mismatch4 with arguments that tell it the
4337 : size of access and load to report all tag mismatches.
4338 :
4339 : The arguments to this function are:
4340 : Address of invalid access.
4341 : Bitfield containing information about the access
4342 : (access_info)
4343 : Pointer to a frame of registers
4344 : (for use in printing the contents of registers in a dump)
4345 : Not used yet -- to be used by inline instrumentation.
4346 : Size of access.
4347 :
4348 : The access_info bitfield encodes the following pieces of
4349 : information:
4350 : - Is this a store or load?
4351 : access_info & 0x10 => store
4352 : - Should the program continue after reporting the error?
4353 : access_info & 0x20 => recover
4354 : - What size access is this (not used here since we can always
4355 : pass the size in the last argument)
4356 :
4357 : if (access_info & 0xf == 0xf)
4358 : size is taken from last argument.
4359 : else
4360 : size == 1 << (access_info & 0xf)
4361 :
4362 : The last argument contains the size of the access iff the
4363 : access_info size indicator is 0xf (we always use this argument
4364 : rather than storing the size in the access_info bitfield).
4365 :
4366 : See the function definition `__hwasan_tag_mismatch4` in
4367 : libsanitizer/hwasan for the full definition.
4368 : */
4369 0 : unsigned access_info = (0x20 * recover_p)
4370 0 : + (0x10 * store_p)
4371 0 : + (0xf);
4372 0 : call = gimple_build_call (fun, 4,
4373 : build_fold_addr_expr (shadow_var),
4374 : build_int_cst (pointer_sized_int_node,
4375 0 : access_info),
4376 : build_int_cst (pointer_sized_int_node, 0),
4377 : size);
4378 : }
4379 : else
4380 : {
4381 31 : tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
4382 : &nargs);
4383 31 : call = gimple_build_call (fun, 1,
4384 : build_fold_addr_expr (shadow_var));
4385 : }
4386 31 : gimple_set_location (call, gimple_location (use));
4387 31 : gimple *call_to_insert = call;
4388 :
4389 : /* The USE can be a gimple PHI node. If so, insert the call on
4390 : all edges leading to the PHI node. */
4391 31 : if (is_a <gphi *> (use))
4392 : {
4393 : gphi *phi = dyn_cast<gphi *> (use);
4394 20 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
4395 15 : if (gimple_phi_arg_def (phi, i) == poisoned_var)
4396 : {
4397 5 : edge e = gimple_phi_arg_edge (phi, i);
4398 :
4399 : /* Do not insert on an edge we can't split. */
4400 5 : if (e->flags & EDGE_ABNORMAL)
4401 5 : continue;
4402 :
4403 0 : if (call_to_insert == NULL)
4404 0 : call_to_insert = gimple_copy (call);
4405 :
4406 0 : gsi_insert_seq_on_edge (e, call_to_insert);
4407 0 : *need_commit_edge_insert = true;
4408 0 : call_to_insert = NULL;
4409 : }
4410 : }
4411 : else
4412 : {
4413 26 : gimple_stmt_iterator gsi = gsi_for_stmt (use);
4414 26 : if (store_p)
4415 4 : gsi_replace (&gsi, call, true);
4416 : else
4417 22 : gsi_insert_before (&gsi, call, GSI_NEW_STMT);
4418 : }
4419 31 : }
4420 :
4421 31 : SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
4422 31 : SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
4423 31 : gsi_replace (iter, poison_call, false);
4424 :
4425 31 : return true;
4426 : }
4427 :
4428 : /* Instrument the current function. */
4429 :
4430 : static unsigned int
4431 6352 : asan_instrument (void)
4432 : {
4433 6352 : if (hwassist_sanitize_p ())
4434 : {
4435 457 : initialize_sanitizer_builtins ();
4436 457 : transform_statements ();
4437 457 : return 0;
4438 : }
4439 :
4440 5895 : if (shadow_ptr_types[0] == NULL_TREE)
4441 2307 : asan_init_shadow_ptr_types ();
4442 5895 : transform_statements ();
4443 5895 : last_alloca_addr = NULL_TREE;
4444 5895 : return 0;
4445 : }
4446 :
4447 : static bool
4448 1472267 : gate_asan (void)
4449 : {
4450 428128 : return sanitize_flags_p (SANITIZE_ADDRESS);
4451 : }
4452 :
4453 : namespace {
4454 :
4455 : const pass_data pass_data_asan =
4456 : {
4457 : GIMPLE_PASS, /* type */
4458 : "asan", /* name */
4459 : OPTGROUP_NONE, /* optinfo_flags */
4460 : TV_NONE, /* tv_id */
4461 : ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
4462 : 0, /* properties_provided */
4463 : 0, /* properties_destroyed */
4464 : 0, /* todo_flags_start */
4465 : TODO_update_ssa, /* todo_flags_finish */
4466 : };
4467 :
4468 : class pass_asan : public gimple_opt_pass
4469 : {
4470 : public:
4471 571444 : pass_asan (gcc::context *ctxt)
4472 1142888 : : gimple_opt_pass (pass_data_asan, ctxt)
4473 : {}
4474 :
4475 : /* opt_pass methods: */
4476 285722 : opt_pass * clone () final override { return new pass_asan (m_ctxt); }
4477 1044139 : bool gate (function *) final override
4478 : {
4479 1044139 : return gate_asan () || gate_hwasan () || gate_memtag ();
4480 : }
4481 5008 : unsigned int execute (function *) final override
4482 : {
4483 5008 : return asan_instrument ();
4484 : }
4485 :
4486 : }; // class pass_asan
4487 :
4488 : } // anon namespace
4489 :
4490 : gimple_opt_pass *
4491 285722 : make_pass_asan (gcc::context *ctxt)
4492 : {
4493 285722 : return new pass_asan (ctxt);
4494 : }
4495 :
4496 : namespace {
4497 :
4498 : const pass_data pass_data_asan_O0 =
4499 : {
4500 : GIMPLE_PASS, /* type */
4501 : "asan0", /* name */
4502 : OPTGROUP_NONE, /* optinfo_flags */
4503 : TV_NONE, /* tv_id */
4504 : ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
4505 : 0, /* properties_provided */
4506 : 0, /* properties_destroyed */
4507 : 0, /* todo_flags_start */
4508 : TODO_update_ssa, /* todo_flags_finish */
4509 : };
4510 :
4511 : class pass_asan_O0 : public gimple_opt_pass
4512 : {
4513 : public:
4514 285722 : pass_asan_O0 (gcc::context *ctxt)
4515 571444 : : gimple_opt_pass (pass_data_asan_O0, ctxt)
4516 : {}
4517 :
4518 : /* opt_pass methods: */
4519 1472150 : bool gate (function *) final override
4520 : {
4521 1900278 : return !optimize && (gate_asan () || gate_hwasan () || gate_memtag ());
4522 : }
4523 1344 : unsigned int execute (function *) final override
4524 : {
4525 1344 : return asan_instrument ();
4526 : }
4527 :
4528 : }; // class pass_asan_O0
4529 :
4530 : } // anon namespace
4531 :
4532 : gimple_opt_pass *
4533 285722 : make_pass_asan_O0 (gcc::context *ctxt)
4534 : {
4535 285722 : return new pass_asan_O0 (ctxt);
4536 : }
4537 :
4538 : /* HWASAN */
4539 :
4540 : /* For stack tagging:
4541 :
4542 : Return the offset from the frame base tag that the "next" expanded object
4543 : should have. */
4544 : uint8_t
4545 180 : hwasan_current_frame_tag ()
4546 : {
4547 180 : return hwasan_frame_tag_offset;
4548 : }
4549 :
4550 : /* For stack tagging:
4551 :
4552 : Return the 'base pointer' for this function. If that base pointer has not
4553 : yet been created then we create a register to hold it and record the insns
4554 : to initialize the register in `hwasan_frame_base_init_seq` for later
4555 : emission. */
4556 : rtx
4557 90 : hwasan_frame_base ()
4558 : {
4559 90 : if (! hwasan_frame_base_ptr)
4560 : {
4561 64 : start_sequence ();
4562 64 : hwasan_frame_base_ptr
4563 64 : = force_reg (Pmode,
4564 64 : targetm.memtag.insert_random_tag (virtual_stack_vars_rtx,
4565 : NULL_RTX));
4566 64 : hwasan_frame_base_init_seq = end_sequence ();
4567 : }
4568 :
4569 90 : return hwasan_frame_base_ptr;
4570 : }
4571 :
4572 : /* For stack tagging:
4573 :
4574 : Check whether this RTX is a standard pointer addressing the base of the
4575 : stack variables for this frame. Returns true if the RTX is either
4576 : virtual_stack_vars_rtx or hwasan_frame_base_ptr. */
4577 : bool
4578 1951457 : stack_vars_base_reg_p (rtx base)
4579 : {
4580 1951457 : return base == virtual_stack_vars_rtx || base == hwasan_frame_base_ptr;
4581 : }
4582 :
4583 : /* For stack tagging:
4584 :
4585 : Emit frame base initialisation.
4586 : If hwasan_frame_base has been used before here then
4587 : hwasan_frame_base_init_seq contains the sequence of instructions to
4588 : initialize it. This must be put just before the hwasan prologue, so we emit
4589 : the insns before parm_birth_insn (which will point to the first instruction
4590 : of the hwasan prologue if it exists).
4591 :
4592 : We update `parm_birth_insn` to point to the start of this initialisation
4593 : since that represents the end of the initialisation done by
4594 : expand_function_{start,end} functions and we want to maintain that. */
4595 : void
4596 373 : hwasan_maybe_emit_frame_base_init ()
4597 : {
4598 373 : if (! hwasan_frame_base_init_seq)
4599 : return;
4600 16 : emit_insn_before (hwasan_frame_base_init_seq, parm_birth_insn);
4601 16 : parm_birth_insn = hwasan_frame_base_init_seq;
4602 : }
4603 :
4604 : /* Record a compile-time constant size stack variable that HWASAN will need to
4605 : tag. This record of the range of a stack variable will be used by
4606 : `hwasan_emit_prologue` to emit the RTL at the start of each frame which will
4607 : set tags in the shadow memory according to the assigned tag for each object.
4608 :
4609 : The range that the object spans in stack space should be described by the
4610 : bounds `untagged_base + nearest_offset` and
4611 : `untagged_base + farthest_offset`.
4612 : `tagged_base` is the base address which contains the "base frame tag" for
4613 : this frame, and from which the value to address this object with will be
4614 : calculated.
4615 :
4616 : We record the `untagged_base` since the functions in the hwasan library we
4617 : use to tag memory take pointers without a tag. */
4618 : void
4619 90 : hwasan_record_stack_var (rtx untagged_base, rtx tagged_base,
4620 : poly_int64 nearest_offset, poly_int64 farthest_offset)
4621 : {
4622 90 : hwasan_stack_var cur_var;
4623 90 : cur_var.untagged_base = untagged_base;
4624 90 : cur_var.tagged_base = tagged_base;
4625 90 : cur_var.nearest_offset = nearest_offset;
4626 90 : cur_var.farthest_offset = farthest_offset;
4627 90 : cur_var.tag_offset = hwasan_current_frame_tag ();
4628 :
4629 90 : hwasan_tagged_stack_vars.safe_push (cur_var);
4630 90 : }
4631 :
4632 : /* Return the RTX representing the farthest extent of the statically allocated
4633 : stack objects for this frame. If hwasan_frame_base_ptr has not been
4634 : initialized then we are not storing any static variables on the stack in
4635 : this frame. In this case we return NULL_RTX to represent that.
4636 :
4637 : Otherwise simply return virtual_stack_vars_rtx + frame_offset. */
4638 : rtx
4639 373 : hwasan_get_frame_extent ()
4640 : {
4641 373 : return (hwasan_frame_base_ptr
4642 373 : ? plus_constant (Pmode, virtual_stack_vars_rtx, frame_offset)
4643 373 : : NULL_RTX);
4644 : }
4645 :
4646 : /* For stack tagging:
4647 :
4648 : Increment the frame tag offset modulo the size a tag can represent. */
4649 : void
4650 90 : hwasan_increment_frame_tag ()
4651 : {
4652 90 : uint8_t tag_bits = HWASAN_TAG_SIZE;
4653 90 : gcc_assert (HWASAN_TAG_SIZE
4654 : <= sizeof (hwasan_frame_tag_offset) * CHAR_BIT);
4655 90 : hwasan_frame_tag_offset = (hwasan_frame_tag_offset + 1) % (1 << tag_bits);
4656 : /* The "background tag" of the stack is zero by definition.
4657 : This is the tag that objects like parameters passed on the stack and
4658 : spilled registers are given. It is handy to avoid this tag for objects
4659 : whose tags we decide ourselves, partly to ensure that buffer overruns
4660 : can't affect these important variables (e.g. saved link register, saved
4661 : stack pointer etc) and partly to make debugging easier (everything with a
4662 : tag of zero is space allocated automatically by the compiler).
4663 :
4664 : This is not feasible when using random frame tags (the default
4665 : configuration for hwasan) since the tag for the given frame is randomly
4666 : chosen at runtime. In order to avoid any tags matching the stack
4667 : background we would need to decide tag offsets at runtime instead of
4668 : compile time (and pay the resulting performance cost).
4669 :
4670 : When not using random base tags for each frame (i.e. when compiled with
4671 : `--param hwasan-random-frame-tag=0`) the base tag for each frame is zero.
4672 : This means the tag that each object gets is equal to the
4673 : hwasan_frame_tag_offset used in determining it.
4674 : When this is the case we *can* ensure no object gets the tag of zero by
4675 : simply ensuring no object has the hwasan_frame_tag_offset of zero.
4676 :
4677 : There is the extra complication that we only record the
4678 : hwasan_frame_tag_offset here (which is the offset from the tag stored in
4679 : the stack pointer). In the kernel, the tag in the stack pointer is 0xff
4680 : rather than zero. This does not cause problems since tags of 0xff are
4681 : never checked in the kernel. As mentioned at the beginning of this
4682 : comment the background tag of the stack is zero by definition, which means
4683 : that for the kernel we should skip offsets of both 0 and 1 from the stack
4684 : pointer. Avoiding the offset of 0 ensures we use a tag which will be
4685 : checked, avoiding the offset of 1 ensures we use a tag that is not the
4686 : same as the background. */
4687 90 : if (hwasan_frame_tag_offset == 0 && ! param_hwasan_random_frame_tag)
4688 0 : hwasan_frame_tag_offset += 1;
4689 16 : if (hwasan_frame_tag_offset == 1 && ! param_hwasan_random_frame_tag
4690 90 : && sanitize_flags_p (SANITIZE_KERNEL_HWADDRESS))
4691 0 : hwasan_frame_tag_offset += 1;
4692 90 : }
4693 :
4694 : /* Clear internal state for the next function.
4695 : This function is called before variables on the stack get expanded, in
4696 : `init_vars_expansion`. */
4697 : void
4698 1117 : hwasan_record_frame_init ()
4699 : {
4700 1117 : delete asan_used_labels;
4701 1117 : asan_used_labels = NULL;
4702 :
4703 : /* If this isn't the case then some stack variable was recorded *before*
4704 : hwasan_record_frame_init is called, yet *after* the hwasan prologue for
4705 : the previous frame was emitted. Such stack variables would not have
4706 : their shadow stack filled in. */
4707 1117 : gcc_assert (hwasan_tagged_stack_vars.is_empty ());
4708 1117 : hwasan_frame_base_ptr = NULL_RTX;
4709 1117 : hwasan_frame_base_init_seq = NULL;
4710 :
4711 : /* When not using a random frame tag we can avoid the background stack
4712 : color which gives the user a little better debug output upon a crash.
4713 : Meanwhile, when using a random frame tag it will be nice to avoid adding
4714 : tags for the first object since that is unnecessary extra work.
4715 : Hence set the initial hwasan_frame_tag_offset to be 0 if using a random
4716 : frame tag and 1 otherwise.
4717 :
4718 : As described in hwasan_increment_frame_tag, in the kernel the stack
4719 : pointer has the tag 0xff. That means that to avoid 0xff and 0 (the tag
4720 : which the kernel does not check and the background tag respectively) we
4721 : start with a tag offset of 2. */
4722 2084 : hwasan_frame_tag_offset = param_hwasan_random_frame_tag
4723 : ? 0
4724 967 : : sanitize_flags_p (SANITIZE_KERNEL_HWADDRESS) ? 2 : 1;
4725 1117 : }
4726 :
4727 : /* For stack tagging:
4728 : (Emits HWASAN equivalent of what is emitted by
4729 : `asan_emit_stack_protection`).
4730 :
4731 : Emits the extra prologue code to set the shadow stack as required for HWASAN
4732 : stack instrumentation.
4733 :
4734 : Uses the vector of recorded stack variables hwasan_tagged_stack_vars. When
4735 : this function has completed hwasan_tagged_stack_vars is empty and all
4736 : objects it had pointed to are deallocated. */
4737 : void
4738 373 : hwasan_emit_prologue ()
4739 : {
4740 : /* We need untagged base pointers since libhwasan only accepts untagged
4741 : pointers in __hwasan_tag_memory. We need the tagged base pointer to obtain
4742 : the base tag for an offset. */
4743 :
4744 373 : if (hwasan_tagged_stack_vars.is_empty ())
4745 373 : return;
4746 :
4747 64 : poly_int64 bot = 0, top = 0;
4748 154 : for (hwasan_stack_var &cur : hwasan_tagged_stack_vars)
4749 : {
4750 90 : poly_int64 nearest = cur.nearest_offset;
4751 90 : poly_int64 farthest = cur.farthest_offset;
4752 :
4753 90 : if (known_ge (nearest, farthest))
4754 : {
4755 : top = nearest;
4756 : bot = farthest;
4757 : }
4758 : else
4759 : {
4760 : /* Given how these values are calculated, one must be known greater
4761 : than the other. */
4762 0 : gcc_assert (known_le (nearest, farthest));
4763 0 : top = farthest;
4764 0 : bot = nearest;
4765 : }
4766 90 : poly_int64 size = (top - bot);
4767 :
4768 : /* Assert the edge of each variable is aligned to the HWASAN tag granule
4769 : size. */
4770 180 : gcc_assert (multiple_p (top, HWASAN_TAG_GRANULE_SIZE));
4771 180 : gcc_assert (multiple_p (bot, HWASAN_TAG_GRANULE_SIZE));
4772 180 : gcc_assert (multiple_p (size, HWASAN_TAG_GRANULE_SIZE));
4773 :
4774 90 : rtx base_tag = targetm.memtag.extract_tag (cur.tagged_base, NULL_RTX);
4775 :
4776 90 : rtx bottom = convert_memory_address (ptr_mode,
4777 : plus_constant (Pmode,
4778 : cur.untagged_base,
4779 : bot));
4780 90 : if (memtag_sanitize_p ())
4781 : {
4782 0 : expand_operand ops[3];
4783 0 : rtx tagged_addr = gen_reg_rtx (ptr_mode);
4784 :
4785 : /* Check if the required target instructions are present. */
4786 0 : gcc_assert (targetm.have_compose_tag ());
4787 0 : gcc_assert (targetm.have_tag_memory ());
4788 :
4789 : /* The AArch64 has addg/subg instructions which are working directly
4790 : on a tagged pointer. */
4791 0 : create_output_operand (&ops[0], tagged_addr, ptr_mode);
4792 0 : create_input_operand (&ops[1], base_tag, ptr_mode);
4793 0 : create_integer_operand (&ops[2], cur.tag_offset);
4794 0 : expand_insn (targetm.code_for_compose_tag, 3, ops);
4795 :
4796 0 : emit_insn (targetm.gen_tag_memory (bottom, tagged_addr,
4797 : gen_int_mode (size, ptr_mode)));
4798 : }
4799 : else
4800 : {
4801 90 : rtx fn = init_one_libfunc ("__hwasan_tag_memory");
4802 90 : rtx tag = plus_constant (QImode, base_tag, cur.tag_offset);
4803 90 : tag = hwasan_truncate_to_tag_size (tag, NULL_RTX);
4804 90 : emit_library_call (fn, LCT_NORMAL, VOIDmode,
4805 : bottom, ptr_mode,
4806 : tag, QImode,
4807 : gen_int_mode (size, ptr_mode), ptr_mode);
4808 : }
4809 : }
4810 : /* Clear the stack vars, we've emitted the prologue for them all now. */
4811 64 : hwasan_tagged_stack_vars.truncate (0);
4812 : }
4813 :
4814 : /* For stack tagging:
4815 :
4816 : Return RTL insns to clear the tags between DYNAMIC and VARS pointers
4817 : into the stack. These instructions should be emitted at the end of
4818 : every function.
4819 :
4820 : If `dynamic` is NULL_RTX then no insns are returned. */
4821 : rtx_insn *
4822 373 : hwasan_emit_untag_frame (rtx dynamic, rtx vars)
4823 : {
4824 373 : if (! dynamic)
4825 : return NULL;
4826 :
4827 64 : start_sequence ();
4828 :
4829 64 : dynamic = convert_memory_address (ptr_mode, dynamic);
4830 64 : vars = convert_memory_address (ptr_mode, vars);
4831 :
4832 64 : rtx top_rtx;
4833 64 : rtx bot_rtx;
4834 64 : if (FRAME_GROWS_DOWNWARD)
4835 : {
4836 64 : top_rtx = vars;
4837 64 : bot_rtx = dynamic;
4838 : }
4839 : else
4840 : {
4841 : top_rtx = dynamic;
4842 : bot_rtx = vars;
4843 : }
4844 :
4845 64 : rtx size_rtx = simplify_gen_binary (MINUS, ptr_mode, top_rtx, bot_rtx);
4846 64 : if (!CONST_INT_P (size_rtx))
4847 0 : size_rtx = force_reg (ptr_mode, size_rtx);
4848 :
4849 64 : if (memtag_sanitize_p ())
4850 0 : emit_insn (targetm.gen_tag_memory (bot_rtx, HWASAN_STACK_BACKGROUND,
4851 : size_rtx));
4852 : else
4853 : {
4854 64 : rtx fn = init_one_libfunc ("__hwasan_tag_memory");
4855 64 : emit_library_call (fn, LCT_NORMAL, VOIDmode,
4856 : bot_rtx, ptr_mode,
4857 : HWASAN_STACK_BACKGROUND, QImode,
4858 : size_rtx, ptr_mode);
4859 : }
4860 :
4861 64 : do_pending_stack_adjust ();
4862 64 : return end_sequence ();
4863 : }
4864 :
4865 : /* Needs to be GTY(()), because cgraph_build_static_cdtor may
4866 : invoke ggc_collect. */
4867 : static GTY(()) tree hwasan_ctor_statements;
4868 :
4869 : /* Insert module initialization into this TU. This initialization calls the
4870 : initialization code for libhwasan. */
4871 : void
4872 258 : hwasan_finish_file (void)
4873 : {
4874 : /* Do not emit constructor initialization for the kernel.
4875 : (the kernel has its own initialization already). */
4876 258 : if (flag_sanitize & SANITIZE_KERNEL_HWADDRESS)
4877 : return;
4878 :
4879 244 : initialize_sanitizer_builtins ();
4880 :
4881 : /* Avoid instrumenting code in the hwasan constructors/destructors. */
4882 244 : flag_sanitize &= ~SANITIZE_HWADDRESS;
4883 244 : int priority = MAX_RESERVED_INIT_PRIORITY - 1;
4884 244 : tree fn = builtin_decl_implicit (BUILT_IN_HWASAN_INIT);
4885 244 : append_to_statement_list (build_call_expr (fn, 0), &hwasan_ctor_statements);
4886 244 : cgraph_build_static_cdtor ('I', hwasan_ctor_statements, priority);
4887 244 : flag_sanitize |= SANITIZE_HWADDRESS;
4888 : }
4889 :
4890 : /* For stack tagging:
4891 :
4892 : Truncate `tag` to the number of bits that a tag uses (i.e. to
4893 : HWASAN_TAG_SIZE). Store the result in `target` if it's convenient. */
4894 : rtx
4895 90 : hwasan_truncate_to_tag_size (rtx tag, rtx target)
4896 : {
4897 90 : gcc_assert (GET_MODE (tag) == QImode);
4898 90 : if (HWASAN_TAG_SIZE != GET_MODE_PRECISION (QImode))
4899 : {
4900 90 : gcc_assert (GET_MODE_PRECISION (QImode) > HWASAN_TAG_SIZE);
4901 90 : rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << HWASAN_TAG_SIZE) - 1,
4902 : QImode);
4903 90 : tag = expand_simple_binop (QImode, AND, tag, mask, target,
4904 : /* unsignedp = */1, OPTAB_WIDEN);
4905 90 : gcc_assert (tag);
4906 : }
4907 90 : return tag;
4908 : }
4909 :
4910 : /* Construct a function tree for __hwasan_{load,store}{1,2,4,8,16,_n}.
4911 : IS_STORE is either 1 (for a store) or 0 (for a load). */
4912 : static combined_fn
4913 381 : hwasan_check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
4914 : int *nargs)
4915 : {
4916 381 : static enum built_in_function check[2][2][6]
4917 : = { { { BUILT_IN_HWASAN_LOAD1, BUILT_IN_HWASAN_LOAD2,
4918 : BUILT_IN_HWASAN_LOAD4, BUILT_IN_HWASAN_LOAD8,
4919 : BUILT_IN_HWASAN_LOAD16, BUILT_IN_HWASAN_LOADN },
4920 : { BUILT_IN_HWASAN_STORE1, BUILT_IN_HWASAN_STORE2,
4921 : BUILT_IN_HWASAN_STORE4, BUILT_IN_HWASAN_STORE8,
4922 : BUILT_IN_HWASAN_STORE16, BUILT_IN_HWASAN_STOREN } },
4923 : { { BUILT_IN_HWASAN_LOAD1_NOABORT,
4924 : BUILT_IN_HWASAN_LOAD2_NOABORT,
4925 : BUILT_IN_HWASAN_LOAD4_NOABORT,
4926 : BUILT_IN_HWASAN_LOAD8_NOABORT,
4927 : BUILT_IN_HWASAN_LOAD16_NOABORT,
4928 : BUILT_IN_HWASAN_LOADN_NOABORT },
4929 : { BUILT_IN_HWASAN_STORE1_NOABORT,
4930 : BUILT_IN_HWASAN_STORE2_NOABORT,
4931 : BUILT_IN_HWASAN_STORE4_NOABORT,
4932 : BUILT_IN_HWASAN_STORE8_NOABORT,
4933 : BUILT_IN_HWASAN_STORE16_NOABORT,
4934 : BUILT_IN_HWASAN_STOREN_NOABORT } } };
4935 381 : if (size_in_bytes == -1)
4936 : {
4937 0 : *nargs = 2;
4938 0 : return as_combined_fn (check[recover_p][is_store][5]);
4939 : }
4940 381 : *nargs = 1;
4941 381 : int size_log2 = exact_log2 (size_in_bytes);
4942 381 : gcc_assert (size_log2 >= 0 && size_log2 <= 5);
4943 381 : return as_combined_fn (check[recover_p][is_store][size_log2]);
4944 : }
4945 :
4946 : /* Expand the HWASAN_{LOAD,STORE} builtins. */
4947 : bool
4948 381 : hwasan_expand_check_ifn (gimple_stmt_iterator *iter, bool)
4949 : {
4950 381 : gimple *g = gsi_stmt (*iter);
4951 381 : location_t loc = gimple_location (g);
4952 381 : bool recover_p;
4953 381 : if (flag_sanitize & SANITIZE_USER_HWADDRESS)
4954 321 : recover_p = (flag_sanitize_recover & SANITIZE_USER_HWADDRESS) != 0;
4955 : else
4956 60 : recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_HWADDRESS) != 0;
4957 :
4958 381 : HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
4959 381 : gcc_assert (flags < ASAN_CHECK_LAST);
4960 381 : bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
4961 381 : bool is_store = (flags & ASAN_CHECK_STORE) != 0;
4962 381 : bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
4963 :
4964 381 : tree base = gimple_call_arg (g, 1);
4965 381 : tree len = gimple_call_arg (g, 2);
4966 :
4967 : /* `align` is unused for HWASAN_CHECK, but we pass the argument anyway
4968 : since that way the arguments match ASAN_CHECK. */
4969 : /* HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3)); */
4970 :
4971 762 : unsigned HOST_WIDE_INT size_in_bytes
4972 381 : = is_scalar_access ? tree_to_shwi (len) : -1;
4973 :
4974 381 : gimple_stmt_iterator gsi = *iter;
4975 :
4976 381 : if (!is_non_zero_len)
4977 : {
4978 : /* So, the length of the memory area to hwasan-protect is
4979 : non-constant. Let's guard the generated instrumentation code
4980 : like:
4981 :
4982 : if (len != 0)
4983 : {
4984 : // hwasan instrumentation code goes here.
4985 : }
4986 : // falltrough instructions, starting with *ITER. */
4987 :
4988 0 : g = gimple_build_cond (NE_EXPR,
4989 : len,
4990 0 : build_int_cst (TREE_TYPE (len), 0),
4991 : NULL_TREE, NULL_TREE);
4992 0 : gimple_set_location (g, loc);
4993 :
4994 0 : basic_block then_bb, fallthrough_bb;
4995 0 : insert_if_then_before_iter (as_a <gcond *> (g), iter,
4996 : /*then_more_likely_p=*/true,
4997 : &then_bb, &fallthrough_bb);
4998 : /* Note that fallthrough_bb starts with the statement that was
4999 : pointed to by ITER. */
5000 :
5001 : /* The 'then block' of the 'if (len != 0) condition is where
5002 : we'll generate the hwasan instrumentation code now. */
5003 0 : gsi = gsi_last_bb (then_bb);
5004 : }
5005 :
5006 381 : gimple_seq stmts = NULL;
5007 381 : tree base_addr = gimple_build (&stmts, loc, NOP_EXPR,
5008 : pointer_sized_int_node, base);
5009 :
5010 381 : int nargs = 0;
5011 381 : combined_fn fn
5012 381 : = hwasan_check_func (is_store, recover_p, size_in_bytes, &nargs);
5013 381 : if (nargs == 1)
5014 381 : gimple_build (&stmts, loc, fn, void_type_node, base_addr);
5015 : else
5016 : {
5017 0 : gcc_assert (nargs == 2);
5018 0 : tree sz_arg = gimple_build (&stmts, loc, NOP_EXPR,
5019 : pointer_sized_int_node, len);
5020 0 : gimple_build (&stmts, loc, fn, void_type_node, base_addr, sz_arg);
5021 : }
5022 :
5023 381 : gsi_insert_seq_after (&gsi, stmts, GSI_NEW_STMT);
5024 381 : gsi_remove (iter, true);
5025 381 : *iter = gsi;
5026 381 : return false;
5027 : }
5028 :
5029 : /* For stack tagging:
5030 :
5031 : Dummy: the HWASAN_MARK internal function should only ever be in the code
5032 : after the sanopt pass. */
5033 : bool
5034 0 : hwasan_expand_mark_ifn (gimple_stmt_iterator *)
5035 : {
5036 0 : gcc_unreachable ();
5037 : }
5038 :
5039 : bool
5040 1696505 : gate_hwasan ()
5041 : {
5042 1696505 : return hwasan_sanitize_p ();
5043 : }
5044 :
5045 : bool
5046 1465915 : gate_memtag ()
5047 : {
5048 1465915 : return memtag_sanitize_p ();
5049 : }
5050 :
5051 : #include "gt-asan.h"
|