Branch data Line data Source code
1 : : /* AddressSanitizer, a fast memory error detector.
2 : : Copyright (C) 2012-2025 Free Software Foundation, Inc.
3 : : Contributed by Kostya Serebryany <kcc@google.com>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify it under
8 : : the terms of the GNU General Public License as published by the Free
9 : : Software Foundation; either version 3, or (at your option) any later
10 : : version.
11 : :
12 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : : for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : :
22 : : #include "config.h"
23 : : #include "system.h"
24 : : #include "coretypes.h"
25 : : #include "backend.h"
26 : : #include "target.h"
27 : : #include "rtl.h"
28 : : #include "tree.h"
29 : : #include "gimple.h"
30 : : #include "cfghooks.h"
31 : : #include "alloc-pool.h"
32 : : #include "tree-pass.h"
33 : : #include "memmodel.h"
34 : : #include "tm_p.h"
35 : : #include "ssa.h"
36 : : #include "stringpool.h"
37 : : #include "tree-ssanames.h"
38 : : #include "optabs.h"
39 : : #include "emit-rtl.h"
40 : : #include "cgraph.h"
41 : : #include "gimple-pretty-print.h"
42 : : #include "alias.h"
43 : : #include "fold-const.h"
44 : : #include "cfganal.h"
45 : : #include "gimplify.h"
46 : : #include "gimple-iterator.h"
47 : : #include "varasm.h"
48 : : #include "stor-layout.h"
49 : : #include "tree-iterator.h"
50 : : #include "stringpool.h"
51 : : #include "attribs.h"
52 : : #include "asan.h"
53 : : #include "dojump.h"
54 : : #include "explow.h"
55 : : #include "expr.h"
56 : : #include "output.h"
57 : : #include "langhooks.h"
58 : : #include "cfgloop.h"
59 : : #include "gimple-builder.h"
60 : : #include "gimple-fold.h"
61 : : #include "ubsan.h"
62 : : #include "builtins.h"
63 : : #include "fnmatch.h"
64 : : #include "tree-inline.h"
65 : : #include "tree-ssa.h"
66 : : #include "tree-eh.h"
67 : : #include "diagnostic-core.h"
68 : :
69 : : /* AddressSanitizer finds out-of-bounds and use-after-free bugs
70 : : with <2x slowdown on average.
71 : :
72 : : The tool consists of two parts:
73 : : instrumentation module (this file) and a run-time library.
74 : : The instrumentation module adds a run-time check before every memory insn.
75 : : For a 8- or 16- byte load accessing address X:
76 : : ShadowAddr = (X >> 3) + Offset
77 : : ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
78 : : if (ShadowValue)
79 : : __asan_report_load8(X);
80 : : For a load of N bytes (N=1, 2 or 4) from address X:
81 : : ShadowAddr = (X >> 3) + Offset
82 : : ShadowValue = *(char*)ShadowAddr;
83 : : if (ShadowValue)
84 : : if ((X & 7) + N - 1 > ShadowValue)
85 : : __asan_report_loadN(X);
86 : : Stores are instrumented similarly, but using __asan_report_storeN functions.
87 : : A call too __asan_init_vN() is inserted to the list of module CTORs.
88 : : N is the version number of the AddressSanitizer API. The changes between the
89 : : API versions are listed in libsanitizer/asan/asan_interface_internal.h.
90 : :
91 : : The run-time library redefines malloc (so that redzone are inserted around
92 : : the allocated memory) and free (so that reuse of free-ed memory is delayed),
93 : : provides __asan_report* and __asan_init_vN functions.
94 : :
95 : : Read more:
96 : : http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
97 : :
98 : : The current implementation supports detection of out-of-bounds and
99 : : use-after-free in the heap, on the stack and for global variables.
100 : :
101 : : [Protection of stack variables]
102 : :
103 : : To understand how detection of out-of-bounds and use-after-free works
104 : : for stack variables, lets look at this example on x86_64 where the
105 : : stack grows downward:
106 : :
107 : : int
108 : : foo ()
109 : : {
110 : : char a[24] = {0};
111 : : int b[2] = {0};
112 : :
113 : : a[5] = 1;
114 : : b[1] = 2;
115 : :
116 : : return a[5] + b[1];
117 : : }
118 : :
119 : : For this function, the stack protected by asan will be organized as
120 : : follows, from the top of the stack to the bottom:
121 : :
122 : : Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
123 : :
124 : : Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
125 : : the next slot be 32 bytes aligned; this one is called Partial
126 : : Redzone; this 32 bytes alignment is an asan constraint]
127 : :
128 : : Slot 3/ [24 bytes for variable 'a']
129 : :
130 : : Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
131 : :
132 : : Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
133 : :
134 : : Slot 6/ [8 bytes for variable 'b']
135 : :
136 : : Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
137 : : 'LEFT RedZone']
138 : :
139 : : The 32 bytes of LEFT red zone at the bottom of the stack can be
140 : : decomposed as such:
141 : :
142 : : 1/ The first 8 bytes contain a magical asan number that is always
143 : : 0x41B58AB3.
144 : :
145 : : 2/ The following 8 bytes contains a pointer to a string (to be
146 : : parsed at runtime by the runtime asan library), which format is
147 : : the following:
148 : :
149 : : "<function-name> <space> <num-of-variables-on-the-stack>
150 : : (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
151 : : <length-of-var-in-bytes> ){n} "
152 : :
153 : : where '(...){n}' means the content inside the parenthesis occurs 'n'
154 : : times, with 'n' being the number of variables on the stack.
155 : :
156 : : 3/ The following 8 bytes contain the PC of the current function which
157 : : will be used by the run-time library to print an error message.
158 : :
159 : : 4/ The following 8 bytes are reserved for internal use by the run-time.
160 : :
161 : : The shadow memory for that stack layout is going to look like this:
162 : :
163 : : - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
164 : : The F1 byte pattern is a magic number called
165 : : ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
166 : : the memory for that shadow byte is part of a the LEFT red zone
167 : : intended to seat at the bottom of the variables on the stack.
168 : :
169 : : - content of shadow memory 8 bytes for slots 6 and 5:
170 : : 0xF4F4F400. The F4 byte pattern is a magic number
171 : : called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
172 : : memory region for this shadow byte is a PARTIAL red zone
173 : : intended to pad a variable A, so that the slot following
174 : : {A,padding} is 32 bytes aligned.
175 : :
176 : : Note that the fact that the least significant byte of this
177 : : shadow memory content is 00 means that 8 bytes of its
178 : : corresponding memory (which corresponds to the memory of
179 : : variable 'b') is addressable.
180 : :
181 : : - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
182 : : The F2 byte pattern is a magic number called
183 : : ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
184 : : region for this shadow byte is a MIDDLE red zone intended to
185 : : seat between two 32 aligned slots of {variable,padding}.
186 : :
187 : : - content of shadow memory 8 bytes for slot 3 and 2:
188 : : 0xF4000000. This represents is the concatenation of
189 : : variable 'a' and the partial red zone following it, like what we
190 : : had for variable 'b'. The least significant 3 bytes being 00
191 : : means that the 3 bytes of variable 'a' are addressable.
192 : :
193 : : - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
194 : : The F3 byte pattern is a magic number called
195 : : ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
196 : : region for this shadow byte is a RIGHT red zone intended to seat
197 : : at the top of the variables of the stack.
198 : :
199 : : Note that the real variable layout is done in expand_used_vars in
200 : : cfgexpand.cc. As far as Address Sanitizer is concerned, it lays out
201 : : stack variables as well as the different red zones, emits some
202 : : prologue code to populate the shadow memory as to poison (mark as
203 : : non-accessible) the regions of the red zones and mark the regions of
204 : : stack variables as accessible, and emit some epilogue code to
205 : : un-poison (mark as accessible) the regions of red zones right before
206 : : the function exits.
207 : :
208 : : [Protection of global variables]
209 : :
210 : : The basic idea is to insert a red zone between two global variables
211 : : and install a constructor function that calls the asan runtime to do
212 : : the populating of the relevant shadow memory regions at load time.
213 : :
214 : : So the global variables are laid out as to insert a red zone between
215 : : them. The size of the red zones is so that each variable starts on a
216 : : 32 bytes boundary.
217 : :
218 : : Then a constructor function is installed so that, for each global
219 : : variable, it calls the runtime asan library function
220 : : __asan_register_globals_with an instance of this type:
221 : :
222 : : struct __asan_global
223 : : {
224 : : // Address of the beginning of the global variable.
225 : : const void *__beg;
226 : :
227 : : // Initial size of the global variable.
228 : : uptr __size;
229 : :
230 : : // Size of the global variable + size of the red zone. This
231 : : // size is 32 bytes aligned.
232 : : uptr __size_with_redzone;
233 : :
234 : : // Name of the global variable.
235 : : const void *__name;
236 : :
237 : : // Name of the module where the global variable is declared.
238 : : const void *__module_name;
239 : :
240 : : // 1 if it has dynamic initialization, 0 otherwise.
241 : : uptr __has_dynamic_init;
242 : :
243 : : // A pointer to struct that contains source location, could be NULL.
244 : : __asan_global_source_location *__location;
245 : : }
246 : :
247 : : A destructor function that calls the runtime asan library function
248 : : _asan_unregister_globals is also installed. */
249 : :
250 : : static unsigned HOST_WIDE_INT asan_shadow_offset_value;
251 : : static bool asan_shadow_offset_computed;
252 : : static vec<char *> sanitized_sections;
253 : : static tree last_alloca_addr;
254 : :
255 : : /* Set of variable declarations that are going to be guarded by
256 : : use-after-scope sanitizer. */
257 : :
258 : : hash_set<tree> *asan_handled_variables = NULL;
259 : :
260 : : hash_set <tree> *asan_used_labels = NULL;
261 : :
262 : : /* Global variables for HWASAN stack tagging. */
263 : : /* hwasan_frame_tag_offset records the offset from the frame base tag that the
264 : : next object should have. */
265 : : static uint8_t hwasan_frame_tag_offset = 0;
266 : : /* hwasan_frame_base_ptr is a pointer with the same address as
267 : : `virtual_stack_vars_rtx` for the current frame, and with the frame base tag
268 : : stored in it. N.b. this global RTX does not need to be marked GTY, but is
269 : : done so anyway. The need is not there since all uses are in just one pass
270 : : (cfgexpand) and there are no calls to ggc_collect between the uses. We mark
271 : : it GTY(()) anyway to allow the use of the variable later on if needed by
272 : : future features. */
273 : : static GTY(()) rtx hwasan_frame_base_ptr = NULL_RTX;
274 : : /* hwasan_frame_base_init_seq is the sequence of RTL insns that will initialize
275 : : the hwasan_frame_base_ptr. When the hwasan_frame_base_ptr is requested, we
276 : : generate this sequence but do not emit it. If the sequence was created it
277 : : is emitted once the function body has been expanded.
278 : :
279 : : This delay is because the frame base pointer may be needed anywhere in the
280 : : function body, or needed by the expand_used_vars function. Emitting once in
281 : : a known place is simpler than requiring the emission of the instructions to
282 : : be know where it should go depending on the first place the hwasan frame
283 : : base is needed. */
284 : : static GTY(()) rtx_insn *hwasan_frame_base_init_seq = NULL;
285 : :
286 : : /* Structure defining the extent of one object on the stack that HWASAN needs
287 : : to tag in the corresponding shadow stack space.
288 : :
289 : : The range this object spans on the stack is between `untagged_base +
290 : : nearest_offset` and `untagged_base + farthest_offset`.
291 : : `tagged_base` is an rtx containing the same value as `untagged_base` but
292 : : with a random tag stored in the top byte. We record both `untagged_base`
293 : : and `tagged_base` so that `hwasan_emit_prologue` can use both without having
294 : : to emit RTL into the instruction stream to re-calculate one from the other.
295 : : (`hwasan_emit_prologue` needs to use both bases since the
296 : : __hwasan_tag_memory call it emits uses an untagged value, and it calculates
297 : : the tag to store in shadow memory based on the tag_offset plus the tag in
298 : : tagged_base). */
299 : : struct hwasan_stack_var
300 : : {
301 : : rtx untagged_base;
302 : : rtx tagged_base;
303 : : poly_int64 nearest_offset;
304 : : poly_int64 farthest_offset;
305 : : uint8_t tag_offset;
306 : : };
307 : :
308 : : /* Variable recording all stack variables that HWASAN needs to tag.
309 : : Does not need to be marked as GTY(()) since every use is in the cfgexpand
310 : : pass and gcc_collect is not called in the middle of that pass. */
311 : : static vec<hwasan_stack_var> hwasan_tagged_stack_vars;
312 : :
313 : :
314 : : /* Sets shadow offset to value in string VAL. */
315 : :
316 : : bool
317 : 10 : set_asan_shadow_offset (const char *val)
318 : : {
319 : 10 : char *endp;
320 : :
321 : 10 : errno = 0;
322 : : #ifdef HAVE_LONG_LONG
323 : 10 : asan_shadow_offset_value = strtoull (val, &endp, 0);
324 : : #else
325 : : asan_shadow_offset_value = strtoul (val, &endp, 0);
326 : : #endif
327 : 10 : if (!(*val != '\0' && *endp == '\0' && errno == 0))
328 : : return false;
329 : :
330 : 10 : asan_shadow_offset_computed = true;
331 : :
332 : 10 : return true;
333 : : }
334 : :
335 : : /* Set list of user-defined sections that need to be sanitized. */
336 : :
337 : : void
338 : 40 : set_sanitized_sections (const char *sections)
339 : : {
340 : 40 : char *pat;
341 : 40 : unsigned i;
342 : 50 : FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
343 : 10 : free (pat);
344 : 40 : sanitized_sections.truncate (0);
345 : :
346 : 90 : for (const char *s = sections; *s; )
347 : : {
348 : : const char *end;
349 : 220 : for (end = s; *end && *end != ','; ++end);
350 : 50 : size_t len = end - s;
351 : 50 : sanitized_sections.safe_push (xstrndup (s, len));
352 : 50 : s = *end ? end + 1 : end;
353 : : }
354 : 40 : }
355 : :
356 : : bool
357 : 77338 : asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
358 : : {
359 : 77338 : return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
360 : 77338 : && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
361 : : }
362 : :
363 : : bool
364 : 20758358 : asan_sanitize_stack_p (void)
365 : : {
366 : 20758358 : return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_stack);
367 : : }
368 : :
369 : : bool
370 : 1451033 : asan_sanitize_allocas_p (void)
371 : : {
372 : 1451033 : return (asan_sanitize_stack_p () && param_asan_protect_allocas);
373 : : }
374 : :
375 : : bool
376 : 9988 : asan_instrument_reads (void)
377 : : {
378 : 9988 : return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_reads);
379 : : }
380 : :
381 : : bool
382 : 8412 : asan_instrument_writes (void)
383 : : {
384 : 8412 : return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_writes);
385 : : }
386 : :
387 : : bool
388 : 3194 : asan_memintrin (void)
389 : : {
390 : 3194 : return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_memintrin);
391 : : }
392 : :
393 : :
394 : : /* Support for --param asan-kernel-mem-intrinsic-prefix=1. */
395 : : static GTY(()) rtx asan_memfn_rtls[3];
396 : :
397 : : rtx
398 : 42 : asan_memfn_rtl (tree fndecl)
399 : : {
400 : 42 : int i;
401 : 42 : const char *f, *p;
402 : 42 : char buf[sizeof ("__hwasan_memmove")];
403 : :
404 : 42 : switch (DECL_FUNCTION_CODE (fndecl))
405 : : {
406 : : case BUILT_IN_MEMCPY: i = 0; f = "memcpy"; break;
407 : 14 : case BUILT_IN_MEMSET: i = 1; f = "memset"; break;
408 : 14 : case BUILT_IN_MEMMOVE: i = 2; f = "memmove"; break;
409 : 0 : default: gcc_unreachable ();
410 : : }
411 : 42 : if (asan_memfn_rtls[i] == NULL_RTX)
412 : : {
413 : 42 : tree save_name = DECL_NAME (fndecl);
414 : 42 : tree save_assembler_name = DECL_ASSEMBLER_NAME (fndecl);
415 : 42 : rtx save_rtl = DECL_RTL (fndecl);
416 : 42 : if (flag_sanitize & SANITIZE_KERNEL_HWADDRESS)
417 : : p = "__hwasan_";
418 : : else
419 : 42 : p = "__asan_";
420 : 42 : strcpy (buf, p);
421 : 42 : strcat (buf, f);
422 : 42 : DECL_NAME (fndecl) = get_identifier (buf);
423 : 42 : DECL_ASSEMBLER_NAME_RAW (fndecl) = NULL_TREE;
424 : 42 : SET_DECL_RTL (fndecl, NULL_RTX);
425 : 42 : asan_memfn_rtls[i] = DECL_RTL (fndecl);
426 : 42 : DECL_NAME (fndecl) = save_name;
427 : 42 : DECL_ASSEMBLER_NAME_RAW (fndecl) = save_assembler_name;
428 : 42 : SET_DECL_RTL (fndecl, save_rtl);
429 : : }
430 : 42 : return asan_memfn_rtls[i];
431 : : }
432 : :
433 : :
434 : : /* Checks whether section SEC should be sanitized. */
435 : :
436 : : static bool
437 : 270 : section_sanitized_p (const char *sec)
438 : : {
439 : 270 : char *pat;
440 : 270 : unsigned i;
441 : 420 : FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
442 : 330 : if (fnmatch (pat, sec, FNM_PERIOD) == 0)
443 : : return true;
444 : : return false;
445 : : }
446 : :
447 : : /* Returns Asan shadow offset. */
448 : :
449 : : static unsigned HOST_WIDE_INT
450 : 14141 : asan_shadow_offset ()
451 : : {
452 : 14141 : if (!asan_shadow_offset_computed)
453 : : {
454 : 1573 : asan_shadow_offset_computed = true;
455 : 1573 : asan_shadow_offset_value = targetm.asan_shadow_offset ();
456 : : }
457 : 14141 : return asan_shadow_offset_value;
458 : : }
459 : :
460 : : static bool
461 : 20048 : asan_dynamic_shadow_offset_p ()
462 : : {
463 : 20048 : return (asan_shadow_offset_value == 0)
464 : 20048 : && targetm.asan_dynamic_shadow_offset_p ();
465 : : }
466 : :
467 : : /* Returns Asan shadow offset has been set. */
468 : : bool
469 : 0 : asan_shadow_offset_set_p ()
470 : : {
471 : 0 : return asan_shadow_offset_computed;
472 : : }
473 : :
474 : : alias_set_type asan_shadow_set = -1;
475 : :
476 : : /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
477 : : alias set is used for all shadow memory accesses. */
478 : : static GTY(()) tree shadow_ptr_types[3];
479 : :
480 : : /* Decl for __asan_option_detect_stack_use_after_return. */
481 : : static GTY(()) tree asan_detect_stack_use_after_return;
482 : :
483 : : static GTY (()) tree asan_shadow_memory_dynamic_address;
484 : :
485 : : /* Local copy for the asan_shadow_memory_dynamic_address within the
486 : : function. */
487 : : static GTY (()) tree asan_local_shadow_memory_dynamic_address;
488 : :
489 : : static tree
490 : 0 : get_asan_shadow_memory_dynamic_address_decl ()
491 : : {
492 : 0 : if (asan_shadow_memory_dynamic_address == NULL_TREE)
493 : : {
494 : 0 : tree id, decl;
495 : 0 : id = get_identifier ("__asan_shadow_memory_dynamic_address");
496 : 0 : decl
497 : 0 : = build_decl (BUILTINS_LOCATION, VAR_DECL, id, pointer_sized_int_node);
498 : 0 : SET_DECL_ASSEMBLER_NAME (decl, id);
499 : 0 : TREE_ADDRESSABLE (decl) = 1;
500 : 0 : DECL_ARTIFICIAL (decl) = 1;
501 : 0 : DECL_IGNORED_P (decl) = 1;
502 : 0 : DECL_EXTERNAL (decl) = 1;
503 : 0 : TREE_STATIC (decl) = 1;
504 : 0 : TREE_PUBLIC (decl) = 1;
505 : 0 : TREE_USED (decl) = 1;
506 : 0 : asan_shadow_memory_dynamic_address = decl;
507 : : }
508 : :
509 : 0 : return asan_shadow_memory_dynamic_address;
510 : : }
511 : :
512 : : void
513 : 5907 : asan_maybe_insert_dynamic_shadow_at_function_entry (function *fun)
514 : : {
515 : 5907 : asan_local_shadow_memory_dynamic_address = NULL_TREE;
516 : 5907 : if (!asan_dynamic_shadow_offset_p ())
517 : : return;
518 : :
519 : 0 : gimple *g;
520 : :
521 : 0 : tree lhs = create_tmp_var (pointer_sized_int_node,
522 : : "__local_asan_shadow_memory_dynamic_address");
523 : :
524 : 0 : g = gimple_build_assign (lhs, get_asan_shadow_memory_dynamic_address_decl ());
525 : 0 : gimple_set_location (g, fun->function_start_locus);
526 : 0 : edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
527 : 0 : gsi_insert_on_edge_immediate (e, g);
528 : :
529 : 0 : asan_local_shadow_memory_dynamic_address = lhs;
530 : : }
531 : :
532 : : /* Hashtable support for memory references used by gimple
533 : : statements. */
534 : :
535 : : /* This type represents a reference to a memory region. */
536 : : struct asan_mem_ref
537 : : {
538 : : /* The expression of the beginning of the memory region. */
539 : : tree start;
540 : :
541 : : /* The size of the access. */
542 : : HOST_WIDE_INT access_size;
543 : : };
544 : :
545 : : object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
546 : :
547 : : /* Initializes an instance of asan_mem_ref. */
548 : :
549 : : static void
550 : 104882 : asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
551 : : {
552 : 104901 : ref->start = start;
553 : 104901 : ref->access_size = access_size;
554 : 0 : }
555 : :
556 : : /* Allocates memory for an instance of asan_mem_ref into the memory
557 : : pool returned by asan_mem_ref_get_alloc_pool and initialize it.
558 : : START is the address of (or the expression pointing to) the
559 : : beginning of memory reference. ACCESS_SIZE is the size of the
560 : : access to the referenced memory. */
561 : :
562 : : static asan_mem_ref*
563 : 23286 : asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
564 : : {
565 : 0 : asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
566 : :
567 : 23286 : asan_mem_ref_init (ref, start, access_size);
568 : 23286 : return ref;
569 : : }
570 : :
571 : : /* This builds and returns a pointer to the end of the memory region
572 : : that starts at START and of length LEN. */
573 : :
574 : : tree
575 : 0 : asan_mem_ref_get_end (tree start, tree len)
576 : : {
577 : 0 : if (len == NULL_TREE || integer_zerop (len))
578 : 0 : return start;
579 : :
580 : 0 : if (!ptrofftype_p (len))
581 : 0 : len = convert_to_ptrofftype (len);
582 : :
583 : 0 : return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
584 : : }
585 : :
586 : : /* Return a tree expression that represents the end of the referenced
587 : : memory region. Beware that this function can actually build a new
588 : : tree expression. */
589 : :
590 : : tree
591 : 0 : asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
592 : : {
593 : 0 : return asan_mem_ref_get_end (ref->start, len);
594 : : }
595 : :
596 : : struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
597 : : {
598 : : static inline hashval_t hash (const asan_mem_ref *);
599 : : static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
600 : : };
601 : :
602 : : /* Hash a memory reference. */
603 : :
604 : : inline hashval_t
605 : 141673 : asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
606 : : {
607 : 141673 : return iterative_hash_expr (mem_ref->start, 0);
608 : : }
609 : :
610 : : /* Compare two memory references. We accept the length of either
611 : : memory references to be NULL_TREE. */
612 : :
613 : : inline bool
614 : 99965 : asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
615 : : const asan_mem_ref *m2)
616 : : {
617 : 99965 : return operand_equal_p (m1->start, m2->start, 0);
618 : : }
619 : :
620 : : static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
621 : :
622 : : /* Returns a reference to the hash table containing memory references.
623 : : This function ensures that the hash table is created. Note that
624 : : this hash table is updated by the function
625 : : update_mem_ref_hash_table. */
626 : :
627 : : static hash_table<asan_mem_ref_hasher> *
628 : 53628 : get_mem_ref_hash_table ()
629 : : {
630 : 53628 : if (!asan_mem_ref_ht)
631 : 3090 : asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
632 : :
633 : 53628 : return asan_mem_ref_ht;
634 : : }
635 : :
636 : : /* Clear all entries from the memory references hash table. */
637 : :
638 : : static void
639 : 28732 : empty_mem_ref_hash_table ()
640 : : {
641 : 28732 : if (asan_mem_ref_ht)
642 : 15730 : asan_mem_ref_ht->empty ();
643 : 28732 : }
644 : :
645 : : /* Free the memory references hash table. */
646 : :
647 : : static void
648 : 6088 : free_mem_ref_resources ()
649 : : {
650 : 6088 : delete asan_mem_ref_ht;
651 : 6088 : asan_mem_ref_ht = NULL;
652 : :
653 : 6088 : asan_mem_ref_pool.release ();
654 : 6088 : }
655 : :
656 : : /* Return true iff the memory reference REF has been instrumented. */
657 : :
658 : : static bool
659 : 30330 : has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
660 : : {
661 : 30330 : asan_mem_ref r;
662 : 30330 : asan_mem_ref_init (&r, ref, access_size);
663 : :
664 : 30330 : asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
665 : 30330 : return saved_ref && saved_ref->access_size >= access_size;
666 : : }
667 : :
668 : : /* Return true iff the memory reference REF has been instrumented. */
669 : :
670 : : static bool
671 : 18577 : has_mem_ref_been_instrumented (const asan_mem_ref *ref)
672 : : {
673 : 0 : return has_mem_ref_been_instrumented (ref->start, ref->access_size);
674 : : }
675 : :
676 : : /* Return true iff access to memory region starting at REF and of
677 : : length LEN has been instrumented. */
678 : :
679 : : static bool
680 : 730 : has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
681 : : {
682 : 730 : HOST_WIDE_INT size_in_bytes
683 : 730 : = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
684 : :
685 : 232 : return size_in_bytes != -1
686 : 232 : && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
687 : : }
688 : :
689 : : /* Set REF to the memory reference present in a gimple assignment
690 : : ASSIGNMENT. Return true upon successful completion, false
691 : : otherwise. */
692 : :
693 : : static bool
694 : 21592 : get_mem_ref_of_assignment (const gassign *assignment,
695 : : asan_mem_ref *ref,
696 : : bool *ref_is_store)
697 : : {
698 : 21592 : gcc_assert (gimple_assign_single_p (assignment));
699 : :
700 : 21592 : if (gimple_store_p (assignment)
701 : 21592 : && !gimple_clobber_p (assignment))
702 : : {
703 : 8665 : ref->start = gimple_assign_lhs (assignment);
704 : 8665 : *ref_is_store = true;
705 : : }
706 : 12927 : else if (gimple_assign_load_p (assignment))
707 : : {
708 : 9893 : ref->start = gimple_assign_rhs1 (assignment);
709 : 9893 : *ref_is_store = false;
710 : : }
711 : : else
712 : : return false;
713 : :
714 : 18558 : ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
715 : 18558 : return true;
716 : : }
717 : :
718 : : /* Return address of last allocated dynamic alloca. */
719 : :
720 : : static tree
721 : 400 : get_last_alloca_addr ()
722 : : {
723 : 400 : if (last_alloca_addr)
724 : : return last_alloca_addr;
725 : :
726 : 187 : last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
727 : 187 : gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
728 : 187 : edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
729 : 187 : gsi_insert_on_edge_immediate (e, g);
730 : 187 : return last_alloca_addr;
731 : : }
732 : :
733 : : /* Insert __asan_allocas_unpoison (top, bottom) call before
734 : : __builtin_stack_restore (new_sp) call.
735 : : The pseudocode of this routine should look like this:
736 : : top = last_alloca_addr;
737 : : bot = new_sp;
738 : : __asan_allocas_unpoison (top, bot);
739 : : last_alloca_addr = new_sp;
740 : : __builtin_stack_restore (new_sp);
741 : : In general, we can't use new_sp as bot parameter because on some
742 : : architectures SP has non zero offset from dynamic stack area. Moreover, on
743 : : some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
744 : : particular function only after all callees were expanded to rtl.
745 : : The most noticeable example is PowerPC{,64}, see
746 : : http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
747 : : To overcome the issue we use following trick: pass new_sp as a second
748 : : parameter to __asan_allocas_unpoison and rewrite it during expansion with
749 : : new_sp + (virtual_dynamic_stack_rtx - sp) later in
750 : : expand_asan_emit_allocas_unpoison function.
751 : :
752 : : HWASAN needs to do very similar, the eventual pseudocode should be:
753 : : __hwasan_tag_memory (virtual_stack_dynamic_rtx,
754 : : 0,
755 : : new_sp - sp);
756 : : __builtin_stack_restore (new_sp)
757 : :
758 : : Need to use the same trick to handle STACK_DYNAMIC_OFFSET as described
759 : : above. */
760 : :
761 : : static void
762 : 410 : handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
763 : : {
764 : 410 : if (!iter
765 : 410 : || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()))
766 : 207 : return;
767 : :
768 : 203 : tree restored_stack = gimple_call_arg (call, 0);
769 : :
770 : 203 : gimple *g;
771 : :
772 : 203 : if (hwasan_sanitize_allocas_p ())
773 : : {
774 : 0 : enum internal_fn fn = IFN_HWASAN_ALLOCA_UNPOISON;
775 : : /* There is only one piece of information `expand_HWASAN_ALLOCA_UNPOISON`
776 : : needs to work. This is the length of the area that we're
777 : : deallocating. Since the stack pointer is known at expand time, the
778 : : position of the new stack pointer after deallocation is enough
779 : : information to calculate this length. */
780 : 0 : g = gimple_build_call_internal (fn, 1, restored_stack);
781 : : }
782 : : else
783 : : {
784 : 203 : tree last_alloca = get_last_alloca_addr ();
785 : 203 : tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
786 : 203 : g = gimple_build_call (fn, 2, last_alloca, restored_stack);
787 : 203 : gsi_insert_before (iter, g, GSI_SAME_STMT);
788 : 203 : g = gimple_build_assign (last_alloca, restored_stack);
789 : : }
790 : :
791 : 203 : gsi_insert_before (iter, g, GSI_SAME_STMT);
792 : : }
793 : :
794 : : /* Deploy and poison redzones around __builtin_alloca call. To do this, we
795 : : should replace this call with another one with changed parameters and
796 : : replace all its uses with new address, so
797 : : addr = __builtin_alloca (old_size, align);
798 : : is replaced by
799 : : left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
800 : : Following two statements are optimized out if we know that
801 : : old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
802 : : redzone.
803 : : misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
804 : : partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
805 : : right_redzone_size = ASAN_RED_ZONE_SIZE;
806 : : additional_size = left_redzone_size + partial_redzone_size +
807 : : right_redzone_size;
808 : : new_size = old_size + additional_size;
809 : : new_alloca = __builtin_alloca (new_size, max (align, 32))
810 : : __asan_alloca_poison (new_alloca, old_size)
811 : : addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
812 : : last_alloca_addr = new_alloca;
813 : : ADDITIONAL_SIZE is added to make new memory allocation contain not only
814 : : requested memory, but also left, partial and right redzones as well as some
815 : : additional space, required by alignment. */
816 : :
817 : : static void
818 : 398 : handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
819 : : {
820 : 398 : if (!iter
821 : 398 : || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()))
822 : 201 : return;
823 : :
824 : 197 : gassign *g;
825 : 197 : gcall *gg;
826 : 197 : tree callee = gimple_call_fndecl (call);
827 : 197 : tree lhs = gimple_call_lhs (call);
828 : 197 : tree old_size = gimple_call_arg (call, 0);
829 : 197 : tree ptr_type = lhs ? TREE_TYPE (lhs) : ptr_type_node;
830 : 197 : tree partial_size = NULL_TREE;
831 : 197 : unsigned int align
832 : 197 : = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
833 : 390 : ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
834 : :
835 : 197 : bool throws = false;
836 : 197 : edge e = NULL;
837 : 197 : if (stmt_can_throw_internal (cfun, call))
838 : : {
839 : 9 : if (!lhs)
840 : : return;
841 : 9 : throws = true;
842 : 9 : e = find_fallthru_edge (gsi_bb (*iter)->succs);
843 : : }
844 : :
845 : 197 : if (hwasan_sanitize_allocas_p ())
846 : : {
847 : 0 : gimple_seq stmts = NULL;
848 : 0 : location_t loc = gimple_location (gsi_stmt (*iter));
849 : : /*
850 : : HWASAN needs a different expansion.
851 : :
852 : : addr = __builtin_alloca (size, align);
853 : :
854 : : should be replaced by
855 : :
856 : : new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment;
857 : : untagged_addr = __builtin_alloca (new_size, align);
858 : : tag = __hwasan_choose_alloca_tag ();
859 : : addr = ifn_HWASAN_SET_TAG (untagged_addr, tag);
860 : : __hwasan_tag_memory (untagged_addr, tag, new_size);
861 : : */
862 : : /* Ensure alignment at least HWASAN_TAG_GRANULE_SIZE bytes so we start on
863 : : a tag granule. */
864 : 0 : align = align > HWASAN_TAG_GRANULE_SIZE ? align : HWASAN_TAG_GRANULE_SIZE;
865 : :
866 : 0 : tree old_size = gimple_call_arg (call, 0);
867 : 0 : tree new_size = gimple_build_round_up (&stmts, loc, size_type_node,
868 : : old_size,
869 : 0 : HWASAN_TAG_GRANULE_SIZE);
870 : :
871 : : /* Make the alloca call */
872 : 0 : tree untagged_addr
873 : 0 : = gimple_build (&stmts, loc,
874 : : as_combined_fn (BUILT_IN_ALLOCA_WITH_ALIGN), ptr_type,
875 : 0 : new_size, build_int_cst (size_type_node, align));
876 : :
877 : : /* Choose the tag.
878 : : Here we use an internal function so we can choose the tag at expand
879 : : time. We need the decision to be made after stack variables have been
880 : : assigned their tag (i.e. once the hwasan_frame_tag_offset variable has
881 : : been set to one after the last stack variables tag). */
882 : 0 : tree tag = gimple_build (&stmts, loc, CFN_HWASAN_CHOOSE_TAG,
883 : : unsigned_char_type_node);
884 : :
885 : : /* Add tag to pointer. */
886 : 0 : tree addr
887 : 0 : = gimple_build (&stmts, loc, CFN_HWASAN_SET_TAG, ptr_type,
888 : : untagged_addr, tag);
889 : :
890 : : /* Tag shadow memory.
891 : : NOTE: require using `untagged_addr` here for libhwasan API. */
892 : 0 : gimple_build (&stmts, loc, as_combined_fn (BUILT_IN_HWASAN_TAG_MEM),
893 : : void_type_node, untagged_addr, tag, new_size);
894 : :
895 : : /* Insert the built up code sequence into the original instruction stream
896 : : the iterator points to. */
897 : 0 : gsi_insert_seq_before (iter, stmts, GSI_SAME_STMT);
898 : :
899 : : /* Finally, replace old alloca ptr with NEW_ALLOCA. */
900 : 0 : replace_call_with_value (iter, addr);
901 : 0 : return;
902 : : }
903 : :
904 : 197 : tree last_alloca = get_last_alloca_addr ();
905 : 197 : const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
906 : :
907 : : /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
908 : : bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
909 : : manually. */
910 : 197 : align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
911 : :
912 : 197 : tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
913 : 197 : tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
914 : :
915 : : /* Extract lower bits from old_size. */
916 : 197 : wide_int size_nonzero_bits = get_nonzero_bits (old_size);
917 : 197 : wide_int rz_mask
918 : 197 : = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
919 : 197 : wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
920 : :
921 : : /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
922 : : redzone. Otherwise, compute its size here. */
923 : 197 : if (wi::ne_p (old_size_lower_bits, 0))
924 : : {
925 : : /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
926 : : partial_size = ASAN_RED_ZONE_SIZE - misalign. */
927 : 194 : g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
928 : : BIT_AND_EXPR, old_size, alloca_rz_mask);
929 : 194 : gsi_insert_before (iter, g, GSI_SAME_STMT);
930 : 194 : tree misalign = gimple_assign_lhs (g);
931 : 194 : g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
932 : : redzone_size, misalign);
933 : 194 : gsi_insert_before (iter, g, GSI_SAME_STMT);
934 : 194 : partial_size = gimple_assign_lhs (g);
935 : : }
936 : :
937 : : /* additional_size = align + ASAN_RED_ZONE_SIZE. */
938 : 394 : tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
939 : 197 : + ASAN_RED_ZONE_SIZE);
940 : : /* If alloca has partial redzone, include it to additional_size too. */
941 : 197 : if (partial_size)
942 : : {
943 : : /* additional_size += partial_size. */
944 : 194 : g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
945 : : partial_size, additional_size);
946 : 194 : gsi_insert_before (iter, g, GSI_SAME_STMT);
947 : 194 : additional_size = gimple_assign_lhs (g);
948 : : }
949 : :
950 : : /* new_size = old_size + additional_size. */
951 : 197 : g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
952 : : additional_size);
953 : 197 : gsi_insert_before (iter, g, GSI_SAME_STMT);
954 : 197 : tree new_size = gimple_assign_lhs (g);
955 : :
956 : : /* Build new __builtin_alloca call:
957 : : new_alloca_with_rz = __builtin_alloca (new_size, align). */
958 : 197 : tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
959 : 197 : gg = gimple_build_call (fn, 2, new_size,
960 : 197 : build_int_cst (size_type_node, align));
961 : 197 : tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
962 : 197 : gimple_call_set_lhs (gg, new_alloca_with_rz);
963 : 197 : if (throws)
964 : : {
965 : 9 : gimple_call_set_lhs (call, NULL);
966 : 9 : gsi_replace (iter, gg, true);
967 : : }
968 : : else
969 : 188 : gsi_insert_before (iter, gg, GSI_SAME_STMT);
970 : :
971 : : /* new_alloca = new_alloca_with_rz + align. */
972 : 197 : g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
973 : : new_alloca_with_rz,
974 : : build_int_cst (size_type_node,
975 : 197 : align / BITS_PER_UNIT));
976 : 197 : gimple_stmt_iterator gsi = gsi_none ();
977 : 197 : if (throws)
978 : : {
979 : 9 : gsi_insert_on_edge_immediate (e, g);
980 : 9 : gsi = gsi_for_stmt (g);
981 : : }
982 : : else
983 : 188 : gsi_insert_before (iter, g, GSI_SAME_STMT);
984 : 197 : tree new_alloca = gimple_assign_lhs (g);
985 : :
986 : : /* Poison newly created alloca redzones:
987 : : __asan_alloca_poison (new_alloca, old_size). */
988 : 197 : fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
989 : 197 : gg = gimple_build_call (fn, 2, new_alloca, old_size);
990 : 197 : if (throws)
991 : 9 : gsi_insert_after (&gsi, gg, GSI_NEW_STMT);
992 : : else
993 : 188 : gsi_insert_before (iter, gg, GSI_SAME_STMT);
994 : :
995 : : /* Save new_alloca_with_rz value into last_alloca to use it during
996 : : allocas unpoisoning. */
997 : 197 : g = gimple_build_assign (last_alloca, new_alloca_with_rz);
998 : 197 : if (throws)
999 : 9 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1000 : : else
1001 : 188 : gsi_insert_before (iter, g, GSI_SAME_STMT);
1002 : :
1003 : : /* Finally, replace old alloca ptr with NEW_ALLOCA. */
1004 : 197 : if (throws)
1005 : : {
1006 : 9 : g = gimple_build_assign (lhs, new_alloca);
1007 : 9 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1008 : : }
1009 : : else
1010 : 188 : replace_call_with_value (iter, new_alloca);
1011 : 197 : }
1012 : :
1013 : : /* Return the memory references contained in a gimple statement
1014 : : representing a builtin call that has to do with memory access. */
1015 : :
1016 : : static bool
1017 : 6376 : get_mem_refs_of_builtin_call (gcall *call,
1018 : : asan_mem_ref *src0,
1019 : : tree *src0_len,
1020 : : bool *src0_is_store,
1021 : : asan_mem_ref *src1,
1022 : : tree *src1_len,
1023 : : bool *src1_is_store,
1024 : : asan_mem_ref *dst,
1025 : : tree *dst_len,
1026 : : bool *dst_is_store,
1027 : : bool *dest_is_deref,
1028 : : bool *intercepted_p,
1029 : : gimple_stmt_iterator *iter = NULL)
1030 : : {
1031 : 6376 : gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1032 : :
1033 : 6376 : tree callee = gimple_call_fndecl (call);
1034 : 6376 : tree source0 = NULL_TREE, source1 = NULL_TREE,
1035 : 6376 : dest = NULL_TREE, len = NULL_TREE;
1036 : 6376 : bool is_store = true, got_reference_p = false;
1037 : 6376 : HOST_WIDE_INT access_size = 1;
1038 : :
1039 : 6376 : *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
1040 : :
1041 : 6376 : switch (DECL_FUNCTION_CODE (callee))
1042 : : {
1043 : : /* (s, s, n) style memops. */
1044 : 112 : case BUILT_IN_BCMP:
1045 : 112 : case BUILT_IN_MEMCMP:
1046 : 112 : source0 = gimple_call_arg (call, 0);
1047 : 112 : source1 = gimple_call_arg (call, 1);
1048 : 112 : len = gimple_call_arg (call, 2);
1049 : 112 : break;
1050 : :
1051 : : /* (src, dest, n) style memops. */
1052 : 0 : case BUILT_IN_BCOPY:
1053 : 0 : source0 = gimple_call_arg (call, 0);
1054 : 0 : dest = gimple_call_arg (call, 1);
1055 : 0 : len = gimple_call_arg (call, 2);
1056 : 0 : break;
1057 : :
1058 : : /* (dest, src, n) style memops. */
1059 : 792 : case BUILT_IN_MEMCPY:
1060 : 792 : case BUILT_IN_MEMCPY_CHK:
1061 : 792 : case BUILT_IN_MEMMOVE:
1062 : 792 : case BUILT_IN_MEMMOVE_CHK:
1063 : 792 : case BUILT_IN_MEMPCPY:
1064 : 792 : case BUILT_IN_MEMPCPY_CHK:
1065 : 792 : dest = gimple_call_arg (call, 0);
1066 : 792 : source0 = gimple_call_arg (call, 1);
1067 : 792 : len = gimple_call_arg (call, 2);
1068 : 792 : break;
1069 : :
1070 : : /* (dest, n) style memops. */
1071 : 0 : case BUILT_IN_BZERO:
1072 : 0 : dest = gimple_call_arg (call, 0);
1073 : 0 : len = gimple_call_arg (call, 1);
1074 : 0 : break;
1075 : :
1076 : : /* (dest, x, n) style memops*/
1077 : 388 : case BUILT_IN_MEMSET:
1078 : 388 : case BUILT_IN_MEMSET_CHK:
1079 : 388 : dest = gimple_call_arg (call, 0);
1080 : 388 : len = gimple_call_arg (call, 2);
1081 : 388 : break;
1082 : :
1083 : 96 : case BUILT_IN_STRLEN:
1084 : : /* Special case strlen here since its length is taken from its return
1085 : : value.
1086 : :
1087 : : The approach taken by the sanitizers is to check a memory access
1088 : : before it's taken. For ASAN strlen is intercepted by libasan, so no
1089 : : check is inserted by the compiler.
1090 : :
1091 : : This function still returns `true` and provides a length to the rest
1092 : : of the ASAN pass in order to record what areas have been checked,
1093 : : avoiding superfluous checks later on.
1094 : :
1095 : : HWASAN does not intercept any of these internal functions.
1096 : : This means that checks for memory accesses must be inserted by the
1097 : : compiler.
1098 : : strlen is a special case, because we can tell the length from the
1099 : : return of the function, but that is not known until after the function
1100 : : has returned.
1101 : :
1102 : : Hence we can't check the memory access before it happens.
1103 : : We could check the memory access after it has already happened, but
1104 : : for now we choose to just ignore `strlen` calls.
1105 : : This decision was simply made because that means the special case is
1106 : : limited to this one case of this one function. */
1107 : 96 : if (hwasan_sanitize_p ())
1108 : : return false;
1109 : 64 : source0 = gimple_call_arg (call, 0);
1110 : 64 : len = gimple_call_lhs (call);
1111 : 64 : break;
1112 : :
1113 : 410 : case BUILT_IN_STACK_RESTORE:
1114 : 410 : handle_builtin_stack_restore (call, iter);
1115 : 410 : break;
1116 : :
1117 : 398 : CASE_BUILT_IN_ALLOCA:
1118 : 398 : handle_builtin_alloca (call, iter);
1119 : 398 : break;
1120 : : /* And now the __atomic* and __sync builtins.
1121 : : These are handled differently from the classical memory
1122 : : access builtins above. */
1123 : :
1124 : 0 : case BUILT_IN_ATOMIC_LOAD_1:
1125 : 0 : is_store = false;
1126 : : /* FALLTHRU */
1127 : 0 : case BUILT_IN_SYNC_FETCH_AND_ADD_1:
1128 : 0 : case BUILT_IN_SYNC_FETCH_AND_SUB_1:
1129 : 0 : case BUILT_IN_SYNC_FETCH_AND_OR_1:
1130 : 0 : case BUILT_IN_SYNC_FETCH_AND_AND_1:
1131 : 0 : case BUILT_IN_SYNC_FETCH_AND_XOR_1:
1132 : 0 : case BUILT_IN_SYNC_FETCH_AND_NAND_1:
1133 : 0 : case BUILT_IN_SYNC_ADD_AND_FETCH_1:
1134 : 0 : case BUILT_IN_SYNC_SUB_AND_FETCH_1:
1135 : 0 : case BUILT_IN_SYNC_OR_AND_FETCH_1:
1136 : 0 : case BUILT_IN_SYNC_AND_AND_FETCH_1:
1137 : 0 : case BUILT_IN_SYNC_XOR_AND_FETCH_1:
1138 : 0 : case BUILT_IN_SYNC_NAND_AND_FETCH_1:
1139 : 0 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1140 : 0 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
1141 : 0 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
1142 : 0 : case BUILT_IN_SYNC_LOCK_RELEASE_1:
1143 : 0 : case BUILT_IN_ATOMIC_EXCHANGE_1:
1144 : 0 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1145 : 0 : case BUILT_IN_ATOMIC_STORE_1:
1146 : 0 : case BUILT_IN_ATOMIC_ADD_FETCH_1:
1147 : 0 : case BUILT_IN_ATOMIC_SUB_FETCH_1:
1148 : 0 : case BUILT_IN_ATOMIC_AND_FETCH_1:
1149 : 0 : case BUILT_IN_ATOMIC_NAND_FETCH_1:
1150 : 0 : case BUILT_IN_ATOMIC_XOR_FETCH_1:
1151 : 0 : case BUILT_IN_ATOMIC_OR_FETCH_1:
1152 : 0 : case BUILT_IN_ATOMIC_FETCH_ADD_1:
1153 : 0 : case BUILT_IN_ATOMIC_FETCH_SUB_1:
1154 : 0 : case BUILT_IN_ATOMIC_FETCH_AND_1:
1155 : 0 : case BUILT_IN_ATOMIC_FETCH_NAND_1:
1156 : 0 : case BUILT_IN_ATOMIC_FETCH_XOR_1:
1157 : 0 : case BUILT_IN_ATOMIC_FETCH_OR_1:
1158 : 0 : access_size = 1;
1159 : 0 : goto do_atomic;
1160 : :
1161 : 0 : case BUILT_IN_ATOMIC_LOAD_2:
1162 : 0 : is_store = false;
1163 : : /* FALLTHRU */
1164 : 0 : case BUILT_IN_SYNC_FETCH_AND_ADD_2:
1165 : 0 : case BUILT_IN_SYNC_FETCH_AND_SUB_2:
1166 : 0 : case BUILT_IN_SYNC_FETCH_AND_OR_2:
1167 : 0 : case BUILT_IN_SYNC_FETCH_AND_AND_2:
1168 : 0 : case BUILT_IN_SYNC_FETCH_AND_XOR_2:
1169 : 0 : case BUILT_IN_SYNC_FETCH_AND_NAND_2:
1170 : 0 : case BUILT_IN_SYNC_ADD_AND_FETCH_2:
1171 : 0 : case BUILT_IN_SYNC_SUB_AND_FETCH_2:
1172 : 0 : case BUILT_IN_SYNC_OR_AND_FETCH_2:
1173 : 0 : case BUILT_IN_SYNC_AND_AND_FETCH_2:
1174 : 0 : case BUILT_IN_SYNC_XOR_AND_FETCH_2:
1175 : 0 : case BUILT_IN_SYNC_NAND_AND_FETCH_2:
1176 : 0 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1177 : 0 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
1178 : 0 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
1179 : 0 : case BUILT_IN_SYNC_LOCK_RELEASE_2:
1180 : 0 : case BUILT_IN_ATOMIC_EXCHANGE_2:
1181 : 0 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1182 : 0 : case BUILT_IN_ATOMIC_STORE_2:
1183 : 0 : case BUILT_IN_ATOMIC_ADD_FETCH_2:
1184 : 0 : case BUILT_IN_ATOMIC_SUB_FETCH_2:
1185 : 0 : case BUILT_IN_ATOMIC_AND_FETCH_2:
1186 : 0 : case BUILT_IN_ATOMIC_NAND_FETCH_2:
1187 : 0 : case BUILT_IN_ATOMIC_XOR_FETCH_2:
1188 : 0 : case BUILT_IN_ATOMIC_OR_FETCH_2:
1189 : 0 : case BUILT_IN_ATOMIC_FETCH_ADD_2:
1190 : 0 : case BUILT_IN_ATOMIC_FETCH_SUB_2:
1191 : 0 : case BUILT_IN_ATOMIC_FETCH_AND_2:
1192 : 0 : case BUILT_IN_ATOMIC_FETCH_NAND_2:
1193 : 0 : case BUILT_IN_ATOMIC_FETCH_XOR_2:
1194 : 0 : case BUILT_IN_ATOMIC_FETCH_OR_2:
1195 : 0 : access_size = 2;
1196 : 0 : goto do_atomic;
1197 : :
1198 : 0 : case BUILT_IN_ATOMIC_LOAD_4:
1199 : 0 : is_store = false;
1200 : : /* FALLTHRU */
1201 : 56 : case BUILT_IN_SYNC_FETCH_AND_ADD_4:
1202 : 56 : case BUILT_IN_SYNC_FETCH_AND_SUB_4:
1203 : 56 : case BUILT_IN_SYNC_FETCH_AND_OR_4:
1204 : 56 : case BUILT_IN_SYNC_FETCH_AND_AND_4:
1205 : 56 : case BUILT_IN_SYNC_FETCH_AND_XOR_4:
1206 : 56 : case BUILT_IN_SYNC_FETCH_AND_NAND_4:
1207 : 56 : case BUILT_IN_SYNC_ADD_AND_FETCH_4:
1208 : 56 : case BUILT_IN_SYNC_SUB_AND_FETCH_4:
1209 : 56 : case BUILT_IN_SYNC_OR_AND_FETCH_4:
1210 : 56 : case BUILT_IN_SYNC_AND_AND_FETCH_4:
1211 : 56 : case BUILT_IN_SYNC_XOR_AND_FETCH_4:
1212 : 56 : case BUILT_IN_SYNC_NAND_AND_FETCH_4:
1213 : 56 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1214 : 56 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
1215 : 56 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
1216 : 56 : case BUILT_IN_SYNC_LOCK_RELEASE_4:
1217 : 56 : case BUILT_IN_ATOMIC_EXCHANGE_4:
1218 : 56 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1219 : 56 : case BUILT_IN_ATOMIC_STORE_4:
1220 : 56 : case BUILT_IN_ATOMIC_ADD_FETCH_4:
1221 : 56 : case BUILT_IN_ATOMIC_SUB_FETCH_4:
1222 : 56 : case BUILT_IN_ATOMIC_AND_FETCH_4:
1223 : 56 : case BUILT_IN_ATOMIC_NAND_FETCH_4:
1224 : 56 : case BUILT_IN_ATOMIC_XOR_FETCH_4:
1225 : 56 : case BUILT_IN_ATOMIC_OR_FETCH_4:
1226 : 56 : case BUILT_IN_ATOMIC_FETCH_ADD_4:
1227 : 56 : case BUILT_IN_ATOMIC_FETCH_SUB_4:
1228 : 56 : case BUILT_IN_ATOMIC_FETCH_AND_4:
1229 : 56 : case BUILT_IN_ATOMIC_FETCH_NAND_4:
1230 : 56 : case BUILT_IN_ATOMIC_FETCH_XOR_4:
1231 : 56 : case BUILT_IN_ATOMIC_FETCH_OR_4:
1232 : 56 : access_size = 4;
1233 : 56 : goto do_atomic;
1234 : :
1235 : 0 : case BUILT_IN_ATOMIC_LOAD_8:
1236 : 0 : is_store = false;
1237 : : /* FALLTHRU */
1238 : 0 : case BUILT_IN_SYNC_FETCH_AND_ADD_8:
1239 : 0 : case BUILT_IN_SYNC_FETCH_AND_SUB_8:
1240 : 0 : case BUILT_IN_SYNC_FETCH_AND_OR_8:
1241 : 0 : case BUILT_IN_SYNC_FETCH_AND_AND_8:
1242 : 0 : case BUILT_IN_SYNC_FETCH_AND_XOR_8:
1243 : 0 : case BUILT_IN_SYNC_FETCH_AND_NAND_8:
1244 : 0 : case BUILT_IN_SYNC_ADD_AND_FETCH_8:
1245 : 0 : case BUILT_IN_SYNC_SUB_AND_FETCH_8:
1246 : 0 : case BUILT_IN_SYNC_OR_AND_FETCH_8:
1247 : 0 : case BUILT_IN_SYNC_AND_AND_FETCH_8:
1248 : 0 : case BUILT_IN_SYNC_XOR_AND_FETCH_8:
1249 : 0 : case BUILT_IN_SYNC_NAND_AND_FETCH_8:
1250 : 0 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1251 : 0 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
1252 : 0 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
1253 : 0 : case BUILT_IN_SYNC_LOCK_RELEASE_8:
1254 : 0 : case BUILT_IN_ATOMIC_EXCHANGE_8:
1255 : 0 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1256 : 0 : case BUILT_IN_ATOMIC_STORE_8:
1257 : 0 : case BUILT_IN_ATOMIC_ADD_FETCH_8:
1258 : 0 : case BUILT_IN_ATOMIC_SUB_FETCH_8:
1259 : 0 : case BUILT_IN_ATOMIC_AND_FETCH_8:
1260 : 0 : case BUILT_IN_ATOMIC_NAND_FETCH_8:
1261 : 0 : case BUILT_IN_ATOMIC_XOR_FETCH_8:
1262 : 0 : case BUILT_IN_ATOMIC_OR_FETCH_8:
1263 : 0 : case BUILT_IN_ATOMIC_FETCH_ADD_8:
1264 : 0 : case BUILT_IN_ATOMIC_FETCH_SUB_8:
1265 : 0 : case BUILT_IN_ATOMIC_FETCH_AND_8:
1266 : 0 : case BUILT_IN_ATOMIC_FETCH_NAND_8:
1267 : 0 : case BUILT_IN_ATOMIC_FETCH_XOR_8:
1268 : 0 : case BUILT_IN_ATOMIC_FETCH_OR_8:
1269 : 0 : access_size = 8;
1270 : 0 : goto do_atomic;
1271 : :
1272 : 0 : case BUILT_IN_ATOMIC_LOAD_16:
1273 : 0 : is_store = false;
1274 : : /* FALLTHRU */
1275 : : case BUILT_IN_SYNC_FETCH_AND_ADD_16:
1276 : : case BUILT_IN_SYNC_FETCH_AND_SUB_16:
1277 : : case BUILT_IN_SYNC_FETCH_AND_OR_16:
1278 : : case BUILT_IN_SYNC_FETCH_AND_AND_16:
1279 : : case BUILT_IN_SYNC_FETCH_AND_XOR_16:
1280 : : case BUILT_IN_SYNC_FETCH_AND_NAND_16:
1281 : : case BUILT_IN_SYNC_ADD_AND_FETCH_16:
1282 : : case BUILT_IN_SYNC_SUB_AND_FETCH_16:
1283 : : case BUILT_IN_SYNC_OR_AND_FETCH_16:
1284 : : case BUILT_IN_SYNC_AND_AND_FETCH_16:
1285 : : case BUILT_IN_SYNC_XOR_AND_FETCH_16:
1286 : : case BUILT_IN_SYNC_NAND_AND_FETCH_16:
1287 : : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1288 : : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
1289 : : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
1290 : : case BUILT_IN_SYNC_LOCK_RELEASE_16:
1291 : : case BUILT_IN_ATOMIC_EXCHANGE_16:
1292 : : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1293 : : case BUILT_IN_ATOMIC_STORE_16:
1294 : : case BUILT_IN_ATOMIC_ADD_FETCH_16:
1295 : : case BUILT_IN_ATOMIC_SUB_FETCH_16:
1296 : : case BUILT_IN_ATOMIC_AND_FETCH_16:
1297 : : case BUILT_IN_ATOMIC_NAND_FETCH_16:
1298 : : case BUILT_IN_ATOMIC_XOR_FETCH_16:
1299 : : case BUILT_IN_ATOMIC_OR_FETCH_16:
1300 : : case BUILT_IN_ATOMIC_FETCH_ADD_16:
1301 : : case BUILT_IN_ATOMIC_FETCH_SUB_16:
1302 : : case BUILT_IN_ATOMIC_FETCH_AND_16:
1303 : : case BUILT_IN_ATOMIC_FETCH_NAND_16:
1304 : : case BUILT_IN_ATOMIC_FETCH_XOR_16:
1305 : : case BUILT_IN_ATOMIC_FETCH_OR_16:
1306 : : access_size = 16;
1307 : : /* FALLTHRU */
1308 : 56 : do_atomic:
1309 : 56 : {
1310 : 56 : dest = gimple_call_arg (call, 0);
1311 : : /* DEST represents the address of a memory location.
1312 : : instrument_derefs wants the memory location, so lets
1313 : : dereference the address DEST before handing it to
1314 : : instrument_derefs. */
1315 : 112 : tree type = build_nonstandard_integer_type (access_size
1316 : 56 : * BITS_PER_UNIT, 1);
1317 : 56 : dest = build2 (MEM_REF, type, dest,
1318 : : build_int_cst (build_pointer_type (char_type_node), 0));
1319 : 56 : break;
1320 : : }
1321 : :
1322 : : default:
1323 : : /* The other builtins memory access are not instrumented in this
1324 : : function because they either don't have any length parameter,
1325 : : or their length parameter is just a limit. */
1326 : : break;
1327 : : }
1328 : :
1329 : 2220 : if (len != NULL_TREE)
1330 : : {
1331 : 1356 : if (source0 != NULL_TREE)
1332 : : {
1333 : 968 : src0->start = source0;
1334 : 968 : src0->access_size = access_size;
1335 : 968 : *src0_len = len;
1336 : 968 : *src0_is_store = false;
1337 : : }
1338 : :
1339 : 1356 : if (source1 != NULL_TREE)
1340 : : {
1341 : 112 : src1->start = source1;
1342 : 112 : src1->access_size = access_size;
1343 : 112 : *src1_len = len;
1344 : 112 : *src1_is_store = false;
1345 : : }
1346 : :
1347 : 1356 : if (dest != NULL_TREE)
1348 : : {
1349 : 1180 : dst->start = dest;
1350 : 1180 : dst->access_size = access_size;
1351 : 1180 : *dst_len = len;
1352 : 1180 : *dst_is_store = true;
1353 : : }
1354 : :
1355 : : got_reference_p = true;
1356 : : }
1357 : 4988 : else if (dest)
1358 : : {
1359 : 56 : dst->start = dest;
1360 : 56 : dst->access_size = access_size;
1361 : 56 : *dst_len = NULL_TREE;
1362 : 56 : *dst_is_store = is_store;
1363 : 56 : *dest_is_deref = true;
1364 : 56 : got_reference_p = true;
1365 : : }
1366 : :
1367 : : return got_reference_p;
1368 : : }
1369 : :
1370 : : /* Return true iff a given gimple statement has been instrumented.
1371 : : Note that the statement is "defined" by the memory references it
1372 : : contains. */
1373 : :
1374 : : static bool
1375 : 128220 : has_stmt_been_instrumented_p (gimple *stmt)
1376 : : {
1377 : 128220 : if (gimple_assign_single_p (stmt))
1378 : : {
1379 : 21592 : bool r_is_store;
1380 : 21592 : asan_mem_ref r;
1381 : 21592 : asan_mem_ref_init (&r, NULL, 1);
1382 : :
1383 : 21592 : if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
1384 : : &r_is_store))
1385 : : {
1386 : 18558 : if (!has_mem_ref_been_instrumented (&r))
1387 : 18558 : return false;
1388 : 724 : if (r_is_store && gimple_assign_load_p (stmt))
1389 : : {
1390 : 1 : asan_mem_ref src;
1391 : 1 : asan_mem_ref_init (&src, NULL, 1);
1392 : 1 : src.start = gimple_assign_rhs1 (stmt);
1393 : 1 : src.access_size = int_size_in_bytes (TREE_TYPE (src.start));
1394 : 1 : if (!has_mem_ref_been_instrumented (&src))
1395 : : return false;
1396 : : }
1397 : 723 : return true;
1398 : : }
1399 : : }
1400 : 106628 : else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1401 : : {
1402 : 3206 : asan_mem_ref src0, src1, dest;
1403 : 3206 : asan_mem_ref_init (&src0, NULL, 1);
1404 : 3206 : asan_mem_ref_init (&src1, NULL, 1);
1405 : 3206 : asan_mem_ref_init (&dest, NULL, 1);
1406 : :
1407 : 3206 : tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1408 : 3206 : bool src0_is_store = false, src1_is_store = false,
1409 : : dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1410 : 3206 : if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
1411 : : &src0, &src0_len, &src0_is_store,
1412 : : &src1, &src1_len, &src1_is_store,
1413 : : &dest, &dest_len, &dest_is_store,
1414 : : &dest_is_deref, &intercepted_p))
1415 : : {
1416 : 718 : if (src0.start != NULL_TREE
1417 : 718 : && !has_mem_ref_been_instrumented (&src0, src0_len))
1418 : 718 : return false;
1419 : :
1420 : 240 : if (src1.start != NULL_TREE
1421 : 240 : && !has_mem_ref_been_instrumented (&src1, src1_len))
1422 : : return false;
1423 : :
1424 : 240 : if (dest.start != NULL_TREE
1425 : 240 : && !has_mem_ref_been_instrumented (&dest, dest_len))
1426 : : return false;
1427 : :
1428 : 12 : return true;
1429 : : }
1430 : : }
1431 : 103422 : else if (is_gimple_call (stmt)
1432 : 15021 : && gimple_store_p (stmt)
1433 : 103641 : && (gimple_call_builtin_p (stmt)
1434 : 219 : || gimple_call_internal_p (stmt)
1435 : 219 : || !aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
1436 : 219 : gimple_call_fntype (stmt))))
1437 : : {
1438 : 18 : asan_mem_ref r;
1439 : 18 : asan_mem_ref_init (&r, NULL, 1);
1440 : :
1441 : 18 : r.start = gimple_call_lhs (stmt);
1442 : 18 : r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
1443 : 18 : return has_mem_ref_been_instrumented (&r);
1444 : : }
1445 : :
1446 : : return false;
1447 : : }
1448 : :
1449 : : /* Insert a memory reference into the hash table. */
1450 : :
1451 : : static void
1452 : 23298 : update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
1453 : : {
1454 : 23298 : hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
1455 : :
1456 : 23298 : asan_mem_ref r;
1457 : 23298 : asan_mem_ref_init (&r, ref, access_size);
1458 : :
1459 : 23298 : asan_mem_ref **slot = ht->find_slot (&r, INSERT);
1460 : 23298 : if (*slot == NULL || (*slot)->access_size < access_size)
1461 : 23286 : *slot = asan_mem_ref_new (ref, access_size);
1462 : 23298 : }
1463 : :
1464 : : /* Initialize shadow_ptr_types array. */
1465 : :
1466 : : static void
1467 : 2363 : asan_init_shadow_ptr_types (void)
1468 : : {
1469 : 2363 : asan_shadow_set = new_alias_set ();
1470 : 2363 : tree types[3] = { signed_char_type_node, short_integer_type_node,
1471 : 2363 : integer_type_node };
1472 : :
1473 : 9452 : for (unsigned i = 0; i < 3; i++)
1474 : : {
1475 : 7089 : shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
1476 : 7089 : TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
1477 : 7089 : shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
1478 : : }
1479 : :
1480 : 2363 : initialize_sanitizer_builtins ();
1481 : 2363 : }
1482 : :
1483 : : /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
1484 : :
1485 : : static tree
1486 : 11018 : asan_pp_string (pretty_printer *pp)
1487 : : {
1488 : 11018 : const char *buf = pp_formatted_text (pp);
1489 : 11018 : size_t len = strlen (buf);
1490 : 11018 : tree ret = build_string (len + 1, buf);
1491 : 22036 : TREE_TYPE (ret)
1492 : 11018 : = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
1493 : 11018 : build_index_type (size_int (len)));
1494 : 11018 : TREE_READONLY (ret) = 1;
1495 : 11018 : TREE_STATIC (ret) = 1;
1496 : 11018 : return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1497 : : }
1498 : :
1499 : : /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1500 : : though. */
1501 : :
1502 : : static void
1503 : 1987 : asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1504 : : {
1505 : 1987 : rtx_insn *insn, *insns, *jump;
1506 : 1987 : rtx_code_label *top_label;
1507 : 1987 : rtx end, addr, tmp;
1508 : :
1509 : 1987 : gcc_assert ((len & 3) == 0);
1510 : 1987 : start_sequence ();
1511 : 1987 : clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1512 : 1987 : insns = end_sequence ();
1513 : 7157 : for (insn = insns; insn; insn = NEXT_INSN (insn))
1514 : 3195 : if (CALL_P (insn))
1515 : : break;
1516 : 1987 : if (insn == NULL_RTX)
1517 : : {
1518 : 1975 : emit_insn (insns);
1519 : 1975 : return;
1520 : : }
1521 : :
1522 : 12 : top_label = gen_label_rtx ();
1523 : 12 : addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1524 : 12 : shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1525 : 12 : end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1526 : 12 : emit_label (top_label);
1527 : :
1528 : 12 : emit_move_insn (shadow_mem, const0_rtx);
1529 : 12 : tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1530 : : true, OPTAB_LIB_WIDEN);
1531 : 12 : if (tmp != addr)
1532 : 0 : emit_move_insn (addr, tmp);
1533 : 12 : emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1534 : 12 : jump = get_last_insn ();
1535 : 12 : gcc_assert (JUMP_P (jump));
1536 : 12 : add_reg_br_prob_note (jump,
1537 : 24 : profile_probability::guessed_always ()
1538 : : .apply_scale (80, 100));
1539 : : }
1540 : :
1541 : : void
1542 : 5779 : asan_function_start (void)
1543 : : {
1544 : 5779 : ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC", current_function_funcdef_no);
1545 : 5779 : }
1546 : :
1547 : : /* Return number of shadow bytes that are occupied by a local variable
1548 : : of SIZE bytes. */
1549 : :
1550 : : static unsigned HOST_WIDE_INT
1551 : 1307 : shadow_mem_size (unsigned HOST_WIDE_INT size)
1552 : : {
1553 : : /* It must be possible to align stack variables to granularity
1554 : : of shadow memory. */
1555 : 1307 : gcc_assert (BITS_PER_UNIT
1556 : : * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
1557 : :
1558 : 1307 : return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1559 : : }
1560 : :
1561 : : /* Always emit 4 bytes at a time. */
1562 : : #define RZ_BUFFER_SIZE 4
1563 : :
1564 : : /* ASAN redzone buffer container that handles emission of shadow bytes. */
1565 : 2870 : class asan_redzone_buffer
1566 : : {
1567 : : public:
1568 : : /* Constructor. */
1569 : 1435 : asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INT prev_offset):
1570 : 1435 : m_shadow_mem (shadow_mem), m_prev_offset (prev_offset),
1571 : 1435 : m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE)
1572 : : {}
1573 : :
1574 : : /* Emit VALUE shadow byte at a given OFFSET. */
1575 : : void emit_redzone_byte (HOST_WIDE_INT offset, unsigned char value);
1576 : :
1577 : : /* Emit RTX emission of the content of the buffer. */
1578 : : void flush_redzone_payload (void);
1579 : :
1580 : : private:
1581 : : /* Flush if the content of the buffer is full
1582 : : (equal to RZ_BUFFER_SIZE). */
1583 : : void flush_if_full (void);
1584 : :
1585 : : /* Memory where we last emitted a redzone payload. */
1586 : : rtx m_shadow_mem;
1587 : :
1588 : : /* Relative offset where we last emitted a redzone payload. */
1589 : : HOST_WIDE_INT m_prev_offset;
1590 : :
1591 : : /* Relative original offset. Used for checking only. */
1592 : : HOST_WIDE_INT m_original_offset;
1593 : :
1594 : : public:
1595 : : /* Buffer with redzone payload. */
1596 : : auto_vec<unsigned char> m_shadow_bytes;
1597 : : };
1598 : :
1599 : : /* Emit VALUE shadow byte at a given OFFSET. */
1600 : :
1601 : : void
1602 : 19189 : asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INT offset,
1603 : : unsigned char value)
1604 : : {
1605 : 19189 : gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0);
1606 : 19189 : gcc_assert (offset >= m_prev_offset);
1607 : :
1608 : 19189 : HOST_WIDE_INT off
1609 : 19189 : = m_prev_offset + ASAN_SHADOW_GRANULARITY * m_shadow_bytes.length ();
1610 : 19189 : if (off == offset)
1611 : : /* Consecutive shadow memory byte. */;
1612 : 3969 : else if (offset < m_prev_offset + (HOST_WIDE_INT) (ASAN_SHADOW_GRANULARITY
1613 : : * RZ_BUFFER_SIZE)
1614 : 3969 : && !m_shadow_bytes.is_empty ())
1615 : : {
1616 : : /* Shadow memory byte with a small gap. */
1617 : 70 : for (; off < offset; off += ASAN_SHADOW_GRANULARITY)
1618 : 35 : m_shadow_bytes.safe_push (0);
1619 : : }
1620 : : else
1621 : : {
1622 : 3934 : if (!m_shadow_bytes.is_empty ())
1623 : 318 : flush_redzone_payload ();
1624 : :
1625 : : /* Maybe start earlier in order to use aligned store. */
1626 : 3934 : HOST_WIDE_INT align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE;
1627 : 3934 : if (align)
1628 : : {
1629 : 1032 : offset -= align;
1630 : 2648 : for (unsigned i = 0; i < align / BITS_PER_UNIT; i++)
1631 : 1616 : m_shadow_bytes.safe_push (0);
1632 : : }
1633 : :
1634 : : /* Adjust m_prev_offset and m_shadow_mem. */
1635 : 3934 : HOST_WIDE_INT diff = offset - m_prev_offset;
1636 : 3934 : m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,
1637 : : diff >> ASAN_SHADOW_SHIFT);
1638 : 3934 : m_prev_offset = offset;
1639 : : }
1640 : 19189 : m_shadow_bytes.safe_push (value);
1641 : 19189 : flush_if_full ();
1642 : 19189 : }
1643 : :
1644 : : /* Emit RTX emission of the content of the buffer. */
1645 : :
1646 : : void
1647 : 5369 : asan_redzone_buffer::flush_redzone_payload (void)
1648 : : {
1649 : 5369 : gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1650 : :
1651 : 5369 : if (m_shadow_bytes.is_empty ())
1652 : 5369 : return;
1653 : :
1654 : : /* Be sure we always emit to an aligned address. */
1655 : 5369 : gcc_assert (((m_prev_offset - m_original_offset)
1656 : : & (ASAN_RED_ZONE_SIZE - 1)) == 0);
1657 : :
1658 : : /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed. */
1659 : : unsigned l = m_shadow_bytes.length ();
1660 : 11374 : for (unsigned i = 0; i <= RZ_BUFFER_SIZE - l; i++)
1661 : 6005 : m_shadow_bytes.safe_push (0);
1662 : :
1663 : 5369 : if (dump_file && (dump_flags & TDF_DETAILS))
1664 : 0 : fprintf (dump_file,
1665 : : "Flushing rzbuffer at offset %" PRId64 " with: ", m_prev_offset);
1666 : :
1667 : : unsigned HOST_WIDE_INT val = 0;
1668 : 26845 : for (unsigned i = 0; i < RZ_BUFFER_SIZE; i++)
1669 : : {
1670 : 21476 : unsigned char v
1671 : 21476 : = m_shadow_bytes[BYTES_BIG_ENDIAN ? RZ_BUFFER_SIZE - i - 1 : i];
1672 : 21476 : val |= (unsigned HOST_WIDE_INT)v << (BITS_PER_UNIT * i);
1673 : 21476 : if (dump_file && (dump_flags & TDF_DETAILS))
1674 : 0 : fprintf (dump_file, "%02x ", v);
1675 : : }
1676 : :
1677 : 5369 : if (dump_file && (dump_flags & TDF_DETAILS))
1678 : 0 : fprintf (dump_file, "\n");
1679 : :
1680 : 5369 : rtx c = gen_int_mode (val, SImode);
1681 : 5369 : m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0);
1682 : 5369 : emit_move_insn (m_shadow_mem, c);
1683 : 5369 : m_shadow_bytes.truncate (0);
1684 : : }
1685 : :
1686 : : /* Flush if the content of the buffer is full
1687 : : (equal to RZ_BUFFER_SIZE). */
1688 : :
1689 : : void
1690 : 19189 : asan_redzone_buffer::flush_if_full (void)
1691 : : {
1692 : 19189 : if (m_shadow_bytes.length () == RZ_BUFFER_SIZE)
1693 : 5051 : flush_redzone_payload ();
1694 : 19189 : }
1695 : :
1696 : :
1697 : : /* HWAddressSanitizer (hwasan) is a probabilistic method for detecting
1698 : : out-of-bounds and use-after-free bugs.
1699 : : Read more:
1700 : : http://code.google.com/p/address-sanitizer/
1701 : :
1702 : : Similar to AddressSanitizer (asan) it consists of two parts: the
1703 : : instrumentation module in this file, and a run-time library.
1704 : :
1705 : : The instrumentation module adds a run-time check before every memory insn in
1706 : : the same manner as asan (see the block comment for AddressSanitizer above).
1707 : : Currently, hwasan only adds out-of-line instrumentation, where each check is
1708 : : implemented as a function call to the run-time library. Hence a check for a
1709 : : load of N bytes from address X would be implemented with a function call to
1710 : : __hwasan_loadN(X), and checking a store of N bytes from address X would be
1711 : : implemented with a function call to __hwasan_storeN(X).
1712 : :
1713 : : The main difference between hwasan and asan is in the information stored to
1714 : : help this checking. Both sanitizers use a shadow memory area which stores
1715 : : data recording the state of main memory at a corresponding address.
1716 : :
1717 : : For hwasan, each 16 byte granule in main memory has a corresponding 1 byte
1718 : : in shadow memory. This shadow address can be calculated with equation:
1719 : : (addr >> log_2(HWASAN_TAG_GRANULE_SIZE))
1720 : : + __hwasan_shadow_memory_dynamic_address;
1721 : : The conversion between real and shadow memory for asan is given in the block
1722 : : comment at the top of this file.
1723 : : The description of how this shadow memory is laid out for asan is in the
1724 : : block comment at the top of this file, here we describe how this shadow
1725 : : memory is used for hwasan.
1726 : :
1727 : : For hwasan, each variable is assigned a byte-sized 'tag'. The extent of
1728 : : the shadow memory for that variable is filled with the assigned tag, and
1729 : : every pointer referencing that variable has its top byte set to the same
1730 : : tag. The run-time library redefines malloc so that every allocation returns
1731 : : a tagged pointer and tags the corresponding shadow memory with the same tag.
1732 : :
1733 : : On each pointer dereference the tag found in the pointer is compared to the
1734 : : tag found in the shadow memory corresponding to the accessed memory address.
1735 : : If these tags are found to differ then this memory access is judged to be
1736 : : invalid and a report is generated.
1737 : :
1738 : : This method of bug detection is not perfect -- it can not catch every bad
1739 : : access -- but catches them probabilistically instead. There is always the
1740 : : possibility that an invalid memory access will happen to access memory
1741 : : tagged with the same tag as the pointer that this access used.
1742 : : The chances of this are approx. 0.4% for any two uncorrelated objects.
1743 : :
1744 : : Random tag generation can mitigate this problem by decreasing the
1745 : : probability that an invalid access will be missed in the same manner over
1746 : : multiple runs. i.e. if two objects are tagged the same in one run of the
1747 : : binary they are unlikely to be tagged the same in the next run.
1748 : : Both heap and stack allocated objects have random tags by default.
1749 : :
1750 : : [16 byte granule implications]
1751 : : Since the shadow memory only has a resolution on real memory of 16 bytes,
1752 : : invalid accesses that are within the same 16 byte granule as a valid
1753 : : address will not be caught.
1754 : :
1755 : : There is a "short-granule" feature in the runtime library which does catch
1756 : : such accesses, but this feature is not implemented for stack objects (since
1757 : : stack objects are allocated and tagged by compiler instrumentation, and
1758 : : this feature has not yet been implemented in GCC instrumentation).
1759 : :
1760 : : Another outcome of this 16 byte resolution is that each tagged object must
1761 : : be 16 byte aligned. If two objects were to share any 16 byte granule in
1762 : : memory, then they both would have to be given the same tag, and invalid
1763 : : accesses to one using a pointer to the other would be undetectable.
1764 : :
1765 : : [Compiler instrumentation]
1766 : : Compiler instrumentation ensures that two adjacent buffers on the stack are
1767 : : given different tags, this means an access to one buffer using a pointer
1768 : : generated from the other (e.g. through buffer overrun) will have mismatched
1769 : : tags and be caught by hwasan.
1770 : :
1771 : : We don't randomly tag every object on the stack, since that would require
1772 : : keeping many registers to record each tag. Instead we randomly generate a
1773 : : tag for each function frame, and each new stack object uses a tag offset
1774 : : from that frame tag.
1775 : : i.e. each object is tagged as RFT + offset, where RFT is the "random frame
1776 : : tag" generated for this frame.
1777 : : This means that randomisation does not peturb the difference between tags
1778 : : on tagged stack objects within a frame, but this is mitigated by the fact
1779 : : that objects with the same tag within a frame are very far apart
1780 : : (approx. 2^HWASAN_TAG_SIZE objects apart).
1781 : :
1782 : : As a demonstration, using the same example program as in the asan block
1783 : : comment above:
1784 : :
1785 : : int
1786 : : foo ()
1787 : : {
1788 : : char a[24] = {0};
1789 : : int b[2] = {0};
1790 : :
1791 : : a[5] = 1;
1792 : : b[1] = 2;
1793 : :
1794 : : return a[5] + b[1];
1795 : : }
1796 : :
1797 : : On AArch64 the stack will be ordered as follows for the above function:
1798 : :
1799 : : Slot 1/ [24 bytes for variable 'a']
1800 : : Slot 2/ [8 bytes padding for alignment]
1801 : : Slot 3/ [8 bytes for variable 'b']
1802 : : Slot 4/ [8 bytes padding for alignment]
1803 : :
1804 : : (The padding is there to ensure 16 byte alignment as described in the 16
1805 : : byte granule implications).
1806 : :
1807 : : While the shadow memory will be ordered as follows:
1808 : :
1809 : : - 2 bytes (representing 32 bytes in real memory) tagged with RFT + 1.
1810 : : - 1 byte (representing 16 bytes in real memory) tagged with RFT + 2.
1811 : :
1812 : : And any pointer to "a" will have the tag RFT + 1, and any pointer to "b"
1813 : : will have the tag RFT + 2.
1814 : :
1815 : : [Top Byte Ignore requirements]
1816 : : Hwasan requires the ability to store an 8 bit tag in every pointer. There
1817 : : is no instrumentation done to remove this tag from pointers before
1818 : : dereferencing, which means the hardware must ignore this tag during memory
1819 : : accesses.
1820 : :
1821 : : Architectures where this feature is available should indicate this using
1822 : : the TARGET_MEMTAG_CAN_TAG_ADDRESSES hook.
1823 : :
1824 : : [Stack requires cleanup on unwinding]
1825 : : During normal operation of a hwasan sanitized program more space in the
1826 : : shadow memory becomes tagged as the stack grows. As the stack shrinks this
1827 : : shadow memory space must become untagged. If it is not untagged then when
1828 : : the stack grows again (during other function calls later on in the program)
1829 : : objects on the stack that are usually not tagged (e.g. parameters passed on
1830 : : the stack) can be placed in memory whose shadow space is tagged with
1831 : : something else, and accesses can cause false positive reports.
1832 : :
1833 : : Hence we place untagging code on every epilogue of functions which tag some
1834 : : stack objects.
1835 : :
1836 : : Moreover, the run-time library intercepts longjmp & setjmp to untag when
1837 : : the stack is unwound this way.
1838 : :
1839 : : C++ exceptions are not yet handled, which means this sanitizer can not
1840 : : handle C++ code that throws exceptions -- it will give false positives
1841 : : after an exception has been thrown. The implementation that the hwasan
1842 : : library has for handling these relies on the frame pointer being after any
1843 : : local variables. This is not generally the case for GCC. */
1844 : :
1845 : :
1846 : : /* Returns whether we are tagging pointers and checking those tags on memory
1847 : : access. */
1848 : : bool
1849 : 29276729 : hwasan_sanitize_p ()
1850 : : {
1851 : 29276729 : return sanitize_flags_p (SANITIZE_HWADDRESS);
1852 : : }
1853 : :
1854 : : /* Are we tagging the stack? */
1855 : : bool
1856 : 27517910 : hwasan_sanitize_stack_p ()
1857 : : {
1858 : 27517910 : return (hwasan_sanitize_p () && param_hwasan_instrument_stack);
1859 : : }
1860 : :
1861 : : /* Are we tagging alloca objects? */
1862 : : bool
1863 : 1450855 : hwasan_sanitize_allocas_p (void)
1864 : : {
1865 : 1450855 : return (hwasan_sanitize_stack_p () && param_hwasan_instrument_allocas);
1866 : : }
1867 : :
1868 : : /* Should we instrument reads? */
1869 : : bool
1870 : 368 : hwasan_instrument_reads (void)
1871 : : {
1872 : 368 : return (hwasan_sanitize_p () && param_hwasan_instrument_reads);
1873 : : }
1874 : :
1875 : : /* Should we instrument writes? */
1876 : : bool
1877 : 209 : hwasan_instrument_writes (void)
1878 : : {
1879 : 209 : return (hwasan_sanitize_p () && param_hwasan_instrument_writes);
1880 : : }
1881 : :
1882 : : /* Should we instrument builtin calls? */
1883 : : bool
1884 : 95 : hwasan_memintrin (void)
1885 : : {
1886 : 95 : return (hwasan_sanitize_p () && param_hwasan_instrument_mem_intrinsics);
1887 : : }
1888 : :
1889 : : /* Insert code to protect stack vars. The prologue sequence should be emitted
1890 : : directly, epilogue sequence returned. BASE is the register holding the
1891 : : stack base, against which OFFSETS array offsets are relative to, OFFSETS
1892 : : array contains pairs of offsets in reverse order, always the end offset
1893 : : of some gap that needs protection followed by starting offset,
1894 : : and DECLS is an array of representative decls for each var partition.
1895 : : LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1896 : : elements long (OFFSETS include gap before the first variable as well
1897 : : as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1898 : : register which stack vars DECL_RTLs are based on. Either BASE should be
1899 : : assigned to PBASE, when not doing use after return protection, or
1900 : : corresponding address based on __asan_stack_malloc* return value. */
1901 : :
1902 : : rtx_insn *
1903 : 1435 : asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1904 : : HOST_WIDE_INT *offsets, tree *decls, int length)
1905 : : {
1906 : 1435 : rtx shadow_base, shadow_mem, ret, mem, orig_base;
1907 : 1435 : rtx_code_label *lab;
1908 : 1435 : rtx_insn *insns;
1909 : 1435 : char buf[32];
1910 : 1435 : HOST_WIDE_INT base_offset = offsets[length - 1];
1911 : 1435 : HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1912 : 1435 : HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1913 : 1435 : HOST_WIDE_INT last_offset, last_size, last_size_aligned;
1914 : 1435 : int l;
1915 : 1435 : unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1916 : 1435 : tree str_cst, decl, id;
1917 : 1435 : int use_after_return_class = -1;
1918 : :
1919 : : /* Don't emit anything when doing error recovery, the assertions
1920 : : might fail e.g. if a function had a frame offset overflow. */
1921 : 1435 : if (seen_error ())
1922 : : return NULL;
1923 : :
1924 : 1435 : if (shadow_ptr_types[0] == NULL_TREE)
1925 : 0 : asan_init_shadow_ptr_types ();
1926 : :
1927 : 1435 : expanded_location cfun_xloc
1928 : 1435 : = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1929 : :
1930 : : /* First of all, prepare the description string. */
1931 : 1435 : pretty_printer asan_pp;
1932 : :
1933 : 1435 : pp_decimal_int (&asan_pp, length / 2 - 1);
1934 : 1435 : pp_space (&asan_pp);
1935 : 4077 : for (l = length - 2; l; l -= 2)
1936 : : {
1937 : 2642 : tree decl = decls[l / 2 - 1];
1938 : 2642 : pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1939 : 2642 : pp_space (&asan_pp);
1940 : 2642 : pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1941 : 2642 : pp_space (&asan_pp);
1942 : :
1943 : 2642 : expanded_location xloc
1944 : 2642 : = expand_location (DECL_SOURCE_LOCATION (decl));
1945 : 2642 : char location[32];
1946 : :
1947 : 2642 : if (xloc.file == cfun_xloc.file)
1948 : 2496 : sprintf (location, ":%d", xloc.line);
1949 : : else
1950 : 146 : location[0] = '\0';
1951 : :
1952 : 2642 : if (DECL_P (decl) && DECL_NAME (decl))
1953 : : {
1954 : 2138 : unsigned idlen
1955 : 2138 : = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
1956 : 2138 : pp_decimal_int (&asan_pp, idlen);
1957 : 2138 : pp_space (&asan_pp);
1958 : 2138 : pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1959 : 2138 : pp_string (&asan_pp, location);
1960 : : }
1961 : : else
1962 : 504 : pp_string (&asan_pp, "9 <unknown>");
1963 : :
1964 : 2642 : if (l > 2)
1965 : 1207 : pp_space (&asan_pp);
1966 : : }
1967 : 1435 : str_cst = asan_pp_string (&asan_pp);
1968 : :
1969 : 2810 : gcc_checking_assert (offsets[0] == (crtl->stack_protect_guard
1970 : : ? -ASAN_RED_ZONE_SIZE : 0));
1971 : : /* Emit the prologue sequence. */
1972 : 1435 : if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1973 : 1404 : && param_asan_use_after_return)
1974 : : {
1975 : 1372 : HOST_WIDE_INT adjusted_frame_size = asan_frame_size;
1976 : : /* The stack protector guard is allocated at the top of the frame
1977 : : and cfgexpand.cc then uses align_frame_offset (ASAN_RED_ZONE_SIZE);
1978 : : while in that case we can still use asan_frame_size, we need to take
1979 : : that into account when computing base_align_bias. */
1980 : 1372 : if (alignb > ASAN_RED_ZONE_SIZE && crtl->stack_protect_guard)
1981 : 28 : adjusted_frame_size += ASAN_RED_ZONE_SIZE;
1982 : 1372 : use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1983 : : /* __asan_stack_malloc_N guarantees alignment
1984 : : N < 6 ? (64 << N) : 4096 bytes. */
1985 : 1372 : if (alignb > (use_after_return_class < 6
1986 : 1372 : ? (64U << use_after_return_class) : 4096U))
1987 : : use_after_return_class = -1;
1988 : 1372 : else if (alignb > ASAN_RED_ZONE_SIZE
1989 : 45 : && (adjusted_frame_size & (alignb - 1)))
1990 : : {
1991 : 28 : base_align_bias
1992 : 28 : = ((adjusted_frame_size + alignb - 1)
1993 : 28 : & ~(alignb - HOST_WIDE_INT_1)) - adjusted_frame_size;
1994 : 28 : use_after_return_class
1995 : 28 : = floor_log2 (asan_frame_size + base_align_bias - 1) - 5;
1996 : 28 : if (use_after_return_class > 10)
1997 : : {
1998 : 63 : base_align_bias = 0;
1999 : 63 : use_after_return_class = -1;
2000 : : }
2001 : : }
2002 : : }
2003 : :
2004 : : /* Align base if target is STRICT_ALIGNMENT. */
2005 : 1435 : if (STRICT_ALIGNMENT)
2006 : : {
2007 : : const HOST_WIDE_INT align
2008 : : = (GET_MODE_ALIGNMENT (SImode) / BITS_PER_UNIT) << ASAN_SHADOW_SHIFT;
2009 : : base = expand_binop (Pmode, and_optab, base, gen_int_mode (-align, Pmode),
2010 : : NULL_RTX, 1, OPTAB_DIRECT);
2011 : : }
2012 : :
2013 : 1435 : if (use_after_return_class == -1 && pbase)
2014 : 63 : emit_move_insn (pbase, base);
2015 : :
2016 : 1435 : base = expand_binop (Pmode, add_optab, base,
2017 : 1435 : gen_int_mode (base_offset - base_align_bias, Pmode),
2018 : : NULL_RTX, 1, OPTAB_DIRECT);
2019 : 1435 : orig_base = NULL_RTX;
2020 : 1435 : if (use_after_return_class != -1)
2021 : : {
2022 : 1372 : if (asan_detect_stack_use_after_return == NULL_TREE)
2023 : : {
2024 : 1033 : id = get_identifier ("__asan_option_detect_stack_use_after_return");
2025 : 1033 : decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
2026 : : integer_type_node);
2027 : 1033 : SET_DECL_ASSEMBLER_NAME (decl, id);
2028 : 1033 : TREE_ADDRESSABLE (decl) = 1;
2029 : 1033 : DECL_ARTIFICIAL (decl) = 1;
2030 : 1033 : DECL_IGNORED_P (decl) = 1;
2031 : 1033 : DECL_EXTERNAL (decl) = 1;
2032 : 1033 : TREE_STATIC (decl) = 1;
2033 : 1033 : TREE_PUBLIC (decl) = 1;
2034 : 1033 : TREE_USED (decl) = 1;
2035 : 1033 : asan_detect_stack_use_after_return = decl;
2036 : : }
2037 : 1372 : orig_base = gen_reg_rtx (Pmode);
2038 : 1372 : emit_move_insn (orig_base, base);
2039 : 1372 : ret = expand_normal (asan_detect_stack_use_after_return);
2040 : 1372 : lab = gen_label_rtx ();
2041 : 1372 : emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
2042 : : VOIDmode, 0, lab,
2043 : : profile_probability::very_likely ());
2044 : 1372 : snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
2045 : : use_after_return_class);
2046 : 1372 : ret = init_one_libfunc (buf);
2047 : 1372 : ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
2048 : : GEN_INT (asan_frame_size
2049 : : + base_align_bias),
2050 : 1372 : TYPE_MODE (pointer_sized_int_node));
2051 : : /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
2052 : : and NULL otherwise. Check RET value is NULL here and jump over the
2053 : : BASE reassignment in this case. Otherwise, reassign BASE to RET. */
2054 : 1372 : emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
2055 : : VOIDmode, 0, lab,
2056 : : profile_probability:: very_unlikely ());
2057 : 1372 : ret = convert_memory_address (Pmode, ret);
2058 : 1372 : emit_move_insn (base, ret);
2059 : 1372 : emit_label (lab);
2060 : 1372 : emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
2061 : : gen_int_mode (base_align_bias
2062 : 1372 : - base_offset, Pmode),
2063 : : NULL_RTX, 1, OPTAB_DIRECT));
2064 : : }
2065 : 1435 : mem = gen_rtx_MEM (ptr_mode, base);
2066 : 1435 : mem = adjust_address (mem, VOIDmode, base_align_bias);
2067 : 1435 : emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
2068 : 2870 : mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
2069 : 1435 : emit_move_insn (mem, expand_normal (str_cst));
2070 : 2870 : mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
2071 : 1435 : ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
2072 : 1435 : id = get_identifier (buf);
2073 : 1435 : decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2074 : : VAR_DECL, id, char_type_node);
2075 : 1435 : SET_DECL_ASSEMBLER_NAME (decl, id);
2076 : 1435 : TREE_ADDRESSABLE (decl) = 1;
2077 : 1435 : TREE_READONLY (decl) = 1;
2078 : 1435 : DECL_ARTIFICIAL (decl) = 1;
2079 : 1435 : DECL_IGNORED_P (decl) = 1;
2080 : 1435 : TREE_STATIC (decl) = 1;
2081 : 1435 : TREE_PUBLIC (decl) = 0;
2082 : 1435 : TREE_USED (decl) = 1;
2083 : 1435 : DECL_INITIAL (decl) = decl;
2084 : 1435 : TREE_ASM_WRITTEN (decl) = 1;
2085 : 1435 : TREE_ASM_WRITTEN (id) = 1;
2086 : 1435 : DECL_ALIGN_RAW (decl) = DECL_ALIGN_RAW (current_function_decl);
2087 : 1435 : emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
2088 : 1435 : shadow_base = expand_binop (Pmode, lshr_optab, base,
2089 : 1435 : gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
2090 : : NULL_RTX, 1, OPTAB_DIRECT);
2091 : 1435 : if (asan_dynamic_shadow_offset_p ())
2092 : : {
2093 : 0 : ret = expand_normal (get_asan_shadow_memory_dynamic_address_decl ());
2094 : 0 : shadow_base
2095 : 0 : = expand_simple_binop (Pmode, PLUS, shadow_base, ret, NULL_RTX,
2096 : : /* unsignedp = */ 1, OPTAB_WIDEN);
2097 : 0 : shadow_base = plus_constant (Pmode, shadow_base,
2098 : 0 : (base_align_bias >> ASAN_SHADOW_SHIFT));
2099 : : }
2100 : : else
2101 : : {
2102 : 1435 : shadow_base = plus_constant (Pmode, shadow_base,
2103 : 1435 : asan_shadow_offset ()
2104 : 1435 : + (base_align_bias >> ASAN_SHADOW_SHIFT));
2105 : : }
2106 : 1435 : gcc_assert (asan_shadow_set != -1
2107 : : && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
2108 : 1435 : shadow_mem = gen_rtx_MEM (SImode, shadow_base);
2109 : 1435 : set_mem_alias_set (shadow_mem, asan_shadow_set);
2110 : 1435 : if (STRICT_ALIGNMENT)
2111 : : set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
2112 : 1435 : prev_offset = base_offset;
2113 : :
2114 : 1435 : asan_redzone_buffer rz_buffer (shadow_mem, prev_offset);
2115 : 5512 : for (l = length; l; l -= 2)
2116 : : {
2117 : 4077 : if (l == 2)
2118 : 1435 : cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
2119 : 4077 : offset = offsets[l - 1];
2120 : :
2121 : 4077 : bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY - 1);
2122 : : /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then
2123 : : the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0.
2124 : : In that case we have to emit one extra byte that will describe
2125 : : how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed. */
2126 : 4077 : if (extra_byte)
2127 : : {
2128 : 1171 : HOST_WIDE_INT aoff
2129 : 1171 : = base_offset + ((offset - base_offset)
2130 : 1171 : & ~(ASAN_SHADOW_GRANULARITY - HOST_WIDE_INT_1));
2131 : 1171 : rz_buffer.emit_redzone_byte (aoff, offset - aoff);
2132 : 1171 : offset = aoff + ASAN_SHADOW_GRANULARITY;
2133 : : }
2134 : :
2135 : : /* Calculate size of red zone payload. */
2136 : 22095 : while (offset < offsets[l - 2])
2137 : : {
2138 : 18018 : rz_buffer.emit_redzone_byte (offset, cur_shadow_byte);
2139 : 18018 : offset += ASAN_SHADOW_GRANULARITY;
2140 : : }
2141 : :
2142 : 4077 : cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
2143 : : }
2144 : :
2145 : : /* As the automatic variables are aligned to
2146 : : ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be
2147 : : flushed here. */
2148 : 1435 : gcc_assert (rz_buffer.m_shadow_bytes.is_empty ());
2149 : :
2150 : 1435 : do_pending_stack_adjust ();
2151 : :
2152 : : /* Construct epilogue sequence. */
2153 : 1435 : start_sequence ();
2154 : :
2155 : 1435 : lab = NULL;
2156 : 1435 : if (use_after_return_class != -1)
2157 : : {
2158 : 1372 : rtx_code_label *lab2 = gen_label_rtx ();
2159 : 1372 : char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
2160 : 1372 : emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
2161 : : VOIDmode, 0, lab2,
2162 : : profile_probability::very_likely ());
2163 : 1372 : shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
2164 : 1372 : set_mem_alias_set (shadow_mem, asan_shadow_set);
2165 : 1372 : mem = gen_rtx_MEM (ptr_mode, base);
2166 : 1372 : mem = adjust_address (mem, VOIDmode, base_align_bias);
2167 : 1372 : emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
2168 : 1372 : unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
2169 : 1372 : bool asan_stack_free_emitted_p = false;
2170 : 1372 : if (use_after_return_class < 5
2171 : 1372 : && can_store_by_pieces (sz, builtin_memset_read_str, &c,
2172 : : BITS_PER_UNIT, true))
2173 : : /* Emit memset (ShadowBase, kAsanStackAfterReturnMagic, ShadowSize). */
2174 : 1256 : store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
2175 : : BITS_PER_UNIT, true, RETURN_BEGIN);
2176 : 116 : else if (use_after_return_class >= 5
2177 : 130 : || !set_storage_via_setmem (shadow_mem,
2178 : : GEN_INT (sz),
2179 : 14 : gen_int_mode (c, QImode),
2180 : : BITS_PER_UNIT, BITS_PER_UNIT,
2181 : : -1, sz, sz, sz))
2182 : : {
2183 : 102 : snprintf (buf, sizeof buf, "__asan_stack_free_%d",
2184 : : use_after_return_class);
2185 : 102 : ret = init_one_libfunc (buf);
2186 : 102 : rtx addr = convert_memory_address (ptr_mode, base);
2187 : 102 : rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
2188 : 204 : emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
2189 : : GEN_INT (asan_frame_size + base_align_bias),
2190 : 102 : TYPE_MODE (pointer_sized_int_node),
2191 : : orig_addr, ptr_mode);
2192 : 102 : asan_stack_free_emitted_p = true;
2193 : : }
2194 : 1372 : if (!asan_stack_free_emitted_p)
2195 : : {
2196 : : /* Emit **SavedFlagPtr (FakeStack, class_id) = 0. */
2197 : 1270 : unsigned HOST_WIDE_INT offset = (1 << (use_after_return_class + 6));
2198 : 1270 : offset -= GET_MODE_SIZE (ptr_mode);
2199 : 1270 : mem = gen_rtx_MEM (ptr_mode, base);
2200 : 1270 : mem = adjust_address (mem, ptr_mode, offset);
2201 : 1270 : rtx addr = gen_reg_rtx (ptr_mode);
2202 : 1270 : emit_move_insn (addr, mem);
2203 : 1270 : addr = convert_memory_address (Pmode, addr);
2204 : 1270 : mem = gen_rtx_MEM (QImode, addr);
2205 : 1270 : emit_move_insn (mem, const0_rtx);
2206 : : }
2207 : 1372 : lab = gen_label_rtx ();
2208 : 1372 : emit_jump (lab);
2209 : 1372 : emit_label (lab2);
2210 : : }
2211 : :
2212 : 1435 : shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
2213 : 1435 : set_mem_alias_set (shadow_mem, asan_shadow_set);
2214 : :
2215 : 1435 : if (STRICT_ALIGNMENT)
2216 : : set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
2217 : :
2218 : 1435 : prev_offset = base_offset;
2219 : 1435 : last_offset = base_offset;
2220 : 1435 : last_size = 0;
2221 : 1435 : last_size_aligned = 0;
2222 : 5512 : for (l = length; l; l -= 2)
2223 : : {
2224 : 4077 : offset = base_offset + ((offsets[l - 1] - base_offset)
2225 : 4077 : & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
2226 : 4077 : if (last_offset + last_size_aligned < offset)
2227 : : {
2228 : 552 : shadow_mem = adjust_address (shadow_mem, VOIDmode,
2229 : : (last_offset - prev_offset)
2230 : : >> ASAN_SHADOW_SHIFT);
2231 : 552 : prev_offset = last_offset;
2232 : 552 : asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
2233 : 552 : last_offset = offset;
2234 : 552 : last_size = 0;
2235 : : }
2236 : : else
2237 : 3525 : last_size = offset - last_offset;
2238 : 4077 : last_size += base_offset + ((offsets[l - 2] - base_offset)
2239 : 4077 : & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
2240 : 4077 : - offset;
2241 : :
2242 : : /* Unpoison shadow memory that corresponds to a variable that is
2243 : : is subject of use-after-return sanitization. */
2244 : 4077 : if (l > 2)
2245 : : {
2246 : 2642 : decl = decls[l / 2 - 2];
2247 : 2642 : if (asan_handled_variables != NULL
2248 : 2642 : && asan_handled_variables->contains (decl))
2249 : : {
2250 : 644 : HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
2251 : 644 : if (dump_file && (dump_flags & TDF_DETAILS))
2252 : : {
2253 : 0 : const char *n = (DECL_NAME (decl)
2254 : 0 : ? IDENTIFIER_POINTER (DECL_NAME (decl))
2255 : 0 : : "<unknown>");
2256 : 0 : fprintf (dump_file, "Unpoisoning shadow stack for variable: "
2257 : : "%s (%" PRId64 " B)\n", n, size);
2258 : : }
2259 : :
2260 : 644 : last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
2261 : : }
2262 : : }
2263 : 4077 : last_size_aligned
2264 : 4077 : = ((last_size + (ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
2265 : : & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
2266 : : }
2267 : 1435 : if (last_size_aligned)
2268 : : {
2269 : 1435 : shadow_mem = adjust_address (shadow_mem, VOIDmode,
2270 : : (last_offset - prev_offset)
2271 : : >> ASAN_SHADOW_SHIFT);
2272 : 1435 : asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
2273 : : }
2274 : :
2275 : : /* Clean-up set with instrumented stack variables. */
2276 : 1706 : delete asan_handled_variables;
2277 : 1435 : asan_handled_variables = NULL;
2278 : 1494 : delete asan_used_labels;
2279 : 1435 : asan_used_labels = NULL;
2280 : :
2281 : 1435 : do_pending_stack_adjust ();
2282 : 1435 : if (lab)
2283 : 1372 : emit_label (lab);
2284 : :
2285 : 1435 : insns = end_sequence ();
2286 : 1435 : return insns;
2287 : 1435 : }
2288 : :
2289 : : /* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds
2290 : : to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE
2291 : : indicates whether we're emitting new instructions sequence or not. */
2292 : :
2293 : : rtx_insn *
2294 : 178 : asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
2295 : : {
2296 : 178 : if (before)
2297 : 38 : push_to_sequence (before);
2298 : : else
2299 : 140 : start_sequence ();
2300 : 178 : rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
2301 : 178 : top = convert_memory_address (ptr_mode, top);
2302 : 178 : bot = convert_memory_address (ptr_mode, bot);
2303 : 178 : emit_library_call (ret, LCT_NORMAL, ptr_mode,
2304 : : top, ptr_mode, bot, ptr_mode);
2305 : :
2306 : 178 : do_pending_stack_adjust ();
2307 : 178 : return end_sequence ();
2308 : : }
2309 : :
2310 : : /* Return true if DECL, a global var, might be overridden and needs
2311 : : therefore a local alias. */
2312 : :
2313 : : static bool
2314 : 3765 : asan_needs_local_alias (tree decl)
2315 : : {
2316 : 3765 : return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
2317 : : }
2318 : :
2319 : : /* Return true if DECL, a global var, is an artificial ODR indicator symbol
2320 : : therefore doesn't need protection. */
2321 : :
2322 : : static bool
2323 : 7928 : is_odr_indicator (tree decl)
2324 : : {
2325 : 7928 : return (DECL_ARTIFICIAL (decl)
2326 : 7928 : && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
2327 : : }
2328 : :
2329 : : /* Return true if DECL is a VAR_DECL that should be protected
2330 : : by Address Sanitizer, by appending a red zone with protected
2331 : : shadow memory after it and aligning it to at least
2332 : : ASAN_RED_ZONE_SIZE bytes. */
2333 : :
2334 : : bool
2335 : 20481 : asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
2336 : : {
2337 : 20481 : if (!param_asan_globals)
2338 : : return false;
2339 : :
2340 : 20330 : rtx rtl, symbol;
2341 : :
2342 : 20330 : if (TREE_CODE (decl) == STRING_CST)
2343 : : {
2344 : : /* Instrument all STRING_CSTs except those created
2345 : : by asan_pp_string here. */
2346 : 11951 : if (shadow_ptr_types[0] != NULL_TREE
2347 : 11917 : && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2348 : 23868 : && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
2349 : : return false;
2350 : : return true;
2351 : : }
2352 : 8379 : if (!VAR_P (decl)
2353 : : /* TLS vars aren't statically protectable. */
2354 : 8379 : || DECL_THREAD_LOCAL_P (decl)
2355 : : /* Externs will be protected elsewhere. */
2356 : 8373 : || DECL_EXTERNAL (decl)
2357 : : /* PR sanitizer/81697: For architectures that use section anchors first
2358 : : call to asan_protect_global may occur before DECL_RTL (decl) is set.
2359 : : We should ignore DECL_RTL_SET_P then, because otherwise the first call
2360 : : to asan_protect_global will return FALSE and the following calls on the
2361 : : same decl after setting DECL_RTL (decl) will return TRUE and we'll end
2362 : : up with inconsistency at runtime. */
2363 : 8373 : || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
2364 : : /* Comdat vars pose an ABI problem, we can't know if
2365 : : the var that is selected by the linker will have
2366 : : padding or not. */
2367 : 8361 : || DECL_ONE_ONLY (decl)
2368 : : /* Similarly for common vars. People can use -fno-common.
2369 : : Note: Linux kernel is built with -fno-common, so we do instrument
2370 : : globals there even if it is C. */
2371 : 8078 : || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
2372 : : /* Don't protect if using user section, often vars placed
2373 : : into user section from multiple TUs are then assumed
2374 : : to be an array of such vars, putting padding in there
2375 : : breaks this assumption. */
2376 : 8078 : || (DECL_SECTION_NAME (decl) != NULL
2377 : 270 : && !symtab_node::get (decl)->implicit_section
2378 : 270 : && !section_sanitized_p (DECL_SECTION_NAME (decl)))
2379 : : /* Don't protect variables in non-generic address-space. */
2380 : 7988 : || !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)))
2381 : 7986 : || DECL_SIZE (decl) == 0
2382 : 7986 : || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
2383 : 7986 : || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2384 : 7986 : || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
2385 : 7986 : || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
2386 : 7928 : || TREE_TYPE (decl) == ubsan_get_source_location_type ()
2387 : 16307 : || is_odr_indicator (decl))
2388 : 451 : return false;
2389 : :
2390 : 7928 : if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
2391 : : {
2392 : :
2393 : 7928 : rtl = DECL_RTL (decl);
2394 : 7928 : if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
2395 : : return false;
2396 : 7928 : symbol = XEXP (rtl, 0);
2397 : :
2398 : 7928 : if (CONSTANT_POOL_ADDRESS_P (symbol)
2399 : 7928 : || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2400 : : return false;
2401 : : }
2402 : :
2403 : 7928 : if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
2404 : : return false;
2405 : :
2406 : : if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
2407 : : return false;
2408 : :
2409 : : return true;
2410 : : }
2411 : :
2412 : : /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
2413 : : IS_STORE is either 1 (for a store) or 0 (for a load). */
2414 : :
2415 : : static tree
2416 : 10868 : report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
2417 : : int *nargs)
2418 : : {
2419 : 10868 : gcc_assert (!hwasan_sanitize_p ());
2420 : :
2421 : 10868 : static enum built_in_function report[2][2][6]
2422 : : = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
2423 : : BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
2424 : : BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
2425 : : { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
2426 : : BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
2427 : : BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
2428 : : { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
2429 : : BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
2430 : : BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
2431 : : BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
2432 : : BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
2433 : : BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
2434 : : { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
2435 : : BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
2436 : : BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
2437 : : BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
2438 : : BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
2439 : : BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
2440 : 10868 : if (size_in_bytes == -1)
2441 : : {
2442 : 562 : *nargs = 2;
2443 : 562 : return builtin_decl_implicit (report[recover_p][is_store][5]);
2444 : : }
2445 : 10306 : *nargs = 1;
2446 : 10306 : int size_log2 = exact_log2 (size_in_bytes);
2447 : 10306 : return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
2448 : : }
2449 : :
2450 : : /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
2451 : : IS_STORE is either 1 (for a store) or 0 (for a load). */
2452 : :
2453 : : static tree
2454 : 103 : check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
2455 : : int *nargs)
2456 : : {
2457 : 103 : static enum built_in_function check[2][2][6]
2458 : : = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
2459 : : BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
2460 : : BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
2461 : : { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
2462 : : BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
2463 : : BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
2464 : : { { BUILT_IN_ASAN_LOAD1_NOABORT,
2465 : : BUILT_IN_ASAN_LOAD2_NOABORT,
2466 : : BUILT_IN_ASAN_LOAD4_NOABORT,
2467 : : BUILT_IN_ASAN_LOAD8_NOABORT,
2468 : : BUILT_IN_ASAN_LOAD16_NOABORT,
2469 : : BUILT_IN_ASAN_LOADN_NOABORT },
2470 : : { BUILT_IN_ASAN_STORE1_NOABORT,
2471 : : BUILT_IN_ASAN_STORE2_NOABORT,
2472 : : BUILT_IN_ASAN_STORE4_NOABORT,
2473 : : BUILT_IN_ASAN_STORE8_NOABORT,
2474 : : BUILT_IN_ASAN_STORE16_NOABORT,
2475 : : BUILT_IN_ASAN_STOREN_NOABORT } } };
2476 : 103 : if (size_in_bytes == -1)
2477 : : {
2478 : 28 : *nargs = 2;
2479 : 28 : return builtin_decl_implicit (check[recover_p][is_store][5]);
2480 : : }
2481 : 75 : *nargs = 1;
2482 : 75 : int size_log2 = exact_log2 (size_in_bytes);
2483 : 75 : return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
2484 : : }
2485 : :
2486 : : /* Split the current basic block and create a condition statement
2487 : : insertion point right before or after the statement pointed to by
2488 : : ITER. Return an iterator to the point at which the caller might
2489 : : safely insert the condition statement.
2490 : :
2491 : : THEN_BLOCK must be set to the address of an uninitialized instance
2492 : : of basic_block. The function will then set *THEN_BLOCK to the
2493 : : 'then block' of the condition statement to be inserted by the
2494 : : caller.
2495 : :
2496 : : If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
2497 : : *THEN_BLOCK to *FALLTHROUGH_BLOCK.
2498 : :
2499 : : Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
2500 : : block' of the condition statement to be inserted by the caller.
2501 : :
2502 : : Note that *FALLTHROUGH_BLOCK is a new block that contains the
2503 : : statements starting from *ITER, and *THEN_BLOCK is a new empty
2504 : : block.
2505 : :
2506 : : *ITER is adjusted to point to always point to the first statement
2507 : : of the basic block * FALLTHROUGH_BLOCK. That statement is the
2508 : : same as what ITER was pointing to prior to calling this function,
2509 : : if BEFORE_P is true; otherwise, it is its following statement. */
2510 : :
2511 : : gimple_stmt_iterator
2512 : 14529 : create_cond_insert_point (gimple_stmt_iterator *iter,
2513 : : bool before_p,
2514 : : bool then_more_likely_p,
2515 : : bool create_then_fallthru_edge,
2516 : : basic_block *then_block,
2517 : : basic_block *fallthrough_block)
2518 : : {
2519 : 14529 : gimple_stmt_iterator gsi = *iter;
2520 : :
2521 : 14529 : if (!gsi_end_p (gsi) && before_p)
2522 : 1129 : gsi_prev (&gsi);
2523 : :
2524 : 14529 : basic_block cur_bb = gsi_bb (*iter);
2525 : :
2526 : 14529 : edge e = split_block (cur_bb, gsi_stmt (gsi));
2527 : :
2528 : : /* Get a hold on the 'condition block', the 'then block' and the
2529 : : 'else block'. */
2530 : 14529 : basic_block cond_bb = e->src;
2531 : 14529 : basic_block fallthru_bb = e->dest;
2532 : 14529 : basic_block then_bb = create_empty_bb (cond_bb);
2533 : 14529 : if (current_loops)
2534 : : {
2535 : 14529 : add_bb_to_loop (then_bb, cond_bb->loop_father);
2536 : 14529 : loops_state_set (LOOPS_NEED_FIXUP);
2537 : : }
2538 : :
2539 : : /* Set up the newly created 'then block'. */
2540 : 14529 : e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
2541 : 14529 : profile_probability fallthrough_probability
2542 : : = then_more_likely_p
2543 : 14529 : ? profile_probability::very_unlikely ()
2544 : 14529 : : profile_probability::very_likely ();
2545 : 14529 : e->probability = fallthrough_probability.invert ();
2546 : 14529 : then_bb->count = e->count ();
2547 : 14529 : if (create_then_fallthru_edge)
2548 : 3812 : make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
2549 : :
2550 : : /* Set up the fallthrough basic block. */
2551 : 14529 : e = find_edge (cond_bb, fallthru_bb);
2552 : 14529 : e->flags = EDGE_FALSE_VALUE;
2553 : 14529 : e->probability = fallthrough_probability;
2554 : :
2555 : : /* Update dominance info for the newly created then_bb; note that
2556 : : fallthru_bb's dominance info has already been updated by
2557 : : split_bock. */
2558 : 14529 : if (dom_info_available_p (CDI_DOMINATORS))
2559 : 13308 : set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
2560 : :
2561 : 14529 : *then_block = then_bb;
2562 : 14529 : *fallthrough_block = fallthru_bb;
2563 : 14529 : *iter = gsi_start_bb (fallthru_bb);
2564 : :
2565 : 14529 : return gsi_last_bb (cond_bb);
2566 : : }
2567 : :
2568 : : /* Insert an if condition followed by a 'then block' right before the
2569 : : statement pointed to by ITER. The fallthrough block -- which is the
2570 : : else block of the condition as well as the destination of the
2571 : : outcoming edge of the 'then block' -- starts with the statement
2572 : : pointed to by ITER.
2573 : :
2574 : : COND is the condition of the if.
2575 : :
2576 : : If THEN_MORE_LIKELY_P is true, the probability of the edge to the
2577 : : 'then block' is higher than the probability of the edge to the
2578 : : fallthrough block.
2579 : :
2580 : : Upon completion of the function, *THEN_BB is set to the newly
2581 : : inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
2582 : : fallthrough block.
2583 : :
2584 : : *ITER is adjusted to still point to the same statement it was
2585 : : pointing to initially. */
2586 : :
2587 : : static void
2588 : 0 : insert_if_then_before_iter (gcond *cond,
2589 : : gimple_stmt_iterator *iter,
2590 : : bool then_more_likely_p,
2591 : : basic_block *then_bb,
2592 : : basic_block *fallthrough_bb)
2593 : : {
2594 : 0 : gimple_stmt_iterator cond_insert_point =
2595 : 0 : create_cond_insert_point (iter,
2596 : : /*before_p=*/true,
2597 : : then_more_likely_p,
2598 : : /*create_then_fallthru_edge=*/true,
2599 : : then_bb,
2600 : : fallthrough_bb);
2601 : 0 : gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
2602 : 0 : }
2603 : :
2604 : : /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
2605 : : If RETURN_ADDRESS is set to true, return memory location instread
2606 : : of a value in the shadow memory. */
2607 : :
2608 : : static tree
2609 : 12706 : build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
2610 : : tree base_addr, tree shadow_ptr_type,
2611 : : bool return_address = false)
2612 : : {
2613 : 12706 : tree t, uintptr_type = TREE_TYPE (base_addr);
2614 : 12706 : tree shadow_type = TREE_TYPE (shadow_ptr_type);
2615 : 12706 : gimple *g;
2616 : :
2617 : 12706 : t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
2618 : 12706 : g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
2619 : : base_addr, t);
2620 : 12706 : gimple_set_location (g, location);
2621 : 12706 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
2622 : :
2623 : 12706 : if (asan_dynamic_shadow_offset_p ())
2624 : 0 : t = asan_local_shadow_memory_dynamic_address;
2625 : : else
2626 : 12706 : t = build_int_cst (uintptr_type, asan_shadow_offset ());
2627 : 12706 : g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
2628 : : gimple_assign_lhs (g), t);
2629 : 12706 : gimple_set_location (g, location);
2630 : 12706 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
2631 : :
2632 : 12706 : g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
2633 : : gimple_assign_lhs (g));
2634 : 12706 : gimple_set_location (g, location);
2635 : 12706 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
2636 : :
2637 : 12706 : if (!return_address)
2638 : : {
2639 : 11399 : t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
2640 : : build_int_cst (shadow_ptr_type, 0));
2641 : 11399 : g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
2642 : 11399 : gimple_set_location (g, location);
2643 : 11399 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
2644 : : }
2645 : :
2646 : 12706 : return gimple_assign_lhs (g);
2647 : : }
2648 : :
2649 : : /* BASE can already be an SSA_NAME; in that case, do not create a
2650 : : new SSA_NAME for it. */
2651 : :
2652 : : static tree
2653 : 11475 : maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
2654 : : bool before_p)
2655 : : {
2656 : 11475 : STRIP_USELESS_TYPE_CONVERSION (base);
2657 : 11475 : if (TREE_CODE (base) == SSA_NAME)
2658 : : return base;
2659 : 10067 : gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)), base);
2660 : 10067 : gimple_set_location (g, loc);
2661 : 10067 : if (before_p)
2662 : 10067 : gsi_safe_insert_before (iter, g);
2663 : : else
2664 : 0 : gsi_insert_after (iter, g, GSI_NEW_STMT);
2665 : 10067 : return gimple_assign_lhs (g);
2666 : : }
2667 : :
2668 : : /* LEN can already have necessary size and precision;
2669 : : in that case, do not create a new variable. */
2670 : :
2671 : : tree
2672 : 0 : maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
2673 : : bool before_p)
2674 : : {
2675 : 0 : if (ptrofftype_p (len))
2676 : : return len;
2677 : 0 : gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2678 : : NOP_EXPR, len);
2679 : 0 : gimple_set_location (g, loc);
2680 : 0 : if (before_p)
2681 : 0 : gsi_safe_insert_before (iter, g);
2682 : : else
2683 : 0 : gsi_insert_after (iter, g, GSI_NEW_STMT);
2684 : 0 : return gimple_assign_lhs (g);
2685 : : }
2686 : :
2687 : : /* Instrument the memory access instruction BASE. Insert new
2688 : : statements before or after ITER.
2689 : :
2690 : : Note that the memory access represented by BASE can be either an
2691 : : SSA_NAME, or a non-SSA expression. LOCATION is the source code
2692 : : location. IS_STORE is TRUE for a store, FALSE for a load.
2693 : : BEFORE_P is TRUE for inserting the instrumentation code before
2694 : : ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
2695 : : for a scalar memory access and FALSE for memory region access.
2696 : : NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
2697 : : length. ALIGN tells alignment of accessed memory object.
2698 : :
2699 : : START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
2700 : : memory region have already been instrumented.
2701 : :
2702 : : If BEFORE_P is TRUE, *ITER is arranged to still point to the
2703 : : statement it was pointing to prior to calling this function,
2704 : : otherwise, it points to the statement logically following it. */
2705 : :
2706 : : static void
2707 : 11475 : build_check_stmt (location_t loc, tree base, tree len,
2708 : : HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
2709 : : bool is_non_zero_len, bool before_p, bool is_store,
2710 : : bool is_scalar_access, unsigned int align = 0)
2711 : : {
2712 : 11475 : gimple *g;
2713 : :
2714 : 11475 : gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
2715 : 11475 : gcc_assert (size_in_bytes == -1 || size_in_bytes >= 1);
2716 : :
2717 : 11475 : base = unshare_expr (base);
2718 : 11475 : base = maybe_create_ssa_name (loc, base, iter, before_p);
2719 : :
2720 : 11475 : if (len)
2721 : : {
2722 : 0 : len = unshare_expr (len);
2723 : 0 : len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
2724 : : }
2725 : : else
2726 : : {
2727 : 11475 : gcc_assert (size_in_bytes != -1);
2728 : 11475 : len = build_int_cst (pointer_sized_int_node, size_in_bytes);
2729 : : }
2730 : :
2731 : 11475 : if (size_in_bytes > 1)
2732 : : {
2733 : 9616 : if ((size_in_bytes & (size_in_bytes - 1)) != 0
2734 : 9393 : || size_in_bytes > 16)
2735 : : is_scalar_access = false;
2736 : 9171 : else if (align && align < size_in_bytes * BITS_PER_UNIT)
2737 : : {
2738 : : /* On non-strict alignment targets, if
2739 : : 16-byte access is just 8-byte aligned,
2740 : : this will result in misaligned shadow
2741 : : memory 2 byte load, but otherwise can
2742 : : be handled using one read. */
2743 : 238 : if (size_in_bytes != 16
2744 : : || STRICT_ALIGNMENT
2745 : 105 : || align < 8 * BITS_PER_UNIT)
2746 : 11475 : is_scalar_access = false;
2747 : : }
2748 : : }
2749 : :
2750 : 11475 : HOST_WIDE_INT flags = 0;
2751 : 11475 : if (is_store)
2752 : 5469 : flags |= ASAN_CHECK_STORE;
2753 : 11475 : if (is_non_zero_len)
2754 : 11475 : flags |= ASAN_CHECK_NON_ZERO_LEN;
2755 : 11475 : if (is_scalar_access)
2756 : 10888 : flags |= ASAN_CHECK_SCALAR_ACCESS;
2757 : :
2758 : 11475 : enum internal_fn fn = hwasan_sanitize_p ()
2759 : 11475 : ? IFN_HWASAN_CHECK
2760 : 11104 : : IFN_ASAN_CHECK;
2761 : :
2762 : 11475 : g = gimple_build_call_internal (fn, 4,
2763 : 11475 : build_int_cst (integer_type_node, flags),
2764 : : base, len,
2765 : : build_int_cst (integer_type_node,
2766 : 11475 : align / BITS_PER_UNIT));
2767 : 11475 : gimple_set_location (g, loc);
2768 : 11475 : if (before_p)
2769 : 11475 : gsi_safe_insert_before (iter, g);
2770 : : else
2771 : : {
2772 : 0 : gsi_insert_after (iter, g, GSI_NEW_STMT);
2773 : 0 : gsi_next (iter);
2774 : : }
2775 : 11475 : }
2776 : :
2777 : : /* If T represents a memory access, add instrumentation code before ITER.
2778 : : LOCATION is source code location.
2779 : : IS_STORE is either TRUE (for a store) or FALSE (for a load). */
2780 : :
2781 : : static void
2782 : 18400 : instrument_derefs (gimple_stmt_iterator *iter, tree t,
2783 : : location_t location, bool is_store)
2784 : : {
2785 : 18400 : if (is_store && !(asan_instrument_writes () || hwasan_instrument_writes ()))
2786 : 6879 : return;
2787 : 18358 : if (!is_store && !(asan_instrument_reads () || hwasan_instrument_reads ()))
2788 : : return;
2789 : :
2790 : 18302 : tree type, base;
2791 : 18302 : HOST_WIDE_INT size_in_bytes;
2792 : 18302 : if (location == UNKNOWN_LOCATION)
2793 : 342 : location = EXPR_LOCATION (t);
2794 : :
2795 : 18302 : type = TREE_TYPE (t);
2796 : 18302 : switch (TREE_CODE (t))
2797 : : {
2798 : 18107 : case ARRAY_REF:
2799 : 18107 : case COMPONENT_REF:
2800 : 18107 : case INDIRECT_REF:
2801 : 18107 : case MEM_REF:
2802 : 18107 : case VAR_DECL:
2803 : 18107 : case BIT_FIELD_REF:
2804 : 18107 : break;
2805 : : /* FALLTHRU */
2806 : : default:
2807 : : return;
2808 : : }
2809 : :
2810 : 18107 : size_in_bytes = int_size_in_bytes (type);
2811 : 18107 : if (size_in_bytes <= 0)
2812 : : return;
2813 : :
2814 : 18107 : poly_int64 bitsize, bitpos;
2815 : 18107 : tree offset;
2816 : 18107 : machine_mode mode;
2817 : 18107 : int unsignedp, reversep, volatilep = 0;
2818 : 18107 : tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2819 : : &unsignedp, &reversep, &volatilep);
2820 : :
2821 : 18107 : if (TREE_CODE (t) == COMPONENT_REF
2822 : 18107 : && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2823 : : {
2824 : 72 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2825 : 72 : instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
2826 : 72 : TREE_OPERAND (t, 0), repr,
2827 : 72 : TREE_OPERAND (t, 2)),
2828 : : location, is_store);
2829 : 72 : return;
2830 : : }
2831 : :
2832 : 18035 : if (!multiple_p (bitpos, BITS_PER_UNIT)
2833 : 18035 : || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2834 : : return;
2835 : :
2836 : 18035 : if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2837 : : return;
2838 : :
2839 : : /* Accesses to non-generic address-spaces should not be instrumented. */
2840 : 18029 : if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (inner))))
2841 : : return;
2842 : :
2843 : 18019 : poly_int64 decl_size;
2844 : 18019 : if ((VAR_P (inner)
2845 : 5999 : || (TREE_CODE (inner) == RESULT_DECL
2846 : 44 : && !aggregate_value_p (inner, current_function_decl)))
2847 : 12020 : && offset == NULL_TREE
2848 : 11519 : && DECL_SIZE (inner)
2849 : 11519 : && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
2850 : 36038 : && known_subrange_p (bitpos, bitsize, 0, decl_size))
2851 : : {
2852 : 11486 : if (VAR_P (inner) && DECL_THREAD_LOCAL_P (inner))
2853 : : return;
2854 : : /* If we're not sanitizing globals and we can tell statically that this
2855 : : access is inside a global variable, then there's no point adding
2856 : : instrumentation to check the access. N.b. hwasan currently never
2857 : : sanitizes globals. */
2858 : 22802 : if ((hwasan_sanitize_p () || !param_asan_globals)
2859 : 11560 : && is_global_var (inner))
2860 : : return;
2861 : 11304 : if (!TREE_STATIC (inner))
2862 : : {
2863 : : /* Automatic vars in the current function will be always
2864 : : accessible. */
2865 : 6704 : if (decl_function_context (inner) == current_function_decl
2866 : 6704 : && (!asan_sanitize_use_after_scope ()
2867 : 5912 : || !TREE_ADDRESSABLE (inner)))
2868 : : return;
2869 : : }
2870 : : /* Always instrument external vars, they might be dynamically
2871 : : initialized. */
2872 : 4600 : else if (!DECL_EXTERNAL (inner))
2873 : : {
2874 : : /* For static vars if they are known not to be dynamically
2875 : : initialized, they will be always accessible. */
2876 : 4600 : varpool_node *vnode = varpool_node::get (inner);
2877 : 4600 : if (vnode && !vnode->dynamically_initialized)
2878 : : return;
2879 : : }
2880 : : }
2881 : :
2882 : 11521 : if (DECL_P (inner)
2883 : 5596 : && decl_function_context (inner) == current_function_decl
2884 : 16442 : && !TREE_ADDRESSABLE (inner))
2885 : 99 : mark_addressable (inner);
2886 : :
2887 : 11521 : base = build_fold_addr_expr (t);
2888 : 11521 : if (!has_mem_ref_been_instrumented (base, size_in_bytes))
2889 : : {
2890 : 11475 : unsigned int align = get_object_alignment (t);
2891 : 11475 : build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
2892 : : /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
2893 : : is_store, /*is_scalar_access*/true, align);
2894 : 11475 : update_mem_ref_hash_table (base, size_in_bytes);
2895 : 11475 : update_mem_ref_hash_table (t, size_in_bytes);
2896 : : }
2897 : :
2898 : : }
2899 : :
2900 : : /* Insert a memory reference into the hash table if access length
2901 : : can be determined in compile time. */
2902 : :
2903 : : static void
2904 : 1112 : maybe_update_mem_ref_hash_table (tree base, tree len)
2905 : : {
2906 : 1172 : if (!POINTER_TYPE_P (TREE_TYPE (base))
2907 : 1172 : || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
2908 : : return;
2909 : :
2910 : 1112 : HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2911 : :
2912 : 348 : if (size_in_bytes != -1)
2913 : 348 : update_mem_ref_hash_table (base, size_in_bytes);
2914 : : }
2915 : :
2916 : : /* Instrument an access to a contiguous memory region that starts at
2917 : : the address pointed to by BASE, over a length of LEN (expressed in
2918 : : the sizeof (*BASE) bytes). ITER points to the instruction before
2919 : : which the instrumentation instructions must be inserted. LOCATION
2920 : : is the source location that the instrumentation instructions must
2921 : : have. If IS_STORE is true, then the memory access is a store;
2922 : : otherwise, it's a load. */
2923 : :
2924 : : static void
2925 : 0 : instrument_mem_region_access (tree base, tree len,
2926 : : gimple_stmt_iterator *iter,
2927 : : location_t location, bool is_store)
2928 : : {
2929 : 0 : if (!POINTER_TYPE_P (TREE_TYPE (base))
2930 : 0 : || !INTEGRAL_TYPE_P (TREE_TYPE (len))
2931 : 0 : || integer_zerop (len))
2932 : 0 : return;
2933 : :
2934 : 0 : HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2935 : :
2936 : 0 : if ((size_in_bytes == -1)
2937 : 0 : || !has_mem_ref_been_instrumented (base, size_in_bytes))
2938 : : {
2939 : 0 : build_check_stmt (location, base, len, size_in_bytes, iter,
2940 : : /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
2941 : : is_store, /*is_scalar_access*/false, /*align*/0);
2942 : : }
2943 : :
2944 : 0 : maybe_update_mem_ref_hash_table (base, len);
2945 : 0 : *iter = gsi_for_stmt (gsi_stmt (*iter));
2946 : : }
2947 : :
2948 : : /* Instrument the call to a built-in memory access function that is
2949 : : pointed to by the iterator ITER.
2950 : :
2951 : : Upon completion, return TRUE iff *ITER has been advanced to the
2952 : : statement following the one it was originally pointing to. */
2953 : :
2954 : : static bool
2955 : 3194 : instrument_builtin_call (gimple_stmt_iterator *iter)
2956 : : {
2957 : 3194 : if (!(asan_memintrin () || hwasan_memintrin ()))
2958 : : return false;
2959 : :
2960 : 3170 : bool iter_advanced_p = false;
2961 : 3170 : gcall *call = as_a <gcall *> (gsi_stmt (*iter));
2962 : :
2963 : 3170 : gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2964 : :
2965 : 3170 : location_t loc = gimple_location (call);
2966 : :
2967 : 3170 : asan_mem_ref src0, src1, dest;
2968 : 3170 : asan_mem_ref_init (&src0, NULL, 1);
2969 : 3170 : asan_mem_ref_init (&src1, NULL, 1);
2970 : 3170 : asan_mem_ref_init (&dest, NULL, 1);
2971 : :
2972 : 3170 : tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2973 : 3170 : bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2974 : 3170 : dest_is_deref = false, intercepted_p = true;
2975 : :
2976 : 3170 : if (get_mem_refs_of_builtin_call (call,
2977 : : &src0, &src0_len, &src0_is_store,
2978 : : &src1, &src1_len, &src1_is_store,
2979 : : &dest, &dest_len, &dest_is_store,
2980 : : &dest_is_deref, &intercepted_p, iter))
2981 : : {
2982 : 694 : if (dest_is_deref)
2983 : : {
2984 : 28 : instrument_derefs (iter, dest.start, loc, dest_is_store);
2985 : 28 : gsi_next (iter);
2986 : 28 : iter_advanced_p = true;
2987 : : }
2988 : 666 : else if (!intercepted_p
2989 : 0 : && (src0_len || src1_len || dest_len))
2990 : : {
2991 : 0 : if (src0.start != NULL_TREE)
2992 : 0 : instrument_mem_region_access (src0.start, src0_len,
2993 : : iter, loc, /*is_store=*/false);
2994 : 0 : if (src1.start != NULL_TREE)
2995 : 0 : instrument_mem_region_access (src1.start, src1_len,
2996 : : iter, loc, /*is_store=*/false);
2997 : 0 : if (dest.start != NULL_TREE)
2998 : 0 : instrument_mem_region_access (dest.start, dest_len,
2999 : : iter, loc, /*is_store=*/true);
3000 : :
3001 : 0 : *iter = gsi_for_stmt (call);
3002 : 0 : gsi_next (iter);
3003 : 0 : iter_advanced_p = true;
3004 : : }
3005 : : else
3006 : : {
3007 : 666 : if (src0.start != NULL_TREE)
3008 : 478 : maybe_update_mem_ref_hash_table (src0.start, src0_len);
3009 : 666 : if (src1.start != NULL_TREE)
3010 : 50 : maybe_update_mem_ref_hash_table (src1.start, src1_len);
3011 : 666 : if (dest.start != NULL_TREE)
3012 : 584 : maybe_update_mem_ref_hash_table (dest.start, dest_len);
3013 : : }
3014 : : }
3015 : : return iter_advanced_p;
3016 : : }
3017 : :
3018 : : /* Instrument the assignment statement ITER if it is subject to
3019 : : instrumentation. Return TRUE iff instrumentation actually
3020 : : happened. In that case, the iterator ITER is advanced to the next
3021 : : logical expression following the one initially pointed to by ITER,
3022 : : and the relevant memory reference that which access has been
3023 : : instrumented is added to the memory references hash table. */
3024 : :
3025 : : static bool
3026 : 19914 : maybe_instrument_assignment (gimple_stmt_iterator *iter)
3027 : : {
3028 : 19914 : gimple *s = gsi_stmt (*iter);
3029 : :
3030 : 19914 : gcc_assert (gimple_assign_single_p (s));
3031 : :
3032 : 19914 : tree ref_expr = NULL_TREE;
3033 : 19914 : bool is_store, is_instrumented = false;
3034 : :
3035 : 19914 : if (gimple_store_p (s))
3036 : : {
3037 : 8359 : ref_expr = gimple_assign_lhs (s);
3038 : 8359 : is_store = true;
3039 : 8359 : instrument_derefs (iter, ref_expr,
3040 : : gimple_location (s),
3041 : : is_store);
3042 : 8359 : is_instrumented = true;
3043 : : }
3044 : :
3045 : 19914 : if (gimple_assign_load_p (s))
3046 : : {
3047 : 9829 : ref_expr = gimple_assign_rhs1 (s);
3048 : 9829 : is_store = false;
3049 : 9829 : instrument_derefs (iter, ref_expr,
3050 : : gimple_location (s),
3051 : : is_store);
3052 : 9829 : is_instrumented = true;
3053 : : }
3054 : :
3055 : 19914 : if (is_instrumented)
3056 : 17835 : gsi_next (iter);
3057 : :
3058 : 19914 : return is_instrumented;
3059 : : }
3060 : :
3061 : : /* Instrument the function call pointed to by the iterator ITER, if it
3062 : : is subject to instrumentation. At the moment, the only function
3063 : : calls that are instrumented are some built-in functions that access
3064 : : memory. Look at instrument_builtin_call to learn more.
3065 : :
3066 : : Upon completion return TRUE iff *ITER was advanced to the statement
3067 : : following the one it was originally pointing to. */
3068 : :
3069 : : static bool
3070 : 18215 : maybe_instrument_call (gimple_stmt_iterator *iter)
3071 : : {
3072 : 18215 : gimple *stmt = gsi_stmt (*iter);
3073 : 18215 : bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
3074 : :
3075 : 18215 : if (is_builtin && instrument_builtin_call (iter))
3076 : : return true;
3077 : :
3078 : 18187 : if (gimple_call_noreturn_p (stmt))
3079 : : {
3080 : 823 : if (is_builtin)
3081 : : {
3082 : 220 : tree callee = gimple_call_fndecl (stmt);
3083 : 220 : switch (DECL_FUNCTION_CODE (callee))
3084 : : {
3085 : : case BUILT_IN_UNREACHABLE:
3086 : : case BUILT_IN_UNREACHABLE_TRAP:
3087 : : case BUILT_IN_TRAP:
3088 : : /* Don't instrument these. */
3089 : : return false;
3090 : : default:
3091 : : break;
3092 : : }
3093 : : }
3094 : 724 : if (gimple_call_internal_p (stmt, IFN_ABNORMAL_DISPATCHER))
3095 : : /* Don't instrument this. */
3096 : : return false;
3097 : : /* If a function does not return, then we must handle clearing up the
3098 : : shadow stack accordingly. For ASAN we can simply set the entire stack
3099 : : to "valid" for accesses by setting the shadow space to 0 and all
3100 : : accesses will pass checks. That means that some bad accesses may be
3101 : : missed, but we will not report any false positives.
3102 : :
3103 : : This is not possible for HWASAN. Since there is no "always valid" tag
3104 : : we can not set any space to "always valid". If we were to clear the
3105 : : entire shadow stack then code resuming from `longjmp` or a caught
3106 : : exception would trigger false positives when correctly accessing
3107 : : variables on the stack. Hence we need to handle things like
3108 : : `longjmp`, thread exit, and exceptions in a different way. These
3109 : : problems must be handled externally to the compiler, e.g. in the
3110 : : language runtime. */
3111 : 639 : if (! hwasan_sanitize_p ())
3112 : : {
3113 : 604 : tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
3114 : 604 : gimple *g = gimple_build_call (decl, 0);
3115 : 604 : gimple_set_location (g, gimple_location (stmt));
3116 : 604 : gsi_safe_insert_before (iter, g);
3117 : : }
3118 : : }
3119 : :
3120 : 18003 : bool instrumented = false;
3121 : 18003 : if (gimple_store_p (stmt)
3122 : 18003 : && (gimple_call_builtin_p (stmt)
3123 : 219 : || gimple_call_internal_p (stmt)
3124 : 219 : || !aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
3125 : 219 : gimple_call_fntype (stmt))))
3126 : : {
3127 : 18 : tree ref_expr = gimple_call_lhs (stmt);
3128 : 18 : instrument_derefs (iter, ref_expr,
3129 : : gimple_location (stmt),
3130 : : /*is_store=*/true);
3131 : :
3132 : 18 : instrumented = true;
3133 : : }
3134 : :
3135 : : /* Walk through gimple_call arguments and check them id needed. */
3136 : 18003 : unsigned args_num = gimple_call_num_args (stmt);
3137 : 56828 : for (unsigned i = 0; i < args_num; ++i)
3138 : : {
3139 : 38825 : tree arg = gimple_call_arg (stmt, i);
3140 : : /* If ARG is not a non-aggregate register variable, compiler in general
3141 : : creates temporary for it and pass it as argument to gimple call.
3142 : : But in some cases, e.g. when we pass by value a small structure that
3143 : : fits to register, compiler can avoid extra overhead by pulling out
3144 : : these temporaries. In this case, we should check the argument. */
3145 : 38825 : if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
3146 : : {
3147 : 94 : instrument_derefs (iter, arg,
3148 : : gimple_location (stmt),
3149 : : /*is_store=*/false);
3150 : 94 : instrumented = true;
3151 : : }
3152 : : }
3153 : 18003 : if (instrumented)
3154 : 108 : gsi_next (iter);
3155 : : return instrumented;
3156 : : }
3157 : :
3158 : : /* Walk each instruction of all basic block and instrument those that
3159 : : represent memory references: loads, stores, or function calls.
3160 : : In a given basic block, this function avoids instrumenting memory
3161 : : references that have already been instrumented. */
3162 : :
3163 : : static void
3164 : 6088 : transform_statements (void)
3165 : : {
3166 : 6088 : basic_block bb, last_bb = NULL;
3167 : 6088 : gimple_stmt_iterator i;
3168 : 6088 : int saved_last_basic_block = last_basic_block_for_fn (cfun);
3169 : :
3170 : 29637 : FOR_EACH_BB_FN (bb, cfun)
3171 : : {
3172 : 23549 : basic_block prev_bb = bb;
3173 : :
3174 : 23549 : if (bb->index >= saved_last_basic_block) continue;
3175 : :
3176 : : /* Flush the mem ref hash table, if current bb doesn't have
3177 : : exactly one predecessor, or if that predecessor (skipping
3178 : : over asan created basic blocks) isn't the last processed
3179 : : basic block. Thus we effectively flush on extended basic
3180 : : block boundaries. */
3181 : 23547 : while (single_pred_p (prev_bb))
3182 : : {
3183 : 19838 : prev_bb = single_pred (prev_bb);
3184 : 19838 : if (prev_bb->index < saved_last_basic_block)
3185 : : break;
3186 : : }
3187 : 23541 : if (prev_bb != last_bb)
3188 : 15788 : empty_mem_ref_hash_table ();
3189 : 23541 : last_bb = bb;
3190 : :
3191 : 175302 : for (i = gsi_start_bb (bb); !gsi_end_p (i);)
3192 : : {
3193 : 128220 : gimple *s = gsi_stmt (i);
3194 : :
3195 : 128220 : if (has_stmt_been_instrumented_p (s))
3196 : 735 : gsi_next (&i);
3197 : 127485 : else if (gimple_assign_single_p (s)
3198 : 20869 : && !gimple_clobber_p (s)
3199 : 147399 : && maybe_instrument_assignment (&i))
3200 : : /* Nothing to do as maybe_instrument_assignment advanced
3201 : : the iterator I. */;
3202 : 109650 : else if (is_gimple_call (s) && maybe_instrument_call (&i))
3203 : : /* Nothing to do as maybe_instrument_call
3204 : : advanced the iterator I. */;
3205 : : else
3206 : : {
3207 : : /* No instrumentation happened.
3208 : :
3209 : : If the current instruction is a function call that
3210 : : might free something, let's forget about the memory
3211 : : references that got instrumented. Otherwise we might
3212 : : miss some instrumentation opportunities. Do the same
3213 : : for a ASAN_MARK poisoning internal function. */
3214 : 109514 : if (is_gimple_call (s)
3215 : 109514 : && (!nonfreeing_call_p (s)
3216 : 5135 : || asan_mark_p (s, ASAN_MARK_POISON)))
3217 : 12944 : empty_mem_ref_hash_table ();
3218 : :
3219 : 109514 : gsi_next (&i);
3220 : : }
3221 : : }
3222 : : }
3223 : 6088 : free_mem_ref_resources ();
3224 : 6088 : }
3225 : :
3226 : : /* Build
3227 : : __asan_before_dynamic_init (module_name)
3228 : : or
3229 : : __asan_after_dynamic_init ()
3230 : : call. */
3231 : :
3232 : : tree
3233 : 42 : asan_dynamic_init_call (bool after_p)
3234 : : {
3235 : 42 : if (shadow_ptr_types[0] == NULL_TREE)
3236 : 21 : asan_init_shadow_ptr_types ();
3237 : :
3238 : 63 : tree fn = builtin_decl_implicit (after_p
3239 : : ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
3240 : : : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
3241 : 42 : tree module_name_cst = NULL_TREE;
3242 : 42 : if (!after_p)
3243 : : {
3244 : 21 : pretty_printer module_name_pp;
3245 : 21 : pp_string (&module_name_pp, main_input_filename);
3246 : :
3247 : 21 : module_name_cst = asan_pp_string (&module_name_pp);
3248 : 21 : module_name_cst = fold_convert (const_ptr_type_node,
3249 : : module_name_cst);
3250 : 21 : }
3251 : :
3252 : 42 : return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
3253 : : }
3254 : :
3255 : : /* Build
3256 : : struct __asan_global
3257 : : {
3258 : : const void *__beg;
3259 : : uptr __size;
3260 : : uptr __size_with_redzone;
3261 : : const void *__name;
3262 : : const void *__module_name;
3263 : : uptr __has_dynamic_init;
3264 : : __asan_global_source_location *__location;
3265 : : char *__odr_indicator;
3266 : : } type. */
3267 : :
3268 : : static tree
3269 : 1037 : asan_global_struct (void)
3270 : : {
3271 : 1037 : static const char *field_names[]
3272 : : = { "__beg", "__size", "__size_with_redzone",
3273 : : "__name", "__module_name", "__has_dynamic_init", "__location",
3274 : : "__odr_indicator" };
3275 : 1037 : tree fields[ARRAY_SIZE (field_names)], ret;
3276 : 1037 : unsigned i;
3277 : :
3278 : 1037 : ret = make_node (RECORD_TYPE);
3279 : 10370 : for (i = 0; i < ARRAY_SIZE (field_names); i++)
3280 : : {
3281 : 8296 : fields[i]
3282 : 8296 : = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
3283 : : get_identifier (field_names[i]),
3284 : 8296 : (i == 0 || i == 3) ? const_ptr_type_node
3285 : : : pointer_sized_int_node);
3286 : 8296 : DECL_CONTEXT (fields[i]) = ret;
3287 : 8296 : if (i)
3288 : 7259 : DECL_CHAIN (fields[i - 1]) = fields[i];
3289 : : }
3290 : 1037 : tree type_decl = build_decl (input_location, TYPE_DECL,
3291 : : get_identifier ("__asan_global"), ret);
3292 : 1037 : DECL_IGNORED_P (type_decl) = 1;
3293 : 1037 : DECL_ARTIFICIAL (type_decl) = 1;
3294 : 1037 : TYPE_FIELDS (ret) = fields[0];
3295 : 1037 : TYPE_NAME (ret) = type_decl;
3296 : 1037 : TYPE_STUB_DECL (ret) = type_decl;
3297 : 1037 : TYPE_ARTIFICIAL (ret) = 1;
3298 : 1037 : layout_type (ret);
3299 : 1037 : return ret;
3300 : : }
3301 : :
3302 : : /* Create and return odr indicator symbol for DECL.
3303 : : TYPE is __asan_global struct type as returned by asan_global_struct. */
3304 : :
3305 : : static tree
3306 : 1248 : create_odr_indicator (tree decl, tree type)
3307 : : {
3308 : 1248 : char *name;
3309 : 1248 : tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3310 : 1248 : tree decl_name
3311 : 1248 : = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
3312 : 0 : : DECL_NAME (decl));
3313 : : /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
3314 : 1248 : if (decl_name == NULL_TREE)
3315 : 0 : return build_int_cst (uptr, 0);
3316 : 1248 : const char *dname = IDENTIFIER_POINTER (decl_name);
3317 : 1248 : if (HAS_DECL_ASSEMBLER_NAME_P (decl))
3318 : 1248 : dname = targetm.strip_name_encoding (dname);
3319 : 1248 : size_t len = strlen (dname) + sizeof ("__odr_asan_");
3320 : 1248 : name = XALLOCAVEC (char, len);
3321 : 1248 : snprintf (name, len, "__odr_asan_%s", dname);
3322 : : #ifndef NO_DOT_IN_LABEL
3323 : 1248 : name[sizeof ("__odr_asan") - 1] = '.';
3324 : : #elif !defined(NO_DOLLAR_IN_LABEL)
3325 : : name[sizeof ("__odr_asan") - 1] = '$';
3326 : : #endif
3327 : 1248 : tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
3328 : : char_type_node);
3329 : 1248 : TREE_ADDRESSABLE (var) = 1;
3330 : 1248 : TREE_READONLY (var) = 0;
3331 : 1248 : TREE_THIS_VOLATILE (var) = 1;
3332 : 1248 : DECL_ARTIFICIAL (var) = 1;
3333 : 1248 : DECL_IGNORED_P (var) = 1;
3334 : 1248 : TREE_STATIC (var) = 1;
3335 : 1248 : TREE_PUBLIC (var) = 1;
3336 : 1248 : DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
3337 : 1248 : DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
3338 : :
3339 : 1248 : TREE_USED (var) = 1;
3340 : 1248 : tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
3341 : : build_int_cst (unsigned_type_node, 0));
3342 : 1248 : TREE_CONSTANT (ctor) = 1;
3343 : 1248 : TREE_STATIC (ctor) = 1;
3344 : 1248 : DECL_INITIAL (var) = ctor;
3345 : 1248 : DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
3346 : 1248 : NULL, DECL_ATTRIBUTES (var));
3347 : 1248 : make_decl_rtl (var);
3348 : 1248 : varpool_node::finalize_decl (var);
3349 : 1248 : return fold_convert (uptr, build_fold_addr_expr (var));
3350 : : }
3351 : :
3352 : : /* Return true if DECL, a global var, might be overridden and needs
3353 : : an additional odr indicator symbol. */
3354 : :
3355 : : static bool
3356 : 3765 : asan_needs_odr_indicator_p (tree decl)
3357 : : {
3358 : : /* Don't emit ODR indicators for kernel because:
3359 : : a) Kernel is written in C thus doesn't need ODR indicators.
3360 : : b) Some kernel code may have assumptions about symbols containing specific
3361 : : patterns in their names. Since ODR indicators contain original names
3362 : : of symbols they are emitted for, these assumptions would be broken for
3363 : : ODR indicator symbols. */
3364 : 3765 : return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
3365 : 3765 : && !DECL_ARTIFICIAL (decl)
3366 : 1652 : && !DECL_WEAK (decl)
3367 : 5417 : && TREE_PUBLIC (decl));
3368 : : }
3369 : :
3370 : : /* Append description of a single global DECL into vector V.
3371 : : TYPE is __asan_global struct type as returned by asan_global_struct. */
3372 : :
3373 : : static void
3374 : 3765 : asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
3375 : : {
3376 : 3765 : tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3377 : 3765 : unsigned HOST_WIDE_INT size;
3378 : 3765 : tree str_cst, module_name_cst, refdecl = decl;
3379 : 3765 : vec<constructor_elt, va_gc> *vinner = NULL;
3380 : :
3381 : 3765 : pretty_printer asan_pp, module_name_pp;
3382 : :
3383 : 3765 : if (DECL_NAME (decl))
3384 : 3765 : pp_tree_identifier (&asan_pp, DECL_NAME (decl));
3385 : : else
3386 : 0 : pp_string (&asan_pp, "<unknown>");
3387 : 3765 : str_cst = asan_pp_string (&asan_pp);
3388 : :
3389 : 3765 : if (!in_lto_p)
3390 : 3345 : pp_string (&module_name_pp, main_input_filename);
3391 : : else
3392 : : {
3393 : 420 : const_tree tu = get_ultimate_context ((const_tree)decl);
3394 : 420 : if (tu != NULL_TREE)
3395 : 288 : pp_string (&module_name_pp, IDENTIFIER_POINTER (DECL_NAME (tu)));
3396 : : else
3397 : 132 : pp_string (&module_name_pp, aux_base_name);
3398 : : }
3399 : :
3400 : 3765 : module_name_cst = asan_pp_string (&module_name_pp);
3401 : :
3402 : 3765 : if (asan_needs_local_alias (decl))
3403 : : {
3404 : 0 : char buf[20];
3405 : 0 : ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
3406 : 0 : refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
3407 : 0 : VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
3408 : 0 : TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
3409 : 0 : TREE_READONLY (refdecl) = TREE_READONLY (decl);
3410 : 0 : TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
3411 : 0 : DECL_NOT_GIMPLE_REG_P (refdecl) = DECL_NOT_GIMPLE_REG_P (decl);
3412 : 0 : DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
3413 : 0 : DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
3414 : 0 : TREE_STATIC (refdecl) = 1;
3415 : 0 : TREE_PUBLIC (refdecl) = 0;
3416 : 0 : TREE_USED (refdecl) = 1;
3417 : 0 : assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
3418 : : }
3419 : :
3420 : 3765 : tree odr_indicator_ptr
3421 : 3765 : = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
3422 : 2517 : : build_int_cst (uptr, 0));
3423 : 3765 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3424 : : fold_convert (const_ptr_type_node,
3425 : : build_fold_addr_expr (refdecl)));
3426 : 3765 : size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
3427 : 3765 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
3428 : 3765 : size += asan_red_zone_size (size);
3429 : 3765 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
3430 : 3765 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3431 : : fold_convert (const_ptr_type_node, str_cst));
3432 : 3765 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3433 : : fold_convert (const_ptr_type_node, module_name_cst));
3434 : 3765 : varpool_node *vnode = varpool_node::get (decl);
3435 : 3765 : int has_dynamic_init = 0;
3436 : : /* FIXME: Enable initialization order fiasco detection in LTO mode once
3437 : : proper fix for PR 79061 will be applied. */
3438 : 3765 : if (!in_lto_p)
3439 : 3345 : has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
3440 : 3765 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3441 : : build_int_cst (uptr, has_dynamic_init));
3442 : 3765 : tree locptr = NULL_TREE;
3443 : 3765 : location_t loc = DECL_SOURCE_LOCATION (decl);
3444 : 3765 : expanded_location xloc = expand_location (loc);
3445 : 3765 : if (xloc.file != NULL)
3446 : : {
3447 : 2032 : static int lasanloccnt = 0;
3448 : 2032 : char buf[25];
3449 : 2032 : ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
3450 : 2032 : tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3451 : : ubsan_get_source_location_type ());
3452 : 2032 : TREE_STATIC (var) = 1;
3453 : 2032 : TREE_PUBLIC (var) = 0;
3454 : 2032 : DECL_ARTIFICIAL (var) = 1;
3455 : 2032 : DECL_IGNORED_P (var) = 1;
3456 : 2032 : pretty_printer filename_pp;
3457 : 2032 : pp_string (&filename_pp, xloc.file);
3458 : 2032 : tree str = asan_pp_string (&filename_pp);
3459 : 2032 : tree ctor = build_constructor_va (TREE_TYPE (var), 3,
3460 : : NULL_TREE, str, NULL_TREE,
3461 : : build_int_cst (unsigned_type_node,
3462 : 2032 : xloc.line), NULL_TREE,
3463 : : build_int_cst (unsigned_type_node,
3464 : 2032 : xloc.column));
3465 : 2032 : TREE_CONSTANT (ctor) = 1;
3466 : 2032 : TREE_STATIC (ctor) = 1;
3467 : 2032 : DECL_INITIAL (var) = ctor;
3468 : 2032 : varpool_node::finalize_decl (var);
3469 : 2032 : locptr = fold_convert (uptr, build_fold_addr_expr (var));
3470 : 2032 : }
3471 : : else
3472 : 1733 : locptr = build_int_cst (uptr, 0);
3473 : 3765 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
3474 : 3765 : CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
3475 : 3765 : init = build_constructor (type, vinner);
3476 : 3765 : CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
3477 : 3765 : }
3478 : :
3479 : : /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
3480 : : void
3481 : 37061 : initialize_sanitizer_builtins (void)
3482 : : {
3483 : 37061 : tree decl;
3484 : :
3485 : 37061 : if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
3486 : 37012 : return;
3487 : :
3488 : 49 : tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
3489 : 49 : tree BT_FN_VOID_PTR
3490 : 49 : = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
3491 : 49 : tree BT_FN_VOID_CONST_PTR
3492 : 49 : = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
3493 : 49 : tree BT_FN_VOID_PTR_PTR
3494 : 49 : = build_function_type_list (void_type_node, ptr_type_node,
3495 : : ptr_type_node, NULL_TREE);
3496 : 49 : tree BT_FN_VOID_PTR_PTR_PTR
3497 : 49 : = build_function_type_list (void_type_node, ptr_type_node,
3498 : : ptr_type_node, ptr_type_node, NULL_TREE);
3499 : 49 : tree BT_FN_VOID_PTR_PTRMODE
3500 : 49 : = build_function_type_list (void_type_node, ptr_type_node,
3501 : : pointer_sized_int_node, NULL_TREE);
3502 : 49 : tree BT_FN_VOID_INT
3503 : 49 : = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
3504 : 49 : tree BT_FN_SIZE_CONST_PTR_INT
3505 : 49 : = build_function_type_list (size_type_node, const_ptr_type_node,
3506 : : integer_type_node, NULL_TREE);
3507 : :
3508 : 49 : tree BT_FN_VOID_UINT8_UINT8
3509 : 49 : = build_function_type_list (void_type_node, unsigned_char_type_node,
3510 : : unsigned_char_type_node, NULL_TREE);
3511 : 49 : tree BT_FN_VOID_UINT16_UINT16
3512 : 49 : = build_function_type_list (void_type_node, uint16_type_node,
3513 : : uint16_type_node, NULL_TREE);
3514 : 49 : tree BT_FN_VOID_UINT32_UINT32
3515 : 49 : = build_function_type_list (void_type_node, uint32_type_node,
3516 : : uint32_type_node, NULL_TREE);
3517 : 49 : tree BT_FN_VOID_UINT64_UINT64
3518 : 49 : = build_function_type_list (void_type_node, uint64_type_node,
3519 : : uint64_type_node, NULL_TREE);
3520 : 49 : tree BT_FN_VOID_FLOAT_FLOAT
3521 : 49 : = build_function_type_list (void_type_node, float_type_node,
3522 : : float_type_node, NULL_TREE);
3523 : 49 : tree BT_FN_VOID_DOUBLE_DOUBLE
3524 : 49 : = build_function_type_list (void_type_node, double_type_node,
3525 : : double_type_node, NULL_TREE);
3526 : 49 : tree BT_FN_VOID_UINT64_PTR
3527 : 49 : = build_function_type_list (void_type_node, uint64_type_node,
3528 : : ptr_type_node, NULL_TREE);
3529 : :
3530 : 49 : tree BT_FN_PTR_CONST_PTR_UINT8
3531 : 49 : = build_function_type_list (ptr_type_node, const_ptr_type_node,
3532 : : unsigned_char_type_node, NULL_TREE);
3533 : 49 : tree BT_FN_VOID_PTR_UINT8_PTRMODE
3534 : 49 : = build_function_type_list (void_type_node, ptr_type_node,
3535 : : unsigned_char_type_node,
3536 : : pointer_sized_int_node, NULL_TREE);
3537 : :
3538 : 49 : tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
3539 : 49 : tree BT_FN_IX_CONST_VPTR_INT[5];
3540 : 49 : tree BT_FN_IX_VPTR_IX_INT[5];
3541 : 49 : tree BT_FN_VOID_VPTR_IX_INT[5];
3542 : 49 : tree vptr
3543 : 49 : = build_pointer_type (build_qualified_type (void_type_node,
3544 : : TYPE_QUAL_VOLATILE));
3545 : 49 : tree cvptr
3546 : 49 : = build_pointer_type (build_qualified_type (void_type_node,
3547 : : TYPE_QUAL_VOLATILE
3548 : : |TYPE_QUAL_CONST));
3549 : 49 : tree boolt
3550 : 49 : = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
3551 : 49 : int i;
3552 : 294 : for (i = 0; i < 5; i++)
3553 : : {
3554 : 245 : tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
3555 : 245 : BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
3556 : 245 : = build_function_type_list (boolt, vptr, ptr_type_node, ix,
3557 : : integer_type_node, integer_type_node,
3558 : : NULL_TREE);
3559 : 245 : BT_FN_IX_CONST_VPTR_INT[i]
3560 : 245 : = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
3561 : 245 : BT_FN_IX_VPTR_IX_INT[i]
3562 : 245 : = build_function_type_list (ix, vptr, ix, integer_type_node,
3563 : : NULL_TREE);
3564 : 245 : BT_FN_VOID_VPTR_IX_INT[i]
3565 : 245 : = build_function_type_list (void_type_node, vptr, ix,
3566 : : integer_type_node, NULL_TREE);
3567 : : }
3568 : : #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
3569 : : #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
3570 : : #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
3571 : : #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
3572 : : #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
3573 : : #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
3574 : : #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
3575 : : #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
3576 : : #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
3577 : : #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
3578 : : #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
3579 : : #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
3580 : : #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
3581 : : #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
3582 : : #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
3583 : : #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
3584 : : #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
3585 : : #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
3586 : : #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
3587 : : #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
3588 : : #undef ATTR_NOTHROW_LIST
3589 : : #define ATTR_NOTHROW_LIST ECF_NOTHROW
3590 : : #undef ATTR_NOTHROW_LEAF_LIST
3591 : : #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
3592 : : #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
3593 : : #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
3594 : : #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
3595 : : #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
3596 : : #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3597 : : #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
3598 : : ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
3599 : : #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
3600 : : #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
3601 : : ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
3602 : : #undef ATTR_COLD_NOTHROW_LEAF_LIST
3603 : : #define ATTR_COLD_NOTHROW_LEAF_LIST \
3604 : : /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
3605 : : #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
3606 : : #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
3607 : : /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
3608 : : #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
3609 : : #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
3610 : : /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3611 : : #undef ATTR_PURE_NOTHROW_LEAF_LIST
3612 : : #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
3613 : : #undef DEF_BUILTIN_STUB
3614 : : #define DEF_BUILTIN_STUB(ENUM, NAME)
3615 : : #undef DEF_SANITIZER_BUILTIN_1
3616 : : #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS) \
3617 : : do { \
3618 : : decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
3619 : : BUILT_IN_NORMAL, NAME, NULL_TREE); \
3620 : : set_call_expr_flags (decl, ATTRS); \
3621 : : set_builtin_decl (ENUM, decl, true); \
3622 : : } while (0)
3623 : : #undef DEF_SANITIZER_BUILTIN
3624 : : #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
3625 : : DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
3626 : :
3627 : : #include "sanitizer.def"
3628 : :
3629 : : /* -fsanitize=object-size uses __builtin_dynamic_object_size and
3630 : : __builtin_object_size, but they might not be available for e.g. Fortran at
3631 : : this point. We use DEF_SANITIZER_BUILTIN here only as a convenience
3632 : : macro. */
3633 : 49 : if (flag_sanitize & SANITIZE_OBJECT_SIZE)
3634 : : {
3635 : 20 : if (!builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
3636 : 14 : DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
3637 : : BT_FN_SIZE_CONST_PTR_INT,
3638 : : ATTR_PURE_NOTHROW_LEAF_LIST);
3639 : 69 : if (!builtin_decl_implicit_p (BUILT_IN_DYNAMIC_OBJECT_SIZE))
3640 : 14 : DEF_SANITIZER_BUILTIN_1 (BUILT_IN_DYNAMIC_OBJECT_SIZE,
3641 : : "dynamic_object_size",
3642 : : BT_FN_SIZE_CONST_PTR_INT,
3643 : : ATTR_PURE_NOTHROW_LEAF_LIST);
3644 : : }
3645 : :
3646 : : #undef DEF_SANITIZER_BUILTIN_1
3647 : : #undef DEF_SANITIZER_BUILTIN
3648 : : #undef DEF_BUILTIN_STUB
3649 : : }
3650 : :
3651 : : /* Called via htab_traverse. Count number of emitted
3652 : : STRING_CSTs in the constant hash table. */
3653 : :
3654 : : int
3655 : 2947 : count_string_csts (constant_descriptor_tree **slot,
3656 : : unsigned HOST_WIDE_INT *data)
3657 : : {
3658 : 2947 : struct constant_descriptor_tree *desc = *slot;
3659 : 2947 : if (TREE_CODE (desc->value) == STRING_CST
3660 : 2926 : && TREE_ASM_WRITTEN (desc->value)
3661 : 5873 : && asan_protect_global (desc->value))
3662 : 1533 : ++*data;
3663 : 2947 : return 1;
3664 : : }
3665 : :
3666 : : /* Helper structure to pass two parameters to
3667 : : add_string_csts. */
3668 : :
3669 : : struct asan_add_string_csts_data
3670 : : {
3671 : : tree type;
3672 : : vec<constructor_elt, va_gc> *v;
3673 : : };
3674 : :
3675 : : /* Called via hash_table::traverse. Call asan_add_global
3676 : : on emitted STRING_CSTs from the constant hash table. */
3677 : :
3678 : : int
3679 : 3160 : add_string_csts (constant_descriptor_tree **slot,
3680 : : asan_add_string_csts_data *aascd)
3681 : : {
3682 : 3160 : struct constant_descriptor_tree *desc = *slot;
3683 : 3160 : if (TREE_CODE (desc->value) == STRING_CST
3684 : 3146 : && TREE_ASM_WRITTEN (desc->value)
3685 : 6306 : && asan_protect_global (desc->value))
3686 : : {
3687 : 1533 : asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
3688 : : aascd->type, aascd->v);
3689 : : }
3690 : 3160 : return 1;
3691 : : }
3692 : :
3693 : : /* Needs to be GTY(()), because cgraph_build_static_cdtor may
3694 : : invoke ggc_collect. */
3695 : : static GTY(()) tree asan_ctor_statements;
3696 : :
3697 : : /* Module-level instrumentation.
3698 : : - Insert __asan_init_vN() into the list of CTORs.
3699 : : - TODO: insert redzones around globals.
3700 : : */
3701 : :
3702 : : void
3703 : 2360 : asan_finish_file (void)
3704 : : {
3705 : 2360 : varpool_node *vnode;
3706 : 2360 : unsigned HOST_WIDE_INT gcount = 0;
3707 : :
3708 : 2360 : if (shadow_ptr_types[0] == NULL_TREE)
3709 : 108 : asan_init_shadow_ptr_types ();
3710 : : /* Avoid instrumenting code in the asan ctors/dtors.
3711 : : We don't need to insert padding after the description strings,
3712 : : nor after .LASAN* array. */
3713 : 2360 : flag_sanitize &= ~SANITIZE_ADDRESS;
3714 : :
3715 : : /* For user-space we want asan constructors to run first.
3716 : : Linux kernel does not support priorities other than default, and the only
3717 : : other user of constructors is coverage. So we run with the default
3718 : : priority. */
3719 : 123 : int priority = flag_sanitize & SANITIZE_USER_ADDRESS
3720 : 2360 : ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
3721 : :
3722 : 2360 : if (flag_sanitize & SANITIZE_USER_ADDRESS)
3723 : : {
3724 : 2237 : tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
3725 : 2237 : append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3726 : 2237 : fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
3727 : 2237 : append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3728 : : }
3729 : 4809 : FOR_EACH_DEFINED_VARIABLE (vnode)
3730 : 2449 : if (TREE_ASM_WRITTEN (vnode->decl)
3731 : 2449 : && asan_protect_global (vnode->decl))
3732 : 2232 : ++gcount;
3733 : 2360 : hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
3734 : 2360 : const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
3735 : 5307 : (&gcount);
3736 : 2360 : if (gcount)
3737 : : {
3738 : 1037 : tree type = asan_global_struct (), var, ctor;
3739 : 1037 : tree dtor_statements = NULL_TREE;
3740 : 1037 : vec<constructor_elt, va_gc> *v;
3741 : 1037 : char buf[20];
3742 : :
3743 : 1037 : type = build_array_type_nelts (type, gcount);
3744 : 1037 : ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
3745 : 1037 : var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3746 : : type);
3747 : 1037 : TREE_STATIC (var) = 1;
3748 : 1037 : TREE_PUBLIC (var) = 0;
3749 : 1037 : DECL_ARTIFICIAL (var) = 1;
3750 : 1037 : DECL_IGNORED_P (var) = 1;
3751 : 1037 : vec_alloc (v, gcount);
3752 : 3406 : FOR_EACH_DEFINED_VARIABLE (vnode)
3753 : 2369 : if (TREE_ASM_WRITTEN (vnode->decl)
3754 : 2369 : && asan_protect_global (vnode->decl))
3755 : 2232 : asan_add_global (vnode->decl, TREE_TYPE (type), v);
3756 : 1037 : struct asan_add_string_csts_data aascd;
3757 : 1037 : aascd.type = TREE_TYPE (type);
3758 : 1037 : aascd.v = v;
3759 : 1037 : const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
3760 : 4197 : (&aascd);
3761 : 1037 : ctor = build_constructor (type, v);
3762 : 1037 : TREE_CONSTANT (ctor) = 1;
3763 : 1037 : TREE_STATIC (ctor) = 1;
3764 : 1037 : DECL_INITIAL (var) = ctor;
3765 : 1037 : SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
3766 : : ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
3767 : :
3768 : 1037 : varpool_node::finalize_decl (var);
3769 : :
3770 : 1037 : tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
3771 : 1037 : tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
3772 : 1037 : append_to_statement_list (build_call_expr (fn, 2,
3773 : : build_fold_addr_expr (var),
3774 : : gcount_tree),
3775 : : &asan_ctor_statements);
3776 : :
3777 : 1037 : fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
3778 : 1037 : append_to_statement_list (build_call_expr (fn, 2,
3779 : : build_fold_addr_expr (var),
3780 : : gcount_tree),
3781 : : &dtor_statements);
3782 : 1037 : cgraph_build_static_cdtor ('D', dtor_statements, priority);
3783 : : }
3784 : 2360 : if (asan_ctor_statements)
3785 : 2237 : cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
3786 : 2360 : flag_sanitize |= SANITIZE_ADDRESS;
3787 : 2360 : }
3788 : :
3789 : : /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
3790 : : on SHADOW address. Newly added statements will be added to ITER with
3791 : : given location LOC. We mark SIZE bytes in shadow memory, where
3792 : : LAST_CHUNK_SIZE is greater than zero in situation where we are at the
3793 : : end of a variable. */
3794 : :
3795 : : static void
3796 : 1374 : asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
3797 : : tree shadow,
3798 : : unsigned HOST_WIDE_INT base_addr_offset,
3799 : : bool is_clobber, unsigned size,
3800 : : unsigned last_chunk_size)
3801 : : {
3802 : 1374 : tree shadow_ptr_type;
3803 : :
3804 : 1374 : switch (size)
3805 : : {
3806 : 788 : case 1:
3807 : 788 : shadow_ptr_type = shadow_ptr_types[0];
3808 : 788 : break;
3809 : 86 : case 2:
3810 : 86 : shadow_ptr_type = shadow_ptr_types[1];
3811 : 86 : break;
3812 : 500 : case 4:
3813 : 500 : shadow_ptr_type = shadow_ptr_types[2];
3814 : 500 : break;
3815 : 0 : default:
3816 : 0 : gcc_unreachable ();
3817 : : }
3818 : :
3819 : 1374 : unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
3820 : 1374 : unsigned HOST_WIDE_INT val = 0;
3821 : 1374 : unsigned last_pos = size;
3822 : 1374 : if (last_chunk_size && !is_clobber)
3823 : 304 : last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
3824 : 4334 : for (unsigned i = 0; i < size; ++i)
3825 : : {
3826 : 2960 : unsigned char shadow_c = c;
3827 : 2960 : if (i == last_pos)
3828 : 304 : shadow_c = last_chunk_size;
3829 : 2960 : val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
3830 : : }
3831 : :
3832 : : /* Handle last chunk in unpoisoning. */
3833 : 1374 : tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
3834 : :
3835 : 1374 : tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
3836 : 1374 : build_int_cst (shadow_ptr_type, base_addr_offset));
3837 : :
3838 : 1374 : gimple *g = gimple_build_assign (dest, magic);
3839 : 1374 : gimple_set_location (g, loc);
3840 : 1374 : gsi_insert_after (iter, g, GSI_NEW_STMT);
3841 : 1374 : }
3842 : :
3843 : : /* Expand the ASAN_MARK builtins. */
3844 : :
3845 : : bool
3846 : 1315 : asan_expand_mark_ifn (gimple_stmt_iterator *iter)
3847 : : {
3848 : 1315 : gimple *g = gsi_stmt (*iter);
3849 : 1315 : location_t loc = gimple_location (g);
3850 : 1315 : HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
3851 : 1315 : bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
3852 : :
3853 : 1315 : tree base = gimple_call_arg (g, 1);
3854 : 1315 : gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
3855 : 1315 : tree decl = TREE_OPERAND (base, 0);
3856 : :
3857 : : /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
3858 : 1315 : if (TREE_CODE (decl) == COMPONENT_REF
3859 : 1315 : && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
3860 : 1 : decl = TREE_OPERAND (decl, 0);
3861 : :
3862 : 1315 : gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
3863 : :
3864 : 1315 : if (hwasan_sanitize_p ())
3865 : : {
3866 : 0 : gcc_assert (param_hwasan_instrument_stack);
3867 : 0 : gimple_seq stmts = NULL;
3868 : : /* Here we swap ASAN_MARK calls for HWASAN_MARK.
3869 : : This is because we are using the approach of using ASAN_MARK as a
3870 : : synonym until here.
3871 : : That approach means we don't yet have to duplicate all the special
3872 : : cases for ASAN_MARK and ASAN_POISON with the exact same handling but
3873 : : called HWASAN_MARK etc.
3874 : :
3875 : : N.b. __asan_poison_stack_memory (which implements ASAN_MARK for ASAN)
3876 : : rounds the size up to its shadow memory granularity, while
3877 : : __hwasan_tag_memory (which implements the same for HWASAN) does not.
3878 : : Hence we emit HWASAN_MARK with an aligned size unlike ASAN_MARK. */
3879 : 0 : tree len = gimple_call_arg (g, 2);
3880 : 0 : tree new_len = gimple_build_round_up (&stmts, loc, size_type_node, len,
3881 : 0 : HWASAN_TAG_GRANULE_SIZE);
3882 : 0 : gimple_build (&stmts, loc, CFN_HWASAN_MARK,
3883 : : void_type_node, gimple_call_arg (g, 0),
3884 : : base, new_len);
3885 : 0 : gsi_replace_with_seq (iter, stmts, true);
3886 : 0 : return false;
3887 : : }
3888 : :
3889 : 1315 : if (is_poison)
3890 : : {
3891 : 718 : if (asan_handled_variables == NULL)
3892 : 271 : asan_handled_variables = new hash_set<tree> (16);
3893 : 718 : asan_handled_variables->add (decl);
3894 : : }
3895 : 1315 : tree len = gimple_call_arg (g, 2);
3896 : :
3897 : 1315 : gcc_assert (poly_int_tree_p (len));
3898 : :
3899 : 1315 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3900 : : NOP_EXPR, base);
3901 : 1315 : gimple_set_location (g, loc);
3902 : 1315 : gsi_replace (iter, g, false);
3903 : 1315 : tree base_addr = gimple_assign_lhs (g);
3904 : :
3905 : : /* Generate direct emission if size_in_bytes is small. */
3906 : 1315 : unsigned threshold = param_use_after_scope_direct_emission_threshold;
3907 : 1315 : if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) <= threshold)
3908 : : {
3909 : 1307 : unsigned HOST_WIDE_INT size_in_bytes = tree_to_uhwi (len);
3910 : 1307 : const unsigned HOST_WIDE_INT shadow_size
3911 : 1307 : = shadow_mem_size (size_in_bytes);
3912 : 1307 : const unsigned int shadow_align
3913 : 1307 : = (get_pointer_alignment (base) / BITS_PER_UNIT) >> ASAN_SHADOW_SHIFT;
3914 : :
3915 : 1307 : tree shadow = build_shadow_mem_access (iter, loc, base_addr,
3916 : : shadow_ptr_types[0], true);
3917 : :
3918 : 2681 : for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
3919 : : {
3920 : 1374 : unsigned size = 1;
3921 : 1374 : if (shadow_size - offset >= 4
3922 : : && (!STRICT_ALIGNMENT || shadow_align >= 4))
3923 : : size = 4;
3924 : 874 : else if (shadow_size - offset >= 2
3925 : : && (!STRICT_ALIGNMENT || shadow_align >= 2))
3926 : 86 : size = 2;
3927 : :
3928 : 1374 : unsigned HOST_WIDE_INT last_chunk_size = 0;
3929 : 1374 : unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
3930 : 1374 : if (s > size_in_bytes)
3931 : 624 : last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
3932 : :
3933 : 1374 : asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
3934 : : size, last_chunk_size);
3935 : 1374 : offset += size;
3936 : : }
3937 : : }
3938 : : else
3939 : : {
3940 : 8 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3941 : : NOP_EXPR, len);
3942 : 8 : gimple_set_location (g, loc);
3943 : 8 : gsi_safe_insert_before (iter, g);
3944 : 8 : tree sz_arg = gimple_assign_lhs (g);
3945 : :
3946 : 8 : tree fun
3947 : 8 : = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
3948 : : : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
3949 : 8 : g = gimple_build_call (fun, 2, base_addr, sz_arg);
3950 : 8 : gimple_set_location (g, loc);
3951 : 8 : gsi_insert_after (iter, g, GSI_NEW_STMT);
3952 : : }
3953 : :
3954 : : return false;
3955 : : }
3956 : :
3957 : : /* Expand the ASAN_{LOAD,STORE} builtins. */
3958 : :
3959 : : bool
3960 : 10940 : asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
3961 : : {
3962 : 10940 : gcc_assert (!hwasan_sanitize_p ());
3963 : 10940 : gimple *g = gsi_stmt (*iter);
3964 : 10940 : location_t loc = gimple_location (g);
3965 : 10940 : bool recover_p;
3966 : 10940 : if (flag_sanitize & SANITIZE_USER_ADDRESS)
3967 : 10897 : recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3968 : : else
3969 : 43 : recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3970 : :
3971 : 10940 : HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
3972 : 10940 : gcc_assert (flags < ASAN_CHECK_LAST);
3973 : 10940 : bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
3974 : 10940 : bool is_store = (flags & ASAN_CHECK_STORE) != 0;
3975 : 10940 : bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
3976 : :
3977 : 10940 : tree base = gimple_call_arg (g, 1);
3978 : 10940 : tree len = gimple_call_arg (g, 2);
3979 : 10940 : HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
3980 : :
3981 : 21290 : HOST_WIDE_INT size_in_bytes
3982 : 10940 : = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3983 : :
3984 : 10940 : if (use_calls)
3985 : : {
3986 : : /* Instrument using callbacks. */
3987 : 103 : gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3988 : : NOP_EXPR, base);
3989 : 103 : gimple_set_location (g, loc);
3990 : 103 : gsi_insert_before (iter, g, GSI_SAME_STMT);
3991 : 103 : tree base_addr = gimple_assign_lhs (g);
3992 : :
3993 : 103 : int nargs;
3994 : 103 : tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
3995 : 103 : if (nargs == 1)
3996 : 75 : g = gimple_build_call (fun, 1, base_addr);
3997 : : else
3998 : : {
3999 : 28 : gcc_assert (nargs == 2);
4000 : 28 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4001 : : NOP_EXPR, len);
4002 : 28 : gimple_set_location (g, loc);
4003 : 28 : gsi_insert_before (iter, g, GSI_SAME_STMT);
4004 : 28 : tree sz_arg = gimple_assign_lhs (g);
4005 : 28 : g = gimple_build_call (fun, nargs, base_addr, sz_arg);
4006 : : }
4007 : 103 : gimple_set_location (g, loc);
4008 : 103 : gsi_replace (iter, g, false);
4009 : 103 : return false;
4010 : : }
4011 : :
4012 : 10837 : HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
4013 : :
4014 : 10275 : tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
4015 : 10837 : tree shadow_type = TREE_TYPE (shadow_ptr_type);
4016 : :
4017 : 10837 : gimple_stmt_iterator gsi = *iter;
4018 : :
4019 : 10837 : if (!is_non_zero_len)
4020 : : {
4021 : : /* So, the length of the memory area to asan-protect is
4022 : : non-constant. Let's guard the generated instrumentation code
4023 : : like:
4024 : :
4025 : : if (len != 0)
4026 : : {
4027 : : //asan instrumentation code goes here.
4028 : : }
4029 : : // falltrough instructions, starting with *ITER. */
4030 : :
4031 : 0 : g = gimple_build_cond (NE_EXPR,
4032 : : len,
4033 : 0 : build_int_cst (TREE_TYPE (len), 0),
4034 : : NULL_TREE, NULL_TREE);
4035 : 0 : gimple_set_location (g, loc);
4036 : :
4037 : 0 : basic_block then_bb, fallthrough_bb;
4038 : 0 : insert_if_then_before_iter (as_a <gcond *> (g), iter,
4039 : : /*then_more_likely_p=*/true,
4040 : : &then_bb, &fallthrough_bb);
4041 : : /* Note that fallthrough_bb starts with the statement that was
4042 : : pointed to by ITER. */
4043 : :
4044 : : /* The 'then block' of the 'if (len != 0) condition is where
4045 : : we'll generate the asan instrumentation code now. */
4046 : 0 : gsi = gsi_last_bb (then_bb);
4047 : : }
4048 : :
4049 : : /* Get an iterator on the point where we can add the condition
4050 : : statement for the instrumentation. */
4051 : 10837 : basic_block then_bb, else_bb;
4052 : 10837 : gsi = create_cond_insert_point (&gsi, /*before_p*/false,
4053 : : /*then_more_likely_p=*/false,
4054 : : /*create_then_fallthru_edge*/recover_p,
4055 : : &then_bb,
4056 : : &else_bb);
4057 : :
4058 : 10837 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4059 : : NOP_EXPR, base);
4060 : 10837 : gimple_set_location (g, loc);
4061 : 10837 : gsi_insert_before (&gsi, g, GSI_NEW_STMT);
4062 : 10837 : tree base_addr = gimple_assign_lhs (g);
4063 : :
4064 : 10837 : tree t = NULL_TREE;
4065 : 10837 : if (real_size_in_bytes >= 8)
4066 : : {
4067 : 7182 : tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
4068 : : shadow_ptr_type);
4069 : 7182 : t = shadow;
4070 : : }
4071 : : else
4072 : : {
4073 : : /* Slow path for 1, 2 and 4 byte accesses. */
4074 : : /* Test (shadow != 0)
4075 : : & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
4076 : 3655 : tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
4077 : : shadow_ptr_type);
4078 : 3655 : gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
4079 : 3655 : gimple_seq seq = NULL;
4080 : 3655 : gimple_seq_add_stmt (&seq, shadow_test);
4081 : : /* Aligned (>= 8 bytes) can test just
4082 : : (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
4083 : : to be 0. */
4084 : 3655 : if (align < 8)
4085 : : {
4086 : 2654 : gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
4087 : : base_addr, 7));
4088 : 2654 : gimple_seq_add_stmt (&seq,
4089 : 5308 : build_type_cast (shadow_type,
4090 : : gimple_seq_last (seq)));
4091 : 2654 : if (real_size_in_bytes > 1)
4092 : 907 : gimple_seq_add_stmt (&seq,
4093 : 907 : build_assign (PLUS_EXPR,
4094 : : gimple_seq_last (seq),
4095 : 907 : real_size_in_bytes - 1));
4096 : 5308 : t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
4097 : : }
4098 : : else
4099 : 1001 : t = build_int_cst (shadow_type, real_size_in_bytes - 1);
4100 : 3655 : gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
4101 : 7310 : gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
4102 : : gimple_seq_last (seq)));
4103 : 7310 : t = gimple_assign_lhs (gimple_seq_last (seq));
4104 : 3655 : gimple_seq_set_location (seq, loc);
4105 : 3655 : gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
4106 : :
4107 : : /* For non-constant, misaligned or otherwise weird access sizes,
4108 : : check first and last byte. */
4109 : 3655 : if (size_in_bytes == -1)
4110 : : {
4111 : 562 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4112 : : MINUS_EXPR, len,
4113 : : build_int_cst (pointer_sized_int_node, 1));
4114 : 562 : gimple_set_location (g, loc);
4115 : 562 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4116 : 562 : tree last = gimple_assign_lhs (g);
4117 : 562 : g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
4118 : : PLUS_EXPR, base_addr, last);
4119 : 562 : gimple_set_location (g, loc);
4120 : 562 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4121 : 562 : tree base_end_addr = gimple_assign_lhs (g);
4122 : :
4123 : 562 : tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
4124 : : shadow_ptr_type);
4125 : 562 : gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
4126 : 562 : gimple_seq seq = NULL;
4127 : 562 : gimple_seq_add_stmt (&seq, shadow_test);
4128 : 562 : gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
4129 : : base_end_addr, 7));
4130 : 1124 : gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
4131 : : gimple_seq_last (seq)));
4132 : 1124 : gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
4133 : : gimple_seq_last (seq),
4134 : : shadow));
4135 : 1124 : gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
4136 : : gimple_seq_last (seq)));
4137 : 1124 : gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
4138 : : gimple_seq_last (seq)));
4139 : 1124 : t = gimple_assign_lhs (gimple_seq_last (seq));
4140 : 562 : gimple_seq_set_location (seq, loc);
4141 : 562 : gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
4142 : : }
4143 : : }
4144 : :
4145 : 10837 : g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
4146 : : NULL_TREE, NULL_TREE);
4147 : 10837 : gimple_set_location (g, loc);
4148 : 10837 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4149 : :
4150 : : /* Generate call to the run-time library (e.g. __asan_report_load8). */
4151 : 10837 : gsi = gsi_start_bb (then_bb);
4152 : 10837 : int nargs;
4153 : 10837 : tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
4154 : 10837 : g = gimple_build_call (fun, nargs, base_addr, len);
4155 : 10837 : gimple_set_location (g, loc);
4156 : 10837 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4157 : :
4158 : 10837 : gsi_remove (iter, true);
4159 : 10837 : *iter = gsi_start_bb (else_bb);
4160 : :
4161 : 10837 : return true;
4162 : : }
4163 : :
4164 : : /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
4165 : : into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
4166 : :
4167 : : static tree
4168 : 31 : create_asan_shadow_var (tree var_decl,
4169 : : hash_map<tree, tree> &shadow_vars_mapping)
4170 : : {
4171 : 31 : tree *slot = shadow_vars_mapping.get (var_decl);
4172 : 31 : if (slot == NULL)
4173 : : {
4174 : 31 : tree shadow_var = copy_node (var_decl);
4175 : :
4176 : 31 : copy_body_data id;
4177 : 31 : memset (&id, 0, sizeof (copy_body_data));
4178 : 31 : id.src_fn = id.dst_fn = current_function_decl;
4179 : 31 : copy_decl_for_dup_finish (&id, var_decl, shadow_var);
4180 : :
4181 : 31 : DECL_ARTIFICIAL (shadow_var) = 1;
4182 : 31 : DECL_IGNORED_P (shadow_var) = 1;
4183 : 31 : DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
4184 : 31 : gimple_add_tmp_var (shadow_var);
4185 : :
4186 : 31 : shadow_vars_mapping.put (var_decl, shadow_var);
4187 : 31 : return shadow_var;
4188 : : }
4189 : : else
4190 : 0 : return *slot;
4191 : : }
4192 : :
4193 : : /* Expand ASAN_POISON ifn. */
4194 : :
4195 : : bool
4196 : 36 : asan_expand_poison_ifn (gimple_stmt_iterator *iter,
4197 : : bool *need_commit_edge_insert,
4198 : : hash_map<tree, tree> &shadow_vars_mapping)
4199 : : {
4200 : 36 : gimple *g = gsi_stmt (*iter);
4201 : 36 : tree poisoned_var = gimple_call_lhs (g);
4202 : 36 : if (!poisoned_var || has_zero_uses (poisoned_var))
4203 : : {
4204 : 5 : gsi_remove (iter, true);
4205 : 5 : return true;
4206 : : }
4207 : :
4208 : 31 : if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
4209 : 0 : SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
4210 : : create_tmp_var (TREE_TYPE (poisoned_var)));
4211 : :
4212 : 31 : tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
4213 : : shadow_vars_mapping);
4214 : :
4215 : 31 : bool recover_p;
4216 : 31 : if (flag_sanitize & SANITIZE_USER_ADDRESS)
4217 : 31 : recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
4218 : : else
4219 : 0 : recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
4220 : 31 : tree size = DECL_SIZE_UNIT (shadow_var);
4221 : 31 : gimple *poison_call
4222 : 31 : = gimple_build_call_internal (IFN_ASAN_MARK, 3,
4223 : : build_int_cst (integer_type_node,
4224 : : ASAN_MARK_POISON),
4225 : : build_fold_addr_expr (shadow_var), size);
4226 : :
4227 : 31 : gimple *use;
4228 : 31 : imm_use_iterator imm_iter;
4229 : 93 : FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
4230 : : {
4231 : 62 : if (is_gimple_debug (use))
4232 : 31 : continue;
4233 : :
4234 : 31 : int nargs;
4235 : 31 : bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
4236 : 31 : gcall *call;
4237 : 31 : if (hwasan_sanitize_p ())
4238 : : {
4239 : 0 : tree fun = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_MISMATCH4);
4240 : : /* NOTE: hwasan has no __hwasan_report_* functions like asan does.
4241 : : We use __hwasan_tag_mismatch4 with arguments that tell it the
4242 : : size of access and load to report all tag mismatches.
4243 : :
4244 : : The arguments to this function are:
4245 : : Address of invalid access.
4246 : : Bitfield containing information about the access
4247 : : (access_info)
4248 : : Pointer to a frame of registers
4249 : : (for use in printing the contents of registers in a dump)
4250 : : Not used yet -- to be used by inline instrumentation.
4251 : : Size of access.
4252 : :
4253 : : The access_info bitfield encodes the following pieces of
4254 : : information:
4255 : : - Is this a store or load?
4256 : : access_info & 0x10 => store
4257 : : - Should the program continue after reporting the error?
4258 : : access_info & 0x20 => recover
4259 : : - What size access is this (not used here since we can always
4260 : : pass the size in the last argument)
4261 : :
4262 : : if (access_info & 0xf == 0xf)
4263 : : size is taken from last argument.
4264 : : else
4265 : : size == 1 << (access_info & 0xf)
4266 : :
4267 : : The last argument contains the size of the access iff the
4268 : : access_info size indicator is 0xf (we always use this argument
4269 : : rather than storing the size in the access_info bitfield).
4270 : :
4271 : : See the function definition `__hwasan_tag_mismatch4` in
4272 : : libsanitizer/hwasan for the full definition.
4273 : : */
4274 : 0 : unsigned access_info = (0x20 * recover_p)
4275 : 0 : + (0x10 * store_p)
4276 : 0 : + (0xf);
4277 : 0 : call = gimple_build_call (fun, 4,
4278 : : build_fold_addr_expr (shadow_var),
4279 : : build_int_cst (pointer_sized_int_node,
4280 : 0 : access_info),
4281 : : build_int_cst (pointer_sized_int_node, 0),
4282 : : size);
4283 : : }
4284 : : else
4285 : : {
4286 : 31 : tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
4287 : : &nargs);
4288 : 31 : call = gimple_build_call (fun, 1,
4289 : : build_fold_addr_expr (shadow_var));
4290 : : }
4291 : 31 : gimple_set_location (call, gimple_location (use));
4292 : 31 : gimple *call_to_insert = call;
4293 : :
4294 : : /* The USE can be a gimple PHI node. If so, insert the call on
4295 : : all edges leading to the PHI node. */
4296 : 31 : if (is_a <gphi *> (use))
4297 : : {
4298 : : gphi *phi = dyn_cast<gphi *> (use);
4299 : 20 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
4300 : 15 : if (gimple_phi_arg_def (phi, i) == poisoned_var)
4301 : : {
4302 : 5 : edge e = gimple_phi_arg_edge (phi, i);
4303 : :
4304 : : /* Do not insert on an edge we can't split. */
4305 : 5 : if (e->flags & EDGE_ABNORMAL)
4306 : 5 : continue;
4307 : :
4308 : 0 : if (call_to_insert == NULL)
4309 : 0 : call_to_insert = gimple_copy (call);
4310 : :
4311 : 0 : gsi_insert_seq_on_edge (e, call_to_insert);
4312 : 0 : *need_commit_edge_insert = true;
4313 : 0 : call_to_insert = NULL;
4314 : : }
4315 : : }
4316 : : else
4317 : : {
4318 : 26 : gimple_stmt_iterator gsi = gsi_for_stmt (use);
4319 : 26 : if (store_p)
4320 : 4 : gsi_replace (&gsi, call, true);
4321 : : else
4322 : 22 : gsi_insert_before (&gsi, call, GSI_NEW_STMT);
4323 : : }
4324 : 31 : }
4325 : :
4326 : 31 : SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
4327 : 31 : SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
4328 : 31 : gsi_replace (iter, poison_call, false);
4329 : :
4330 : 31 : return true;
4331 : : }
4332 : :
4333 : : /* Instrument the current function. */
4334 : :
4335 : : static unsigned int
4336 : 6088 : asan_instrument (void)
4337 : : {
4338 : 6088 : if (hwasan_sanitize_p ())
4339 : : {
4340 : 530 : initialize_sanitizer_builtins ();
4341 : 530 : transform_statements ();
4342 : 530 : return 0;
4343 : : }
4344 : :
4345 : 5558 : if (shadow_ptr_types[0] == NULL_TREE)
4346 : 2234 : asan_init_shadow_ptr_types ();
4347 : 5558 : transform_statements ();
4348 : 5558 : last_alloca_addr = NULL_TREE;
4349 : 5558 : return 0;
4350 : : }
4351 : :
4352 : : static bool
4353 : 1450753 : gate_asan (void)
4354 : : {
4355 : 426503 : return sanitize_flags_p (SANITIZE_ADDRESS);
4356 : : }
4357 : :
4358 : : namespace {
4359 : :
4360 : : const pass_data pass_data_asan =
4361 : : {
4362 : : GIMPLE_PASS, /* type */
4363 : : "asan", /* name */
4364 : : OPTGROUP_NONE, /* optinfo_flags */
4365 : : TV_NONE, /* tv_id */
4366 : : ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
4367 : : 0, /* properties_provided */
4368 : : 0, /* properties_destroyed */
4369 : : 0, /* todo_flags_start */
4370 : : TODO_update_ssa, /* todo_flags_finish */
4371 : : };
4372 : :
4373 : : class pass_asan : public gimple_opt_pass
4374 : : {
4375 : : public:
4376 : 570162 : pass_asan (gcc::context *ctxt)
4377 : 1140324 : : gimple_opt_pass (pass_data_asan, ctxt)
4378 : : {}
4379 : :
4380 : : /* opt_pass methods: */
4381 : 285081 : opt_pass * clone () final override { return new pass_asan (m_ctxt); }
4382 : 1024250 : bool gate (function *) final override
4383 : : {
4384 : 1024250 : return gate_asan () || gate_hwasan ();
4385 : : }
4386 : 4650 : unsigned int execute (function *) final override
4387 : : {
4388 : 4650 : return asan_instrument ();
4389 : : }
4390 : :
4391 : : }; // class pass_asan
4392 : :
4393 : : } // anon namespace
4394 : :
4395 : : gimple_opt_pass *
4396 : 285081 : make_pass_asan (gcc::context *ctxt)
4397 : : {
4398 : 285081 : return new pass_asan (ctxt);
4399 : : }
4400 : :
4401 : : namespace {
4402 : :
4403 : : const pass_data pass_data_asan_O0 =
4404 : : {
4405 : : GIMPLE_PASS, /* type */
4406 : : "asan0", /* name */
4407 : : OPTGROUP_NONE, /* optinfo_flags */
4408 : : TV_NONE, /* tv_id */
4409 : : ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
4410 : : 0, /* properties_provided */
4411 : : 0, /* properties_destroyed */
4412 : : 0, /* todo_flags_start */
4413 : : TODO_update_ssa, /* todo_flags_finish */
4414 : : };
4415 : :
4416 : : class pass_asan_O0 : public gimple_opt_pass
4417 : : {
4418 : : public:
4419 : 285081 : pass_asan_O0 (gcc::context *ctxt)
4420 : 570162 : : gimple_opt_pass (pass_data_asan_O0, ctxt)
4421 : : {}
4422 : :
4423 : : /* opt_pass methods: */
4424 : 1450636 : bool gate (function *) final override
4425 : : {
4426 : 1877139 : return !optimize && (gate_asan () || gate_hwasan ());
4427 : : }
4428 : 1438 : unsigned int execute (function *) final override
4429 : : {
4430 : 1438 : return asan_instrument ();
4431 : : }
4432 : :
4433 : : }; // class pass_asan_O0
4434 : :
4435 : : } // anon namespace
4436 : :
4437 : : gimple_opt_pass *
4438 : 285081 : make_pass_asan_O0 (gcc::context *ctxt)
4439 : : {
4440 : 285081 : return new pass_asan_O0 (ctxt);
4441 : : }
4442 : :
4443 : : /* HWASAN */
4444 : :
4445 : : /* For stack tagging:
4446 : :
4447 : : Return the offset from the frame base tag that the "next" expanded object
4448 : : should have. */
4449 : : uint8_t
4450 : 170 : hwasan_current_frame_tag ()
4451 : : {
4452 : 170 : return hwasan_frame_tag_offset;
4453 : : }
4454 : :
4455 : : /* For stack tagging:
4456 : :
4457 : : Return the 'base pointer' for this function. If that base pointer has not
4458 : : yet been created then we create a register to hold it and record the insns
4459 : : to initialize the register in `hwasan_frame_base_init_seq` for later
4460 : : emission. */
4461 : : rtx
4462 : 85 : hwasan_frame_base ()
4463 : : {
4464 : 85 : if (! hwasan_frame_base_ptr)
4465 : : {
4466 : 59 : start_sequence ();
4467 : 59 : hwasan_frame_base_ptr
4468 : 59 : = force_reg (Pmode,
4469 : 59 : targetm.memtag.insert_random_tag (virtual_stack_vars_rtx,
4470 : : NULL_RTX));
4471 : 59 : hwasan_frame_base_init_seq = end_sequence ();
4472 : : }
4473 : :
4474 : 85 : return hwasan_frame_base_ptr;
4475 : : }
4476 : :
4477 : : /* For stack tagging:
4478 : :
4479 : : Check whether this RTX is a standard pointer addressing the base of the
4480 : : stack variables for this frame. Returns true if the RTX is either
4481 : : virtual_stack_vars_rtx or hwasan_frame_base_ptr. */
4482 : : bool
4483 : 1996363 : stack_vars_base_reg_p (rtx base)
4484 : : {
4485 : 1996363 : return base == virtual_stack_vars_rtx || base == hwasan_frame_base_ptr;
4486 : : }
4487 : :
4488 : : /* For stack tagging:
4489 : :
4490 : : Emit frame base initialisation.
4491 : : If hwasan_frame_base has been used before here then
4492 : : hwasan_frame_base_init_seq contains the sequence of instructions to
4493 : : initialize it. This must be put just before the hwasan prologue, so we emit
4494 : : the insns before parm_birth_insn (which will point to the first instruction
4495 : : of the hwasan prologue if it exists).
4496 : :
4497 : : We update `parm_birth_insn` to point to the start of this initialisation
4498 : : since that represents the end of the initialisation done by
4499 : : expand_function_{start,end} functions and we want to maintain that. */
4500 : : void
4501 : 446 : hwasan_maybe_emit_frame_base_init ()
4502 : : {
4503 : 446 : if (! hwasan_frame_base_init_seq)
4504 : : return;
4505 : 16 : emit_insn_before (hwasan_frame_base_init_seq, parm_birth_insn);
4506 : 16 : parm_birth_insn = hwasan_frame_base_init_seq;
4507 : : }
4508 : :
4509 : : /* Record a compile-time constant size stack variable that HWASAN will need to
4510 : : tag. This record of the range of a stack variable will be used by
4511 : : `hwasan_emit_prologue` to emit the RTL at the start of each frame which will
4512 : : set tags in the shadow memory according to the assigned tag for each object.
4513 : :
4514 : : The range that the object spans in stack space should be described by the
4515 : : bounds `untagged_base + nearest_offset` and
4516 : : `untagged_base + farthest_offset`.
4517 : : `tagged_base` is the base address which contains the "base frame tag" for
4518 : : this frame, and from which the value to address this object with will be
4519 : : calculated.
4520 : :
4521 : : We record the `untagged_base` since the functions in the hwasan library we
4522 : : use to tag memory take pointers without a tag. */
4523 : : void
4524 : 85 : hwasan_record_stack_var (rtx untagged_base, rtx tagged_base,
4525 : : poly_int64 nearest_offset, poly_int64 farthest_offset)
4526 : : {
4527 : 85 : hwasan_stack_var cur_var;
4528 : 85 : cur_var.untagged_base = untagged_base;
4529 : 85 : cur_var.tagged_base = tagged_base;
4530 : 85 : cur_var.nearest_offset = nearest_offset;
4531 : 85 : cur_var.farthest_offset = farthest_offset;
4532 : 85 : cur_var.tag_offset = hwasan_current_frame_tag ();
4533 : :
4534 : 85 : hwasan_tagged_stack_vars.safe_push (cur_var);
4535 : 85 : }
4536 : :
4537 : : /* Return the RTX representing the farthest extent of the statically allocated
4538 : : stack objects for this frame. If hwasan_frame_base_ptr has not been
4539 : : initialized then we are not storing any static variables on the stack in
4540 : : this frame. In this case we return NULL_RTX to represent that.
4541 : :
4542 : : Otherwise simply return virtual_stack_vars_rtx + frame_offset. */
4543 : : rtx
4544 : 446 : hwasan_get_frame_extent ()
4545 : : {
4546 : 446 : return (hwasan_frame_base_ptr
4547 : 446 : ? plus_constant (Pmode, virtual_stack_vars_rtx, frame_offset)
4548 : 446 : : NULL_RTX);
4549 : : }
4550 : :
4551 : : /* For stack tagging:
4552 : :
4553 : : Increment the frame tag offset modulo the size a tag can represent. */
4554 : : void
4555 : 85 : hwasan_increment_frame_tag ()
4556 : : {
4557 : 85 : uint8_t tag_bits = HWASAN_TAG_SIZE;
4558 : 85 : gcc_assert (HWASAN_TAG_SIZE
4559 : : <= sizeof (hwasan_frame_tag_offset) * CHAR_BIT);
4560 : 85 : hwasan_frame_tag_offset = (hwasan_frame_tag_offset + 1) % (1 << tag_bits);
4561 : : /* The "background tag" of the stack is zero by definition.
4562 : : This is the tag that objects like parameters passed on the stack and
4563 : : spilled registers are given. It is handy to avoid this tag for objects
4564 : : whose tags we decide ourselves, partly to ensure that buffer overruns
4565 : : can't affect these important variables (e.g. saved link register, saved
4566 : : stack pointer etc) and partly to make debugging easier (everything with a
4567 : : tag of zero is space allocated automatically by the compiler).
4568 : :
4569 : : This is not feasible when using random frame tags (the default
4570 : : configuration for hwasan) since the tag for the given frame is randomly
4571 : : chosen at runtime. In order to avoid any tags matching the stack
4572 : : background we would need to decide tag offsets at runtime instead of
4573 : : compile time (and pay the resulting performance cost).
4574 : :
4575 : : When not using random base tags for each frame (i.e. when compiled with
4576 : : `--param hwasan-random-frame-tag=0`) the base tag for each frame is zero.
4577 : : This means the tag that each object gets is equal to the
4578 : : hwasan_frame_tag_offset used in determining it.
4579 : : When this is the case we *can* ensure no object gets the tag of zero by
4580 : : simply ensuring no object has the hwasan_frame_tag_offset of zero.
4581 : :
4582 : : There is the extra complication that we only record the
4583 : : hwasan_frame_tag_offset here (which is the offset from the tag stored in
4584 : : the stack pointer). In the kernel, the tag in the stack pointer is 0xff
4585 : : rather than zero. This does not cause problems since tags of 0xff are
4586 : : never checked in the kernel. As mentioned at the beginning of this
4587 : : comment the background tag of the stack is zero by definition, which means
4588 : : that for the kernel we should skip offsets of both 0 and 1 from the stack
4589 : : pointer. Avoiding the offset of 0 ensures we use a tag which will be
4590 : : checked, avoiding the offset of 1 ensures we use a tag that is not the
4591 : : same as the background. */
4592 : 85 : if (hwasan_frame_tag_offset == 0 && ! param_hwasan_random_frame_tag)
4593 : 0 : hwasan_frame_tag_offset += 1;
4594 : 16 : if (hwasan_frame_tag_offset == 1 && ! param_hwasan_random_frame_tag
4595 : 85 : && sanitize_flags_p (SANITIZE_KERNEL_HWADDRESS))
4596 : 0 : hwasan_frame_tag_offset += 1;
4597 : 85 : }
4598 : :
4599 : : /* Clear internal state for the next function.
4600 : : This function is called before variables on the stack get expanded, in
4601 : : `init_vars_expansion`. */
4602 : : void
4603 : 1190 : hwasan_record_frame_init ()
4604 : : {
4605 : 1190 : delete asan_used_labels;
4606 : 1190 : asan_used_labels = NULL;
4607 : :
4608 : : /* If this isn't the case then some stack variable was recorded *before*
4609 : : hwasan_record_frame_init is called, yet *after* the hwasan prologue for
4610 : : the previous frame was emitted. Such stack variables would not have
4611 : : their shadow stack filled in. */
4612 : 1190 : gcc_assert (hwasan_tagged_stack_vars.is_empty ());
4613 : 1190 : hwasan_frame_base_ptr = NULL_RTX;
4614 : 1190 : hwasan_frame_base_init_seq = NULL;
4615 : :
4616 : : /* When not using a random frame tag we can avoid the background stack
4617 : : color which gives the user a little better debug output upon a crash.
4618 : : Meanwhile, when using a random frame tag it will be nice to avoid adding
4619 : : tags for the first object since that is unnecessary extra work.
4620 : : Hence set the initial hwasan_frame_tag_offset to be 0 if using a random
4621 : : frame tag and 1 otherwise.
4622 : :
4623 : : As described in hwasan_increment_frame_tag, in the kernel the stack
4624 : : pointer has the tag 0xff. That means that to avoid 0xff and 0 (the tag
4625 : : which the kernel does not check and the background tag respectively) we
4626 : : start with a tag offset of 2. */
4627 : 2230 : hwasan_frame_tag_offset = param_hwasan_random_frame_tag
4628 : : ? 0
4629 : 1040 : : sanitize_flags_p (SANITIZE_KERNEL_HWADDRESS) ? 2 : 1;
4630 : 1190 : }
4631 : :
4632 : : /* For stack tagging:
4633 : : (Emits HWASAN equivalent of what is emitted by
4634 : : `asan_emit_stack_protection`).
4635 : :
4636 : : Emits the extra prologue code to set the shadow stack as required for HWASAN
4637 : : stack instrumentation.
4638 : :
4639 : : Uses the vector of recorded stack variables hwasan_tagged_stack_vars. When
4640 : : this function has completed hwasan_tagged_stack_vars is empty and all
4641 : : objects it had pointed to are deallocated. */
4642 : : void
4643 : 446 : hwasan_emit_prologue ()
4644 : : {
4645 : : /* We need untagged base pointers since libhwasan only accepts untagged
4646 : : pointers in __hwasan_tag_memory. We need the tagged base pointer to obtain
4647 : : the base tag for an offset. */
4648 : :
4649 : 446 : if (hwasan_tagged_stack_vars.is_empty ())
4650 : 446 : return;
4651 : :
4652 : 59 : poly_int64 bot = 0, top = 0;
4653 : 144 : for (hwasan_stack_var &cur : hwasan_tagged_stack_vars)
4654 : : {
4655 : 85 : poly_int64 nearest = cur.nearest_offset;
4656 : 85 : poly_int64 farthest = cur.farthest_offset;
4657 : :
4658 : 85 : if (known_ge (nearest, farthest))
4659 : : {
4660 : : top = nearest;
4661 : : bot = farthest;
4662 : : }
4663 : : else
4664 : : {
4665 : : /* Given how these values are calculated, one must be known greater
4666 : : than the other. */
4667 : 0 : gcc_assert (known_le (nearest, farthest));
4668 : 0 : top = farthest;
4669 : 0 : bot = nearest;
4670 : : }
4671 : 85 : poly_int64 size = (top - bot);
4672 : :
4673 : : /* Assert the edge of each variable is aligned to the HWASAN tag granule
4674 : : size. */
4675 : 170 : gcc_assert (multiple_p (top, HWASAN_TAG_GRANULE_SIZE));
4676 : 170 : gcc_assert (multiple_p (bot, HWASAN_TAG_GRANULE_SIZE));
4677 : 170 : gcc_assert (multiple_p (size, HWASAN_TAG_GRANULE_SIZE));
4678 : :
4679 : 85 : rtx fn = init_one_libfunc ("__hwasan_tag_memory");
4680 : 85 : rtx base_tag = targetm.memtag.extract_tag (cur.tagged_base, NULL_RTX);
4681 : 85 : rtx tag = plus_constant (QImode, base_tag, cur.tag_offset);
4682 : 85 : tag = hwasan_truncate_to_tag_size (tag, NULL_RTX);
4683 : :
4684 : 85 : rtx bottom = convert_memory_address (ptr_mode,
4685 : : plus_constant (Pmode,
4686 : : cur.untagged_base,
4687 : : bot));
4688 : 85 : emit_library_call (fn, LCT_NORMAL, VOIDmode,
4689 : : bottom, ptr_mode,
4690 : : tag, QImode,
4691 : : gen_int_mode (size, ptr_mode), ptr_mode);
4692 : : }
4693 : : /* Clear the stack vars, we've emitted the prologue for them all now. */
4694 : 59 : hwasan_tagged_stack_vars.truncate (0);
4695 : : }
4696 : :
4697 : : /* For stack tagging:
4698 : :
4699 : : Return RTL insns to clear the tags between DYNAMIC and VARS pointers
4700 : : into the stack. These instructions should be emitted at the end of
4701 : : every function.
4702 : :
4703 : : If `dynamic` is NULL_RTX then no insns are returned. */
4704 : : rtx_insn *
4705 : 446 : hwasan_emit_untag_frame (rtx dynamic, rtx vars)
4706 : : {
4707 : 446 : if (! dynamic)
4708 : : return NULL;
4709 : :
4710 : 59 : start_sequence ();
4711 : :
4712 : 59 : dynamic = convert_memory_address (ptr_mode, dynamic);
4713 : 59 : vars = convert_memory_address (ptr_mode, vars);
4714 : :
4715 : 59 : rtx top_rtx;
4716 : 59 : rtx bot_rtx;
4717 : 59 : if (FRAME_GROWS_DOWNWARD)
4718 : : {
4719 : 59 : top_rtx = vars;
4720 : 59 : bot_rtx = dynamic;
4721 : : }
4722 : : else
4723 : : {
4724 : : top_rtx = dynamic;
4725 : : bot_rtx = vars;
4726 : : }
4727 : :
4728 : 59 : rtx size_rtx = expand_simple_binop (ptr_mode, MINUS, top_rtx, bot_rtx,
4729 : : NULL_RTX, /* unsignedp = */0,
4730 : : OPTAB_DIRECT);
4731 : :
4732 : 59 : rtx fn = init_one_libfunc ("__hwasan_tag_memory");
4733 : 59 : emit_library_call (fn, LCT_NORMAL, VOIDmode,
4734 : : bot_rtx, ptr_mode,
4735 : : HWASAN_STACK_BACKGROUND, QImode,
4736 : : size_rtx, ptr_mode);
4737 : :
4738 : 59 : do_pending_stack_adjust ();
4739 : 59 : return end_sequence ();
4740 : : }
4741 : :
4742 : : /* Needs to be GTY(()), because cgraph_build_static_cdtor may
4743 : : invoke ggc_collect. */
4744 : : static GTY(()) tree hwasan_ctor_statements;
4745 : :
4746 : : /* Insert module initialization into this TU. This initialization calls the
4747 : : initialization code for libhwasan. */
4748 : : void
4749 : 331 : hwasan_finish_file (void)
4750 : : {
4751 : : /* Do not emit constructor initialization for the kernel.
4752 : : (the kernel has its own initialization already). */
4753 : 331 : if (flag_sanitize & SANITIZE_KERNEL_HWADDRESS)
4754 : : return;
4755 : :
4756 : 317 : initialize_sanitizer_builtins ();
4757 : :
4758 : : /* Avoid instrumenting code in the hwasan constructors/destructors. */
4759 : 317 : flag_sanitize &= ~SANITIZE_HWADDRESS;
4760 : 317 : int priority = MAX_RESERVED_INIT_PRIORITY - 1;
4761 : 317 : tree fn = builtin_decl_implicit (BUILT_IN_HWASAN_INIT);
4762 : 317 : append_to_statement_list (build_call_expr (fn, 0), &hwasan_ctor_statements);
4763 : 317 : cgraph_build_static_cdtor ('I', hwasan_ctor_statements, priority);
4764 : 317 : flag_sanitize |= SANITIZE_HWADDRESS;
4765 : : }
4766 : :
4767 : : /* For stack tagging:
4768 : :
4769 : : Truncate `tag` to the number of bits that a tag uses (i.e. to
4770 : : HWASAN_TAG_SIZE). Store the result in `target` if it's convenient. */
4771 : : rtx
4772 : 85 : hwasan_truncate_to_tag_size (rtx tag, rtx target)
4773 : : {
4774 : 85 : gcc_assert (GET_MODE (tag) == QImode);
4775 : 85 : if (HWASAN_TAG_SIZE != GET_MODE_PRECISION (QImode))
4776 : : {
4777 : 85 : gcc_assert (GET_MODE_PRECISION (QImode) > HWASAN_TAG_SIZE);
4778 : 85 : rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << HWASAN_TAG_SIZE) - 1,
4779 : : QImode);
4780 : 85 : tag = expand_simple_binop (QImode, AND, tag, mask, target,
4781 : : /* unsignedp = */1, OPTAB_WIDEN);
4782 : 85 : gcc_assert (tag);
4783 : : }
4784 : 85 : return tag;
4785 : : }
4786 : :
4787 : : /* Construct a function tree for __hwasan_{load,store}{1,2,4,8,16,_n}.
4788 : : IS_STORE is either 1 (for a store) or 0 (for a load). */
4789 : : static combined_fn
4790 : 371 : hwasan_check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
4791 : : int *nargs)
4792 : : {
4793 : 371 : static enum built_in_function check[2][2][6]
4794 : : = { { { BUILT_IN_HWASAN_LOAD1, BUILT_IN_HWASAN_LOAD2,
4795 : : BUILT_IN_HWASAN_LOAD4, BUILT_IN_HWASAN_LOAD8,
4796 : : BUILT_IN_HWASAN_LOAD16, BUILT_IN_HWASAN_LOADN },
4797 : : { BUILT_IN_HWASAN_STORE1, BUILT_IN_HWASAN_STORE2,
4798 : : BUILT_IN_HWASAN_STORE4, BUILT_IN_HWASAN_STORE8,
4799 : : BUILT_IN_HWASAN_STORE16, BUILT_IN_HWASAN_STOREN } },
4800 : : { { BUILT_IN_HWASAN_LOAD1_NOABORT,
4801 : : BUILT_IN_HWASAN_LOAD2_NOABORT,
4802 : : BUILT_IN_HWASAN_LOAD4_NOABORT,
4803 : : BUILT_IN_HWASAN_LOAD8_NOABORT,
4804 : : BUILT_IN_HWASAN_LOAD16_NOABORT,
4805 : : BUILT_IN_HWASAN_LOADN_NOABORT },
4806 : : { BUILT_IN_HWASAN_STORE1_NOABORT,
4807 : : BUILT_IN_HWASAN_STORE2_NOABORT,
4808 : : BUILT_IN_HWASAN_STORE4_NOABORT,
4809 : : BUILT_IN_HWASAN_STORE8_NOABORT,
4810 : : BUILT_IN_HWASAN_STORE16_NOABORT,
4811 : : BUILT_IN_HWASAN_STOREN_NOABORT } } };
4812 : 371 : if (size_in_bytes == -1)
4813 : : {
4814 : 0 : *nargs = 2;
4815 : 0 : return as_combined_fn (check[recover_p][is_store][5]);
4816 : : }
4817 : 371 : *nargs = 1;
4818 : 371 : int size_log2 = exact_log2 (size_in_bytes);
4819 : 371 : gcc_assert (size_log2 >= 0 && size_log2 <= 5);
4820 : 371 : return as_combined_fn (check[recover_p][is_store][size_log2]);
4821 : : }
4822 : :
4823 : : /* Expand the HWASAN_{LOAD,STORE} builtins. */
4824 : : bool
4825 : 371 : hwasan_expand_check_ifn (gimple_stmt_iterator *iter, bool)
4826 : : {
4827 : 371 : gimple *g = gsi_stmt (*iter);
4828 : 371 : location_t loc = gimple_location (g);
4829 : 371 : bool recover_p;
4830 : 371 : if (flag_sanitize & SANITIZE_USER_HWADDRESS)
4831 : 311 : recover_p = (flag_sanitize_recover & SANITIZE_USER_HWADDRESS) != 0;
4832 : : else
4833 : 60 : recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_HWADDRESS) != 0;
4834 : :
4835 : 371 : HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
4836 : 371 : gcc_assert (flags < ASAN_CHECK_LAST);
4837 : 371 : bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
4838 : 371 : bool is_store = (flags & ASAN_CHECK_STORE) != 0;
4839 : 371 : bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
4840 : :
4841 : 371 : tree base = gimple_call_arg (g, 1);
4842 : 371 : tree len = gimple_call_arg (g, 2);
4843 : :
4844 : : /* `align` is unused for HWASAN_CHECK, but we pass the argument anyway
4845 : : since that way the arguments match ASAN_CHECK. */
4846 : : /* HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3)); */
4847 : :
4848 : 742 : unsigned HOST_WIDE_INT size_in_bytes
4849 : 371 : = is_scalar_access ? tree_to_shwi (len) : -1;
4850 : :
4851 : 371 : gimple_stmt_iterator gsi = *iter;
4852 : :
4853 : 371 : if (!is_non_zero_len)
4854 : : {
4855 : : /* So, the length of the memory area to hwasan-protect is
4856 : : non-constant. Let's guard the generated instrumentation code
4857 : : like:
4858 : :
4859 : : if (len != 0)
4860 : : {
4861 : : // hwasan instrumentation code goes here.
4862 : : }
4863 : : // falltrough instructions, starting with *ITER. */
4864 : :
4865 : 0 : g = gimple_build_cond (NE_EXPR,
4866 : : len,
4867 : 0 : build_int_cst (TREE_TYPE (len), 0),
4868 : : NULL_TREE, NULL_TREE);
4869 : 0 : gimple_set_location (g, loc);
4870 : :
4871 : 0 : basic_block then_bb, fallthrough_bb;
4872 : 0 : insert_if_then_before_iter (as_a <gcond *> (g), iter,
4873 : : /*then_more_likely_p=*/true,
4874 : : &then_bb, &fallthrough_bb);
4875 : : /* Note that fallthrough_bb starts with the statement that was
4876 : : pointed to by ITER. */
4877 : :
4878 : : /* The 'then block' of the 'if (len != 0) condition is where
4879 : : we'll generate the hwasan instrumentation code now. */
4880 : 0 : gsi = gsi_last_bb (then_bb);
4881 : : }
4882 : :
4883 : 371 : gimple_seq stmts = NULL;
4884 : 371 : tree base_addr = gimple_build (&stmts, loc, NOP_EXPR,
4885 : : pointer_sized_int_node, base);
4886 : :
4887 : 371 : int nargs = 0;
4888 : 371 : combined_fn fn
4889 : 371 : = hwasan_check_func (is_store, recover_p, size_in_bytes, &nargs);
4890 : 371 : if (nargs == 1)
4891 : 371 : gimple_build (&stmts, loc, fn, void_type_node, base_addr);
4892 : : else
4893 : : {
4894 : 0 : gcc_assert (nargs == 2);
4895 : 0 : tree sz_arg = gimple_build (&stmts, loc, NOP_EXPR,
4896 : : pointer_sized_int_node, len);
4897 : 0 : gimple_build (&stmts, loc, fn, void_type_node, base_addr, sz_arg);
4898 : : }
4899 : :
4900 : 371 : gsi_insert_seq_after (&gsi, stmts, GSI_NEW_STMT);
4901 : 371 : gsi_remove (iter, true);
4902 : 371 : *iter = gsi;
4903 : 371 : return false;
4904 : : }
4905 : :
4906 : : /* For stack tagging:
4907 : :
4908 : : Dummy: the HWASAN_MARK internal function should only ever be in the code
4909 : : after the sanopt pass. */
4910 : : bool
4911 : 0 : hwasan_expand_mark_ifn (gimple_stmt_iterator *)
4912 : : {
4913 : 0 : gcc_unreachable ();
4914 : : }
4915 : :
4916 : : bool
4917 : 1673540 : gate_hwasan ()
4918 : : {
4919 : 1673540 : return hwasan_sanitize_p ();
4920 : : }
4921 : :
4922 : : #include "gt-asan.h"
|