Line data Source code
1 : /* GCC instrumentation plugin for ThreadSanitizer.
2 : Copyright (C) 2011-2026 Free Software Foundation, Inc.
3 : Contributed by Dmitry Vyukov <dvyukov@google.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 :
22 : #include "config.h"
23 : #include "system.h"
24 : #include "coretypes.h"
25 : #include "backend.h"
26 : #include "rtl.h"
27 : #include "tree.h"
28 : #include "memmodel.h"
29 : #include "gimple.h"
30 : #include "tree-pass.h"
31 : #include "ssa.h"
32 : #include "cgraph.h"
33 : #include "fold-const.h"
34 : #include "gimplify.h"
35 : #include "gimple-iterator.h"
36 : #include "gimplify-me.h"
37 : #include "tree-cfg.h"
38 : #include "tree-iterator.h"
39 : #include "gimple-fold.h"
40 : #include "tree-ssa-loop-ivopts.h"
41 : #include "tree-eh.h"
42 : #include "tsan.h"
43 : #include "stringpool.h"
44 : #include "attribs.h"
45 : #include "asan.h"
46 : #include "builtins.h"
47 : #include "target.h"
48 : #include "diagnostic-core.h"
49 :
50 : /* Number of instrumented memory accesses in the current function. */
51 :
52 : /* Builds the following decl
53 : void __tsan_read/writeX (void *addr); */
54 :
55 : static tree
56 769 : get_memory_access_decl (bool is_write, unsigned size, bool volatilep)
57 : {
58 769 : enum built_in_function fcode;
59 769 : int pos;
60 :
61 769 : if (size <= 1)
62 : pos = 0;
63 765 : else if (size <= 3)
64 : pos = 1;
65 761 : else if (size <= 7)
66 : pos = 2;
67 449 : else if (size <= 15)
68 : pos = 3;
69 : else
70 3 : pos = 4;
71 :
72 769 : if (param_tsan_distinguish_volatile && volatilep)
73 32 : fcode = is_write ? BUILT_IN_TSAN_VOLATILE_WRITE1
74 : : BUILT_IN_TSAN_VOLATILE_READ1;
75 : else
76 737 : fcode = is_write ? BUILT_IN_TSAN_WRITE1
77 : : BUILT_IN_TSAN_READ1;
78 769 : fcode = (built_in_function)(fcode + pos);
79 :
80 769 : return builtin_decl_implicit (fcode);
81 : }
82 :
83 : /* Check as to whether EXPR refers to a store to vptr. */
84 :
85 : static tree
86 796 : is_vptr_store (gimple *stmt, tree expr, bool is_write)
87 : {
88 796 : if (is_write == true
89 294 : && gimple_assign_single_p (stmt)
90 1090 : && TREE_CODE (expr) == COMPONENT_REF)
91 : {
92 35 : tree field = TREE_OPERAND (expr, 1);
93 35 : if (TREE_CODE (field) == FIELD_DECL
94 35 : && DECL_VIRTUAL_P (field))
95 21 : return gimple_assign_rhs1 (stmt);
96 : }
97 : return NULL;
98 : }
99 :
100 : /* Instruments EXPR if needed. If any instrumentation is inserted,
101 : return true. */
102 :
103 : static bool
104 863 : instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
105 : {
106 863 : tree base, rhs, expr_ptr, builtin_decl;
107 863 : basic_block bb;
108 863 : HOST_WIDE_INT size;
109 863 : gimple *stmt, *g;
110 863 : gimple_seq seq;
111 863 : location_t loc;
112 863 : unsigned int align;
113 :
114 863 : size = int_size_in_bytes (TREE_TYPE (expr));
115 863 : if (size <= 0)
116 : return false;
117 :
118 863 : poly_int64 unused_bitsize, unused_bitpos;
119 863 : tree offset;
120 863 : machine_mode mode;
121 863 : int unsignedp, reversep, volatilep = 0;
122 863 : base = get_inner_reference (expr, &unused_bitsize, &unused_bitpos, &offset,
123 : &mode, &unsignedp, &reversep, &volatilep);
124 :
125 : /* No need to instrument accesses to decls that don't escape,
126 : they can't escape to other threads then. */
127 863 : if (DECL_P (base) && !is_global_var (base))
128 : {
129 203 : struct pt_solution pt;
130 203 : memset (&pt, 0, sizeof (pt));
131 203 : pt.escaped = 1;
132 203 : pt.ipa_escaped = flag_ipa_pta != 0;
133 203 : if (!pt_solution_includes (&pt, base))
134 62 : return false;
135 173 : if (!may_be_aliased (base))
136 : return false;
137 : }
138 :
139 801 : if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base)))
140 : return false;
141 :
142 800 : if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (base))))
143 : return false;
144 :
145 796 : stmt = gsi_stmt (gsi);
146 796 : loc = gimple_location (stmt);
147 796 : rhs = is_vptr_store (stmt, expr, is_write);
148 :
149 796 : if ((TREE_CODE (expr) == COMPONENT_REF
150 54 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
151 838 : || TREE_CODE (expr) == BIT_FIELD_REF)
152 : {
153 12 : HOST_WIDE_INT bitpos, bitsize;
154 12 : base = TREE_OPERAND (expr, 0);
155 12 : if (TREE_CODE (expr) == COMPONENT_REF)
156 : {
157 12 : expr = TREE_OPERAND (expr, 1);
158 20 : if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr))
159 : expr = DECL_BIT_FIELD_REPRESENTATIVE (expr);
160 12 : if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr))
161 12 : || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr))
162 24 : || !tree_fits_uhwi_p (DECL_SIZE (expr)))
163 : return false;
164 12 : bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT
165 12 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr));
166 12 : bitsize = tree_to_uhwi (DECL_SIZE (expr));
167 : }
168 : else
169 : {
170 0 : if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2))
171 0 : || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
172 : return false;
173 0 : bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2));
174 0 : bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1));
175 : }
176 12 : if (bitpos < 0 || bitsize <= 0)
177 : return false;
178 12 : size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1)
179 12 : / BITS_PER_UNIT;
180 12 : if (may_be_nonaddressable_p (base))
181 : return false;
182 12 : align = get_object_alignment (base);
183 12 : if (align < BITS_PER_UNIT)
184 : return false;
185 12 : bitpos = bitpos & ~(BITS_PER_UNIT - 1);
186 12 : if ((align - 1) & bitpos)
187 : {
188 0 : align = (align - 1) & bitpos;
189 0 : align = least_bit_hwi (align);
190 : }
191 12 : expr = build_fold_addr_expr (unshare_expr (base));
192 12 : expr = build2 (MEM_REF, char_type_node, expr,
193 12 : build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT));
194 12 : expr_ptr = build_fold_addr_expr (expr);
195 : }
196 : else
197 : {
198 784 : if (may_be_nonaddressable_p (expr))
199 : return false;
200 784 : align = get_object_alignment (expr);
201 784 : if (align < BITS_PER_UNIT)
202 : return false;
203 784 : expr_ptr = build_fold_addr_expr (unshare_expr (expr));
204 : }
205 796 : expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE);
206 796 : if ((size & (size - 1)) != 0 || size > 16
207 794 : || align < MIN (size, 8) * BITS_PER_UNIT)
208 : {
209 8 : builtin_decl = builtin_decl_implicit (is_write
210 : ? BUILT_IN_TSAN_WRITE_RANGE
211 : : BUILT_IN_TSAN_READ_RANGE);
212 6 : g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size));
213 : }
214 790 : else if (rhs == NULL)
215 769 : g = gimple_build_call (get_memory_access_decl (is_write, size,
216 769 : TREE_THIS_VOLATILE (expr)),
217 : 1, expr_ptr);
218 : else
219 : {
220 21 : builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
221 21 : g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs));
222 : }
223 796 : gimple_set_location (g, loc);
224 796 : gimple_seq_add_stmt_without_update (&seq, g);
225 : /* Instrumentation for assignment of a function result
226 : must be inserted after the call. Instrumentation for
227 : reads of function arguments must be inserted before the call.
228 : That's because the call can contain synchronization. */
229 796 : if (is_gimple_call (stmt) && is_write)
230 : {
231 : /* If the call can throw, it must be the last stmt in
232 : a basic block, so the instrumented stmts need to be
233 : inserted in successor bbs. */
234 0 : if (is_ctrl_altering_stmt (stmt))
235 : {
236 0 : edge e;
237 :
238 0 : bb = gsi_bb (gsi);
239 0 : e = find_fallthru_edge (bb->succs);
240 0 : if (e)
241 0 : gsi_insert_seq_on_edge_immediate (e, seq);
242 : }
243 : else
244 0 : gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
245 : }
246 : else
247 796 : gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
248 :
249 : return true;
250 : }
251 :
252 : /* Actions for sync/atomic builtin transformations. */
253 : enum tsan_atomic_action
254 : {
255 : check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
256 : bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst,
257 : bool_clear, bool_test_and_set
258 : };
259 :
260 : /* Table how to map sync/atomic builtins to their corresponding
261 : tsan equivalents. */
262 : static const struct tsan_map_atomic
263 : {
264 : enum built_in_function fcode, tsan_fcode;
265 : enum tsan_atomic_action action;
266 : enum tree_code code;
267 : } tsan_atomic_table[] =
268 : {
269 : #define TRANSFORM(fcode, tsan_fcode, action, code) \
270 : { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
271 : #define CHECK_LAST(fcode, tsan_fcode) \
272 : TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
273 : #define ADD_SEQ_CST(fcode, tsan_fcode) \
274 : TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
275 : #define ADD_ACQUIRE(fcode, tsan_fcode) \
276 : TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
277 : #define WEAK_CAS(fcode, tsan_fcode) \
278 : TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
279 : #define STRONG_CAS(fcode, tsan_fcode) \
280 : TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
281 : #define BOOL_CAS(fcode, tsan_fcode) \
282 : TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
283 : #define VAL_CAS(fcode, tsan_fcode) \
284 : TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
285 : #define LOCK_RELEASE(fcode, tsan_fcode) \
286 : TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
287 : #define FETCH_OP(fcode, tsan_fcode, code) \
288 : TRANSFORM (fcode, tsan_fcode, fetch_op, code)
289 : #define FETCH_OPS(fcode, tsan_fcode, code) \
290 : TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
291 : #define BOOL_CLEAR(fcode, tsan_fcode) \
292 : TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK)
293 : #define BOOL_TEST_AND_SET(fcode, tsan_fcode) \
294 : TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK)
295 :
296 : CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
297 : CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
298 : CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
299 : CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
300 : CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
301 : CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
302 : CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
303 : CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
304 : CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
305 : CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
306 : CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
307 : CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
308 : CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
309 : CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
310 : CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
311 : CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
312 : CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
313 : CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
314 : CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
315 : CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
316 : CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
317 : CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
318 : CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
319 : CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
320 : CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
321 : CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
322 : CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
323 : CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
324 : CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
325 : CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
326 : CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
327 : CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
328 : CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
329 : CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
330 : CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
331 : CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
332 : CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
333 : CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
334 : CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
335 : CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
336 : CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
337 : CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
338 : CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
339 : CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
340 : CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
341 :
342 : CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
343 : CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
344 :
345 : FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
346 : FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
347 : FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
348 : FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
349 : FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
350 : FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
351 : FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
352 : FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
353 : FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
354 : FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
355 : FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
356 : FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
357 : FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
358 : FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
359 : FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
360 : FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
361 : FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
362 : FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
363 : FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
364 : FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
365 : FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
366 : FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
367 : FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
368 : FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
369 : FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
370 : FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
371 : FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
372 : FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
373 : FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
374 : FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
375 :
376 : ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
377 : ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
378 : ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
379 : ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
380 : ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
381 :
382 : ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
383 : ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
384 : ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
385 : ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
386 : ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
387 : ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
388 : ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
389 : ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
390 : ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
391 : ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
392 : ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
393 : ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
394 : ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
395 : ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
396 : ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
397 : ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
398 : ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
399 : ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
400 : ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
401 : ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
402 : ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
403 : ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
404 : ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
405 : ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
406 : ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
407 : ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
408 : ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
409 : ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
410 : ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
411 : ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
412 :
413 : ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
414 :
415 : FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
416 : FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
417 : FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
418 : FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
419 : FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
420 : FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
421 : FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
422 : FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
423 : FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
424 : FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
425 : FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
426 : FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
427 : FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
428 : FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
429 : FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
430 : FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
431 : FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
432 : FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
433 : FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
434 : FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
435 : FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
436 : FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
437 : FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
438 : FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
439 : FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
440 : FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
441 : FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
442 : FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
443 : FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
444 : FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
445 :
446 : WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
447 : WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
448 : WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
449 : WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
450 : WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
451 :
452 : STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
453 : STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
454 : TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
455 : STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
456 : TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
457 : STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
458 : TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
459 : STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
460 : TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
461 :
462 : BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
463 : TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
464 : BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
465 : TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
466 : BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
467 : TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
468 : BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
469 : TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
470 : BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
471 : TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
472 :
473 : VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
474 : VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
475 : VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
476 : VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
477 : VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
478 : TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
479 :
480 : LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
481 : LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
482 : LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
483 : LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
484 : LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE),
485 :
486 : BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE),
487 :
488 : BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE)
489 : };
490 :
491 : /* Instrument an atomic builtin. */
492 :
493 : static void
494 76 : instrument_builtin_call (gimple_stmt_iterator *gsi)
495 : {
496 76 : gimple *stmt = gsi_stmt (*gsi), *g;
497 76 : tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
498 76 : enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
499 76 : unsigned int i, num = gimple_call_num_args (stmt), j;
500 301 : for (j = 0; j < 6 && j < num; j++)
501 149 : args[j] = gimple_call_arg (stmt, j);
502 11144 : for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
503 11094 : if (fcode != tsan_atomic_table[i].fcode)
504 11068 : continue;
505 : else
506 : {
507 26 : if (fcode == BUILT_IN_ATOMIC_THREAD_FENCE)
508 2 : warning_at (gimple_location (stmt), OPT_Wtsan,
509 : "%qs is not supported with %qs", "atomic_thread_fence",
510 : "-fsanitize=thread");
511 :
512 26 : tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
513 26 : if (decl == NULL_TREE)
514 26 : return;
515 26 : switch (tsan_atomic_table[i].action)
516 : {
517 12 : case check_last:
518 12 : case fetch_op:
519 12 : last_arg = gimple_call_arg (stmt, num - 1);
520 12 : if (tree_fits_uhwi_p (last_arg)
521 12 : && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
522 : return;
523 12 : gimple_call_set_fndecl (stmt, decl);
524 12 : update_stmt (stmt);
525 12 : maybe_clean_eh_stmt (stmt);
526 12 : if (tsan_atomic_table[i].action == fetch_op)
527 : {
528 0 : args[1] = gimple_call_arg (stmt, 1);
529 0 : goto adjust_result;
530 : }
531 : return;
532 0 : case add_seq_cst:
533 0 : case add_acquire:
534 0 : case fetch_op_seq_cst:
535 0 : gcc_assert (num <= 2);
536 0 : for (j = 0; j < num; j++)
537 0 : args[j] = gimple_call_arg (stmt, j);
538 0 : for (; j < 2; j++)
539 0 : args[j] = NULL_TREE;
540 0 : args[num] = build_int_cst (NULL_TREE,
541 : tsan_atomic_table[i].action
542 : != add_acquire
543 0 : ? MEMMODEL_SEQ_CST
544 : : MEMMODEL_ACQUIRE);
545 0 : update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
546 0 : maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
547 0 : stmt = gsi_stmt (*gsi);
548 0 : if (tsan_atomic_table[i].action == fetch_op_seq_cst)
549 : {
550 0 : adjust_result:
551 0 : lhs = gimple_call_lhs (stmt);
552 0 : if (lhs == NULL_TREE)
553 : return;
554 0 : if (!useless_type_conversion_p (TREE_TYPE (lhs),
555 0 : TREE_TYPE (args[1])))
556 : {
557 0 : tree var = make_ssa_name (TREE_TYPE (lhs));
558 0 : g = gimple_build_assign (var, NOP_EXPR, args[1]);
559 0 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
560 0 : args[1] = var;
561 : }
562 0 : gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs)));
563 : /* BIT_NOT_EXPR stands for NAND. */
564 0 : if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
565 : {
566 0 : tree var = make_ssa_name (TREE_TYPE (lhs));
567 0 : g = gimple_build_assign (var, BIT_AND_EXPR,
568 : gimple_call_lhs (stmt), args[1]);
569 0 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
570 0 : g = gimple_build_assign (lhs, BIT_NOT_EXPR, var);
571 : }
572 : else
573 0 : g = gimple_build_assign (lhs, tsan_atomic_table[i].code,
574 : gimple_call_lhs (stmt), args[1]);
575 0 : update_stmt (stmt);
576 0 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
577 : }
578 0 : return;
579 0 : case weak_cas:
580 0 : if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
581 0 : continue;
582 : /* FALLTHRU */
583 0 : case strong_cas:
584 0 : gcc_assert (num == 6);
585 0 : for (j = 0; j < 6; j++)
586 0 : args[j] = gimple_call_arg (stmt, j);
587 0 : if (tree_fits_uhwi_p (args[4])
588 0 : && memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST)
589 : return;
590 0 : if (tree_fits_uhwi_p (args[5])
591 0 : && memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST)
592 : return;
593 0 : update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
594 : args[4], args[5]);
595 0 : maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
596 0 : return;
597 2 : case bool_cas:
598 2 : case val_cas:
599 2 : gcc_assert (num == 3);
600 8 : for (j = 0; j < 3; j++)
601 6 : args[j] = gimple_call_arg (stmt, j);
602 2 : t = TYPE_ARG_TYPES (TREE_TYPE (decl));
603 2 : t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
604 2 : t = create_tmp_var (t);
605 2 : mark_addressable (t);
606 2 : if (!useless_type_conversion_p (TREE_TYPE (t),
607 2 : TREE_TYPE (args[1])))
608 : {
609 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)),
610 : NOP_EXPR, args[1]);
611 0 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
612 0 : args[1] = gimple_assign_lhs (g);
613 : }
614 2 : g = gimple_build_assign (t, args[1]);
615 2 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
616 2 : lhs = gimple_call_lhs (stmt);
617 2 : update_gimple_call (gsi, decl, 5, args[0],
618 : build_fold_addr_expr (t), args[2],
619 : build_int_cst (NULL_TREE,
620 : MEMMODEL_SEQ_CST),
621 : build_int_cst (NULL_TREE,
622 : MEMMODEL_SEQ_CST));
623 2 : maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
624 2 : if (tsan_atomic_table[i].action == val_cas && lhs)
625 : {
626 2 : stmt = gsi_stmt (*gsi);
627 2 : tree t2 = make_ssa_name (TREE_TYPE (t));
628 2 : g = gimple_build_assign (t2, t);
629 2 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
630 2 : t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
631 2 : tree cond = make_ssa_name (boolean_type_node);
632 2 : g = gimple_build_assign (cond, NE_EXPR,
633 2 : t, build_zero_cst (TREE_TYPE (t)));
634 2 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
635 2 : g = gimple_build_assign (lhs, COND_EXPR, cond, args[1], t2);
636 2 : gimple_call_set_lhs (stmt, t);
637 2 : update_stmt (stmt);
638 2 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
639 : }
640 2 : return;
641 0 : case lock_release:
642 0 : gcc_assert (num == 1);
643 0 : t = TYPE_ARG_TYPES (TREE_TYPE (decl));
644 0 : t = TREE_VALUE (TREE_CHAIN (t));
645 0 : update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
646 : build_int_cst (t, 0),
647 : build_int_cst (NULL_TREE,
648 : MEMMODEL_RELEASE));
649 0 : maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
650 0 : return;
651 12 : case bool_clear:
652 12 : case bool_test_and_set:
653 12 : if (BOOL_TYPE_SIZE != 8)
654 : {
655 : decl = NULL_TREE;
656 : for (j = 1; j < 5; j++)
657 : if (BOOL_TYPE_SIZE == (8 << j))
658 : {
659 : enum built_in_function tsan_fcode
660 : = (enum built_in_function)
661 : (tsan_atomic_table[i].tsan_fcode + j);
662 : decl = builtin_decl_implicit (tsan_fcode);
663 : break;
664 : }
665 : if (decl == NULL_TREE)
666 : return;
667 : }
668 12 : last_arg = gimple_call_arg (stmt, num - 1);
669 12 : if (tree_fits_uhwi_p (last_arg)
670 12 : && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
671 : return;
672 12 : t = TYPE_ARG_TYPES (TREE_TYPE (decl));
673 12 : t = TREE_VALUE (TREE_CHAIN (t));
674 12 : if (tsan_atomic_table[i].action == bool_clear)
675 : {
676 6 : update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
677 : build_int_cst (t, 0), last_arg);
678 6 : maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
679 6 : return;
680 : }
681 6 : t = build_int_cst (t, targetm.atomic_test_and_set_trueval);
682 6 : update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
683 : t, last_arg);
684 6 : maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
685 6 : stmt = gsi_stmt (*gsi);
686 6 : lhs = gimple_call_lhs (stmt);
687 6 : if (lhs == NULL_TREE)
688 : return;
689 6 : if (targetm.atomic_test_and_set_trueval != 1
690 12 : || !useless_type_conversion_p (TREE_TYPE (lhs),
691 6 : TREE_TYPE (t)))
692 : {
693 6 : tree new_lhs = make_ssa_name (TREE_TYPE (t));
694 6 : gimple_call_set_lhs (stmt, new_lhs);
695 6 : if (targetm.atomic_test_and_set_trueval != 1)
696 0 : g = gimple_build_assign (lhs, NE_EXPR, new_lhs,
697 0 : build_int_cst (TREE_TYPE (t), 0));
698 : else
699 6 : g = gimple_build_assign (lhs, NOP_EXPR, new_lhs);
700 6 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
701 6 : update_stmt (stmt);
702 : }
703 6 : return;
704 0 : default:
705 0 : continue;
706 0 : }
707 : }
708 : }
709 :
710 : /* Instruments the gimple pointed to by GSI. Return
711 : true if func entry/exit should be instrumented. */
712 :
713 : static bool
714 7031 : instrument_gimple (gimple_stmt_iterator *gsi)
715 : {
716 7031 : gimple *stmt;
717 7031 : tree rhs, lhs;
718 7031 : bool instrumented = false;
719 :
720 7031 : stmt = gsi_stmt (*gsi);
721 7031 : if (is_gimple_call (stmt)
722 7031 : && (gimple_call_fndecl (stmt)
723 1530 : != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
724 : {
725 : /* All functions with function call will have exit instrumented,
726 : therefore no function calls other than __tsan_func_exit
727 : shall appear in the functions. */
728 1273 : gimple_call_set_tail (as_a <gcall *> (stmt), false);
729 1273 : if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
730 76 : instrument_builtin_call (gsi);
731 1273 : return true;
732 : }
733 5758 : else if (is_gimple_assign (stmt)
734 5758 : && !gimple_clobber_p (stmt))
735 : {
736 1607 : if (gimple_store_p (stmt))
737 : {
738 325 : lhs = gimple_assign_lhs (stmt);
739 325 : instrumented = instrument_expr (*gsi, lhs, true);
740 : }
741 1607 : if (gimple_assign_load_p (stmt))
742 : {
743 538 : rhs = gimple_assign_rhs1 (stmt);
744 538 : instrumented = instrument_expr (*gsi, rhs, false);
745 : }
746 : }
747 : return instrumented;
748 : }
749 :
750 : /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
751 :
752 : static void
753 593 : replace_func_exit (gimple *stmt)
754 : {
755 593 : tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
756 593 : gimple *g = gimple_build_call (builtin_decl, 0);
757 593 : gimple_set_location (g, cfun->function_end_locus);
758 593 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
759 593 : gsi_replace (&gsi, g, true);
760 593 : }
761 :
762 : /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */
763 :
764 : static void
765 3 : instrument_func_exit (void)
766 : {
767 3 : location_t loc;
768 3 : basic_block exit_bb;
769 3 : gimple_stmt_iterator gsi;
770 3 : gimple *stmt, *g;
771 3 : tree builtin_decl;
772 3 : edge e;
773 3 : edge_iterator ei;
774 :
775 : /* Find all function exits. */
776 3 : exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
777 6 : FOR_EACH_EDGE (e, ei, exit_bb->preds)
778 : {
779 3 : gsi = gsi_last_bb (e->src);
780 3 : stmt = gsi_stmt (gsi);
781 3 : gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
782 : || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
783 3 : loc = gimple_location (stmt);
784 3 : builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
785 3 : g = gimple_build_call (builtin_decl, 0);
786 3 : gimple_set_location (g, loc);
787 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
788 : }
789 3 : }
790 :
791 : /* Instruments all interesting memory accesses in the current function.
792 : Return true if func entry/exit should be instrumented. */
793 :
794 : static bool
795 880 : instrument_memory_accesses (bool *cfg_changed)
796 : {
797 880 : basic_block bb;
798 880 : gimple_stmt_iterator gsi;
799 880 : bool fentry_exit_instrument = false;
800 880 : bool func_exit_seen = false;
801 880 : auto_vec<gimple *> tsan_func_exits;
802 :
803 2944 : FOR_EACH_BB_FN (bb, cfun)
804 : {
805 12142 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
806 : {
807 8014 : gimple *stmt = gsi_stmt (gsi);
808 8014 : if (gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
809 : {
810 983 : if (fentry_exit_instrument)
811 591 : replace_func_exit (stmt);
812 : else
813 392 : tsan_func_exits.safe_push (stmt);
814 : func_exit_seen = true;
815 : }
816 : else
817 7031 : fentry_exit_instrument
818 14062 : |= (instrument_gimple (&gsi)
819 7031 : && param_tsan_instrument_func_entry_exit);
820 : }
821 2064 : if (gimple_purge_dead_eh_edges (bb))
822 2 : *cfg_changed = true;
823 : }
824 : unsigned int i;
825 : gimple *stmt;
826 1272 : FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
827 392 : if (fentry_exit_instrument)
828 2 : replace_func_exit (stmt);
829 : else
830 : {
831 390 : gsi = gsi_for_stmt (stmt);
832 390 : gsi_remove (&gsi, true);
833 : }
834 880 : if (fentry_exit_instrument && !func_exit_seen)
835 3 : instrument_func_exit ();
836 880 : return fentry_exit_instrument;
837 880 : }
838 :
839 : /* Instruments function entry. */
840 :
841 : static void
842 471 : instrument_func_entry (void)
843 : {
844 471 : tree ret_addr, builtin_decl;
845 471 : gimple *g;
846 471 : gimple_seq seq = NULL;
847 :
848 471 : builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
849 471 : g = gimple_build_call (builtin_decl, 1, integer_zero_node);
850 471 : ret_addr = make_ssa_name (ptr_type_node);
851 471 : gimple_call_set_lhs (g, ret_addr);
852 471 : gimple_set_location (g, cfun->function_start_locus);
853 471 : gimple_seq_add_stmt_without_update (&seq, g);
854 :
855 471 : builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
856 471 : g = gimple_build_call (builtin_decl, 1, ret_addr);
857 471 : gimple_set_location (g, cfun->function_start_locus);
858 471 : gimple_seq_add_stmt_without_update (&seq, g);
859 :
860 471 : edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
861 471 : gsi_insert_seq_on_edge_immediate (e, seq);
862 471 : }
863 :
864 : /* ThreadSanitizer instrumentation pass. */
865 :
866 : static unsigned
867 880 : tsan_pass (void)
868 : {
869 880 : initialize_sanitizer_builtins ();
870 880 : bool cfg_changed = false;
871 880 : if (instrument_memory_accesses (&cfg_changed))
872 471 : instrument_func_entry ();
873 880 : return cfg_changed ? TODO_cleanup_cfg : 0;
874 : }
875 :
876 : /* Inserts __tsan_init () into the list of CTORs. */
877 :
878 : void
879 257 : tsan_finish_file (void)
880 : {
881 257 : tree ctor_statements = NULL_TREE;
882 :
883 257 : initialize_sanitizer_builtins ();
884 257 : tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
885 257 : append_to_statement_list (build_call_expr (init_decl, 0),
886 : &ctor_statements);
887 257 : cgraph_build_static_cdtor ('I', ctor_statements,
888 : MAX_RESERVED_INIT_PRIORITY - 1);
889 257 : }
890 :
891 : /* The pass descriptor. */
892 :
893 : namespace {
894 :
895 : const pass_data pass_data_tsan =
896 : {
897 : GIMPLE_PASS, /* type */
898 : "tsan", /* name */
899 : OPTGROUP_NONE, /* optinfo_flags */
900 : TV_NONE, /* tv_id */
901 : ( PROP_ssa | PROP_cfg ), /* properties_required */
902 : 0, /* properties_provided */
903 : 0, /* properties_destroyed */
904 : 0, /* todo_flags_start */
905 : TODO_update_ssa, /* todo_flags_finish */
906 : };
907 :
908 : class pass_tsan : public gimple_opt_pass
909 : {
910 : public:
911 571444 : pass_tsan (gcc::context *ctxt)
912 1142888 : : gimple_opt_pass (pass_data_tsan, ctxt)
913 : {}
914 :
915 : /* opt_pass methods: */
916 285722 : opt_pass * clone () final override { return new pass_tsan (m_ctxt); }
917 1044139 : bool gate (function *) final override
918 : {
919 1044139 : return sanitize_flags_p (SANITIZE_THREAD);
920 : }
921 :
922 349 : unsigned int execute (function *) final override { return tsan_pass (); }
923 :
924 : }; // class pass_tsan
925 :
926 : } // anon namespace
927 :
928 : gimple_opt_pass *
929 285722 : make_pass_tsan (gcc::context *ctxt)
930 : {
931 285722 : return new pass_tsan (ctxt);
932 : }
933 :
934 : namespace {
935 :
936 : const pass_data pass_data_tsan_O0 =
937 : {
938 : GIMPLE_PASS, /* type */
939 : "tsan0", /* name */
940 : OPTGROUP_NONE, /* optinfo_flags */
941 : TV_NONE, /* tv_id */
942 : ( PROP_ssa | PROP_cfg ), /* properties_required */
943 : 0, /* properties_provided */
944 : 0, /* properties_destroyed */
945 : 0, /* todo_flags_start */
946 : TODO_update_ssa, /* todo_flags_finish */
947 : };
948 :
949 : class pass_tsan_O0 : public gimple_opt_pass
950 : {
951 : public:
952 285722 : pass_tsan_O0 (gcc::context *ctxt)
953 571444 : : gimple_opt_pass (pass_data_tsan_O0, ctxt)
954 : {}
955 :
956 : /* opt_pass methods: */
957 1472150 : bool gate (function *) final override
958 : {
959 1472150 : return (sanitize_flags_p (SANITIZE_THREAD) && !optimize);
960 : }
961 :
962 531 : unsigned int execute (function *) final override { return tsan_pass (); }
963 :
964 : }; // class pass_tsan_O0
965 :
966 : } // anon namespace
967 :
968 : gimple_opt_pass *
969 285722 : make_pass_tsan_O0 (gcc::context *ctxt)
970 : {
971 285722 : return new pass_tsan_O0 (ctxt);
972 : }
|