Line data Source code
1 : /* Classes for modeling the state of memory.
2 : Copyright (C) 2020-2026 Free Software Foundation, Inc.
3 : Contributed by David Malcolm <dmalcolm@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but
13 : WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 : General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "analyzer/common.h"
22 :
23 : #include "ordered-hash-map.h"
24 : #include "cfg.h"
25 : #include "sbitmap.h"
26 : #include "stor-layout.h"
27 :
28 : #include "text-art/tree-widget.h"
29 :
30 : #include "analyzer/analyzer-logging.h"
31 : #include "analyzer/supergraph.h"
32 : #include "analyzer/call-string.h"
33 : #include "analyzer/program-point.h"
34 : #include "analyzer/store.h"
35 : #include "analyzer/region-model.h"
36 : #include "analyzer/call-summary.h"
37 : #include "analyzer/analyzer-selftests.h"
38 :
39 : #if ENABLE_ANALYZER
40 :
41 : namespace ana {
42 :
43 : /* Dump SVALS to PP, sorting them to ensure determinism. */
44 :
45 : static void
46 482 : dump_svalue_set (const hash_set <const svalue *> &svals,
47 : pretty_printer *pp, bool simple)
48 : {
49 482 : auto_vec <const svalue *> v;
50 482 : for (hash_set<const svalue *>::iterator iter = svals.begin ();
51 552 : iter != svals.end (); ++iter)
52 : {
53 35 : v.safe_push (*iter);
54 : }
55 482 : v.qsort (svalue::cmp_ptr_ptr);
56 :
57 482 : pp_character (pp, '{');
58 482 : const svalue *sval;
59 482 : unsigned i;
60 999 : FOR_EACH_VEC_ELT (v, i, sval)
61 : {
62 35 : if (i > 0)
63 20 : pp_string (pp, ", ");
64 35 : sval->dump_to_pp (pp, simple);
65 : }
66 482 : pp_character (pp, '}');
67 482 : }
68 :
69 : /* class uncertainty_t. */
70 :
71 : /* Dump this object to PP. */
72 :
73 : void
74 241 : uncertainty_t::dump_to_pp (pretty_printer *pp, bool simple) const
75 : {
76 241 : pp_string (pp, "{m_maybe_bound_svals: ");
77 241 : dump_svalue_set (m_maybe_bound_svals, pp, simple);
78 :
79 241 : pp_string (pp, ", m_mutable_at_unknown_call_svals: ");
80 241 : dump_svalue_set (m_mutable_at_unknown_call_svals, pp, simple);
81 241 : pp_string (pp, "}");
82 241 : }
83 :
84 : /* Dump this object to stderr. */
85 :
86 : DEBUG_FUNCTION void
87 0 : uncertainty_t::dump (bool simple) const
88 : {
89 0 : tree_dump_pretty_printer pp (stderr);
90 0 : dump_to_pp (&pp, simple);
91 0 : pp_newline (&pp);
92 0 : }
93 :
94 : /* class binding_key. */
95 :
96 : const binding_key *
97 3500021 : binding_key::make (store_manager *mgr, const region *r)
98 : {
99 3500021 : region_offset offset = r->get_offset (mgr->get_svalue_manager ());
100 3500021 : if (offset.symbolic_p ())
101 34508 : return mgr->get_symbolic_binding (r);
102 : else
103 : {
104 3465513 : bit_size_t bit_size;
105 3465513 : if (r->get_bit_size (&bit_size))
106 : {
107 : /* Must be non-empty. */
108 2182944 : gcc_assert (bit_size > 0);
109 2182944 : return mgr->get_concrete_binding (offset.get_bit_offset (),
110 2182944 : bit_size);
111 : }
112 : else
113 1282569 : return mgr->get_symbolic_binding (r);
114 : }
115 : }
116 :
117 : /* Dump this binding_key to stderr. */
118 :
119 : DEBUG_FUNCTION void
120 0 : binding_key::dump (bool simple) const
121 : {
122 0 : tree_dump_pretty_printer pp (stderr);
123 0 : dump_to_pp (&pp, simple);
124 0 : pp_newline (&pp);
125 0 : }
126 :
127 : /* Get a description of this binding_key. */
128 :
129 : label_text
130 0 : binding_key::get_desc (bool simple) const
131 : {
132 0 : pretty_printer pp;
133 0 : pp_format_decoder (&pp) = default_tree_printer;
134 0 : dump_to_pp (&pp, simple);
135 0 : return label_text::take (xstrdup (pp_formatted_text (&pp)));
136 0 : }
137 :
138 : /* qsort callback. */
139 :
140 : int
141 1558 : binding_key::cmp_ptrs (const void *p1, const void *p2)
142 : {
143 1558 : const binding_key * const *pk1 = (const binding_key * const *)p1;
144 1558 : const binding_key * const *pk2 = (const binding_key * const *)p2;
145 1558 : return cmp (*pk1, *pk2);
146 : }
147 :
148 : /* Comparator for binding_keys. */
149 :
150 : int
151 1586 : binding_key::cmp (const binding_key *k1, const binding_key *k2)
152 : {
153 1586 : int concrete1 = k1->concrete_p ();
154 1586 : int concrete2 = k2->concrete_p ();
155 1586 : if (int concrete_cmp = concrete1 - concrete2)
156 : return concrete_cmp;
157 1586 : if (concrete1)
158 : {
159 1586 : const concrete_binding *b1 = (const concrete_binding *)k1;
160 1586 : const concrete_binding *b2 = (const concrete_binding *)k2;
161 1586 : if (int start_cmp = wi::cmp (b1->get_start_bit_offset (),
162 1586 : b2->get_start_bit_offset (),
163 : SIGNED))
164 : return start_cmp;
165 28 : return wi::cmp (b1->get_next_bit_offset (), b2->get_next_bit_offset (),
166 : SIGNED);
167 : }
168 : else
169 : {
170 0 : const symbolic_binding *s1 = (const symbolic_binding *)k1;
171 0 : const symbolic_binding *s2 = (const symbolic_binding *)k2;
172 0 : if (s1 > s2)
173 : return 1;
174 0 : if (s1 < s2)
175 : return -1;
176 0 : return 0;
177 : }
178 : }
179 :
180 : /* struct bit_range. */
181 :
182 : void
183 2021 : bit_range::dump_to_pp (pretty_printer *pp) const
184 : {
185 2021 : byte_range bytes (0, 0);
186 2021 : if (as_byte_range (&bytes))
187 2021 : bytes.dump_to_pp (pp);
188 : else
189 : {
190 0 : pp_string (pp, "start: ");
191 0 : pp_wide_int (pp, m_start_bit_offset, SIGNED);
192 0 : pp_string (pp, ", size: ");
193 0 : pp_wide_int (pp, m_size_in_bits, SIGNED);
194 0 : pp_string (pp, ", next: ");
195 0 : pp_wide_int (pp, get_next_bit_offset (), SIGNED);
196 : }
197 2021 : }
198 :
199 : /* Dump this object to stderr. */
200 :
201 : DEBUG_FUNCTION void
202 0 : bit_range::dump () const
203 : {
204 0 : tree_dump_pretty_printer pp (stderr);
205 0 : dump_to_pp (&pp);
206 0 : pp_newline (&pp);
207 0 : }
208 :
209 : /* Generate a JSON value for this bit_range.
210 : This is intended for debugging the analyzer rather
211 : than serialization. */
212 :
213 : std::unique_ptr<json::object>
214 4 : bit_range::to_json () const
215 : {
216 4 : auto obj = std::make_unique<json::object> ();
217 4 : obj->set ("start_bit_offset",
218 4 : bit_offset_to_json (m_start_bit_offset));
219 4 : obj->set ("size_in_bits",
220 4 : bit_offset_to_json (m_size_in_bits));
221 4 : return obj;
222 : }
223 :
224 : /* If OTHER is a subset of this, return true and, if OUT is
225 : non-null, write to *OUT the relative range of OTHER within this.
226 : Otherwise return false. */
227 :
228 : bool
229 23304 : bit_range::contains_p (const bit_range &other, bit_range *out) const
230 : {
231 23304 : if (contains_p (other.get_start_bit_offset ())
232 23304 : && contains_p (other.get_last_bit_offset ()))
233 : {
234 15830 : if (out)
235 : {
236 15830 : out->m_start_bit_offset = other.m_start_bit_offset - m_start_bit_offset;
237 15830 : out->m_size_in_bits = other.m_size_in_bits;
238 : }
239 15830 : return true;
240 : }
241 : else
242 7474 : return false;
243 : }
244 :
245 : /* If OTHER intersects this, return true and write
246 : the relative range of OTHER within THIS to *OUT_THIS,
247 : and the relative range of THIS within OTHER to *OUT_OTHER.
248 : Otherwise return false. */
249 :
250 : bool
251 84 : bit_range::intersects_p (const bit_range &other,
252 : bit_range *out_this,
253 : bit_range *out_other) const
254 : {
255 84 : if (get_start_bit_offset () < other.get_next_bit_offset ()
256 168 : && other.get_start_bit_offset () < get_next_bit_offset ())
257 : {
258 84 : bit_offset_t overlap_start
259 84 : = MAX (get_start_bit_offset (),
260 : other.get_start_bit_offset ());
261 84 : bit_offset_t overlap_next
262 84 : = MIN (get_next_bit_offset (),
263 : other.get_next_bit_offset ());
264 84 : if (overlap_next <= overlap_start)
265 : /* If this has happened, some kind of overflow has happened in
266 : our arithmetic. For now, reject such cases. */
267 : return false;
268 84 : bit_range abs_overlap_bits (overlap_start, overlap_next - overlap_start);
269 84 : *out_this = abs_overlap_bits - get_start_bit_offset ();
270 84 : *out_other = abs_overlap_bits - other.get_start_bit_offset ();
271 84 : return true;
272 : }
273 : else
274 0 : return false;
275 : }
276 :
277 : /* Return true if THIS and OTHER intersect and write the number
278 : of bits both buffers overlap to *OUT_NUM_OVERLAP_BITS.
279 :
280 : Otherwise return false. */
281 :
282 : bool
283 0 : bit_range::intersects_p (const bit_range &other,
284 : bit_size_t *out_num_overlap_bits) const
285 : {
286 0 : if (get_start_bit_offset () < other.get_next_bit_offset ()
287 0 : && other.get_start_bit_offset () < get_next_bit_offset ())
288 : {
289 0 : bit_offset_t overlap_start = MAX (get_start_bit_offset (),
290 : other.get_start_bit_offset ());
291 0 : bit_offset_t overlap_next = MIN (get_next_bit_offset (),
292 : other.get_next_bit_offset ());
293 0 : if (overlap_next <= overlap_start)
294 : /* If this has happened, some kind of overflow has happened in
295 : our arithmetic. For now, reject such cases. */
296 : return false;
297 0 : *out_num_overlap_bits = overlap_next - overlap_start;
298 0 : return true;
299 : }
300 : else
301 0 : return false;
302 : }
303 :
304 : /* Return true if THIS exceeds OTHER and write the overhanging
305 : bit range to OUT_OVERHANGING_BIT_RANGE. */
306 :
307 : bool
308 784618 : bit_range::exceeds_p (const bit_range &other,
309 : bit_range *out_overhanging_bit_range) const
310 : {
311 784618 : gcc_assert (!empty_p ());
312 :
313 784618 : if (other.get_next_bit_offset () < get_next_bit_offset ())
314 : {
315 : /* THIS definitely exceeds OTHER. */
316 449 : bit_offset_t start = MAX (get_start_bit_offset (),
317 : other.get_next_bit_offset ());
318 449 : bit_offset_t size = get_next_bit_offset () - start;
319 449 : if (size <= 0)
320 : /* If this has happened, some kind of overflow has happened in
321 : our arithmetic. For now, reject such cases. */
322 : return false;
323 429 : out_overhanging_bit_range->m_start_bit_offset = start;
324 429 : out_overhanging_bit_range->m_size_in_bits = size;
325 429 : return true;
326 : }
327 : else
328 : return false;
329 : }
330 :
331 : /* Return true if THIS falls short of OFFSET and write the
332 : bit range fallen short to OUT_FALL_SHORT_BITS. */
333 :
334 : bool
335 788165 : bit_range::falls_short_of_p (bit_offset_t offset,
336 : bit_range *out_fall_short_bits) const
337 : {
338 788165 : gcc_assert (!empty_p ());
339 :
340 788165 : if (get_start_bit_offset () < offset)
341 : {
342 : /* THIS falls short of OFFSET. */
343 212 : bit_offset_t start = get_start_bit_offset ();
344 212 : bit_offset_t size = MIN (offset, get_next_bit_offset ()) - start;
345 212 : if (size <= 0)
346 : /* If this has happened, some kind of overflow has happened in
347 : our arithmetic. For now, reject such cases. */
348 : return false;
349 192 : out_fall_short_bits->m_start_bit_offset = start;
350 192 : out_fall_short_bits->m_size_in_bits = size;
351 192 : return true;
352 : }
353 : else
354 : return false;
355 : }
356 :
357 : int
358 427 : bit_range::cmp (const bit_range &br1, const bit_range &br2)
359 : {
360 427 : if (int start_cmp = wi::cmps (br1.m_start_bit_offset,
361 427 : br2.m_start_bit_offset))
362 : return start_cmp;
363 :
364 20 : return wi::cmpu (br1.m_size_in_bits, br2.m_size_in_bits);
365 : }
366 :
367 : /* Offset this range by OFFSET. */
368 :
369 : bit_range
370 168 : bit_range::operator- (bit_offset_t offset) const
371 : {
372 168 : return bit_range (m_start_bit_offset - offset, m_size_in_bits);
373 : }
374 :
375 : /* If MASK is a contiguous range of set bits, write them
376 : to *OUT and return true.
377 : Otherwise return false. */
378 :
379 : bool
380 144 : bit_range::from_mask (unsigned HOST_WIDE_INT mask, bit_range *out)
381 : {
382 144 : unsigned iter_bit_idx = 0;
383 144 : unsigned HOST_WIDE_INT iter_bit_mask = 1;
384 :
385 : /* Find the first contiguous run of set bits in MASK. */
386 :
387 : /* Find first set bit in MASK. */
388 780 : while (iter_bit_idx < HOST_BITS_PER_WIDE_INT)
389 : {
390 776 : if (mask & iter_bit_mask)
391 : break;
392 636 : iter_bit_idx++;
393 636 : iter_bit_mask <<= 1;
394 : }
395 144 : if (iter_bit_idx == HOST_BITS_PER_WIDE_INT)
396 : /* MASK is zero. */
397 : return false;
398 :
399 140 : unsigned first_set_iter_bit_idx = iter_bit_idx;
400 140 : unsigned num_set_bits = 1;
401 140 : iter_bit_idx++;
402 140 : iter_bit_mask <<= 1;
403 :
404 : /* Find next unset bit in MASK. */
405 396 : while (iter_bit_idx < HOST_BITS_PER_WIDE_INT)
406 : {
407 396 : if (!(mask & iter_bit_mask))
408 : break;
409 256 : num_set_bits++;
410 256 : iter_bit_idx++;
411 256 : iter_bit_mask <<= 1;
412 : }
413 140 : if (iter_bit_idx == HOST_BITS_PER_WIDE_INT)
414 : {
415 0 : *out = bit_range (first_set_iter_bit_idx, num_set_bits);
416 0 : return true;
417 : }
418 :
419 : /* We now have the first contiguous run of set bits in MASK.
420 : Fail if any other bits are set. */
421 7892 : while (iter_bit_idx < HOST_BITS_PER_WIDE_INT)
422 : {
423 7760 : if (mask & iter_bit_mask)
424 : return false;
425 7752 : iter_bit_idx++;
426 7752 : iter_bit_mask <<= 1;
427 : }
428 :
429 132 : *out = bit_range (first_set_iter_bit_idx, num_set_bits);
430 132 : return true;
431 : }
432 :
433 : /* Attempt to convert this bit_range to a byte_range.
434 : Return true if it is possible, writing the result to *OUT.
435 : Otherwise return false. */
436 :
437 : bool
438 7662 : bit_range::as_byte_range (byte_range *out) const
439 : {
440 7662 : if (m_start_bit_offset % BITS_PER_UNIT == 0
441 7662 : && m_size_in_bits % BITS_PER_UNIT == 0)
442 : {
443 7649 : out->m_start_byte_offset = m_start_bit_offset / BITS_PER_UNIT;
444 7649 : out->m_size_in_bytes = m_size_in_bits / BITS_PER_UNIT;
445 7649 : return true;
446 : }
447 : return false;
448 : }
449 :
450 : /* Dump this object to PP. */
451 :
452 : void
453 2024 : byte_range::dump_to_pp (pretty_printer *pp) const
454 : {
455 2024 : if (m_size_in_bytes == 0)
456 : {
457 0 : pp_string (pp, "empty");
458 : }
459 2024 : else if (m_size_in_bytes == 1)
460 : {
461 1447 : pp_string (pp, "byte ");
462 1447 : pp_wide_int (pp, m_start_byte_offset, SIGNED);
463 : }
464 : else
465 : {
466 577 : pp_string (pp, "bytes ");
467 577 : pp_wide_int (pp, m_start_byte_offset, SIGNED);
468 577 : pp_string (pp, "-");
469 577 : pp_wide_int (pp, get_last_byte_offset (), SIGNED);
470 : }
471 2024 : }
472 :
473 : /* Dump this object to stderr. */
474 :
475 : DEBUG_FUNCTION void
476 0 : byte_range::dump () const
477 : {
478 0 : tree_dump_pretty_printer pp (stderr);
479 0 : dump_to_pp (&pp);
480 0 : pp_newline (&pp);
481 0 : }
482 :
483 : /* Generate a JSON value for this byte_range.
484 : This is intended for debugging the analyzer rather
485 : than serialization. */
486 :
487 : std::unique_ptr<json::object>
488 4 : byte_range::to_json () const
489 : {
490 4 : auto obj = std::make_unique<json::object> ();
491 4 : obj->set ("start_byte_offset",
492 4 : byte_offset_to_json (m_start_byte_offset));
493 4 : obj->set ("size_in_bytes",
494 4 : byte_offset_to_json (m_size_in_bytes));
495 4 : return obj;
496 : }
497 :
498 : /* If OTHER is a subset of this, return true and write
499 : to *OUT the relative range of OTHER within this.
500 : Otherwise return false. */
501 :
502 : bool
503 925 : byte_range::contains_p (const byte_range &other, byte_range *out) const
504 : {
505 925 : if (contains_p (other.get_start_byte_offset ())
506 925 : && contains_p (other.get_last_byte_offset ()))
507 : {
508 391 : out->m_start_byte_offset = other.m_start_byte_offset - m_start_byte_offset;
509 391 : out->m_size_in_bytes = other.m_size_in_bytes;
510 391 : return true;
511 : }
512 : else
513 534 : return false;
514 : }
515 :
516 : /* qsort comparator for byte ranges. */
517 :
518 : int
519 1810 : byte_range::cmp (const byte_range &br1, const byte_range &br2)
520 : {
521 : /* Order first by offset. */
522 1810 : if (int start_cmp = wi::cmps (br1.m_start_byte_offset,
523 1810 : br2.m_start_byte_offset))
524 : return start_cmp;
525 :
526 : /* ...then by size. */
527 0 : return wi::cmpu (br1.m_size_in_bytes, br2.m_size_in_bytes);
528 : }
529 :
530 : /* class concrete_binding : public binding_key. */
531 :
532 : /* Implementation of binding_key::dump_to_pp vfunc for concrete_binding. */
533 :
534 : void
535 1135 : concrete_binding::dump_to_pp (pretty_printer *pp, bool) const
536 : {
537 1135 : m_bit_range.dump_to_pp (pp);
538 1135 : }
539 :
540 : /* Return true if this binding overlaps with OTHER. */
541 :
542 : bool
543 318100 : concrete_binding::overlaps_p (const concrete_binding &other) const
544 : {
545 318100 : if (get_start_bit_offset () < other.get_next_bit_offset ()
546 318100 : && get_next_bit_offset () > other.get_start_bit_offset ())
547 75350 : return true;
548 : return false;
549 : }
550 :
551 : /* If this is expressible as a concrete byte range, return true
552 : and write it to *OUT. Otherwise return false. */
553 :
554 : bool
555 0 : concrete_binding::get_byte_range (byte_range *out) const
556 : {
557 0 : return m_bit_range.as_byte_range (out);
558 : }
559 :
560 : /* Comparator for use by vec<const concrete_binding *>::qsort. */
561 :
562 : int
563 41 : concrete_binding::cmp_ptr_ptr (const void *p1, const void *p2)
564 : {
565 41 : const concrete_binding *b1 = *(const concrete_binding * const *)p1;
566 41 : const concrete_binding *b2 = *(const concrete_binding * const *)p2;
567 :
568 41 : return bit_range::cmp (b1->m_bit_range, b2->m_bit_range);
569 : }
570 :
571 : /* class symbolic_binding : public binding_key. */
572 :
573 : void
574 16 : symbolic_binding::dump_to_pp (pretty_printer *pp, bool simple) const
575 : {
576 : //binding_key::dump_to_pp (pp, simple);
577 16 : pp_string (pp, "region: ");
578 16 : m_region->dump_to_pp (pp, simple);
579 16 : }
580 :
581 : /* Comparator for use by vec<const symbolic_binding *>::qsort. */
582 :
583 : int
584 19 : symbolic_binding::cmp_ptr_ptr (const void *p1, const void *p2)
585 : {
586 19 : const symbolic_binding *b1 = *(const symbolic_binding * const *)p1;
587 19 : const symbolic_binding *b2 = *(const symbolic_binding * const *)p2;
588 :
589 19 : return region::cmp_ids (b1->get_region (), b2->get_region ());
590 : }
591 :
592 : /* The store is oblivious to the types of the svalues bound within
593 : it: any type can get bound at any location.
594 : Simplify any casts before binding.
595 :
596 : For example, if we have:
597 : struct big { int ia[1024]; };
598 : struct big src, dst;
599 : memcpy (&dst, &src, sizeof (struct big));
600 : this reaches us in gimple form as:
601 : MEM <unsigned char[4096]> [(char * {ref-all})&dst]
602 : = MEM <unsigned char[4096]> [(char * {ref-all})&src];
603 : Using cast_region when handling the MEM_REF would give us:
604 : INIT_VAL(CAST_REG(unsigned char[4096], src))
605 : as rhs_sval, but we can fold that into a cast svalue:
606 : CAST(unsigned char[4096], INIT_VAL(src))
607 : We can discard that cast from the svalue when binding it in
608 : the store for "dst", and simply store:
609 : cluster for: dst
610 : key: {kind: direct, start: 0, size: 32768, next: 32768}
611 : value: ‘struct big’ {INIT_VAL(src)}. */
612 :
613 : static const svalue *
614 389302 : simplify_for_binding (const svalue *sval)
615 : {
616 389302 : if (const svalue *cast_sval = sval->maybe_undo_cast ())
617 0 : sval = cast_sval;
618 43711 : return sval;
619 : }
620 :
621 : /* class binding_map::const_iterator. */
622 :
623 : bool
624 25639939 : binding_map::const_iterator::operator== (const binding_map::const_iterator &other) const
625 : {
626 25639939 : if (m_concrete != other.m_concrete)
627 : return false;
628 11685417 : if (m_symbolic != other.m_symbolic)
629 516833 : return false;
630 : return true;
631 : }
632 :
633 : binding_map::const_iterator &
634 14463994 : binding_map::const_iterator::operator++ ()
635 : {
636 14463994 : if (m_concrete != m_map.m_concrete.end ())
637 13947161 : ++m_concrete;
638 : else
639 516833 : ++m_symbolic;
640 14463994 : return *this;
641 : }
642 :
643 : binding_map::binding_pair
644 3602870 : binding_map::const_iterator::operator* ()
645 : {
646 3602870 : if (m_concrete != m_map.m_concrete.end ())
647 : {
648 3500039 : const bit_range &bits = m_concrete->first;
649 3500039 : const svalue *sval = m_concrete->second;
650 3500039 : return binding_pair (m_map.m_store_mgr.get_concrete_binding (bits), sval);
651 : }
652 : else
653 : {
654 102831 : gcc_assert (m_symbolic != m_map.m_symbolic.end ());
655 102831 : const region *reg = m_symbolic->m_region;
656 102831 : const svalue *sval = m_symbolic->m_sval;
657 102831 : return binding_pair (m_map.m_store_mgr.get_symbolic_binding (reg), sval);
658 : }
659 : }
660 :
661 : const svalue *
662 10868485 : binding_map::const_iterator::get_svalue () const
663 : {
664 10868485 : if (m_concrete != m_map.m_concrete.end ())
665 10454483 : return m_concrete->second;
666 : else
667 : {
668 414002 : gcc_assert (m_symbolic != m_map.m_symbolic.end ());
669 414002 : return m_symbolic->m_sval;
670 : }
671 : }
672 :
673 : /* class binding_map::iterator. */
674 :
675 : bool
676 14772026 : binding_map::iterator::operator== (const binding_map::iterator &other) const
677 : {
678 14772026 : if (m_concrete != other.m_concrete)
679 : return false;
680 7038904 : if (m_symbolic != other.m_symbolic)
681 394165 : return false;
682 : return true;
683 : }
684 :
685 : binding_map::iterator &
686 8126504 : binding_map::iterator::operator++ ()
687 : {
688 8126504 : if (m_concrete != m_map.m_concrete.end ())
689 7732344 : ++m_concrete;
690 : else
691 394160 : ++m_symbolic;
692 8126504 : return *this;
693 : }
694 :
695 : binding_map::binding_pair
696 8218046 : binding_map::iterator::operator* ()
697 : {
698 8218046 : if (m_concrete != m_map.m_concrete.end ())
699 : {
700 7817705 : const bit_range &bits = m_concrete->first;
701 7817705 : const svalue *&sval = m_concrete->second;
702 7817705 : return binding_pair (m_map.m_store_mgr.get_concrete_binding (bits), sval);
703 : }
704 : else
705 : {
706 400341 : gcc_assert (m_symbolic != m_map.m_symbolic.end ());
707 400341 : const region *reg = m_symbolic->m_region;
708 400341 : const svalue *&sval = m_symbolic->m_sval;
709 400341 : return binding_pair (m_map.m_store_mgr.get_symbolic_binding (reg), sval);
710 : }
711 : }
712 :
713 : /* class binding_map. */
714 :
715 : // Construct an empty binding_map.
716 :
717 1748364 : binding_map::binding_map (store_manager &store_mgr)
718 1748364 : : m_store_mgr (store_mgr),
719 1748364 : m_concrete (),
720 1748364 : m_symbolic ()
721 : {
722 1748364 : }
723 :
724 : /* binding_map's copy ctor. */
725 :
726 28601832 : binding_map::binding_map (const binding_map &other)
727 28601832 : : m_store_mgr (other.m_store_mgr),
728 28601832 : m_concrete (other.m_concrete),
729 28601832 : m_symbolic (other.m_symbolic)
730 : {
731 28601832 : }
732 :
733 : /* binding_map's assignment operator. */
734 :
735 : binding_map&
736 4 : binding_map::operator= (const binding_map &other)
737 : {
738 4 : gcc_assert (&m_store_mgr == &other.m_store_mgr);
739 :
740 : /* For now, assume we only ever copy to an empty cluster. */
741 4 : gcc_assert (m_concrete.size () == 0);
742 4 : gcc_assert (m_symbolic.size () == 0);
743 :
744 4 : m_concrete = other.m_concrete;
745 4 : m_symbolic = other.m_symbolic;
746 :
747 4 : return *this;
748 : }
749 :
750 : /* binding_map's equality operator. */
751 :
752 : bool
753 3320585 : binding_map::operator== (const binding_map &other) const
754 : {
755 3320585 : if (m_concrete != other.m_concrete)
756 : return false;
757 3250012 : if (m_symbolic != other.m_symbolic)
758 : return false;
759 :
760 : return true;
761 : }
762 :
763 : /* Generate a hash value for this binding_map. */
764 :
765 : hashval_t
766 21300929 : binding_map::hash () const
767 : {
768 21300929 : hashval_t result = 0;
769 45851915 : for (auto iter : m_concrete)
770 : {
771 24550986 : result ^= iter.first.hash ();
772 24550986 : inchash::hash hstate;
773 24550986 : hstate.add_ptr (iter.second);
774 24550986 : result ^= hstate.end ();
775 : }
776 22448211 : for (auto iter : m_symbolic)
777 : {
778 : /* Use a new hasher for each key to avoid depending on the ordering
779 : of keys when accumulating the result. */
780 1147282 : inchash::hash hstate;
781 1147282 : hstate.add_ptr (iter.m_region);
782 1147282 : hstate.add_ptr (iter.m_sval);
783 1147282 : result ^= hstate.end ();
784 : }
785 21300929 : return result;
786 : }
787 :
788 : const svalue *
789 5212529 : binding_map::get (const binding_key *key) const
790 : {
791 5212529 : if (key->symbolic_p ())
792 : {
793 286302 : const ana::symbolic_binding &sym_key
794 : = *static_cast <const ana::symbolic_binding *> (key);
795 286302 : const region *reg = sym_key.get_region ();
796 :
797 337060 : for (auto iter : m_symbolic)
798 : {
799 210515 : if (iter.m_region == reg)
800 5212529 : return iter.m_sval;
801 : }
802 : return nullptr;
803 : }
804 : else
805 : {
806 4926227 : const concrete_binding &conc_key
807 : = *static_cast <const concrete_binding *> (key);
808 4926227 : const bit_range &bits = conc_key.get_bit_range ();
809 :
810 4926227 : concrete_bindings_t::const_iterator iter (m_concrete.find (bits));
811 4926227 : if (iter != m_concrete.end ())
812 4493515 : return iter->second;
813 : else
814 : return nullptr;
815 : }
816 : }
817 :
818 : void
819 2254847 : binding_map::put (const binding_key *key, const svalue *sval)
820 : {
821 2254847 : if (key->symbolic_p ())
822 : {
823 64818 : const ana::symbolic_binding &sym_key
824 : = *static_cast <const ana::symbolic_binding *> (key);
825 64818 : const region *reg = sym_key.get_region ();
826 :
827 64818 : m_symbolic.clear ();
828 :
829 64818 : m_symbolic.push_back ({reg, sval});
830 : }
831 : else
832 : {
833 2190029 : const concrete_binding &conc_key
834 : = *static_cast <const concrete_binding *> (key);
835 2190029 : const bit_range &bits = conc_key.get_bit_range ();
836 :
837 2190029 : concrete_bindings_t::iterator iter (m_concrete.find (bits));
838 2190029 : if (iter != m_concrete.end ())
839 3465 : (*iter).second = sval;
840 : else
841 2186564 : m_concrete.insert ({bits, sval});
842 : }
843 2254847 : }
844 :
845 : void
846 133 : binding_map::overwrite (iterator_t &pos, const svalue *v)
847 : {
848 133 : gcc_assert (&pos.m_map == this);
849 133 : if (pos.m_symbolic != m_symbolic.end ())
850 0 : (*(pos.m_symbolic)).m_sval = v;
851 : else
852 : {
853 133 : gcc_assert (pos.m_concrete != m_concrete.end ());
854 133 : (*(pos.m_concrete)).second = v;
855 : }
856 133 : }
857 :
858 : void
859 303894 : binding_map::remove (const binding_key *key)
860 : {
861 303894 : if (key->symbolic_p ())
862 9563 : m_symbolic.clear ();
863 : else
864 : {
865 294331 : const concrete_binding &conc_key
866 : = *static_cast <const concrete_binding *> (key);
867 294331 : const bit_range &bits = conc_key.get_bit_range ();
868 294331 : m_concrete.erase (bits);
869 : }
870 303894 : }
871 :
872 : binding_map::const_iterator_t
873 11175945 : binding_map::begin () const
874 : {
875 11175945 : return binding_map::const_iterator_t (*this,
876 : m_concrete.begin (),
877 11175945 : m_symbolic.begin ());
878 : }
879 :
880 : binding_map::const_iterator_t
881 22044409 : binding_map::end () const
882 : {
883 22044409 : return binding_map::const_iterator_t (*this,
884 : m_concrete.end (),
885 22044409 : m_symbolic.end ());
886 : }
887 :
888 : binding_map::iterator_t
889 6645522 : binding_map::begin ()
890 : {
891 6645522 : return binding_map::iterator_t (*this,
892 : m_concrete.begin (),
893 6645522 : m_symbolic.begin ());
894 : }
895 :
896 : binding_map::iterator_t
897 14058761 : binding_map::end ()
898 : {
899 14058761 : return binding_map::iterator_t (*this,
900 : m_concrete.end (),
901 14058761 : m_symbolic.end ());
902 : }
903 :
904 : size_t
905 599530 : binding_map::elements () const
906 : {
907 599530 : return m_concrete.size () + m_symbolic.size ();
908 : }
909 :
910 : /* Dump a representation of this binding_map to PP.
911 : SIMPLE controls how values and regions are to be printed.
912 : If MULTILINE, then split the dump over multiple lines and
913 : use whitespace for readability, otherwise put all on one line. */
914 :
915 : void
916 1733 : binding_map::dump_to_pp (pretty_printer *pp, bool simple,
917 : bool multiline) const
918 : {
919 1733 : bool first = true;
920 2866 : for (auto iter : *this)
921 : {
922 1133 : const binding_key *key = iter.m_key;
923 1133 : const svalue *value = iter.m_sval;
924 1133 : if (multiline)
925 : {
926 300 : pp_string (pp, " key: {");
927 300 : key->dump_to_pp (pp, simple);
928 300 : pp_string (pp, "}");
929 300 : pp_newline (pp);
930 300 : pp_string (pp, " value: ");
931 300 : if (tree t = value->get_type ())
932 300 : dump_quoted_tree (pp, t);
933 300 : pp_string (pp, " {");
934 300 : value->dump_to_pp (pp, simple);
935 300 : pp_string (pp, "}");
936 300 : pp_newline (pp);
937 : }
938 : else
939 : {
940 833 : if (first)
941 : first = false;
942 : else
943 188 : pp_string (pp, ", ");
944 833 : pp_string (pp, "binding key: {");
945 833 : key->dump_to_pp (pp, simple);
946 833 : pp_string (pp, "}, value: {");
947 833 : value->dump_to_pp (pp, simple);
948 833 : pp_string (pp, "}");
949 : }
950 : }
951 1733 : }
952 :
953 : /* Dump a multiline representation of this binding_map to stderr. */
954 :
955 : DEBUG_FUNCTION void
956 0 : binding_map::dump (bool simple) const
957 : {
958 0 : tree_dump_pretty_printer pp (stderr);
959 0 : dump_to_pp (&pp, simple, true);
960 0 : pp_newline (&pp);
961 0 : }
962 :
963 : /* Assert that this object is valid. */
964 :
965 : void
966 12920777 : binding_map::validate () const
967 : {
968 12920777 : const size_t num_concrete = m_concrete.size ();
969 12920777 : const size_t num_symbolic = m_symbolic.size ();
970 :
971 : /* We shouldn't have more than one symbolic key per cluster
972 : (or one would have clobbered the other). */
973 12920777 : gcc_assert (num_symbolic < 2);
974 : /* We can't have both concrete and symbolic keys. */
975 12920777 : gcc_assert (num_concrete == 0 || num_symbolic == 0);
976 :
977 : /* Check for overlapping concrete bindings. */
978 27587262 : for (auto iter = m_concrete.begin (); iter != m_concrete.end (); ++iter)
979 : {
980 14666485 : auto next (iter);
981 14666485 : ++next;
982 14666485 : if (next != m_concrete.end ())
983 : {
984 : /* Verify they are in order, and do not overlap. */
985 4114015 : const bit_range &iter_bits = iter->first;
986 4114015 : const bit_range &next_bits = next->first;
987 4114015 : gcc_assert (iter_bits.get_start_bit_offset ()
988 : < next_bits.get_start_bit_offset ());
989 4114015 : gcc_assert (iter_bits.get_last_bit_offset ()
990 : < next_bits.get_start_bit_offset ());
991 4114015 : gcc_assert (iter_bits.get_next_bit_offset ()
992 : <= next_bits.get_start_bit_offset ());
993 : }
994 : }
995 12920777 : }
996 :
997 : /* Return a new json::object of the form
998 : {KEY_DESC : SVALUE_DESC,
999 : ...for the various key/value pairs in this binding_map}. */
1000 :
1001 : std::unique_ptr<json::object>
1002 0 : binding_map::to_json () const
1003 : {
1004 0 : auto map_obj = std::make_unique<json::object> ();
1005 :
1006 0 : for (auto iter : *this)
1007 : {
1008 0 : const svalue *value = iter.m_sval;
1009 0 : label_text key_desc = iter.m_key->get_desc ();
1010 0 : map_obj->set (key_desc.get (), value->to_json ());
1011 0 : }
1012 :
1013 0 : return map_obj;
1014 : }
1015 :
1016 : /* Add a child to PARENT_WIDGET expressing a binding between
1017 : KEY and SVAL. */
1018 :
1019 : static void
1020 0 : add_binding_to_tree_widget (text_art::tree_widget &parent_widget,
1021 : const text_art::dump_widget_info &dwi,
1022 : const binding_key *key,
1023 : const svalue *sval)
1024 : {
1025 0 : pretty_printer the_pp;
1026 0 : pretty_printer * const pp = &the_pp;
1027 0 : pp_format_decoder (pp) = default_tree_printer;
1028 0 : pp_show_color (pp) = true;
1029 0 : const bool simple = true;
1030 :
1031 0 : key->dump_to_pp (pp, simple);
1032 0 : pp_string (pp, ": ");
1033 0 : if (tree t = sval->get_type ())
1034 0 : dump_quoted_tree (pp, t);
1035 0 : pp_string (pp, " {");
1036 0 : sval->dump_to_pp (pp, simple);
1037 0 : pp_string (pp, "}");
1038 :
1039 0 : parent_widget.add_child (text_art::tree_widget::make (dwi, pp));
1040 0 : }
1041 :
1042 : void
1043 0 : binding_map::add_to_tree_widget (text_art::tree_widget &parent_widget,
1044 : const text_art::dump_widget_info &dwi) const
1045 : {
1046 0 : for (auto iter : *this)
1047 : {
1048 0 : const binding_key *key = iter.m_key;
1049 0 : const svalue *sval = iter.m_sval;
1050 0 : add_binding_to_tree_widget (parent_widget, dwi,
1051 : key, sval);
1052 : }
1053 0 : }
1054 :
1055 :
1056 : /* Comparator for imposing an order on binding_maps. */
1057 :
1058 : int
1059 28 : binding_map::cmp (const binding_map &map1, const binding_map &map2)
1060 : {
1061 28 : if (int count_cmp = map1.elements () - map2.elements ())
1062 : return count_cmp;
1063 :
1064 28 : auto_vec <const binding_key *> keys1 (map1.elements ());
1065 56 : for (auto iter : map1)
1066 28 : keys1.quick_push (iter.m_key);
1067 28 : keys1.qsort (binding_key::cmp_ptrs);
1068 :
1069 28 : auto_vec <const binding_key *> keys2 (map2.elements ());
1070 56 : for (auto iter : map2)
1071 28 : keys2.quick_push (iter.m_key);
1072 28 : keys2.qsort (binding_key::cmp_ptrs);
1073 :
1074 56 : for (size_t i = 0; i < keys1.length (); i++)
1075 : {
1076 28 : const binding_key *k1 = keys1[i];
1077 28 : const binding_key *k2 = keys2[i];
1078 28 : if (int key_cmp = binding_key::cmp (k1, k2))
1079 : return key_cmp;
1080 28 : gcc_assert (k1 == k2);
1081 28 : if (int sval_cmp = svalue::cmp_ptr (map1.get (k1), map2.get (k2)))
1082 : return sval_cmp;
1083 : }
1084 :
1085 : return 0;
1086 28 : }
1087 :
1088 : /* Get the child region of PARENT_REG based upon INDEX within a
1089 : CONSTRUCTOR. */
1090 :
1091 : static const region *
1092 467 : get_subregion_within_ctor (const region *parent_reg, tree index,
1093 : region_model_manager *mgr)
1094 : {
1095 467 : switch (TREE_CODE (index))
1096 : {
1097 0 : default:
1098 0 : gcc_unreachable ();
1099 403 : case INTEGER_CST:
1100 403 : {
1101 403 : const svalue *index_sval
1102 403 : = mgr->get_or_create_constant_svalue (index);
1103 403 : return mgr->get_element_region (parent_reg,
1104 403 : TREE_TYPE (parent_reg->get_type ()),
1105 403 : index_sval);
1106 : }
1107 64 : break;
1108 64 : case FIELD_DECL:
1109 64 : return mgr->get_field_region (parent_reg, index);
1110 : }
1111 : }
1112 :
1113 : /* Get the child region of PARENT_REG based upon (INDEX, VALUE) within a
1114 : CONSTRUCTOR. */
1115 :
1116 : static const region *
1117 467 : get_subregion_within_ctor_for_ctor_pair (const region *parent_reg,
1118 : tree index,
1119 : tree value,
1120 : region_model_manager *mgr)
1121 : {
1122 467 : if (TREE_CODE (index) == INTEGER_CST
1123 403 : && TREE_CODE (value) == RAW_DATA_CST)
1124 : {
1125 : /* Special-case; see tree.def's description of CONSTRUCTOR.
1126 : We have RAW_DATA_LENGTH of bytes, starting at INDEX's start. */
1127 14 : const region *start_reg
1128 14 : = get_subregion_within_ctor (parent_reg, index, mgr);
1129 : /* Build a bit range, relative to PARENT_REG. */
1130 14 : region_offset start_offset = start_reg->get_offset (mgr);
1131 :
1132 14 : if (!start_offset.concrete_p ())
1133 : return nullptr;
1134 14 : bit_offset_t start_bit_offset = start_offset.get_bit_offset ();
1135 14 : int length = RAW_DATA_LENGTH (value);
1136 14 : bit_range bits (start_bit_offset, length * BITS_PER_UNIT);
1137 :
1138 14 : return mgr->get_bit_range (parent_reg, NULL_TREE, bits);
1139 : }
1140 :
1141 453 : return get_subregion_within_ctor (parent_reg, index, mgr);
1142 : }
1143 :
1144 : /* Get the svalue for VAL, a non-CONSTRUCTOR value within a CONSTRUCTOR. */
1145 :
1146 : static const svalue *
1147 425 : get_svalue_for_ctor_val (tree val, region_model_manager *mgr)
1148 : {
1149 : /* Reuse the get_rvalue logic from region_model. */
1150 425 : region_model m (mgr);
1151 425 : return m.get_rvalue (path_var (val, 0), nullptr);
1152 425 : }
1153 :
1154 : /* Bind values from CONSTRUCTOR to this map, relative to
1155 : PARENT_REG's relationship to its base region.
1156 : Return true if successful, false if there was a problem (e.g. due
1157 : to hitting a complexity limit). */
1158 :
1159 : bool
1160 164 : binding_map::apply_ctor_to_region (const region *parent_reg, tree ctor,
1161 : region_model_manager *mgr)
1162 : {
1163 164 : gcc_assert (parent_reg);
1164 164 : gcc_assert (TREE_CODE (ctor) == CONSTRUCTOR);
1165 :
1166 164 : unsigned ix;
1167 164 : tree index;
1168 164 : tree val;
1169 164 : tree parent_type = parent_reg->get_type ();
1170 164 : tree field;
1171 164 : if (TREE_CODE (parent_type) == RECORD_TYPE)
1172 55 : field = TYPE_FIELDS (parent_type);
1173 : else
1174 : field = NULL_TREE;
1175 630 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), ix, index, val)
1176 : {
1177 467 : if (!index)
1178 : {
1179 : /* If index is NULL, then iterate through the fields for
1180 : a RECORD_TYPE, or use an INTEGER_CST otherwise.
1181 : Compare with similar logic in output_constructor. */
1182 189 : if (field)
1183 : {
1184 0 : index = field;
1185 0 : field = DECL_CHAIN (field);
1186 : }
1187 : else
1188 189 : index = build_int_cst (integer_type_node, ix);
1189 : }
1190 278 : else if (TREE_CODE (index) == RANGE_EXPR)
1191 : {
1192 0 : tree min_index = TREE_OPERAND (index, 0);
1193 0 : tree max_index = TREE_OPERAND (index, 1);
1194 0 : if (min_index == max_index)
1195 : {
1196 0 : if (!apply_ctor_pair_to_child_region (parent_reg, mgr,
1197 : min_index, val))
1198 : return false;
1199 : }
1200 : else
1201 : {
1202 0 : if (!apply_ctor_val_to_range (parent_reg, mgr,
1203 : min_index, max_index, val))
1204 : return false;
1205 : }
1206 0 : continue;
1207 0 : }
1208 467 : if (!apply_ctor_pair_to_child_region (parent_reg, mgr, index, val))
1209 : return false;
1210 : }
1211 : return true;
1212 : }
1213 :
1214 : /* Bind the value VAL into the range of elements within PARENT_REF
1215 : from MIN_INDEX to MAX_INDEX (including endpoints).
1216 : For use in handling RANGE_EXPR within a CONSTRUCTOR.
1217 : Return true if successful, false if there was a problem (e.g. due
1218 : to hitting a complexity limit). */
1219 :
1220 : bool
1221 0 : binding_map::apply_ctor_val_to_range (const region *parent_reg,
1222 : region_model_manager *mgr,
1223 : tree min_index, tree max_index,
1224 : tree val)
1225 : {
1226 0 : gcc_assert (TREE_CODE (min_index) == INTEGER_CST);
1227 0 : gcc_assert (TREE_CODE (max_index) == INTEGER_CST);
1228 :
1229 : /* Generate a binding key for the range. */
1230 0 : const region *min_element
1231 0 : = get_subregion_within_ctor (parent_reg, min_index, mgr);
1232 0 : const region *max_element
1233 0 : = get_subregion_within_ctor (parent_reg, max_index, mgr);
1234 0 : region_offset min_offset = min_element->get_offset (mgr);
1235 0 : if (min_offset.symbolic_p ())
1236 : return false;
1237 0 : bit_offset_t start_bit_offset = min_offset.get_bit_offset ();
1238 0 : store_manager *smgr = mgr->get_store_manager ();
1239 0 : if (max_element->empty_p ())
1240 : return false;
1241 0 : const binding_key *max_element_key = binding_key::make (smgr, max_element);
1242 0 : if (max_element_key->symbolic_p ())
1243 : return false;
1244 0 : const concrete_binding *max_element_ckey
1245 0 : = max_element_key->dyn_cast_concrete_binding ();
1246 0 : bit_size_t range_size_in_bits
1247 0 : = max_element_ckey->get_next_bit_offset () - start_bit_offset;
1248 0 : const concrete_binding *range_key
1249 0 : = smgr->get_concrete_binding (start_bit_offset, range_size_in_bits);
1250 0 : if (range_key->symbolic_p ())
1251 : return false;
1252 :
1253 : /* Get the value. */
1254 0 : if (TREE_CODE (val) == CONSTRUCTOR)
1255 : return false;
1256 0 : const svalue *sval = get_svalue_for_ctor_val (val, mgr);
1257 :
1258 : /* Bind the value to the range. */
1259 0 : put (range_key, sval);
1260 0 : return true;
1261 : }
1262 :
1263 : /* Bind the value VAL into INDEX within PARENT_REF.
1264 : For use in handling a pair of entries within a CONSTRUCTOR.
1265 : Return true if successful, false if there was a problem (e.g. due
1266 : to hitting a complexity limit). */
1267 :
1268 : bool
1269 467 : binding_map::apply_ctor_pair_to_child_region (const region *parent_reg,
1270 : region_model_manager *mgr,
1271 : tree index, tree val)
1272 : {
1273 467 : const region *child_reg
1274 467 : = get_subregion_within_ctor_for_ctor_pair (parent_reg, index, val, mgr);
1275 467 : if (!child_reg)
1276 : return false;
1277 467 : if (TREE_CODE (val) == CONSTRUCTOR)
1278 42 : return apply_ctor_to_region (child_reg, val, mgr);
1279 : else
1280 : {
1281 425 : const svalue *sval = get_svalue_for_ctor_val (val, mgr);
1282 425 : if (child_reg->empty_p ())
1283 : return false;
1284 425 : const binding_key *k
1285 425 : = binding_key::make (mgr->get_store_manager (), child_reg);
1286 : /* Handle the case where we have an unknown size for child_reg
1287 : (e.g. due to it being a trailing field with incomplete array
1288 : type. */
1289 425 : if (!k->concrete_p ())
1290 : {
1291 : /* Assume that sval has a well-defined size for this case. */
1292 5 : tree sval_type = sval->get_type ();
1293 5 : gcc_assert (sval_type);
1294 5 : HOST_WIDE_INT sval_byte_size = int_size_in_bytes (sval_type);
1295 5 : gcc_assert (sval_byte_size != -1);
1296 5 : bit_size_t sval_bit_size = sval_byte_size * BITS_PER_UNIT;
1297 : /* Get offset of child relative to base region. */
1298 5 : region_offset child_base_offset = child_reg->get_offset (mgr);
1299 5 : if (child_base_offset.symbolic_p ())
1300 1 : return false;
1301 : /* Convert to an offset relative to the parent region. */
1302 4 : region_offset parent_base_offset = parent_reg->get_offset (mgr);
1303 4 : gcc_assert (!parent_base_offset.symbolic_p ());
1304 4 : bit_offset_t child_parent_offset
1305 4 : = (child_base_offset.get_bit_offset ()
1306 4 : - parent_base_offset.get_bit_offset ());
1307 : /* Create a concrete key for the child within the parent. */
1308 4 : k = mgr->get_store_manager ()->get_concrete_binding
1309 4 : (child_parent_offset, sval_bit_size);
1310 : }
1311 424 : gcc_assert (k->concrete_p ());
1312 424 : put (k, sval);
1313 424 : return true;
1314 : }
1315 : }
1316 :
1317 : /* Populate OUT with all bindings within this map that overlap KEY. */
1318 :
1319 : void
1320 41554 : binding_map::get_overlapping_bindings (const binding_key *key,
1321 : auto_vec<const binding_key *> *out)
1322 : {
1323 324721 : for (auto iter : *this)
1324 : {
1325 283167 : const binding_key *iter_key = iter.m_key;
1326 566334 : if (const concrete_binding *ckey
1327 283167 : = key->dyn_cast_concrete_binding ())
1328 : {
1329 561540 : if (const concrete_binding *iter_ckey
1330 280770 : = iter_key->dyn_cast_concrete_binding ())
1331 : {
1332 280329 : if (ckey->overlaps_p (*iter_ckey))
1333 37619 : out->safe_push (iter_key);
1334 : }
1335 : else
1336 : {
1337 : /* Assume overlap. */
1338 441 : out->safe_push (iter_key);
1339 : }
1340 : }
1341 : else
1342 : {
1343 : /* Assume overlap. */
1344 2397 : out->safe_push (iter_key);
1345 : }
1346 : }
1347 41554 : }
1348 :
1349 : /* Remove, truncate, and/or split any bindings within this map that
1350 : overlap DROP_KEY.
1351 :
1352 : For example, if we have:
1353 :
1354 : +------------------------------------+
1355 : | old binding |
1356 : +------------------------------------+
1357 :
1358 : which is to be overwritten with:
1359 :
1360 : .......+----------------------+.......
1361 : .......| new binding |.......
1362 : .......+----------------------+.......
1363 :
1364 : this function "cuts a hole" out of the old binding:
1365 :
1366 : +------+......................+------+
1367 : |prefix| hole for new binding |suffix|
1368 : +------+......................+------+
1369 :
1370 : into which the new binding can be added without
1371 : overlapping the prefix or suffix.
1372 :
1373 : The prefix and suffix (if added) will be bound to the pertinent
1374 : parts of the value of the old binding.
1375 :
1376 : For example, given:
1377 : struct s5
1378 : {
1379 : char arr[8];
1380 : };
1381 : void test_5 (struct s5 *p)
1382 : {
1383 : struct s5 f = *p;
1384 : f.arr[3] = 42;
1385 : }
1386 : then after the "f = *p;" we have:
1387 : cluster for: f: INIT_VAL((*INIT_VAL(p_33(D))))
1388 : and at the "f.arr[3] = 42;" we remove the bindings overlapping
1389 : "f.arr[3]", replacing it with a prefix (bytes 0-2) and suffix (bytes 4-7)
1390 : giving:
1391 : cluster for: f
1392 : key: {bytes 0-2}
1393 : value: {BITS_WITHIN(bytes 0-2, inner_val: INIT_VAL((*INIT_VAL(p_33(D))).arr))}
1394 : key: {bytes 4-7}
1395 : value: {BITS_WITHIN(bytes 4-7, inner_val: INIT_VAL((*INIT_VAL(p_33(D))).arr))}
1396 : punching a hole into which the new value can be written at byte 3:
1397 : cluster for: f
1398 : key: {bytes 0-2}
1399 : value: {BITS_WITHIN(bytes 0-2, inner_val: INIT_VAL((*INIT_VAL(p_33(D))).arr))}
1400 : key: {byte 3}
1401 : value: 'char' {(char)42}
1402 : key: {bytes 4-7}
1403 : value: {BITS_WITHIN(bytes 4-7, inner_val: INIT_VAL((*INIT_VAL(p_33(D))).arr))}
1404 :
1405 : If UNCERTAINTY is non-NULL, use it to record any svalues that
1406 : were removed, as being maybe-bound.
1407 :
1408 : If MAYBE_LIVE_VALUES is non-NULL, then use it to record any svalues that
1409 : were removed as being maybe-live.
1410 :
1411 : If ALWAYS_OVERLAP, then assume that DROP_KEY can overlap anything
1412 : in the map, due to one or both of the underlying clusters being
1413 : symbolic (but not the same symbolic region). Hence even if DROP_KEY is a
1414 : concrete binding it could actually be referring to the same memory as
1415 : distinct concrete bindings in the map. Remove all bindings, but
1416 : register any svalues with *UNCERTAINTY. */
1417 :
1418 : void
1419 87581 : binding_map::remove_overlapping_bindings (store_manager *mgr,
1420 : const binding_key *drop_key,
1421 : uncertainty_t *uncertainty,
1422 : svalue_set *maybe_live_values,
1423 : bool always_overlap)
1424 : {
1425 : /* Get the bindings of interest within this map. */
1426 87581 : auto_vec<const binding_key *> bindings;
1427 87581 : if (always_overlap)
1428 92885 : for (auto iter : *this)
1429 46858 : bindings.safe_push (iter.m_key); /* Add all bindings. */
1430 : else
1431 : /* Just add overlapping bindings. */
1432 41554 : get_overlapping_bindings (drop_key, &bindings);
1433 :
1434 : unsigned i;
1435 : const binding_key *iter_binding;
1436 245788 : FOR_EACH_VEC_ELT (bindings, i, iter_binding)
1437 : {
1438 : /* Record any svalues that were removed to *UNCERTAINTY as being
1439 : maybe-bound, provided at least some part of the binding is symbolic.
1440 :
1441 : Specifically, if at least one of the bindings is symbolic, or we
1442 : have ALWAYS_OVERLAP for the case where we have possibly aliasing
1443 : regions, then we don't know that the svalue has been overwritten,
1444 : and should record that to *UNCERTAINTY.
1445 :
1446 : However, if we have concrete keys accessing within the same symbolic
1447 : region, then we *know* that the symbolic region has been overwritten,
1448 : so we don't record it to *UNCERTAINTY, as this could be a genuine
1449 : leak. */
1450 87315 : const svalue *old_sval = get (iter_binding);
1451 87315 : if (uncertainty
1452 109867 : && (drop_key->symbolic_p ()
1453 20439 : || iter_binding->symbolic_p ()
1454 18078 : || always_overlap))
1455 18838 : uncertainty->on_maybe_bound_sval (old_sval);
1456 :
1457 : /* Record any svalues that were removed to *MAYBE_LIVE_VALUES as being
1458 : maybe-live. */
1459 87315 : if (maybe_live_values)
1460 46858 : maybe_live_values->add (old_sval);
1461 :
1462 : /* Begin by removing the old binding. */
1463 87315 : remove (iter_binding);
1464 :
1465 : /* Don't attempt to handle prefixes/suffixes for the
1466 : "always_overlap" case; everything's being removed. */
1467 87315 : if (always_overlap)
1468 46858 : continue;
1469 :
1470 : /* Now potentially add the prefix and suffix. */
1471 80914 : if (const concrete_binding *drop_ckey
1472 40457 : = drop_key->dyn_cast_concrete_binding ())
1473 76120 : if (const concrete_binding *iter_ckey
1474 38060 : = iter_binding->dyn_cast_concrete_binding ())
1475 : {
1476 37619 : gcc_assert (drop_ckey->overlaps_p (*iter_ckey));
1477 :
1478 37619 : const bit_range &drop_bits = drop_ckey->get_bit_range ();
1479 37619 : const bit_range &iter_bits = iter_ckey->get_bit_range ();
1480 :
1481 37619 : if (iter_bits.get_start_bit_offset ()
1482 37619 : < drop_bits.get_start_bit_offset ())
1483 : {
1484 : /* We have a truncated prefix. */
1485 1384 : bit_range prefix_bits (iter_bits.get_start_bit_offset (),
1486 2768 : (drop_bits.get_start_bit_offset ()
1487 1384 : - iter_bits.get_start_bit_offset ()));
1488 1384 : const concrete_binding *prefix_key
1489 1384 : = mgr->get_concrete_binding (prefix_bits);
1490 1384 : bit_range rel_prefix (0, prefix_bits.m_size_in_bits);
1491 1384 : const svalue *prefix_sval
1492 1384 : = old_sval->extract_bit_range (NULL_TREE,
1493 : rel_prefix,
1494 : mgr->get_svalue_manager ());
1495 1384 : put (prefix_key, prefix_sval);
1496 : }
1497 :
1498 75238 : if (iter_bits.get_next_bit_offset ()
1499 75238 : > drop_bits.get_next_bit_offset ())
1500 : {
1501 : /* We have a truncated suffix. */
1502 10334 : bit_range suffix_bits (drop_bits.get_next_bit_offset (),
1503 10334 : (iter_bits.get_next_bit_offset ()
1504 20668 : - drop_bits.get_next_bit_offset ()));
1505 10334 : const concrete_binding *suffix_key
1506 10334 : = mgr->get_concrete_binding (suffix_bits);
1507 10334 : bit_range rel_suffix (drop_bits.get_next_bit_offset ()
1508 10334 : - iter_bits.get_start_bit_offset (),
1509 10334 : suffix_bits.m_size_in_bits);
1510 10334 : const svalue *suffix_sval
1511 10334 : = old_sval->extract_bit_range (NULL_TREE,
1512 : rel_suffix,
1513 : mgr->get_svalue_manager ());
1514 10334 : put (suffix_key, suffix_sval);
1515 : }
1516 : }
1517 : }
1518 87581 : }
1519 :
1520 : /* class binding_cluster. */
1521 :
1522 1593235 : binding_cluster::binding_cluster (store_manager &store_mgr,
1523 1593235 : const region *base_region)
1524 1593235 : : m_base_region (base_region), m_map (store_mgr),
1525 1593235 : m_escaped (false), m_touched (false)
1526 : {
1527 1593235 : }
1528 :
1529 : /* binding_cluster's copy ctor. */
1530 :
1531 28601221 : binding_cluster::binding_cluster (const binding_cluster &other)
1532 28601221 : : m_base_region (other.m_base_region), m_map (other.m_map),
1533 28601221 : m_escaped (other.m_escaped), m_touched (other.m_touched)
1534 : {
1535 28601221 : }
1536 :
1537 : /* binding_cluster's assignment operator. */
1538 :
1539 : binding_cluster&
1540 0 : binding_cluster::operator= (const binding_cluster &other)
1541 : {
1542 0 : gcc_assert (m_base_region == other.m_base_region);
1543 0 : m_map = other.m_map;
1544 0 : m_escaped = other.m_escaped;
1545 0 : m_touched = other.m_touched;
1546 0 : return *this;
1547 : }
1548 :
1549 : /* binding_cluster's equality operator. */
1550 :
1551 : bool
1552 3306235 : binding_cluster::operator== (const binding_cluster &other) const
1553 : {
1554 3306235 : if (m_map != other.m_map)
1555 : return false;
1556 :
1557 3244268 : if (m_base_region != other.m_base_region)
1558 : return false;
1559 :
1560 3244268 : if (m_escaped != other.m_escaped)
1561 : return false;
1562 :
1563 3243614 : if (m_touched != other.m_touched)
1564 : return false;
1565 :
1566 3240968 : gcc_checking_assert (hash () == other.hash ());
1567 :
1568 : return true;
1569 : }
1570 :
1571 : /* Generate a hash value for this binding_cluster. */
1572 :
1573 : hashval_t
1574 21300929 : binding_cluster::hash () const
1575 : {
1576 21300929 : return m_map.hash ();
1577 : }
1578 :
1579 : /* Return true if this binding_cluster is symbolic
1580 : i.e. its base region is symbolic. */
1581 :
1582 : bool
1583 8710903 : binding_cluster::symbolic_p () const
1584 : {
1585 8710903 : return m_base_region->get_kind () == RK_SYMBOLIC;
1586 : }
1587 :
1588 : /* Dump a representation of this binding_cluster to PP.
1589 : SIMPLE controls how values and regions are to be printed.
1590 : If MULTILINE, then split the dump over multiple lines and
1591 : use whitespace for readability, otherwise put all on one line. */
1592 :
1593 : void
1594 1731 : binding_cluster::dump_to_pp (pretty_printer *pp, bool simple,
1595 : bool multiline) const
1596 : {
1597 1731 : if (m_escaped)
1598 : {
1599 1092 : if (multiline)
1600 : {
1601 904 : pp_string (pp, " ESCAPED");
1602 904 : pp_newline (pp);
1603 : }
1604 : else
1605 188 : pp_string (pp, "(ESCAPED)");
1606 : }
1607 1731 : if (m_touched)
1608 : {
1609 272 : if (multiline)
1610 : {
1611 84 : pp_string (pp, " TOUCHED");
1612 84 : pp_newline (pp);
1613 : }
1614 : else
1615 188 : pp_string (pp, "(TOUCHED)");
1616 : }
1617 :
1618 1731 : m_map.dump_to_pp (pp, simple, multiline);
1619 1731 : }
1620 :
1621 : /* Dump a multiline representation of this binding_cluster to stderr. */
1622 :
1623 : DEBUG_FUNCTION void
1624 0 : binding_cluster::dump (bool simple) const
1625 : {
1626 0 : tree_dump_pretty_printer pp (stderr);
1627 0 : pp_string (&pp, " cluster for: ");
1628 0 : m_base_region->dump_to_pp (&pp, simple);
1629 0 : pp_string (&pp, ": ");
1630 0 : pp_newline (&pp);
1631 0 : dump_to_pp (&pp, simple, true);
1632 0 : pp_newline (&pp);
1633 0 : }
1634 :
1635 : /* Assert that this object is valid. */
1636 :
1637 : void
1638 12920777 : binding_cluster::validate () const
1639 : {
1640 12920777 : m_map.validate ();
1641 12920777 : }
1642 :
1643 : /* Return a new json::object of the form
1644 : {"escaped": true/false,
1645 : "touched": true/false,
1646 : "map" : object for the binding_map. */
1647 :
1648 : std::unique_ptr<json::object>
1649 0 : binding_cluster::to_json () const
1650 : {
1651 0 : auto cluster_obj = std::make_unique<json::object> ();
1652 :
1653 0 : cluster_obj->set_bool ("escaped", m_escaped);
1654 0 : cluster_obj->set_bool ("touched", m_touched);
1655 0 : cluster_obj->set ("map", m_map.to_json ());
1656 :
1657 0 : return cluster_obj;
1658 : }
1659 :
1660 : std::unique_ptr<text_art::tree_widget>
1661 0 : binding_cluster::make_dump_widget (const text_art::dump_widget_info &dwi,
1662 : store_manager *mgr) const
1663 : {
1664 0 : pretty_printer the_pp;
1665 0 : pretty_printer * const pp = &the_pp;
1666 0 : pp_format_decoder (pp) = default_tree_printer;
1667 0 : pp_show_color (pp) = true;
1668 0 : const bool simple = true;
1669 :
1670 0 : m_base_region->dump_to_pp (pp, simple);
1671 0 : pp_string (pp, ": ");
1672 :
1673 0 : if (const svalue *sval = maybe_get_simple_value (mgr))
1674 : {
1675 : /* Special-case to simplify dumps for the common case where
1676 : we just have one value directly bound to the whole of a
1677 : region. */
1678 0 : sval->dump_to_pp (pp, simple);
1679 0 : if (escaped_p ())
1680 0 : pp_string (pp, " (ESCAPED)");
1681 0 : if (touched_p ())
1682 0 : pp_string (pp, " (TOUCHED)");
1683 :
1684 0 : return text_art::tree_widget::make (dwi, pp);
1685 : }
1686 : else
1687 : {
1688 0 : if (escaped_p ())
1689 0 : pp_string (pp, " (ESCAPED)");
1690 0 : if (touched_p ())
1691 0 : pp_string (pp, " (TOUCHED)");
1692 :
1693 0 : std::unique_ptr<text_art::tree_widget> cluster_widget
1694 0 : (text_art::tree_widget::make (dwi, pp));
1695 :
1696 0 : m_map.add_to_tree_widget (*cluster_widget, dwi);
1697 :
1698 0 : return cluster_widget;
1699 0 : }
1700 0 : }
1701 :
1702 : /* Add a binding of SVAL of kind KIND to REG, unpacking SVAL if it is a
1703 : compound_sval. */
1704 :
1705 : void
1706 423340 : binding_cluster::bind (store_manager *mgr,
1707 : const region *reg, const svalue *sval)
1708 : {
1709 846680 : if (const compound_svalue *compound_sval
1710 423340 : = sval->dyn_cast_compound_svalue ())
1711 : {
1712 757 : bind_compound_sval (mgr, reg, compound_sval);
1713 757 : return;
1714 : }
1715 :
1716 422583 : if (reg->empty_p ())
1717 : return;
1718 422583 : const binding_key *binding = binding_key::make (mgr, reg);
1719 422583 : bind_key (binding, sval);
1720 : }
1721 :
1722 : /* Bind SVAL to KEY.
1723 : Unpacking of compound_svalues should already have been done by the
1724 : time this is called. */
1725 :
1726 : void
1727 425111 : binding_cluster::bind_key (const binding_key *key, const svalue *sval)
1728 : {
1729 425111 : gcc_assert (sval->get_kind () != SK_COMPOUND);
1730 :
1731 425111 : m_map.put (key, sval);
1732 425111 : if (key->symbolic_p ())
1733 23977 : m_touched = true;
1734 425111 : }
1735 :
1736 : /* Subroutine of binding_cluster::bind.
1737 : Unpack compound_svals when binding them, so that we bind them
1738 : element-wise. */
1739 :
1740 : void
1741 757 : binding_cluster::bind_compound_sval (store_manager *mgr,
1742 : const region *reg,
1743 : const compound_svalue *compound_sval)
1744 : {
1745 757 : region_offset reg_offset
1746 757 : = reg->get_offset (mgr->get_svalue_manager ());
1747 757 : if (reg_offset.symbolic_p ())
1748 : {
1749 4 : m_touched = true;
1750 4 : clobber_region (mgr, reg);
1751 4 : return;
1752 : }
1753 :
1754 2881 : for (auto iter : *compound_sval)
1755 : {
1756 2128 : const binding_key *iter_key = iter.m_key;
1757 2128 : const svalue *iter_sval = iter.m_sval;
1758 :
1759 4256 : if (const concrete_binding *concrete_key
1760 2128 : = iter_key->dyn_cast_concrete_binding ())
1761 : {
1762 2128 : bit_offset_t effective_start
1763 2128 : = (concrete_key->get_start_bit_offset ()
1764 2128 : + reg_offset.get_bit_offset ());
1765 2128 : const concrete_binding *effective_concrete_key
1766 2128 : = mgr->get_concrete_binding (effective_start,
1767 : concrete_key->get_size_in_bits ());
1768 2128 : bind_key (effective_concrete_key, iter_sval);
1769 : }
1770 : else
1771 0 : gcc_unreachable ();
1772 : }
1773 : }
1774 :
1775 : /* Remove all bindings overlapping REG within this cluster. */
1776 :
1777 : void
1778 5778 : binding_cluster::clobber_region (store_manager *mgr, const region *reg)
1779 : {
1780 5778 : remove_overlapping_bindings (mgr, reg, nullptr, nullptr);
1781 5778 : }
1782 :
1783 : /* Remove any bindings for REG within this cluster. */
1784 :
1785 : void
1786 216579 : binding_cluster::purge_region (store_manager *mgr, const region *reg)
1787 : {
1788 216579 : gcc_assert (reg->get_kind () == RK_DECL);
1789 216579 : if (reg->empty_p ())
1790 : return;
1791 216579 : const binding_key *binding
1792 216579 : = binding_key::make (mgr, const_cast<region *> (reg));
1793 216579 : m_map.remove (binding);
1794 : }
1795 :
1796 : /* Clobber REG and fill it with repeated copies of SVAL. */
1797 :
1798 : void
1799 1344 : binding_cluster::fill_region (store_manager *mgr,
1800 : const region *reg,
1801 : const svalue *sval)
1802 : {
1803 1344 : clobber_region (mgr, reg);
1804 :
1805 1344 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
1806 1344 : const svalue *byte_size_sval = reg->get_byte_size_sval (sval_mgr);
1807 1344 : const svalue *fill_sval
1808 1344 : = sval_mgr->get_or_create_repeated_svalue (reg->get_type (),
1809 : byte_size_sval, sval);
1810 1344 : bind (mgr, reg, fill_sval);
1811 1344 : }
1812 :
1813 : /* Clobber REG within this cluster and fill it with zeroes. */
1814 :
1815 : void
1816 33 : binding_cluster::zero_fill_region (store_manager *mgr, const region *reg)
1817 : {
1818 33 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
1819 33 : const svalue *zero_sval = sval_mgr->get_or_create_int_cst (char_type_node, 0);
1820 33 : fill_region (mgr, reg, zero_sval);
1821 33 : }
1822 :
1823 : /* Mark REG_TO_BIND within this cluster as being unknown.
1824 :
1825 : Remove any bindings overlapping REG_FOR_OVERLAP.
1826 : If UNCERTAINTY is non-NULL, use it to record any svalues that
1827 : had bindings to them removed, as being maybe-bound.
1828 : If MAYBE_LIVE_VALUES is non-NULL, use it to record any svalues that
1829 : had bindings to them removed, as being maybe-live.
1830 :
1831 : REG_TO_BIND and REG_FOR_OVERLAP are the same for
1832 : store::mark_region_as_unknown, but are different in
1833 : store::set_value's alias handling, for handling the case where
1834 : we have a write to a symbolic REG_FOR_OVERLAP. */
1835 :
1836 : void
1837 46280 : binding_cluster::mark_region_as_unknown (store_manager *mgr,
1838 : const region *reg_to_bind,
1839 : const region *reg_for_overlap,
1840 : uncertainty_t *uncertainty,
1841 : svalue_set *maybe_live_values)
1842 : {
1843 46280 : if (reg_to_bind->empty_p ())
1844 : return;
1845 :
1846 46280 : remove_overlapping_bindings (mgr, reg_for_overlap, uncertainty,
1847 : maybe_live_values);
1848 :
1849 : /* Add a default binding to "unknown". */
1850 46280 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
1851 46280 : const svalue *sval
1852 46280 : = sval_mgr->get_or_create_unknown_svalue (reg_to_bind->get_type ());
1853 46280 : bind (mgr, reg_to_bind, sval);
1854 : }
1855 :
1856 : /* Purge state involving SVAL. */
1857 :
1858 : void
1859 370330 : binding_cluster::purge_state_involving (const svalue *sval,
1860 : region_model_manager *sval_mgr)
1861 : {
1862 370330 : auto_vec<const binding_key *> to_remove;
1863 370330 : auto_vec<std::pair<const binding_key *, tree> > to_make_unknown;
1864 745665 : for (auto iter : m_map)
1865 : {
1866 375335 : const binding_key *iter_key = iter.m_key;
1867 750670 : if (const symbolic_binding *symbolic_key
1868 375335 : = iter_key->dyn_cast_symbolic_binding ())
1869 : {
1870 29831 : const region *reg = symbolic_key->get_region ();
1871 29831 : if (reg->involves_p (sval))
1872 0 : to_remove.safe_push (iter_key);
1873 : }
1874 375335 : const svalue *iter_sval = iter.m_sval;
1875 375335 : if (iter_sval->involves_p (sval))
1876 3092 : to_make_unknown.safe_push (std::make_pair(iter_key,
1877 3092 : iter_sval->get_type ()));
1878 : }
1879 370330 : for (auto iter : to_remove)
1880 : {
1881 0 : m_map.remove (iter);
1882 0 : m_touched = true;
1883 : }
1884 379606 : for (auto iter : to_make_unknown)
1885 : {
1886 3092 : const svalue *new_sval
1887 3092 : = sval_mgr->get_or_create_unknown_svalue (iter.second);
1888 3092 : m_map.put (iter.first, new_sval);
1889 : }
1890 370330 : }
1891 :
1892 : /* Get any SVAL bound to REG within this cluster via kind KIND,
1893 : without checking parent regions of REG. */
1894 :
1895 : const svalue *
1896 1403105 : binding_cluster::get_binding (store_manager *mgr,
1897 : const region *reg) const
1898 : {
1899 1403105 : if (reg->empty_p ())
1900 : return nullptr;
1901 1403105 : const binding_key *reg_binding = binding_key::make (mgr, reg);
1902 1403105 : const svalue *sval = m_map.get (reg_binding);
1903 1403105 : if (sval)
1904 : {
1905 : /* If we have a struct with a single field, then the binding of
1906 : the field will equal that of the struct, and looking up e.g.
1907 : PARENT_REG.field within:
1908 : cluster for PARENT_REG: INIT_VAL(OTHER_REG)
1909 : will erroneously return INIT_VAL(OTHER_REG), rather than
1910 : SUB_VALUE(INIT_VAL(OTHER_REG), FIELD) == INIT_VAL(OTHER_REG.FIELD).
1911 : Fix this issue by iterating upwards whilst the bindings are equal,
1912 : expressing the lookups as subvalues.
1913 : We have to gather a list of subregion accesses, then walk it
1914 : in reverse to get the subvalues. */
1915 1139186 : auto_vec<const region *> regions;
1916 1148000 : while (const region *parent_reg = reg->get_parent_region ())
1917 : {
1918 1148000 : const binding_key *parent_reg_binding
1919 1148000 : = binding_key::make (mgr, parent_reg);
1920 1148000 : if (parent_reg_binding == reg_binding
1921 11008 : && sval->get_type ()
1922 10996 : && reg->get_type ()
1923 1158996 : && sval->get_type () != reg->get_type ())
1924 : {
1925 8814 : regions.safe_push (reg);
1926 8814 : reg = parent_reg;
1927 : }
1928 : else
1929 : break;
1930 8814 : }
1931 1139186 : if (sval->get_type ()
1932 1129972 : && reg->get_type ()
1933 2268284 : && sval->get_type () == reg->get_type ())
1934 : {
1935 930909 : unsigned i;
1936 930909 : const region *iter_reg;
1937 1162816 : FOR_EACH_VEC_ELT_REVERSE (regions, i, iter_reg)
1938 : {
1939 8016 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
1940 8016 : sval = rmm_mgr->get_or_create_sub_svalue (iter_reg->get_type (),
1941 : sval, iter_reg);
1942 : }
1943 : }
1944 1139186 : }
1945 : return sval;
1946 : }
1947 :
1948 : /* Get any SVAL bound to REG within this cluster,
1949 : either directly for REG, or recursively checking for bindings within
1950 : parent regions and extracting subvalues if need be. */
1951 :
1952 : const svalue *
1953 1403105 : binding_cluster::get_binding_recursive (store_manager *mgr,
1954 : const region *reg) const
1955 : {
1956 1403105 : if (const svalue *sval = get_binding (mgr, reg))
1957 : return sval;
1958 263919 : if (reg != m_base_region)
1959 161249 : if (const region *parent_reg = reg->get_parent_region ())
1960 322498 : if (const svalue *parent_sval
1961 161249 : = get_binding_recursive (mgr, parent_reg))
1962 : {
1963 : /* Extract child svalue from parent svalue. */
1964 54402 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
1965 54402 : return rmm_mgr->get_or_create_sub_svalue (reg->get_type (),
1966 54402 : parent_sval, reg);
1967 : }
1968 : return nullptr;
1969 : }
1970 :
1971 : /* Get any value bound for REG within this cluster. */
1972 :
1973 : const svalue *
1974 1241856 : binding_cluster::get_any_binding (store_manager *mgr,
1975 : const region *reg) const
1976 : {
1977 : /* Look for a direct binding. */
1978 2483712 : if (const svalue *direct_sval
1979 1241856 : = get_binding_recursive (mgr, reg))
1980 : return direct_sval;
1981 :
1982 : /* If we had a write to a cluster of unknown size, we might
1983 : have a self-binding of the whole base region with an svalue,
1984 : where the base region is symbolic.
1985 : Handle such cases by returning sub_svalue instances. */
1986 102670 : if (const svalue *cluster_sval = maybe_get_simple_value (mgr))
1987 : {
1988 : /* Extract child svalue from parent svalue. */
1989 0 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
1990 0 : return rmm_mgr->get_or_create_sub_svalue (reg->get_type (),
1991 0 : cluster_sval, reg);
1992 : }
1993 :
1994 : /* If this cluster has been touched by a symbolic write, then the content
1995 : of any subregion not currently specifically bound is "UNKNOWN". */
1996 102670 : if (m_touched)
1997 : {
1998 7048 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
1999 7048 : return rmm_mgr->get_or_create_unknown_svalue (reg->get_type ());
2000 : }
2001 :
2002 : /* Alternatively, if this is a symbolic read and the cluster has any bindings,
2003 : then we don't know if we're reading those values or not, so the result
2004 : is also "UNKNOWN". */
2005 95622 : if (reg->get_offset (mgr->get_svalue_manager ()).symbolic_p ()
2006 95622 : && m_map.elements () > 0)
2007 : {
2008 412 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
2009 412 : return rmm_mgr->get_or_create_unknown_svalue (reg->get_type ());
2010 : }
2011 :
2012 95210 : if (const svalue *compound_sval = maybe_get_compound_binding (mgr, reg))
2013 : return compound_sval;
2014 :
2015 : /* Otherwise, the initial value, or uninitialized. */
2016 : return nullptr;
2017 : }
2018 :
2019 : /* Attempt to get a compound_svalue for the bindings within the cluster
2020 : affecting REG (which could be the base region itself).
2021 :
2022 : Create a compound_svalue with the subset of bindings the affect REG,
2023 : offsetting them so that the offsets are relative to the start of REG
2024 : within the cluster.
2025 :
2026 : For example, REG could be one element within an array of structs.
2027 :
2028 : Return the resulting compound_svalue, or nullptr if there's a problem. */
2029 :
2030 : const svalue *
2031 95210 : binding_cluster::maybe_get_compound_binding (store_manager *mgr,
2032 : const region *reg) const
2033 : {
2034 95210 : region_offset cluster_offset
2035 95210 : = m_base_region->get_offset (mgr->get_svalue_manager ());
2036 95210 : if (cluster_offset.symbolic_p ())
2037 : return nullptr;
2038 95210 : region_offset reg_offset = reg->get_offset (mgr->get_svalue_manager ());
2039 95210 : if (reg_offset.symbolic_p ())
2040 : return nullptr;
2041 :
2042 75832 : if (reg->empty_p ())
2043 : return nullptr;
2044 :
2045 75832 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
2046 :
2047 : /* We will a build the result map in two parts:
2048 : (a) result_map, holding the concrete keys from this cluster,
2049 :
2050 : (b) default_map, holding the initial values for the region
2051 : (e.g. uninitialized, initializer values, or zero), unless this
2052 : cluster has been touched.
2053 :
2054 : We will populate (a), and as we do, clobber (b), trimming and
2055 : splitting its bindings as necessary.
2056 : Finally, we will merge (b) into (a), giving a concrete map
2057 : that merges both the initial values and the bound values from
2058 : the binding_cluster.
2059 : Doing it this way reduces N for the O(N^2) intersection-finding,
2060 : perhaps we should have a spatial-organized data structure for
2061 : concrete keys, though. */
2062 :
2063 75832 : binding_map result_map (*mgr);
2064 75832 : binding_map default_map (*mgr);
2065 :
2066 : /* Set up default values in default_map. */
2067 75832 : const svalue *default_sval;
2068 75832 : if (m_touched)
2069 0 : default_sval = sval_mgr->get_or_create_unknown_svalue (reg->get_type ());
2070 : else
2071 75832 : default_sval = sval_mgr->get_or_create_initial_value (reg);
2072 75832 : const binding_key *default_key = binding_key::make (mgr, reg);
2073 :
2074 : /* Express the bit-range of the default key for REG relative to REG,
2075 : rather than to the base region. */
2076 75832 : const concrete_binding *concrete_default_key
2077 75832 : = default_key->dyn_cast_concrete_binding ();
2078 75832 : if (!concrete_default_key)
2079 : return nullptr;
2080 75045 : const concrete_binding *default_key_relative_to_reg
2081 75045 : = mgr->get_concrete_binding (0, concrete_default_key->get_size_in_bits ());
2082 :
2083 75045 : default_map.put (default_key_relative_to_reg, default_sval);
2084 :
2085 150399 : for (auto iter : m_map)
2086 : {
2087 82676 : const binding_key *key = iter.m_key;
2088 82676 : const svalue *sval = iter.m_sval;
2089 :
2090 165352 : if (const concrete_binding *concrete_key
2091 82676 : = key->dyn_cast_concrete_binding ())
2092 : {
2093 82676 : const bit_range &bound_range = concrete_key->get_bit_range ();
2094 :
2095 82676 : bit_size_t reg_bit_size;
2096 82676 : if (!reg->get_bit_size (®_bit_size))
2097 7322 : return nullptr;
2098 :
2099 82676 : bit_range reg_range (reg_offset.get_bit_offset (),
2100 82676 : reg_bit_size);
2101 :
2102 : /* Skip bindings that are outside the bit range of REG. */
2103 82676 : if (!bound_range.intersects_p (reg_range))
2104 66770 : continue;
2105 :
2106 : /* We shouldn't have an exact match; that should have been
2107 : handled already. */
2108 15906 : gcc_assert (!(reg_range == bound_range));
2109 :
2110 15906 : bit_range subrange (0, 0);
2111 15906 : if (reg_range.contains_p (bound_range, &subrange))
2112 : {
2113 : /* We have a bound range fully within REG.
2114 : Add it to map, offsetting accordingly. */
2115 :
2116 : /* Get offset of KEY relative to REG, rather than to
2117 : the cluster. */
2118 8508 : const concrete_binding *offset_concrete_key
2119 8508 : = mgr->get_concrete_binding (subrange);
2120 8508 : result_map.put (offset_concrete_key, sval);
2121 :
2122 : /* Clobber default_map, removing/trimming/spliting where
2123 : it overlaps with offset_concrete_key. */
2124 8508 : default_map.remove_overlapping_bindings (mgr,
2125 : offset_concrete_key,
2126 : nullptr, nullptr, false);
2127 : }
2128 7398 : else if (bound_range.contains_p (reg_range, &subrange))
2129 : {
2130 : /* REG is fully within the bound range, but
2131 : is not equal to it; we're extracting a subvalue. */
2132 7322 : return sval->extract_bit_range (reg->get_type (),
2133 : subrange,
2134 7322 : mgr->get_svalue_manager ());
2135 : }
2136 : else
2137 : {
2138 : /* REG and the bound range partially overlap. */
2139 76 : bit_range reg_subrange (0, 0);
2140 76 : bit_range bound_subrange (0, 0);
2141 76 : reg_range.intersects_p (bound_range,
2142 : ®_subrange, &bound_subrange);
2143 :
2144 : /* Get the bits from the bound value for the bits at the
2145 : intersection (relative to the bound value). */
2146 76 : const svalue *overlap_sval
2147 76 : = sval->extract_bit_range (NULL_TREE,
2148 : bound_subrange,
2149 : mgr->get_svalue_manager ());
2150 :
2151 : /* Get key for overlap, relative to the REG. */
2152 76 : const concrete_binding *overlap_concrete_key
2153 76 : = mgr->get_concrete_binding (reg_subrange);
2154 76 : result_map.put (overlap_concrete_key, overlap_sval);
2155 :
2156 : /* Clobber default_map, removing/trimming/spliting where
2157 : it overlaps with overlap_concrete_key. */
2158 76 : default_map.remove_overlapping_bindings (mgr,
2159 : overlap_concrete_key,
2160 : nullptr, nullptr, false);
2161 : }
2162 : }
2163 : else
2164 : /* Can't handle symbolic bindings. */
2165 : return nullptr;
2166 : }
2167 :
2168 67723 : if (result_map.elements () == 0)
2169 : return nullptr;
2170 :
2171 : /* Merge any bindings from default_map into result_map. */
2172 3457 : for (auto iter : default_map)
2173 : {
2174 319 : const binding_key *key = iter.m_key;
2175 319 : const svalue *sval = iter.m_sval;
2176 319 : result_map.put (key, sval);
2177 : }
2178 :
2179 3138 : return sval_mgr->get_or_create_compound_svalue (reg->get_type (), result_map);
2180 75832 : }
2181 :
2182 : /* Remove, truncate, and/or split any bindings within this map that
2183 : could overlap REG.
2184 :
2185 : If REG's base region or this cluster is symbolic and they're different
2186 : base regions, then remove everything in this cluster's map, on the
2187 : grounds that REG could be referring to the same memory as anything
2188 : in the map.
2189 :
2190 : If UNCERTAINTY is non-NULL, use it to record any svalues that
2191 : were removed, as being maybe-bound.
2192 :
2193 : If MAYBE_LIVE_VALUES is non-NULL, use it to record any svalues that
2194 : were removed, as being maybe-live. */
2195 :
2196 : void
2197 78997 : binding_cluster::remove_overlapping_bindings (store_manager *mgr,
2198 : const region *reg,
2199 : uncertainty_t *uncertainty,
2200 : svalue_set *maybe_live_values)
2201 : {
2202 78997 : if (reg->empty_p ())
2203 : return;
2204 78997 : const binding_key *reg_binding = binding_key::make (mgr, reg);
2205 :
2206 78997 : const region *cluster_base_reg = get_base_region ();
2207 78997 : const region *other_base_reg = reg->get_base_region ();
2208 : /* If at least one of the base regions involved is symbolic, and they're
2209 : not the same base region, then consider everything in the map as
2210 : potentially overlapping with reg_binding (even if it's a concrete
2211 : binding and things in the map are concrete - they could be referring
2212 : to the same memory when the symbolic base regions are taken into
2213 : account). */
2214 78997 : bool always_overlap = (cluster_base_reg != other_base_reg
2215 78997 : && (cluster_base_reg->get_kind () == RK_SYMBOLIC
2216 17981 : || other_base_reg->get_kind () == RK_SYMBOLIC));
2217 78997 : m_map.remove_overlapping_bindings (mgr, reg_binding, uncertainty,
2218 : maybe_live_values,
2219 : always_overlap);
2220 : }
2221 :
2222 : /* Attempt to merge CLUSTER_A and CLUSTER_B into OUT_CLUSTER, using
2223 : MGR and MERGER.
2224 : Return true if they can be merged, false otherwise. */
2225 :
2226 : bool
2227 1250814 : binding_cluster::can_merge_p (const binding_cluster *cluster_a,
2228 : const binding_cluster *cluster_b,
2229 : binding_cluster *out_cluster,
2230 : store *out_store,
2231 : store_manager *mgr,
2232 : model_merger *merger)
2233 : {
2234 1250814 : gcc_assert (out_cluster);
2235 :
2236 : /* Merge flags ("ESCAPED" and "TOUCHED") by setting the merged flag to
2237 : true if either of the inputs is true. */
2238 1250814 : if ((cluster_a && cluster_a->m_escaped)
2239 810942 : || (cluster_b && cluster_b->m_escaped))
2240 2667 : out_cluster->m_escaped = true;
2241 1250814 : if ((cluster_a && cluster_a->m_touched)
2242 1053950 : || (cluster_b && cluster_b->m_touched))
2243 7047 : out_cluster->m_touched = true;
2244 :
2245 : /* At least one of CLUSTER_A and CLUSTER_B are non-NULL, but either
2246 : could be NULL. Handle these cases. */
2247 1250814 : if (cluster_a == nullptr)
2248 : {
2249 7613 : gcc_assert (cluster_b != nullptr);
2250 7613 : gcc_assert (cluster_b->m_base_region == out_cluster->m_base_region);
2251 7613 : out_cluster->make_unknown_relative_to (cluster_b, out_store, mgr);
2252 7613 : return true;
2253 : }
2254 1243201 : if (cluster_b == nullptr)
2255 : {
2256 24429 : gcc_assert (cluster_a != nullptr);
2257 24429 : gcc_assert (cluster_a->m_base_region == out_cluster->m_base_region);
2258 24429 : out_cluster->make_unknown_relative_to (cluster_a, out_store, mgr);
2259 24429 : return true;
2260 : }
2261 :
2262 : /* The "both inputs are non-NULL" case. */
2263 1218772 : gcc_assert (cluster_a != nullptr && cluster_b != nullptr);
2264 1218772 : gcc_assert (cluster_a->m_base_region == out_cluster->m_base_region);
2265 1218772 : gcc_assert (cluster_b->m_base_region == out_cluster->m_base_region);
2266 :
2267 1218772 : hash_set<const binding_key *> keys;
2268 2992654 : for (auto iter_a : cluster_a->m_map)
2269 : {
2270 1773882 : const binding_key *key_a = iter_a.m_key;
2271 1773882 : keys.add (key_a);
2272 : }
2273 2886321 : for (auto iter_b : cluster_b->m_map)
2274 : {
2275 1667549 : const binding_key *key_b = iter_b.m_key;
2276 1667549 : keys.add (key_b);
2277 : }
2278 1218772 : int num_symbolic_keys = 0;
2279 1218772 : int num_concrete_keys = 0;
2280 2916432 : for (hash_set<const binding_key *>::iterator iter = keys.begin ();
2281 4614092 : iter != keys.end (); ++iter)
2282 : {
2283 1786839 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
2284 1786839 : const binding_key *key = *iter;
2285 1786839 : const svalue *sval_a = cluster_a->get_any_value (key);
2286 1786839 : const svalue *sval_b = cluster_b->get_any_value (key);
2287 :
2288 1786839 : if (key->symbolic_p ())
2289 51945 : num_symbolic_keys++;
2290 : else
2291 1734894 : num_concrete_keys++;
2292 :
2293 1786839 : if (sval_a == sval_b)
2294 : {
2295 1418320 : gcc_assert (sval_a);
2296 1418320 : out_cluster->m_map.put (key, sval_a);
2297 1418320 : continue;
2298 : }
2299 368519 : else if (sval_a && sval_b)
2300 : {
2301 433109 : if (const svalue *merged_sval
2302 159794 : = sval_a->can_merge_p (sval_b, sval_mgr, merger))
2303 : {
2304 113521 : out_cluster->m_map.put (key, merged_sval);
2305 113521 : continue;
2306 : }
2307 : /* Merger of the svalues failed. Reject merger of the cluster. */
2308 89179 : return false;
2309 : }
2310 :
2311 : /* If we get here, then one cluster binds this key and the other
2312 : doesn't; merge them as "UNKNOWN". */
2313 208725 : gcc_assert (sval_a || sval_b);
2314 :
2315 208725 : const svalue *bound_sval = sval_a ? sval_a : sval_b;
2316 208725 : tree type = bound_sval->get_type ();
2317 208725 : const svalue *unknown_sval
2318 208725 : = mgr->get_svalue_manager ()->get_or_create_unknown_svalue (type);
2319 :
2320 : /* ...but reject the merger if this sval shouldn't be mergeable
2321 : (e.g. reject merging svalues that have non-purgable sm-state,
2322 : to avoid falsely reporting memory leaks by merging them
2323 : with something else). */
2324 208725 : if (!bound_sval->can_merge_p (unknown_sval, sval_mgr, merger))
2325 : return false;
2326 :
2327 165819 : out_cluster->m_map.put (key, unknown_sval);
2328 : }
2329 :
2330 : /* We might now have overlapping concrete clusters in OUT_CLUSTER.
2331 : Reject such cases. */
2332 1129593 : if (num_concrete_keys > 0)
2333 : {
2334 : /* Check for overlapping concrete bindings. */
2335 839428 : const auto &concrete_bindings
2336 839428 : = out_cluster->get_map ().get_concrete_bindings ();
2337 2330067 : for (auto iter = concrete_bindings.begin ();
2338 2330067 : iter != concrete_bindings.end (); ++iter)
2339 : {
2340 1507263 : auto next (iter);
2341 1507263 : ++next;
2342 1507263 : if (next != concrete_bindings.end ())
2343 : {
2344 684459 : const bit_range &iter_bits = iter->first;
2345 684459 : const bit_range &next_bits = next->first;
2346 684459 : if (iter_bits.intersects_p (next_bits))
2347 105803 : return false;
2348 : }
2349 : }
2350 : }
2351 :
2352 : /* We can only have at most one symbolic key per cluster,
2353 : and if we do, we can't have any concrete keys.
2354 : If this happens, mark the cluster as touched, with no keys. */
2355 1112969 : if (num_symbolic_keys >= 2
2356 1112364 : || (num_concrete_keys > 0 && num_symbolic_keys > 0))
2357 : {
2358 2699 : out_cluster->m_touched = true;
2359 2699 : out_cluster->m_map.clear ();
2360 : }
2361 :
2362 : /* We don't handle other kinds of overlaps yet. */
2363 :
2364 : return true;
2365 1218772 : }
2366 :
2367 : /* Update this cluster to reflect an attempt to merge OTHER where there
2368 : is no other cluster to merge with, and so we're notionally merging the
2369 : bound values in OTHER with the initial value of the relevant regions.
2370 :
2371 : Any bound keys in OTHER should be bound to unknown in this. */
2372 :
2373 : void
2374 32042 : binding_cluster::make_unknown_relative_to (const binding_cluster *other,
2375 : store *out_store,
2376 : store_manager *mgr)
2377 : {
2378 64257 : for (auto iter : *other)
2379 : {
2380 32215 : const binding_key *iter_key = iter.m_key;
2381 32215 : const svalue *iter_sval = iter.m_sval;
2382 32215 : const svalue *unknown_sval
2383 : = mgr->get_svalue_manager ()->get_or_create_unknown_svalue
2384 32215 : (iter_sval->get_type ());
2385 32215 : m_map.put (iter_key, unknown_sval);
2386 :
2387 : /* For any pointers in OTHER, the merger means that the
2388 : concrete pointer becomes an unknown value, which could
2389 : show up as a false report of a leak when considering what
2390 : pointers are live before vs after.
2391 : Avoid this by marking the base regions they point to as having
2392 : escaped. */
2393 64430 : if (const region_svalue *region_sval
2394 32215 : = iter_sval->dyn_cast_region_svalue ())
2395 : {
2396 2116 : const region *base_reg
2397 2116 : = region_sval->get_pointee ()->get_base_region ();
2398 2116 : if (base_reg->tracked_p ()
2399 2116 : && !base_reg->symbolic_for_unknown_ptr_p ())
2400 : {
2401 2114 : binding_cluster *c
2402 2114 : = out_store->get_or_create_cluster (*mgr, base_reg);
2403 2114 : c->mark_as_escaped ();
2404 : }
2405 : }
2406 : }
2407 32042 : }
2408 :
2409 : /* Mark this cluster as having escaped. */
2410 :
2411 : void
2412 33103 : binding_cluster::mark_as_escaped ()
2413 : {
2414 33103 : m_escaped = true;
2415 33103 : }
2416 :
2417 : /* If this cluster has escaped (by this call, or by an earlier one, or
2418 : by being an external param), then unbind all values and mark it
2419 : as "touched", so that it has a conjured value, rather than an
2420 : initial_svalue.
2421 : Use P to purge state involving conjured_svalues. */
2422 :
2423 : void
2424 116513 : binding_cluster::on_unknown_fncall (const gcall &call,
2425 : store_manager *mgr,
2426 : const conjured_purge &p)
2427 : {
2428 116513 : if (m_escaped)
2429 : {
2430 19721 : m_map.clear ();
2431 :
2432 19721 : if (!m_base_region->empty_p ())
2433 : {
2434 : /* Bind it to a new "conjured" value using CALL. */
2435 19721 : const svalue *sval
2436 : = mgr->get_svalue_manager ()->get_or_create_conjured_svalue
2437 19721 : (m_base_region->get_type (), &call, m_base_region, p);
2438 19721 : bind (mgr, m_base_region, sval);
2439 : }
2440 :
2441 19721 : m_touched = true;
2442 : }
2443 116513 : }
2444 :
2445 : /* Mark this cluster as having been clobbered by STMT.
2446 : Use P to purge state involving conjured_svalues. */
2447 :
2448 : void
2449 510 : binding_cluster::on_asm (const gasm *stmt,
2450 : store_manager *mgr,
2451 : const conjured_purge &p)
2452 : {
2453 510 : m_map.clear ();
2454 :
2455 : /* Bind it to a new "conjured" value using CALL. */
2456 510 : const svalue *sval
2457 : = mgr->get_svalue_manager ()->get_or_create_conjured_svalue
2458 510 : (m_base_region->get_type (), stmt, m_base_region, p);
2459 510 : bind (mgr, m_base_region, sval);
2460 :
2461 510 : m_touched = true;
2462 510 : }
2463 :
2464 : /* Return true if this cluster has escaped. */
2465 :
2466 : bool
2467 9270317 : binding_cluster::escaped_p () const
2468 : {
2469 : /* Consider the "errno" region to always have escaped. */
2470 9270317 : if (m_base_region->get_kind () == RK_ERRNO)
2471 : return true;
2472 9246912 : return m_escaped;
2473 : }
2474 :
2475 : /* Return true if this binding_cluster has no information
2476 : i.e. if there are no bindings, and it hasn't been marked as having
2477 : escaped, or touched symbolically. */
2478 :
2479 : bool
2480 221009 : binding_cluster::redundant_p () const
2481 : {
2482 221009 : return (m_map.elements () == 0
2483 217111 : && !m_escaped
2484 437064 : && !m_touched);
2485 : }
2486 :
2487 : /* Add PV to OUT_PVS, casting it to TYPE if it is not already of that type. */
2488 :
2489 : static void
2490 6950 : append_pathvar_with_type (path_var pv,
2491 : tree type,
2492 : auto_vec<path_var> *out_pvs)
2493 : {
2494 6950 : gcc_assert (pv.m_tree);
2495 :
2496 6950 : if (TREE_TYPE (pv.m_tree) != type)
2497 537 : pv.m_tree = build1 (NOP_EXPR, type, pv.m_tree);
2498 :
2499 6950 : out_pvs->safe_push (pv);
2500 6950 : }
2501 :
2502 : /* Find representative path_vars for SVAL within this binding of BASE_REG,
2503 : appending the results to OUT_PVS. */
2504 :
2505 : void
2506 43711 : binding_cluster::get_representative_path_vars (const region_model *model,
2507 : svalue_set *visited,
2508 : const region *base_reg,
2509 : const svalue *sval,
2510 : logger *logger,
2511 : auto_vec<path_var> *out_pvs)
2512 : const
2513 : {
2514 43711 : sval = simplify_for_binding (sval);
2515 :
2516 84119 : for (auto iter : m_map)
2517 : {
2518 40408 : const binding_key *key = iter.m_key;
2519 40408 : const svalue *bound_sval = iter.m_sval;
2520 40408 : if (bound_sval == sval)
2521 : {
2522 14682 : if (const concrete_binding *ckey
2523 7341 : = key->dyn_cast_concrete_binding ())
2524 : {
2525 7307 : auto_vec <const region *> subregions;
2526 7307 : base_reg->get_subregions_for_binding
2527 7307 : (model->get_manager (),
2528 : ckey->get_start_bit_offset (),
2529 : ckey->get_size_in_bits (),
2530 : sval->get_type (),
2531 : &subregions);
2532 7307 : unsigned i;
2533 7307 : const region *subregion;
2534 21147 : FOR_EACH_VEC_ELT (subregions, i, subregion)
2535 : {
2536 13840 : if (path_var pv
2537 6920 : = model->get_representative_path_var (subregion,
2538 : visited,
2539 6920 : logger))
2540 6916 : append_pathvar_with_type (pv, sval->get_type (), out_pvs);
2541 : }
2542 7307 : }
2543 : else
2544 : {
2545 34 : const symbolic_binding *skey = (const symbolic_binding *)key;
2546 68 : if (path_var pv
2547 34 : = model->get_representative_path_var (skey->get_region (),
2548 : visited,
2549 34 : logger))
2550 34 : append_pathvar_with_type (pv, sval->get_type (), out_pvs);
2551 : }
2552 : }
2553 : }
2554 43711 : }
2555 :
2556 : /* Get any svalue bound to KEY, or nullptr. */
2557 :
2558 : const svalue *
2559 3721499 : binding_cluster::get_any_value (const binding_key *key) const
2560 : {
2561 3721499 : return m_map.get (key);
2562 : }
2563 :
2564 : /* If this cluster has a single direct binding for the whole of the region,
2565 : return it.
2566 : For use in simplifying dumps. */
2567 :
2568 : const svalue *
2569 290896 : binding_cluster::maybe_get_simple_value (store_manager *mgr) const
2570 : {
2571 : /* Fail gracefully if MGR is nullptr to make it easier to dump store
2572 : instances in the debugger. */
2573 290896 : if (mgr == nullptr)
2574 : return nullptr;
2575 :
2576 290896 : if (m_map.elements () != 1)
2577 : return nullptr;
2578 :
2579 147821 : if (m_base_region->empty_p ())
2580 : return nullptr;
2581 :
2582 147821 : const binding_key *key = binding_key::make (mgr, m_base_region);
2583 147821 : return get_any_value (key);
2584 : }
2585 :
2586 : /* class store_manager. */
2587 :
2588 : logger *
2589 357919 : store_manager::get_logger () const
2590 : {
2591 357919 : return m_mgr->get_logger ();
2592 : }
2593 :
2594 : /* binding consolidation. */
2595 :
2596 : const concrete_binding *
2597 13598436 : store_manager::get_concrete_binding (bit_offset_t start_bit_offset,
2598 : bit_offset_t size_in_bits)
2599 : {
2600 13598436 : concrete_binding b (start_bit_offset, size_in_bits);
2601 27184208 : if (concrete_binding *existing = m_concrete_binding_key_mgr.get (b))
2602 : return existing;
2603 :
2604 12664 : concrete_binding *to_save = new concrete_binding (b);
2605 12664 : m_concrete_binding_key_mgr.put (b, to_save);
2606 12664 : return to_save;
2607 13598436 : }
2608 :
2609 : const symbolic_binding *
2610 1820249 : store_manager::get_symbolic_binding (const region *reg)
2611 : {
2612 1820249 : symbolic_binding b (reg);
2613 3620394 : if (symbolic_binding *existing = m_symbolic_binding_key_mgr.get (b))
2614 : return existing;
2615 :
2616 20104 : symbolic_binding *to_save = new symbolic_binding (b);
2617 20104 : m_symbolic_binding_key_mgr.put (b, to_save);
2618 20104 : return to_save;
2619 1820249 : }
2620 :
2621 : /* class store. */
2622 :
2623 : /* store's default ctor. */
2624 :
2625 370385 : store::store ()
2626 370385 : : m_called_unknown_fn (false)
2627 : {
2628 370385 : }
2629 :
2630 : /* store's copy ctor. */
2631 :
2632 3186718 : store::store (const store &other)
2633 3186718 : : m_cluster_map (other.m_cluster_map.elements ()),
2634 3186718 : m_called_unknown_fn (other.m_called_unknown_fn)
2635 : {
2636 3186718 : for (cluster_map_t::iterator iter = other.m_cluster_map.begin ();
2637 31128429 : iter != other.m_cluster_map.end ();
2638 27941711 : ++iter)
2639 : {
2640 27941711 : const region *reg = (*iter).first;
2641 0 : gcc_assert (reg);
2642 27941711 : binding_cluster *c = (*iter).second;
2643 27941711 : gcc_assert (c);
2644 27941711 : m_cluster_map.put (reg, new binding_cluster (*c));
2645 : }
2646 3186718 : }
2647 :
2648 : /* store's dtor. */
2649 :
2650 3557103 : store::~store ()
2651 : {
2652 32806318 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2653 32806318 : iter != m_cluster_map.end ();
2654 29249215 : ++iter)
2655 29249215 : delete (*iter).second;
2656 3557103 : }
2657 :
2658 : /* store's assignment operator. */
2659 :
2660 : store &
2661 58676 : store::operator= (const store &other)
2662 : {
2663 : /* Delete existing cluster map. */
2664 708313 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2665 708313 : iter != m_cluster_map.end ();
2666 649637 : ++iter)
2667 1299274 : delete (*iter).second;
2668 58676 : m_cluster_map.empty ();
2669 :
2670 58676 : m_called_unknown_fn = other.m_called_unknown_fn;
2671 :
2672 58676 : for (cluster_map_t::iterator iter = other.m_cluster_map.begin ();
2673 718186 : iter != other.m_cluster_map.end ();
2674 659510 : ++iter)
2675 : {
2676 659510 : const region *reg = (*iter).first;
2677 659510 : gcc_assert (reg);
2678 659510 : binding_cluster *c = (*iter).second;
2679 659510 : gcc_assert (c);
2680 659510 : m_cluster_map.put (reg, new binding_cluster (*c));
2681 : }
2682 58676 : return *this;
2683 : }
2684 :
2685 : /* store's equality operator. */
2686 :
2687 : bool
2688 502470 : store::operator== (const store &other) const
2689 : {
2690 502470 : if (m_called_unknown_fn != other.m_called_unknown_fn)
2691 : return false;
2692 :
2693 498359 : if (m_cluster_map.elements () != other.m_cluster_map.elements ())
2694 : return false;
2695 :
2696 3713249 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2697 3713249 : iter != m_cluster_map.end ();
2698 3240968 : ++iter)
2699 : {
2700 3308330 : const region *reg = (*iter).first;
2701 3308330 : binding_cluster *c = (*iter).second;
2702 3308330 : binding_cluster **other_slot
2703 3308330 : = const_cast <cluster_map_t &> (other.m_cluster_map).get (reg);
2704 3308330 : if (other_slot == nullptr)
2705 67362 : return false;
2706 3306235 : if (*c != **other_slot)
2707 : return false;
2708 : }
2709 :
2710 404919 : gcc_checking_assert (hash () == other.hash ());
2711 :
2712 : return true;
2713 : }
2714 :
2715 : /* Get a hash value for this store. */
2716 :
2717 : hashval_t
2718 2051490 : store::hash () const
2719 : {
2720 2051490 : hashval_t result = 0;
2721 2051490 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2722 16870483 : iter != m_cluster_map.end ();
2723 14818993 : ++iter)
2724 14818993 : result ^= (*iter).second->hash ();
2725 2051490 : return result;
2726 : }
2727 :
2728 : /* Populate OUT with a sorted list of parent regions for the regions in IN,
2729 : removing duplicate parents. */
2730 :
2731 : static void
2732 2134 : get_sorted_parent_regions (auto_vec<const region *> *out,
2733 : auto_vec<const region *> &in)
2734 : {
2735 : /* Get the set of parent regions. */
2736 2134 : hash_set<const region *> parent_regions;
2737 2134 : const region *iter_reg;
2738 2134 : unsigned i;
2739 10887 : FOR_EACH_VEC_ELT (in, i, iter_reg)
2740 : {
2741 8753 : const region *parent_reg = iter_reg->get_parent_region ();
2742 8753 : gcc_assert (parent_reg);
2743 8753 : parent_regions.add (parent_reg);
2744 : }
2745 :
2746 : /* Write to OUT. */
2747 2134 : for (hash_set<const region *>::iterator iter = parent_regions.begin();
2748 11368 : iter != parent_regions.end(); ++iter)
2749 4617 : out->safe_push (*iter);
2750 :
2751 : /* Sort OUT. */
2752 4066 : out->qsort (region::cmp_ptr_ptr);
2753 2134 : }
2754 :
2755 : /* Dump a representation of this store to PP, using SIMPLE to control how
2756 : svalues and regions are printed.
2757 : MGR is used for simplifying dumps if non-NULL, but can also be nullptr
2758 : (to make it easier to use from the debugger). */
2759 :
2760 : void
2761 2126 : store::dump_to_pp (pretty_printer *pp, bool simple, bool multiline,
2762 : store_manager *mgr) const
2763 : {
2764 : /* Sort into some deterministic order. */
2765 2126 : auto_vec<const region *> base_regions;
2766 10879 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2767 19632 : iter != m_cluster_map.end (); ++iter)
2768 : {
2769 8753 : const region *base_reg = (*iter).first;
2770 8753 : base_regions.safe_push (base_reg);
2771 : }
2772 2126 : base_regions.qsort (region::cmp_ptr_ptr);
2773 :
2774 : /* Gather clusters, organize by parent region, so that we can group
2775 : together locals, globals, etc. */
2776 2126 : auto_vec<const region *> parent_regions;
2777 2126 : get_sorted_parent_regions (&parent_regions, base_regions);
2778 :
2779 2126 : const region *parent_reg;
2780 2126 : unsigned i;
2781 6743 : FOR_EACH_VEC_ELT (parent_regions, i, parent_reg)
2782 : {
2783 4617 : gcc_assert (parent_reg);
2784 4617 : pp_string (pp, "clusters within ");
2785 4617 : parent_reg->dump_to_pp (pp, simple);
2786 4617 : if (multiline)
2787 1175 : pp_newline (pp);
2788 : else
2789 3442 : pp_string (pp, " {");
2790 :
2791 : const region *base_reg;
2792 : unsigned j;
2793 27008 : FOR_EACH_VEC_ELT (base_regions, j, base_reg)
2794 : {
2795 : /* This is O(N * M), but N ought to be small. */
2796 22391 : if (base_reg->get_parent_region () != parent_reg)
2797 13638 : continue;
2798 8753 : binding_cluster *cluster
2799 8753 : = *const_cast<cluster_map_t &> (m_cluster_map).get (base_reg);
2800 8753 : if (!multiline)
2801 : {
2802 5854 : if (j > 0)
2803 4435 : pp_string (pp, ", ");
2804 : }
2805 8753 : if (const svalue *sval = cluster->maybe_get_simple_value (mgr))
2806 : {
2807 : /* Special-case to simplify dumps for the common case where
2808 : we just have one value directly bound to the whole of a
2809 : region. */
2810 7022 : if (multiline)
2811 : {
2812 1811 : pp_string (pp, " cluster for: ");
2813 1811 : base_reg->dump_to_pp (pp, simple);
2814 1811 : pp_string (pp, ": ");
2815 1811 : sval->dump_to_pp (pp, simple);
2816 1811 : if (cluster->escaped_p ())
2817 48 : pp_string (pp, " (ESCAPED)");
2818 1811 : if (cluster->touched_p ())
2819 6 : pp_string (pp, " (TOUCHED)");
2820 1811 : pp_newline (pp);
2821 : }
2822 : else
2823 : {
2824 5211 : pp_string (pp, "region: {");
2825 5211 : base_reg->dump_to_pp (pp, simple);
2826 5211 : pp_string (pp, ", value: ");
2827 5211 : sval->dump_to_pp (pp, simple);
2828 5211 : if (cluster->escaped_p ())
2829 1066 : pp_string (pp, " (ESCAPED)");
2830 5211 : if (cluster->touched_p ())
2831 1066 : pp_string (pp, " (TOUCHED)");
2832 5211 : pp_string (pp, "}");
2833 : }
2834 : }
2835 1731 : else if (multiline)
2836 : {
2837 1088 : pp_string (pp, " cluster for: ");
2838 1088 : base_reg->dump_to_pp (pp, simple);
2839 1088 : pp_newline (pp);
2840 1088 : cluster->dump_to_pp (pp, simple, multiline);
2841 : }
2842 : else
2843 : {
2844 643 : pp_string (pp, "base region: {");
2845 643 : base_reg->dump_to_pp (pp, simple);
2846 643 : pp_string (pp, "} has cluster: {");
2847 643 : cluster->dump_to_pp (pp, simple, multiline);
2848 643 : pp_string (pp, "}");
2849 : }
2850 : }
2851 4617 : if (!multiline)
2852 3442 : pp_string (pp, "}");
2853 : }
2854 2126 : pp_printf (pp, "m_called_unknown_fn: %s",
2855 2126 : m_called_unknown_fn ? "TRUE" : "FALSE");
2856 2126 : if (multiline)
2857 545 : pp_newline (pp);
2858 2126 : }
2859 :
2860 : /* Dump a multiline representation of this store to stderr. */
2861 :
2862 : DEBUG_FUNCTION void
2863 0 : store::dump (bool simple) const
2864 : {
2865 0 : tree_dump_pretty_printer pp (stderr);
2866 0 : dump_to_pp (&pp, simple, true, nullptr);
2867 0 : pp_newline (&pp);
2868 0 : }
2869 :
2870 : /* Assert that this object is valid. */
2871 :
2872 : void
2873 1718070 : store::validate () const
2874 : {
2875 14638847 : for (auto iter : m_cluster_map)
2876 12920777 : iter.second->validate ();
2877 1718070 : }
2878 :
2879 : /* Return a new json::object of the form
2880 : {PARENT_REGION_DESC: {BASE_REGION_DESC: object for binding_map,
2881 : ... for each cluster within parent region},
2882 : ...for each parent region,
2883 : "called_unknown_fn": true/false}. */
2884 :
2885 : std::unique_ptr<json::object>
2886 4 : store::to_json () const
2887 : {
2888 4 : auto store_obj = std::make_unique<json::object> ();
2889 :
2890 : /* Sort into some deterministic order. */
2891 4 : auto_vec<const region *> base_regions;
2892 4 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2893 4 : iter != m_cluster_map.end (); ++iter)
2894 : {
2895 0 : const region *base_reg = (*iter).first;
2896 0 : base_regions.safe_push (base_reg);
2897 : }
2898 4 : base_regions.qsort (region::cmp_ptr_ptr);
2899 :
2900 : /* Gather clusters, organize by parent region, so that we can group
2901 : together locals, globals, etc. */
2902 4 : auto_vec<const region *> parent_regions;
2903 4 : get_sorted_parent_regions (&parent_regions, base_regions);
2904 :
2905 4 : const region *parent_reg;
2906 4 : unsigned i;
2907 4 : FOR_EACH_VEC_ELT (parent_regions, i, parent_reg)
2908 : {
2909 0 : gcc_assert (parent_reg);
2910 :
2911 0 : auto clusters_in_parent_reg_obj = std::make_unique<json::object> ();
2912 :
2913 0 : const region *base_reg;
2914 0 : unsigned j;
2915 0 : FOR_EACH_VEC_ELT (base_regions, j, base_reg)
2916 : {
2917 : /* This is O(N * M), but N ought to be small. */
2918 0 : if (base_reg->get_parent_region () != parent_reg)
2919 0 : continue;
2920 0 : binding_cluster *cluster
2921 0 : = *const_cast<cluster_map_t &> (m_cluster_map).get (base_reg);
2922 0 : label_text base_reg_desc = base_reg->get_desc ();
2923 0 : clusters_in_parent_reg_obj->set (base_reg_desc.get (),
2924 0 : cluster->to_json ());
2925 0 : }
2926 0 : label_text parent_reg_desc = parent_reg->get_desc ();
2927 0 : store_obj->set (parent_reg_desc.get (),
2928 : std::move (clusters_in_parent_reg_obj));
2929 0 : }
2930 :
2931 4 : store_obj->set_bool ("called_unknown_fn", m_called_unknown_fn);
2932 :
2933 4 : return store_obj;
2934 4 : }
2935 :
2936 : std::unique_ptr<text_art::tree_widget>
2937 4 : store::make_dump_widget (const text_art::dump_widget_info &dwi,
2938 : store_manager *mgr) const
2939 : {
2940 4 : std::unique_ptr<text_art::tree_widget> store_widget
2941 4 : (text_art::tree_widget::make (dwi, "Store"));
2942 :
2943 4 : store_widget->add_child
2944 8 : (text_art::tree_widget::from_fmt (dwi, nullptr,
2945 : "m_called_unknown_fn: %s",
2946 4 : m_called_unknown_fn ? "true" : "false"));
2947 :
2948 : /* Sort into some deterministic order. */
2949 4 : auto_vec<const region *> base_regions;
2950 4 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2951 4 : iter != m_cluster_map.end (); ++iter)
2952 : {
2953 0 : const region *base_reg = (*iter).first;
2954 0 : base_regions.safe_push (base_reg);
2955 : }
2956 4 : base_regions.qsort (region::cmp_ptr_ptr);
2957 :
2958 : /* Gather clusters, organize by parent region, so that we can group
2959 : together locals, globals, etc. */
2960 4 : auto_vec<const region *> parent_regions;
2961 4 : get_sorted_parent_regions (&parent_regions, base_regions);
2962 :
2963 4 : const region *parent_reg;
2964 4 : unsigned i;
2965 4 : FOR_EACH_VEC_ELT (parent_regions, i, parent_reg)
2966 : {
2967 0 : gcc_assert (parent_reg);
2968 :
2969 0 : pretty_printer the_pp;
2970 0 : pretty_printer * const pp = &the_pp;
2971 0 : pp_format_decoder (pp) = default_tree_printer;
2972 0 : pp_show_color (pp) = true;
2973 0 : const bool simple = true;
2974 :
2975 0 : parent_reg->dump_to_pp (pp, simple);
2976 :
2977 0 : std::unique_ptr<text_art::tree_widget> parent_reg_widget
2978 0 : (text_art::tree_widget::make (dwi, pp));
2979 :
2980 0 : const region *base_reg;
2981 0 : unsigned j;
2982 0 : FOR_EACH_VEC_ELT (base_regions, j, base_reg)
2983 : {
2984 : /* This is O(N * M), but N ought to be small. */
2985 0 : if (base_reg->get_parent_region () != parent_reg)
2986 0 : continue;
2987 0 : binding_cluster *cluster
2988 0 : = *const_cast<cluster_map_t &> (m_cluster_map).get (base_reg);
2989 0 : parent_reg_widget->add_child
2990 0 : (cluster->make_dump_widget (dwi, mgr));
2991 : }
2992 0 : store_widget->add_child (std::move (parent_reg_widget));
2993 0 : }
2994 :
2995 4 : return store_widget;
2996 4 : }
2997 :
2998 : /* Get any svalue bound to REG, or nullptr. */
2999 :
3000 : const svalue *
3001 4106556 : store::get_any_binding (store_manager *mgr, const region *reg) const
3002 : {
3003 4106556 : const region *base_reg = reg->get_base_region ();
3004 4106556 : binding_cluster **cluster_slot
3005 4106556 : = const_cast <cluster_map_t &> (m_cluster_map).get (base_reg);
3006 4106556 : if (!cluster_slot)
3007 : return nullptr;
3008 1241688 : return (*cluster_slot)->get_any_binding (mgr, reg);
3009 : }
3010 :
3011 : /* Set the value of LHS_REG to RHS_SVAL. */
3012 :
3013 : void
3014 357919 : store::set_value (store_manager *mgr, const region *lhs_reg,
3015 : const svalue *rhs_sval,
3016 : uncertainty_t *uncertainty)
3017 : {
3018 357919 : logger *logger = mgr->get_logger ();
3019 357919 : LOG_SCOPE (logger);
3020 :
3021 357919 : remove_overlapping_bindings (mgr, lhs_reg, uncertainty);
3022 :
3023 357919 : if (lhs_reg->get_type ())
3024 345591 : rhs_sval = simplify_for_binding (rhs_sval);
3025 : /* ...but if we have no type for the region, retain any cast. */
3026 :
3027 357919 : const region *lhs_base_reg = lhs_reg->get_base_region ();
3028 357919 : binding_cluster *lhs_cluster;
3029 357919 : if (lhs_base_reg->symbolic_for_unknown_ptr_p ())
3030 : {
3031 : /* Reject attempting to bind values into a symbolic region
3032 : for an unknown ptr; merely invalidate values below. */
3033 2528 : lhs_cluster = nullptr;
3034 :
3035 : /* The LHS of the write is *UNKNOWN. If the RHS is a pointer,
3036 : then treat the region being pointed to as having escaped. */
3037 2528 : if (const region_svalue *ptr_sval = rhs_sval->dyn_cast_region_svalue ())
3038 : {
3039 83 : const region *ptr_dst = ptr_sval->get_pointee ();
3040 83 : const region *ptr_base_reg = ptr_dst->get_base_region ();
3041 83 : mark_as_escaped (*mgr, ptr_base_reg);
3042 : }
3043 2528 : if (uncertainty)
3044 1110 : uncertainty->on_maybe_bound_sval (rhs_sval);
3045 : }
3046 355391 : else if (lhs_base_reg->tracked_p ())
3047 : {
3048 355317 : lhs_cluster = get_or_create_cluster (*mgr, lhs_base_reg);
3049 355317 : lhs_cluster->bind (mgr, lhs_reg, rhs_sval);
3050 : }
3051 : else
3052 : {
3053 : /* Reject attempting to bind values into an untracked region;
3054 : merely invalidate values below. */
3055 : lhs_cluster = nullptr;
3056 : }
3057 :
3058 : /* Bindings to a cluster can affect other clusters if a symbolic
3059 : base region is involved.
3060 : Writes to concrete clusters can't affect other concrete clusters,
3061 : but can affect symbolic clusters.
3062 : Writes to symbolic clusters can affect both concrete and symbolic
3063 : clusters.
3064 : Invalidate our knowledge of other clusters that might have been
3065 : affected by the write.
3066 : Gather the set of all svalues that might still be live even if
3067 : the store doesn't refer to them. */
3068 357919 : svalue_set maybe_live_values;
3069 5133307 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3070 9908695 : iter != m_cluster_map.end (); ++iter)
3071 : {
3072 4775388 : const region *iter_base_reg = (*iter).first;
3073 4775388 : binding_cluster *iter_cluster = (*iter).second;
3074 4775388 : if (iter_base_reg != lhs_base_reg
3075 4775388 : && (lhs_cluster == nullptr
3076 4381525 : || lhs_cluster->symbolic_p ()
3077 4329378 : || iter_cluster->symbolic_p ()))
3078 : {
3079 655001 : tristate t_alias = eval_alias (lhs_base_reg, iter_base_reg);
3080 655001 : switch (t_alias.get_value ())
3081 : {
3082 0 : default:
3083 0 : gcc_unreachable ();
3084 :
3085 46027 : case tristate::TS_UNKNOWN:
3086 46027 : if (logger)
3087 : {
3088 0 : pretty_printer *pp = logger->get_printer ();
3089 0 : logger->start_log_line ();
3090 0 : logger->log_partial ("possible aliasing of ");
3091 0 : iter_base_reg->dump_to_pp (pp, true);
3092 0 : logger->log_partial (" when writing SVAL: ");
3093 0 : rhs_sval->dump_to_pp (pp, true);
3094 0 : logger->log_partial (" to LHS_REG: ");
3095 0 : lhs_reg->dump_to_pp (pp, true);
3096 0 : logger->end_log_line ();
3097 : }
3098 : /* Mark all of iter_cluster's iter_base_reg as unknown,
3099 : using LHS_REG when considering overlaps, to handle
3100 : symbolic vs concrete issues. */
3101 46027 : iter_cluster->mark_region_as_unknown
3102 46027 : (mgr,
3103 : iter_base_reg, /* reg_to_bind */
3104 : lhs_reg, /* reg_for_overlap */
3105 : uncertainty,
3106 : &maybe_live_values);
3107 46027 : break;
3108 :
3109 0 : case tristate::TS_TRUE:
3110 0 : gcc_unreachable ();
3111 : break;
3112 :
3113 : case tristate::TS_FALSE:
3114 : /* If they can't be aliases, then don't invalidate this
3115 : cluster. */
3116 : break;
3117 : }
3118 : }
3119 : }
3120 : /* Given the set of svalues that might still be live, process them
3121 : (e.g. marking regions as escaped).
3122 : We do this after the iteration to avoid potentially changing
3123 : m_cluster_map whilst iterating over it. */
3124 357919 : on_maybe_live_values (*mgr, maybe_live_values);
3125 357919 : }
3126 :
3127 : /* Determine if BASE_REG_A could be an alias of BASE_REG_B. */
3128 :
3129 : tristate
3130 655210 : store::eval_alias (const region *base_reg_a,
3131 : const region *base_reg_b) const
3132 : {
3133 : /* SSA names can't alias. */
3134 655210 : tree decl_a = base_reg_a->maybe_get_decl ();
3135 655210 : if (decl_a && TREE_CODE (decl_a) == SSA_NAME)
3136 472407 : return tristate::TS_FALSE;
3137 182803 : tree decl_b = base_reg_b->maybe_get_decl ();
3138 182803 : if (decl_b && TREE_CODE (decl_b) == SSA_NAME)
3139 38565 : return tristate::TS_FALSE;
3140 :
3141 : /* Different decls don't alias. */
3142 144238 : if (decl_a && decl_b && decl_a != decl_b)
3143 53 : return tristate::TS_FALSE;
3144 :
3145 : /* Try both ways, for symmetry. */
3146 144185 : tristate ts_ab = eval_alias_1 (base_reg_a, base_reg_b);
3147 144185 : if (ts_ab.is_false ())
3148 18402 : return tristate::TS_FALSE;
3149 125783 : tristate ts_ba = eval_alias_1 (base_reg_b, base_reg_a);
3150 125783 : if (ts_ba.is_false ())
3151 79627 : return tristate::TS_FALSE;
3152 46156 : return tristate::TS_UNKNOWN;
3153 : }
3154 :
3155 : /* Half of store::eval_alias; called twice for symmetry. */
3156 :
3157 : tristate
3158 269968 : store::eval_alias_1 (const region *base_reg_a,
3159 : const region *base_reg_b) const
3160 : {
3161 : /* If they're in different memory spaces, they can't alias. */
3162 269968 : {
3163 269968 : enum memory_space memspace_a = base_reg_a->get_memory_space ();
3164 269968 : if (memspace_a != MEMSPACE_UNKNOWN)
3165 : {
3166 105160 : enum memory_space memspace_b = base_reg_b->get_memory_space ();
3167 105160 : if (memspace_b != MEMSPACE_UNKNOWN
3168 105160 : && memspace_a != memspace_b)
3169 48 : return tristate::TS_FALSE;
3170 : }
3171 : }
3172 :
3173 539840 : if (const symbolic_region *sym_reg_a
3174 269920 : = base_reg_a->dyn_cast_symbolic_region ())
3175 : {
3176 159724 : const svalue *sval_a = sym_reg_a->get_pointer ();
3177 159724 : if (tree decl_b = base_reg_b->maybe_get_decl ())
3178 : {
3179 102753 : if (!may_be_aliased (decl_b))
3180 38657 : return tristate::TS_FALSE;
3181 64096 : if (sval_a->get_kind () == SK_INITIAL)
3182 54572 : if (!is_global_var (decl_b))
3183 : {
3184 : /* The initial value of a pointer can't point to a local. */
3185 48716 : return tristate::TS_FALSE;
3186 : }
3187 : }
3188 72351 : if (sval_a->get_kind () == SK_INITIAL
3189 72351 : && base_reg_b->get_kind () == RK_HEAP_ALLOCATED)
3190 : {
3191 : /* The initial value of a pointer can't point to a
3192 : region that was allocated on the heap after the beginning of the
3193 : path. */
3194 10528 : return tristate::TS_FALSE;
3195 : }
3196 123646 : if (const widening_svalue *widening_sval_a
3197 61823 : = sval_a->dyn_cast_widening_svalue ())
3198 : {
3199 2266 : const svalue *base = widening_sval_a->get_base_svalue ();
3200 4532 : if (const region_svalue *region_sval
3201 2266 : = base->dyn_cast_region_svalue ())
3202 : {
3203 209 : const region *pointee = region_sval->get_pointee ();
3204 : /* If we have sval_a is WIDENING(®ION, OP), and
3205 : B can't alias REGION, then B can't alias A either.
3206 : For example, A might arise from
3207 : for (ptr = ®ION; ...; ptr++)
3208 : where sval_a is ptr in the 2nd iteration of the loop.
3209 : We want to ensure that "*ptr" can only clobber things
3210 : within REGION's base region. */
3211 209 : tristate ts = eval_alias (pointee->get_base_region (),
3212 : base_reg_b);
3213 209 : if (ts.is_false ())
3214 80 : return tristate::TS_FALSE;
3215 : }
3216 : }
3217 : }
3218 171939 : return tristate::TS_UNKNOWN;
3219 : }
3220 :
3221 : /* Record all of the values in MAYBE_LIVE_VALUES as being possibly live. */
3222 :
3223 : void
3224 358172 : store::on_maybe_live_values (store_manager &mgr,
3225 : const svalue_set &maybe_live_values)
3226 : {
3227 424422 : for (auto sval : maybe_live_values)
3228 : {
3229 33125 : if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3230 : {
3231 173 : const region *base_reg = ptr_sval->get_pointee ()->get_base_region ();
3232 173 : mark_as_escaped (mgr, base_reg);
3233 : }
3234 : }
3235 358172 : }
3236 :
3237 : /* Remove all bindings overlapping REG within this store. */
3238 :
3239 : void
3240 6130 : store::clobber_region (store_manager *mgr, const region *reg)
3241 : {
3242 6130 : const region *base_reg = reg->get_base_region ();
3243 6130 : binding_cluster **slot = m_cluster_map.get (base_reg);
3244 6130 : if (!slot)
3245 1700 : return;
3246 4430 : binding_cluster *cluster = *slot;
3247 4430 : cluster->clobber_region (mgr, reg);
3248 4430 : if (cluster->redundant_p ())
3249 : {
3250 3033 : delete cluster;
3251 3033 : m_cluster_map.remove (base_reg);
3252 : }
3253 : }
3254 :
3255 : /* Remove any bindings for REG within this store. */
3256 :
3257 : void
3258 216579 : store::purge_region (store_manager *mgr, const region *reg)
3259 : {
3260 216579 : const region *base_reg = reg->get_base_region ();
3261 216579 : binding_cluster **slot = m_cluster_map.get (base_reg);
3262 216579 : if (!slot)
3263 0 : return;
3264 216579 : binding_cluster *cluster = *slot;
3265 216579 : cluster->purge_region (mgr, reg);
3266 216579 : if (cluster->redundant_p ())
3267 : {
3268 212527 : delete cluster;
3269 212527 : m_cluster_map.remove (base_reg);
3270 : }
3271 : }
3272 :
3273 : /* Fill REG with SVAL. */
3274 :
3275 : void
3276 1345 : store::fill_region (store_manager *mgr, const region *reg, const svalue *sval)
3277 : {
3278 : /* Filling an empty region is a no-op. */
3279 1345 : if (reg->empty_p ())
3280 : return;
3281 :
3282 1317 : const region *base_reg = reg->get_base_region ();
3283 1317 : if (base_reg->symbolic_for_unknown_ptr_p ()
3284 1317 : || !base_reg->tracked_p ())
3285 6 : return;
3286 1311 : binding_cluster *cluster = get_or_create_cluster (*mgr, base_reg);
3287 1311 : cluster->fill_region (mgr, reg, sval);
3288 : }
3289 :
3290 : /* Zero-fill REG. */
3291 :
3292 : void
3293 705 : store::zero_fill_region (store_manager *mgr, const region *reg)
3294 : {
3295 705 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
3296 705 : const svalue *zero_sval = sval_mgr->get_or_create_int_cst (char_type_node, 0);
3297 705 : fill_region (mgr, reg, zero_sval);
3298 705 : }
3299 :
3300 : /* Mark REG as having unknown content. */
3301 :
3302 : void
3303 253 : store::mark_region_as_unknown (store_manager *mgr, const region *reg,
3304 : uncertainty_t *uncertainty,
3305 : svalue_set *maybe_live_values)
3306 : {
3307 253 : const region *base_reg = reg->get_base_region ();
3308 253 : if (base_reg->symbolic_for_unknown_ptr_p ()
3309 253 : || !base_reg->tracked_p ())
3310 0 : return;
3311 253 : binding_cluster *cluster = get_or_create_cluster (*mgr, base_reg);
3312 253 : cluster->mark_region_as_unknown (mgr, reg, reg, uncertainty,
3313 : maybe_live_values);
3314 : }
3315 :
3316 : /* Purge state involving SVAL. */
3317 :
3318 : void
3319 26833 : store::purge_state_involving (const svalue *sval,
3320 : region_model_manager *sval_mgr)
3321 : {
3322 26833 : auto_vec <const region *> base_regs_to_purge;
3323 767735 : for (auto iter : m_cluster_map)
3324 : {
3325 370451 : const region *base_reg = iter.first;
3326 370451 : if (base_reg->involves_p (sval))
3327 121 : base_regs_to_purge.safe_push (base_reg);
3328 : else
3329 : {
3330 370330 : binding_cluster *cluster = iter.second;
3331 370330 : cluster->purge_state_involving (sval, sval_mgr);
3332 : }
3333 : }
3334 :
3335 27196 : for (auto iter : base_regs_to_purge)
3336 121 : purge_cluster (iter);
3337 26833 : }
3338 :
3339 : /* Get the cluster for BASE_REG, or nullptr (const version). */
3340 :
3341 : const binding_cluster *
3342 2779372 : store::get_cluster (const region *base_reg) const
3343 : {
3344 2779372 : gcc_assert (base_reg);
3345 2779372 : gcc_assert (base_reg->get_base_region () == base_reg);
3346 5558744 : if (binding_cluster **slot
3347 2779372 : = const_cast <cluster_map_t &> (m_cluster_map).get (base_reg))
3348 2725986 : return *slot;
3349 : else
3350 : return nullptr;
3351 : }
3352 :
3353 : /* Get the cluster for BASE_REG, or nullptr (non-const version). */
3354 :
3355 : binding_cluster *
3356 10499948 : store::get_cluster (const region *base_reg)
3357 : {
3358 10499948 : gcc_assert (base_reg);
3359 10499948 : gcc_assert (base_reg->get_base_region () == base_reg);
3360 10499948 : if (binding_cluster **slot = m_cluster_map.get (base_reg))
3361 8715485 : return *slot;
3362 : else
3363 : return nullptr;
3364 : }
3365 :
3366 : /* Get the cluster for BASE_REG, creating it if doesn't already exist. */
3367 :
3368 : binding_cluster *
3369 1642392 : store::get_or_create_cluster (store_manager &store_mgr,
3370 : const region *base_reg)
3371 : {
3372 1642392 : gcc_assert (base_reg);
3373 1642392 : gcc_assert (base_reg->get_base_region () == base_reg);
3374 :
3375 : /* We shouldn't create clusters for dereferencing an UNKNOWN ptr. */
3376 1642392 : gcc_assert (!base_reg->symbolic_for_unknown_ptr_p ());
3377 :
3378 : /* We shouldn't create clusters for base regions that aren't trackable. */
3379 1642392 : gcc_assert (base_reg->tracked_p ());
3380 :
3381 1642392 : if (binding_cluster **slot = m_cluster_map.get (base_reg))
3382 49358 : return *slot;
3383 :
3384 1593034 : binding_cluster *cluster = new binding_cluster (store_mgr, base_reg);
3385 1593034 : m_cluster_map.put (base_reg, cluster);
3386 :
3387 1593034 : return cluster;
3388 : }
3389 :
3390 : /* Remove any cluster for BASE_REG, for use by
3391 : region_model::unbind_region_and_descendents
3392 : when popping stack frames and handling deleted heap regions. */
3393 :
3394 : void
3395 35222 : store::purge_cluster (const region *base_reg)
3396 : {
3397 35222 : gcc_assert (base_reg->get_base_region () == base_reg);
3398 35222 : binding_cluster **slot = m_cluster_map.get (base_reg);
3399 35222 : if (!slot)
3400 : return;
3401 35222 : binding_cluster *cluster = *slot;
3402 70444 : delete cluster;
3403 35222 : m_cluster_map.remove (base_reg);
3404 : }
3405 :
3406 : /* Attempt to merge STORE_A and STORE_B into OUT_STORE.
3407 : Return true if successful, or false if the stores can't be merged. */
3408 :
3409 : bool
3410 148012 : store::can_merge_p (const store *store_a, const store *store_b,
3411 : store *out_store, store_manager *mgr,
3412 : model_merger *merger)
3413 : {
3414 148012 : if (store_a->m_called_unknown_fn || store_b->m_called_unknown_fn)
3415 45666 : out_store->m_called_unknown_fn = true;
3416 :
3417 : /* Get the union of all base regions for STORE_A and STORE_B. */
3418 148012 : hash_set<const region *> base_regions;
3419 1943446 : for (cluster_map_t::iterator iter_a = store_a->m_cluster_map.begin ();
3420 3738880 : iter_a != store_a->m_cluster_map.end (); ++iter_a)
3421 : {
3422 1795434 : const region *base_reg_a = (*iter_a).first;
3423 1795434 : base_regions.add (base_reg_a);
3424 : }
3425 1921073 : for (cluster_map_t::iterator iter_b = store_b->m_cluster_map.begin ();
3426 3694134 : iter_b != store_b->m_cluster_map.end (); ++iter_b)
3427 : {
3428 1773061 : const region *base_reg_b = (*iter_b).first;
3429 1773061 : base_regions.add (base_reg_b);
3430 : }
3431 :
3432 : /* Sort the base regions before considering them. This ought not to
3433 : affect the results, but can affect which types UNKNOWN_REGIONs are
3434 : created for in a run; sorting them thus avoids minor differences
3435 : in logfiles. */
3436 148012 : auto_vec<const region *> vec_base_regions (base_regions.elements ());
3437 148012 : for (hash_set<const region *>::iterator iter = base_regions.begin ();
3438 3777642 : iter != base_regions.end (); ++iter)
3439 1814815 : vec_base_regions.quick_push (*iter);
3440 148012 : vec_base_regions.qsort (region::cmp_ptr_ptr);
3441 : unsigned i;
3442 : const region *base_reg;
3443 1440178 : FOR_EACH_VEC_ELT (vec_base_regions, i, base_reg)
3444 : {
3445 1250814 : const binding_cluster *cluster_a = store_a->get_cluster (base_reg);
3446 1250814 : const binding_cluster *cluster_b = store_b->get_cluster (base_reg);
3447 : /* At least one of cluster_a and cluster_b must be non-NULL. */
3448 1250814 : binding_cluster *out_cluster
3449 1250814 : = out_store->get_or_create_cluster (*mgr, base_reg);
3450 1250814 : if (!binding_cluster::can_merge_p (cluster_a, cluster_b,
3451 : out_cluster, out_store, mgr, merger))
3452 : return false;
3453 : }
3454 : return true;
3455 148012 : }
3456 :
3457 : /* Mark the cluster for BASE_REG as having escaped.
3458 : For use when handling an unrecognized function call, and
3459 : for params to "top-level" calls.
3460 : Further unknown function calls could touch it, even if the cluster
3461 : isn't reachable from args of those calls. */
3462 :
3463 : void
3464 33677 : store::mark_as_escaped (store_manager &mgr, const region *base_reg)
3465 : {
3466 33677 : gcc_assert (base_reg);
3467 33677 : gcc_assert (base_reg->get_base_region () == base_reg);
3468 :
3469 33677 : if (base_reg->symbolic_for_unknown_ptr_p ()
3470 33677 : || !base_reg->tracked_p ())
3471 4928 : return;
3472 :
3473 28749 : binding_cluster *cluster = get_or_create_cluster (mgr, base_reg);
3474 28749 : cluster->mark_as_escaped ();
3475 : }
3476 :
3477 : /* Handle an unknown fncall by updating any clusters that have escaped
3478 : (either in this fncall, or in a prior one). */
3479 :
3480 : void
3481 16888 : store::on_unknown_fncall (const gcall &call, store_manager *mgr,
3482 : const conjured_purge &p)
3483 : {
3484 16888 : m_called_unknown_fn = true;
3485 :
3486 133401 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3487 249914 : iter != m_cluster_map.end (); ++iter)
3488 116513 : (*iter).second->on_unknown_fncall (call, mgr, p);
3489 16888 : }
3490 :
3491 : /* Return true if a non-const pointer to BASE_REG (or something within it)
3492 : has escaped to code outside of the TU being analyzed. */
3493 :
3494 : bool
3495 8785559 : store::escaped_p (const region *base_reg) const
3496 : {
3497 8785559 : gcc_assert (base_reg);
3498 8785559 : gcc_assert (base_reg->get_base_region () == base_reg);
3499 :
3500 : /* "errno" can always be modified by external code. */
3501 8785559 : if (base_reg->get_kind () == RK_ERRNO)
3502 : return true;
3503 :
3504 17424814 : if (binding_cluster **cluster_slot
3505 8712407 : = const_cast <cluster_map_t &>(m_cluster_map).get (base_reg))
3506 8712407 : return (*cluster_slot)->escaped_p ();
3507 : return false;
3508 : }
3509 :
3510 : /* Populate OUT_PVS with a list of path_vars for describing SVAL based on
3511 : this store, using VISITED to ensure the traversal terminates. */
3512 :
3513 : void
3514 12369 : store::get_representative_path_vars (const region_model *model,
3515 : svalue_set *visited,
3516 : const svalue *sval,
3517 : logger *logger,
3518 : auto_vec<path_var> *out_pvs) const
3519 : {
3520 12369 : gcc_assert (sval);
3521 :
3522 : /* Find all bindings that reference SVAL. */
3523 56080 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3524 99791 : iter != m_cluster_map.end (); ++iter)
3525 : {
3526 43711 : const region *base_reg = (*iter).first;
3527 43711 : binding_cluster *cluster = (*iter).second;
3528 43711 : cluster->get_representative_path_vars (model, visited, base_reg, sval,
3529 : logger,
3530 : out_pvs);
3531 : }
3532 :
3533 12369 : if (const initial_svalue *init_sval = sval->dyn_cast_initial_svalue ())
3534 : {
3535 2304 : const region *reg = init_sval->get_region ();
3536 2304 : if (path_var pv = model->get_representative_path_var (reg,
3537 : visited,
3538 2304 : logger))
3539 2220 : out_pvs->safe_push (pv);
3540 : }
3541 12369 : }
3542 :
3543 : /* Remove all bindings overlapping REG within this store, removing
3544 : any clusters that become redundant.
3545 :
3546 : If UNCERTAINTY is non-NULL, use it to record any svalues that
3547 : were removed, as being maybe-bound. */
3548 :
3549 : void
3550 357919 : store::remove_overlapping_bindings (store_manager *mgr, const region *reg,
3551 : uncertainty_t *uncertainty)
3552 : {
3553 357919 : const region *base_reg = reg->get_base_region ();
3554 357919 : if (binding_cluster **cluster_slot = m_cluster_map.get (base_reg))
3555 : {
3556 71560 : binding_cluster *cluster = *cluster_slot;
3557 71560 : if (reg == base_reg && !escaped_p (base_reg))
3558 : {
3559 : /* Remove whole cluster. */
3560 44621 : m_cluster_map.remove (base_reg);
3561 89242 : delete cluster;
3562 44621 : return;
3563 : }
3564 : /* Pass nullptr for the maybe_live_values here, as we don't want to
3565 : record the old svalues as being maybe-bound. */
3566 26939 : cluster->remove_overlapping_bindings (mgr, reg, uncertainty, nullptr);
3567 : }
3568 : }
3569 :
3570 : /* Subclass of visitor that accumulates a hash_set of the regions that
3571 : were visited. */
3572 :
3573 1647838 : struct region_finder : public visitor
3574 : {
3575 46835709 : void visit_region (const region *reg) final override
3576 : {
3577 46835709 : m_regs.add (reg);
3578 46835709 : }
3579 :
3580 : hash_set<const region *> m_regs;
3581 : };
3582 :
3583 : /* Canonicalize this store, to maximize the chance of equality between
3584 : instances. */
3585 :
3586 : void
3587 823919 : store::canonicalize (store_manager *mgr)
3588 : {
3589 : /* If we have e.g.:
3590 : cluster for: HEAP_ALLOCATED_REGION(543)
3591 : ESCAPED
3592 : TOUCHED
3593 : where the heap region is empty and unreferenced, then purge that
3594 : cluster, to avoid unbounded state chains involving these. */
3595 :
3596 : /* Find regions that are referenced by bound values in the store. */
3597 823919 : region_finder s;
3598 6839921 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3599 12855923 : iter != m_cluster_map.end (); ++iter)
3600 : {
3601 6016002 : binding_cluster *cluster = (*iter).second;
3602 6016002 : for (binding_cluster::iterator_t bind_iter = cluster->m_map.begin ();
3603 13202978 : bind_iter != cluster->m_map.end (); ++bind_iter)
3604 7186976 : (*bind_iter).m_sval->accept (&s);
3605 : }
3606 :
3607 : /* Locate heap-allocated regions that have empty bindings that weren't
3608 : found above. */
3609 823919 : hash_set<const region *> purgeable_regions;
3610 6839921 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3611 12855923 : iter != m_cluster_map.end (); ++iter)
3612 : {
3613 6016002 : const region *base_reg = (*iter).first;
3614 6016002 : binding_cluster *cluster = (*iter).second;
3615 6016002 : if (base_reg->get_kind () == RK_HEAP_ALLOCATED)
3616 : {
3617 : /* Don't purge a heap-allocated region that's been marked as
3618 : escaping, since this could be recording that a ptr to it
3619 : was written to an unknown symbolic region along this
3620 : path, and so we don't know whether it's referenced or
3621 : not, and hence should report it as leaking
3622 : (PR analyzer/106473). */
3623 293242 : if (cluster->escaped_p ())
3624 113769 : continue;
3625 :
3626 179473 : if (cluster->empty_p ())
3627 450 : if (!s.m_regs.contains (base_reg))
3628 0 : purgeable_regions.add (base_reg);
3629 :
3630 : /* Also cover the UNKNOWN case. */
3631 179473 : if (const svalue *sval = cluster->maybe_get_simple_value (mgr))
3632 54186 : if (sval->get_kind () == SK_UNKNOWN)
3633 28707 : if (!s.m_regs.contains (base_reg))
3634 116 : purgeable_regions.add (base_reg);
3635 : }
3636 : }
3637 :
3638 : /* Purge them. */
3639 824035 : for (hash_set<const region *>::iterator iter = purgeable_regions.begin ();
3640 1648070 : iter != purgeable_regions.end (); ++iter)
3641 : {
3642 116 : const region *base_reg = *iter;
3643 116 : purge_cluster (base_reg);
3644 : }
3645 823919 : }
3646 :
3647 : static bool
3648 90759 : needs_loop_replay_fixup_p (const svalue &sval)
3649 : {
3650 90759 : struct my_visitor : public visitor
3651 : {
3652 90759 : my_visitor () : m_found_widening (false) {}
3653 :
3654 : void
3655 400 : visit_widening_svalue (const widening_svalue *) final override
3656 : {
3657 400 : m_found_widening = true;
3658 400 : }
3659 :
3660 : bool m_found_widening;
3661 90759 : } v;
3662 :
3663 90759 : sval.accept (&v);
3664 90759 : return v.m_found_widening;
3665 : }
3666 :
3667 : /* Subroutine for use by exploded_path::feasible_p.
3668 :
3669 : We need to deal with state differences between:
3670 : (a) when the exploded_graph is being initially constructed and
3671 : (b) when replaying the state changes along a specific path in
3672 : in exploded_path::feasible_p.
3673 :
3674 : In (a), state merging happens, so when exploring a loop
3675 : for (i = 0; i < 1024; i++)
3676 : on successive iterations we have i == 0, then i == WIDENING.
3677 :
3678 : In (b), no state merging happens, so naively replaying the path
3679 : that goes twice through the loop then exits it
3680 : would lead to i == 0, then i == 1, and then a (i >= 1024) eedge
3681 : that exits the loop, which would be found to be infeasible as i == 1,
3682 : and the path would be rejected.
3683 :
3684 : We need to fix up state during replay. This subroutine is
3685 : called whenever we enter a supernode that we've already
3686 : visited along this exploded_path, passing in OTHER_STORE
3687 : from the destination enode's state.
3688 :
3689 : Find bindings to widening values in OTHER_STORE.
3690 : For all that are found, update the binding in this store to UNKNOWN. */
3691 :
3692 : void
3693 4661 : store::loop_replay_fixup (const store *other_store,
3694 : region_model_manager *mgr)
3695 : {
3696 4661 : gcc_assert (other_store);
3697 34866 : for (cluster_map_t::iterator iter = other_store->m_cluster_map.begin ();
3698 65071 : iter != other_store->m_cluster_map.end (); ++iter)
3699 : {
3700 30205 : const region *base_reg = (*iter).first;
3701 30205 : binding_cluster *cluster = (*iter).second;
3702 120964 : for (binding_cluster::iterator_t bind_iter = cluster->m_map.begin ();
3703 120964 : bind_iter != cluster->m_map.end (); ++bind_iter)
3704 : {
3705 90759 : const binding_key *key = (*bind_iter).m_key;
3706 90759 : const svalue *sval = (*bind_iter).m_sval;
3707 90759 : if (needs_loop_replay_fixup_p (*sval))
3708 : {
3709 400 : binding_cluster *this_cluster
3710 400 : = get_or_create_cluster (*mgr->get_store_manager (),
3711 : base_reg);
3712 400 : const svalue *unknown
3713 400 : = mgr->get_or_create_unknown_svalue (sval->get_type ());
3714 400 : this_cluster->bind_key (key, unknown);
3715 : }
3716 : }
3717 : }
3718 4661 : }
3719 :
3720 : /* Use R to replay the bindings from SUMMARY into this object. */
3721 :
3722 : void
3723 1629 : store::replay_call_summary (call_summary_replay &r,
3724 : const store &summary)
3725 : {
3726 1629 : if (summary.m_called_unknown_fn)
3727 : {
3728 : /* A call to an external function occurred in the summary.
3729 : Hence we need to invalidate our knownledge of globals,
3730 : escaped regions, etc. */
3731 40 : on_unknown_fncall (r.get_call_stmt (),
3732 : r.get_store_manager (),
3733 40 : conjured_purge (r.get_caller_model (),
3734 40 : r.get_ctxt ()));
3735 : }
3736 :
3737 1629 : auto_vec<const region *> keys (summary.m_cluster_map.elements ());
3738 13997 : for (auto kv : summary.m_cluster_map)
3739 12368 : keys.quick_push (kv.first);
3740 1629 : keys.qsort (region::cmp_ptr_ptr);
3741 17247 : for (auto base_reg : keys)
3742 12368 : replay_call_summary_cluster (r, summary, base_reg);
3743 1629 : }
3744 :
3745 : /* Use R and SUMMARY to replay the bindings in SUMMARY_CLUSTER
3746 : into this object. */
3747 :
3748 : void
3749 12368 : store::replay_call_summary_cluster (call_summary_replay &r,
3750 : const store &summary,
3751 : const region *summary_base_reg)
3752 : {
3753 12368 : const call_details &cd = r.get_call_details ();
3754 12368 : region_model_manager *reg_mgr = r.get_manager ();
3755 12368 : store_manager *mgr = reg_mgr->get_store_manager ();
3756 12368 : const binding_cluster *summary_cluster
3757 12368 : = summary.get_cluster (summary_base_reg);
3758 :
3759 : /* Handle "ESCAPED" and "TOUCHED" flags. */
3760 12368 : if (summary_cluster->escaped_p () || summary_cluster->touched_p ())
3761 7696 : if (const region *caller_reg
3762 3848 : = r.convert_region_from_summary (summary_base_reg))
3763 : {
3764 3845 : const region *caller_base_reg = caller_reg->get_base_region ();
3765 3845 : if (caller_base_reg->tracked_p ()
3766 3845 : && !caller_base_reg->symbolic_for_unknown_ptr_p ())
3767 : {
3768 2924 : binding_cluster *caller_cluster
3769 2924 : = get_or_create_cluster (*mgr, caller_base_reg);
3770 2924 : if (summary_cluster->escaped_p ())
3771 2240 : caller_cluster->mark_as_escaped ();
3772 2924 : if (summary_cluster->touched_p ())
3773 1750 : caller_cluster->m_touched = true;
3774 : }
3775 : }
3776 :
3777 12368 : switch (summary_base_reg->get_kind ())
3778 : {
3779 : /* Top-level regions. */
3780 0 : case RK_FRAME:
3781 0 : case RK_GLOBALS:
3782 0 : case RK_CODE:
3783 0 : case RK_STACK:
3784 0 : case RK_HEAP:
3785 0 : case RK_THREAD_LOCAL:
3786 0 : case RK_ROOT:
3787 : /* Child regions. */
3788 0 : case RK_FIELD:
3789 0 : case RK_ELEMENT:
3790 0 : case RK_OFFSET:
3791 0 : case RK_SIZED:
3792 0 : case RK_CAST:
3793 0 : case RK_BIT_RANGE:
3794 : /* Other regions. */
3795 0 : case RK_VAR_ARG:
3796 0 : case RK_UNKNOWN:
3797 : /* These should never be the base region of a binding cluster. */
3798 0 : gcc_unreachable ();
3799 : break;
3800 :
3801 : case RK_FUNCTION:
3802 : case RK_LABEL:
3803 : case RK_STRING:
3804 : /* These can be marked as escaping. */
3805 : break;
3806 :
3807 2693 : case RK_SYMBOLIC:
3808 2693 : {
3809 2693 : const symbolic_region *summary_symbolic_reg
3810 2693 : = as_a <const symbolic_region *> (summary_base_reg);
3811 2693 : const svalue *summary_ptr_sval = summary_symbolic_reg->get_pointer ();
3812 2693 : const svalue *caller_ptr_sval
3813 2693 : = r.convert_svalue_from_summary (summary_ptr_sval);
3814 2693 : if (!caller_ptr_sval)
3815 : return;
3816 2279 : const region *caller_dest_reg
3817 2279 : = cd.get_model ()->deref_rvalue (caller_ptr_sval,
3818 : NULL_TREE,
3819 : cd.get_ctxt ());
3820 2279 : const svalue *summary_sval
3821 2279 : = summary.get_any_binding (mgr, summary_base_reg);
3822 2279 : if (!summary_sval)
3823 : return;
3824 1980 : const svalue *caller_sval
3825 1980 : = r.convert_svalue_from_summary (summary_sval);
3826 1980 : if (!caller_sval)
3827 2 : caller_sval =
3828 2 : reg_mgr->get_or_create_unknown_svalue (summary_sval->get_type ());
3829 1980 : set_value (mgr, caller_dest_reg,
3830 : caller_sval, nullptr /* uncertainty_t * */);
3831 : }
3832 1980 : break;
3833 :
3834 9670 : case RK_HEAP_ALLOCATED:
3835 9670 : case RK_DECL:
3836 9670 : case RK_ERRNO:
3837 9670 : case RK_PRIVATE:
3838 9670 : {
3839 9670 : const region *caller_dest_reg
3840 9670 : = r.convert_region_from_summary (summary_base_reg);
3841 9670 : if (!caller_dest_reg)
3842 : return;
3843 9448 : const svalue *summary_sval
3844 9448 : = summary.get_any_binding (mgr, summary_base_reg);
3845 9448 : if (!summary_sval)
3846 173 : summary_sval = reg_mgr->get_or_create_compound_svalue
3847 173 : (summary_base_reg->get_type (),
3848 : summary_cluster->get_map ());
3849 9448 : const svalue *caller_sval
3850 9448 : = r.convert_svalue_from_summary (summary_sval);
3851 9448 : if (!caller_sval)
3852 2 : caller_sval =
3853 2 : reg_mgr->get_or_create_unknown_svalue (summary_sval->get_type ());
3854 9448 : set_value (mgr, caller_dest_reg,
3855 : caller_sval, nullptr /* uncertainty_t * */);
3856 : }
3857 9448 : break;
3858 :
3859 : case RK_ALLOCA:
3860 : /* Ignore bindings of alloca regions in the summary. */
3861 : break;
3862 : }
3863 : }
3864 :
3865 : #if CHECKING_P
3866 :
3867 : namespace selftest {
3868 :
3869 : /* Verify that bit_range::intersects_p works as expected. */
3870 :
3871 : static void
3872 4 : test_bit_range_intersects_p ()
3873 : {
3874 4 : bit_range b0 (0, 1);
3875 4 : bit_range b1 (1, 1);
3876 4 : bit_range b2 (2, 1);
3877 4 : bit_range b3 (3, 1);
3878 4 : bit_range b4 (4, 1);
3879 4 : bit_range b5 (5, 1);
3880 4 : bit_range b6 (6, 1);
3881 4 : bit_range b7 (7, 1);
3882 4 : bit_range b1_to_6 (1, 6);
3883 4 : bit_range b0_to_7 (0, 8);
3884 4 : bit_range b3_to_5 (3, 3);
3885 4 : bit_range b6_to_7 (6, 2);
3886 :
3887 : /* self-intersection is true. */
3888 4 : ASSERT_TRUE (b0.intersects_p (b0));
3889 4 : ASSERT_TRUE (b7.intersects_p (b7));
3890 4 : ASSERT_TRUE (b1_to_6.intersects_p (b1_to_6));
3891 4 : ASSERT_TRUE (b0_to_7.intersects_p (b0_to_7));
3892 :
3893 4 : ASSERT_FALSE (b0.intersects_p (b1));
3894 4 : ASSERT_FALSE (b1.intersects_p (b0));
3895 4 : ASSERT_FALSE (b0.intersects_p (b7));
3896 4 : ASSERT_FALSE (b7.intersects_p (b0));
3897 :
3898 4 : ASSERT_TRUE (b0_to_7.intersects_p (b0));
3899 4 : ASSERT_TRUE (b0_to_7.intersects_p (b7));
3900 4 : ASSERT_TRUE (b0.intersects_p (b0_to_7));
3901 4 : ASSERT_TRUE (b7.intersects_p (b0_to_7));
3902 :
3903 4 : ASSERT_FALSE (b0.intersects_p (b1_to_6));
3904 4 : ASSERT_FALSE (b1_to_6.intersects_p (b0));
3905 4 : ASSERT_TRUE (b1.intersects_p (b1_to_6));
3906 4 : ASSERT_TRUE (b1_to_6.intersects_p (b1));
3907 4 : ASSERT_TRUE (b1_to_6.intersects_p (b6));
3908 4 : ASSERT_FALSE (b1_to_6.intersects_p (b7));
3909 :
3910 4 : ASSERT_TRUE (b1_to_6.intersects_p (b0_to_7));
3911 4 : ASSERT_TRUE (b0_to_7.intersects_p (b1_to_6));
3912 :
3913 4 : ASSERT_FALSE (b3_to_5.intersects_p (b6_to_7));
3914 4 : ASSERT_FALSE (b6_to_7.intersects_p (b3_to_5));
3915 :
3916 4 : bit_range r1 (0,0);
3917 4 : bit_range r2 (0,0);
3918 4 : ASSERT_TRUE (b1_to_6.intersects_p (b0_to_7, &r1, &r2));
3919 4 : ASSERT_EQ (r1.get_start_bit_offset (), 0);
3920 4 : ASSERT_EQ (r1.m_size_in_bits, 6);
3921 4 : ASSERT_EQ (r2.get_start_bit_offset (), 1);
3922 4 : ASSERT_EQ (r2.m_size_in_bits, 6);
3923 :
3924 4 : ASSERT_TRUE (b0_to_7.intersects_p (b1_to_6, &r1, &r2));
3925 4 : ASSERT_EQ (r1.get_start_bit_offset (), 1);
3926 4 : ASSERT_EQ (r1.m_size_in_bits, 6);
3927 4 : ASSERT_EQ (r2.get_start_bit_offset (), 0);
3928 4 : ASSERT_EQ (r2.m_size_in_bits, 6);
3929 4 : }
3930 :
3931 : /* Implementation detail of ASSERT_BIT_RANGE_FROM_MASK_EQ. */
3932 :
3933 : static void
3934 76 : assert_bit_range_from_mask_eq (const location &loc,
3935 : unsigned HOST_WIDE_INT mask,
3936 : const bit_range &expected)
3937 : {
3938 76 : bit_range actual (0, 0);
3939 76 : bool ok = bit_range::from_mask (mask, &actual);
3940 76 : ASSERT_TRUE_AT (loc, ok);
3941 76 : ASSERT_EQ_AT (loc, actual, expected);
3942 76 : }
3943 :
3944 : /* Assert that bit_range::from_mask (MASK) returns true, and writes
3945 : out EXPECTED_BIT_RANGE. */
3946 :
3947 : #define ASSERT_BIT_RANGE_FROM_MASK_EQ(MASK, EXPECTED_BIT_RANGE) \
3948 : SELFTEST_BEGIN_STMT \
3949 : assert_bit_range_from_mask_eq (SELFTEST_LOCATION, MASK, \
3950 : EXPECTED_BIT_RANGE); \
3951 : SELFTEST_END_STMT
3952 :
3953 : /* Implementation detail of ASSERT_NO_BIT_RANGE_FROM_MASK. */
3954 :
3955 : static void
3956 12 : assert_no_bit_range_from_mask_eq (const location &loc,
3957 : unsigned HOST_WIDE_INT mask)
3958 : {
3959 12 : bit_range actual (0, 0);
3960 12 : bool ok = bit_range::from_mask (mask, &actual);
3961 12 : ASSERT_FALSE_AT (loc, ok);
3962 12 : }
3963 :
3964 : /* Assert that bit_range::from_mask (MASK) returns false. */
3965 :
3966 : #define ASSERT_NO_BIT_RANGE_FROM_MASK(MASK) \
3967 : SELFTEST_BEGIN_STMT \
3968 : assert_no_bit_range_from_mask_eq (SELFTEST_LOCATION, MASK); \
3969 : SELFTEST_END_STMT
3970 :
3971 : /* Verify that bit_range::from_mask works as expected. */
3972 :
3973 : static void
3974 4 : test_bit_range_from_mask ()
3975 : {
3976 : /* Should fail on zero. */
3977 4 : ASSERT_NO_BIT_RANGE_FROM_MASK (0);
3978 :
3979 : /* Verify 1-bit masks. */
3980 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (1, bit_range (0, 1));
3981 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (2, bit_range (1, 1));
3982 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (4, bit_range (2, 1));
3983 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (8, bit_range (3, 1));
3984 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (16, bit_range (4, 1));
3985 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (32, bit_range (5, 1));
3986 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (64, bit_range (6, 1));
3987 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (128, bit_range (7, 1));
3988 :
3989 : /* Verify N-bit masks starting at bit 0. */
3990 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (3, bit_range (0, 2));
3991 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (7, bit_range (0, 3));
3992 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (15, bit_range (0, 4));
3993 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (31, bit_range (0, 5));
3994 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (63, bit_range (0, 6));
3995 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (127, bit_range (0, 7));
3996 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (255, bit_range (0, 8));
3997 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (0xffff, bit_range (0, 16));
3998 :
3999 : /* Various other tests. */
4000 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (0x30, bit_range (4, 2));
4001 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (0x700, bit_range (8, 3));
4002 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (0x600, bit_range (9, 2));
4003 :
4004 : /* Multiple ranges of set bits should fail. */
4005 4 : ASSERT_NO_BIT_RANGE_FROM_MASK (0x101);
4006 4 : ASSERT_NO_BIT_RANGE_FROM_MASK (0xf0f0f0f0);
4007 4 : }
4008 :
4009 : /* Implementation detail of ASSERT_OVERLAP. */
4010 :
4011 : static void
4012 56 : assert_overlap (const location &loc,
4013 : const concrete_binding *b1,
4014 : const concrete_binding *b2)
4015 : {
4016 56 : ASSERT_TRUE_AT (loc, b1->overlaps_p (*b2));
4017 56 : ASSERT_TRUE_AT (loc, b2->overlaps_p (*b1));
4018 56 : }
4019 :
4020 : /* Implementation detail of ASSERT_DISJOINT. */
4021 :
4022 : static void
4023 20 : assert_disjoint (const location &loc,
4024 : const concrete_binding *b1,
4025 : const concrete_binding *b2)
4026 : {
4027 20 : ASSERT_FALSE_AT (loc, b1->overlaps_p (*b2));
4028 20 : ASSERT_FALSE_AT (loc, b2->overlaps_p (*b1));
4029 20 : }
4030 :
4031 : /* Assert that B1 and B2 overlap, checking both ways. */
4032 :
4033 : #define ASSERT_OVERLAP(B1, B2) \
4034 : SELFTEST_BEGIN_STMT \
4035 : assert_overlap (SELFTEST_LOCATION, B1, B2); \
4036 : SELFTEST_END_STMT
4037 :
4038 : /* Assert that B1 and B2 do not overlap, checking both ways. */
4039 :
4040 : #define ASSERT_DISJOINT(B1, B2) \
4041 : SELFTEST_BEGIN_STMT \
4042 : assert_disjoint (SELFTEST_LOCATION, B1, B2); \
4043 : SELFTEST_END_STMT
4044 :
4045 : /* Verify that concrete_binding::overlaps_p works as expected. */
4046 :
4047 : static void
4048 4 : test_binding_key_overlap ()
4049 : {
4050 4 : store_manager mgr (nullptr);
4051 :
4052 : /* Various 8-bit bindings. */
4053 4 : const concrete_binding *cb_0_7 = mgr.get_concrete_binding (0, 8);
4054 4 : const concrete_binding *cb_8_15 = mgr.get_concrete_binding (8, 8);
4055 4 : const concrete_binding *cb_16_23 = mgr.get_concrete_binding (16, 8);
4056 4 : const concrete_binding *cb_24_31 = mgr.get_concrete_binding (24, 8);
4057 :
4058 : /* 16-bit bindings. */
4059 4 : const concrete_binding *cb_0_15 = mgr.get_concrete_binding (0, 16);
4060 4 : const concrete_binding *cb_8_23 = mgr.get_concrete_binding (8, 16);
4061 4 : const concrete_binding *cb_16_31 = mgr.get_concrete_binding (16, 16);
4062 :
4063 : /* 32-bit binding. */
4064 4 : const concrete_binding *cb_0_31 = mgr.get_concrete_binding (0, 32);
4065 :
4066 : /* Everything should self-overlap. */
4067 4 : ASSERT_OVERLAP (cb_0_7, cb_0_7);
4068 4 : ASSERT_OVERLAP (cb_8_15, cb_8_15);
4069 4 : ASSERT_OVERLAP (cb_16_23, cb_16_23);
4070 4 : ASSERT_OVERLAP (cb_24_31, cb_24_31);
4071 4 : ASSERT_OVERLAP (cb_0_15, cb_0_15);
4072 4 : ASSERT_OVERLAP (cb_8_23, cb_8_23);
4073 4 : ASSERT_OVERLAP (cb_16_31, cb_16_31);
4074 4 : ASSERT_OVERLAP (cb_0_31, cb_0_31);
4075 :
4076 : /* Verify the 8-bit bindings that don't overlap each other. */
4077 4 : ASSERT_DISJOINT (cb_0_7, cb_8_15);
4078 4 : ASSERT_DISJOINT (cb_8_15, cb_16_23);
4079 :
4080 : /* Check for overlap of differently-sized bindings. */
4081 4 : ASSERT_OVERLAP (cb_0_7, cb_0_31);
4082 : /* ...and with differing start points. */
4083 4 : ASSERT_OVERLAP (cb_8_15, cb_0_31);
4084 4 : ASSERT_DISJOINT (cb_8_15, cb_16_31);
4085 4 : ASSERT_OVERLAP (cb_16_23, cb_0_31);
4086 4 : ASSERT_OVERLAP (cb_16_31, cb_0_31);
4087 :
4088 4 : ASSERT_DISJOINT (cb_0_7, cb_8_23);
4089 4 : ASSERT_OVERLAP (cb_8_23, cb_16_23);
4090 4 : ASSERT_OVERLAP (cb_8_23, cb_16_31);
4091 4 : ASSERT_DISJOINT (cb_8_23, cb_24_31);
4092 4 : }
4093 :
4094 : static void
4095 4 : test_binding_map_ops ()
4096 : {
4097 4 : region_model_manager region_mgr;
4098 4 : store_manager store_mgr (®ion_mgr);
4099 :
4100 : /* Assignment of empty. */
4101 4 : {
4102 4 : binding_map src (store_mgr);
4103 4 : binding_map dst (store_mgr);
4104 4 : dst = src;
4105 :
4106 4 : ASSERT_EQ (src, dst);
4107 4 : }
4108 4 : }
4109 :
4110 : /* Run all of the selftests within this file. */
4111 :
4112 : void
4113 4 : analyzer_store_cc_tests ()
4114 : {
4115 4 : test_bit_range_intersects_p ();
4116 4 : test_bit_range_from_mask ();
4117 4 : test_binding_key_overlap ();
4118 4 : test_binding_map_ops ();
4119 4 : }
4120 :
4121 : } // namespace selftest
4122 :
4123 : #endif /* CHECKING_P */
4124 :
4125 : } // namespace ana
4126 :
4127 : #endif /* #if ENABLE_ANALYZER */
|