Line data Source code
1 : /* Classes for modeling the state of memory.
2 : Copyright (C) 2020-2026 Free Software Foundation, Inc.
3 : Contributed by David Malcolm <dmalcolm@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but
13 : WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 : General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "analyzer/common.h"
22 :
23 : #include "ordered-hash-map.h"
24 : #include "cfg.h"
25 : #include "sbitmap.h"
26 : #include "stor-layout.h"
27 :
28 : #include "text-art/tree-widget.h"
29 :
30 : #include "analyzer/analyzer-logging.h"
31 : #include "analyzer/supergraph.h"
32 : #include "analyzer/call-string.h"
33 : #include "analyzer/program-point.h"
34 : #include "analyzer/store.h"
35 : #include "analyzer/region-model.h"
36 : #include "analyzer/call-summary.h"
37 : #include "analyzer/analyzer-selftests.h"
38 :
39 : #if ENABLE_ANALYZER
40 :
41 : namespace ana {
42 :
43 : /* Dump SVALS to PP, sorting them to ensure determinism. */
44 :
45 : static void
46 482 : dump_svalue_set (const hash_set <const svalue *> &svals,
47 : pretty_printer *pp, bool simple)
48 : {
49 482 : auto_vec <const svalue *> v;
50 482 : for (hash_set<const svalue *>::iterator iter = svals.begin ();
51 552 : iter != svals.end (); ++iter)
52 : {
53 35 : v.safe_push (*iter);
54 : }
55 482 : v.qsort (svalue::cmp_ptr_ptr);
56 :
57 482 : pp_character (pp, '{');
58 482 : const svalue *sval;
59 482 : unsigned i;
60 999 : FOR_EACH_VEC_ELT (v, i, sval)
61 : {
62 35 : if (i > 0)
63 20 : pp_string (pp, ", ");
64 35 : sval->dump_to_pp (pp, simple);
65 : }
66 482 : pp_character (pp, '}');
67 482 : }
68 :
69 : /* class uncertainty_t. */
70 :
71 : /* Dump this object to PP. */
72 :
73 : void
74 241 : uncertainty_t::dump_to_pp (pretty_printer *pp, bool simple) const
75 : {
76 241 : pp_string (pp, "{m_maybe_bound_svals: ");
77 241 : dump_svalue_set (m_maybe_bound_svals, pp, simple);
78 :
79 241 : pp_string (pp, ", m_mutable_at_unknown_call_svals: ");
80 241 : dump_svalue_set (m_mutable_at_unknown_call_svals, pp, simple);
81 241 : pp_string (pp, "}");
82 241 : }
83 :
84 : /* Dump this object to stderr. */
85 :
86 : DEBUG_FUNCTION void
87 0 : uncertainty_t::dump (bool simple) const
88 : {
89 0 : tree_dump_pretty_printer pp (stderr);
90 0 : dump_to_pp (&pp, simple);
91 0 : pp_newline (&pp);
92 0 : }
93 :
94 : /* class binding_key. */
95 :
96 : const binding_key *
97 3587640 : binding_key::make (store_manager *mgr, const region *r)
98 : {
99 3587640 : region_offset offset = r->get_offset (mgr->get_svalue_manager ());
100 3587640 : if (offset.symbolic_p ())
101 34802 : return mgr->get_symbolic_binding (r);
102 : else
103 : {
104 3552838 : bit_size_t bit_size;
105 3552838 : if (r->get_bit_size (&bit_size))
106 : {
107 : /* Must be non-empty. */
108 2241795 : gcc_assert (bit_size > 0);
109 2241795 : return mgr->get_concrete_binding (offset.get_bit_offset (),
110 2241795 : bit_size);
111 : }
112 : else
113 1311043 : return mgr->get_symbolic_binding (r);
114 : }
115 : }
116 :
117 : /* Dump this binding_key to stderr. */
118 :
119 : DEBUG_FUNCTION void
120 0 : binding_key::dump (bool simple) const
121 : {
122 0 : tree_dump_pretty_printer pp (stderr);
123 0 : dump_to_pp (&pp, simple);
124 0 : pp_newline (&pp);
125 0 : }
126 :
127 : /* Get a description of this binding_key. */
128 :
129 : label_text
130 0 : binding_key::get_desc (bool simple) const
131 : {
132 0 : pretty_printer pp;
133 0 : pp_format_decoder (&pp) = default_tree_printer;
134 0 : dump_to_pp (&pp, simple);
135 0 : return label_text::take (xstrdup (pp_formatted_text (&pp)));
136 0 : }
137 :
138 : /* qsort callback. */
139 :
140 : int
141 0 : binding_key::cmp_ptrs (const void *p1, const void *p2)
142 : {
143 0 : const binding_key * const *pk1 = (const binding_key * const *)p1;
144 0 : const binding_key * const *pk2 = (const binding_key * const *)p2;
145 0 : return cmp (*pk1, *pk2);
146 : }
147 :
148 : /* Comparator for binding_keys. */
149 :
150 : int
151 0 : binding_key::cmp (const binding_key *k1, const binding_key *k2)
152 : {
153 0 : int concrete1 = k1->concrete_p ();
154 0 : int concrete2 = k2->concrete_p ();
155 0 : if (int concrete_cmp = concrete1 - concrete2)
156 : return concrete_cmp;
157 0 : if (concrete1)
158 : {
159 0 : const concrete_binding *b1 = (const concrete_binding *)k1;
160 0 : const concrete_binding *b2 = (const concrete_binding *)k2;
161 0 : if (int start_cmp = wi::cmp (b1->get_start_bit_offset (),
162 0 : b2->get_start_bit_offset (),
163 : SIGNED))
164 : return start_cmp;
165 0 : return wi::cmp (b1->get_next_bit_offset (), b2->get_next_bit_offset (),
166 : SIGNED);
167 : }
168 : else
169 : {
170 0 : const symbolic_binding *s1 = (const symbolic_binding *)k1;
171 0 : const symbolic_binding *s2 = (const symbolic_binding *)k2;
172 0 : if (s1 > s2)
173 : return 1;
174 0 : if (s1 < s2)
175 : return -1;
176 0 : return 0;
177 : }
178 : }
179 :
180 : /* struct bit_range. */
181 :
182 : void
183 2021 : bit_range::dump_to_pp (pretty_printer *pp) const
184 : {
185 2021 : byte_range bytes (0, 0);
186 2021 : if (as_byte_range (&bytes))
187 2021 : bytes.dump_to_pp (pp);
188 : else
189 : {
190 0 : pp_string (pp, "start: ");
191 0 : pp_wide_int (pp, m_start_bit_offset, SIGNED);
192 0 : pp_string (pp, ", size: ");
193 0 : pp_wide_int (pp, m_size_in_bits, SIGNED);
194 0 : pp_string (pp, ", next: ");
195 0 : pp_wide_int (pp, get_next_bit_offset (), SIGNED);
196 : }
197 2021 : }
198 :
199 : /* Dump this object to stderr. */
200 :
201 : DEBUG_FUNCTION void
202 0 : bit_range::dump () const
203 : {
204 0 : tree_dump_pretty_printer pp (stderr);
205 0 : dump_to_pp (&pp);
206 0 : pp_newline (&pp);
207 0 : }
208 :
209 : /* Generate a JSON value for this bit_range.
210 : This is intended for debugging the analyzer rather
211 : than serialization. */
212 :
213 : std::unique_ptr<json::object>
214 4 : bit_range::to_json () const
215 : {
216 4 : auto obj = std::make_unique<json::object> ();
217 4 : obj->set ("start_bit_offset",
218 4 : bit_offset_to_json (m_start_bit_offset));
219 4 : obj->set ("size_in_bits",
220 4 : bit_offset_to_json (m_size_in_bits));
221 4 : return obj;
222 : }
223 :
224 : /* If OTHER is a subset of this, return true and, if OUT is
225 : non-null, write to *OUT the relative range of OTHER within this.
226 : Otherwise return false. */
227 :
228 : bool
229 23460 : bit_range::contains_p (const bit_range &other, bit_range *out) const
230 : {
231 23460 : if (contains_p (other.get_start_bit_offset ())
232 23460 : && contains_p (other.get_last_bit_offset ()))
233 : {
234 15915 : if (out)
235 : {
236 15915 : out->m_start_bit_offset = other.m_start_bit_offset - m_start_bit_offset;
237 15915 : out->m_size_in_bits = other.m_size_in_bits;
238 : }
239 15915 : return true;
240 : }
241 : else
242 7545 : return false;
243 : }
244 :
245 : /* If OTHER intersects this, return true and write
246 : the relative range of OTHER within THIS to *OUT_THIS,
247 : and the relative range of THIS within OTHER to *OUT_OTHER.
248 : Otherwise return false. */
249 :
250 : bool
251 84 : bit_range::intersects_p (const bit_range &other,
252 : bit_range *out_this,
253 : bit_range *out_other) const
254 : {
255 84 : if (get_start_bit_offset () < other.get_next_bit_offset ()
256 168 : && other.get_start_bit_offset () < get_next_bit_offset ())
257 : {
258 84 : bit_offset_t overlap_start
259 84 : = MAX (get_start_bit_offset (),
260 : other.get_start_bit_offset ());
261 84 : bit_offset_t overlap_next
262 84 : = MIN (get_next_bit_offset (),
263 : other.get_next_bit_offset ());
264 84 : if (overlap_next <= overlap_start)
265 : /* If this has happened, some kind of overflow has happened in
266 : our arithmetic. For now, reject such cases. */
267 : return false;
268 84 : bit_range abs_overlap_bits (overlap_start, overlap_next - overlap_start);
269 84 : *out_this = abs_overlap_bits - get_start_bit_offset ();
270 84 : *out_other = abs_overlap_bits - other.get_start_bit_offset ();
271 84 : return true;
272 : }
273 : else
274 0 : return false;
275 : }
276 :
277 : /* Return true if THIS and OTHER intersect and write the number
278 : of bits both buffers overlap to *OUT_NUM_OVERLAP_BITS.
279 :
280 : Otherwise return false. */
281 :
282 : bool
283 0 : bit_range::intersects_p (const bit_range &other,
284 : bit_size_t *out_num_overlap_bits) const
285 : {
286 0 : if (get_start_bit_offset () < other.get_next_bit_offset ()
287 0 : && other.get_start_bit_offset () < get_next_bit_offset ())
288 : {
289 0 : bit_offset_t overlap_start = MAX (get_start_bit_offset (),
290 : other.get_start_bit_offset ());
291 0 : bit_offset_t overlap_next = MIN (get_next_bit_offset (),
292 : other.get_next_bit_offset ());
293 0 : if (overlap_next <= overlap_start)
294 : /* If this has happened, some kind of overflow has happened in
295 : our arithmetic. For now, reject such cases. */
296 : return false;
297 0 : *out_num_overlap_bits = overlap_next - overlap_start;
298 0 : return true;
299 : }
300 : else
301 0 : return false;
302 : }
303 :
304 : /* Return true if THIS exceeds OTHER and write the overhanging
305 : bit range to OUT_OVERHANGING_BIT_RANGE. */
306 :
307 : bool
308 787237 : bit_range::exceeds_p (const bit_range &other,
309 : bit_range *out_overhanging_bit_range) const
310 : {
311 787237 : gcc_assert (!empty_p ());
312 :
313 787237 : if (other.get_next_bit_offset () < get_next_bit_offset ())
314 : {
315 : /* THIS definitely exceeds OTHER. */
316 460 : bit_offset_t start = MAX (get_start_bit_offset (),
317 : other.get_next_bit_offset ());
318 460 : bit_offset_t size = get_next_bit_offset () - start;
319 460 : if (size <= 0)
320 : /* If this has happened, some kind of overflow has happened in
321 : our arithmetic. For now, reject such cases. */
322 : return false;
323 440 : out_overhanging_bit_range->m_start_bit_offset = start;
324 440 : out_overhanging_bit_range->m_size_in_bits = size;
325 440 : return true;
326 : }
327 : else
328 : return false;
329 : }
330 :
331 : /* Return true if THIS falls short of OFFSET and write the
332 : bit range fallen short to OUT_FALL_SHORT_BITS. */
333 :
334 : bool
335 790911 : bit_range::falls_short_of_p (bit_offset_t offset,
336 : bit_range *out_fall_short_bits) const
337 : {
338 790911 : gcc_assert (!empty_p ());
339 :
340 790911 : if (get_start_bit_offset () < offset)
341 : {
342 : /* THIS falls short of OFFSET. */
343 212 : bit_offset_t start = get_start_bit_offset ();
344 212 : bit_offset_t size = MIN (offset, get_next_bit_offset ()) - start;
345 212 : if (size <= 0)
346 : /* If this has happened, some kind of overflow has happened in
347 : our arithmetic. For now, reject such cases. */
348 : return false;
349 192 : out_fall_short_bits->m_start_bit_offset = start;
350 192 : out_fall_short_bits->m_size_in_bits = size;
351 192 : return true;
352 : }
353 : else
354 : return false;
355 : }
356 :
357 : int
358 444 : bit_range::cmp (const bit_range &br1, const bit_range &br2)
359 : {
360 444 : if (int start_cmp = wi::cmps (br1.m_start_bit_offset,
361 444 : br2.m_start_bit_offset))
362 : return start_cmp;
363 :
364 48 : return wi::cmpu (br1.m_size_in_bits, br2.m_size_in_bits);
365 : }
366 :
367 : /* Offset this range by OFFSET. */
368 :
369 : bit_range
370 168 : bit_range::operator- (bit_offset_t offset) const
371 : {
372 168 : return bit_range (m_start_bit_offset - offset, m_size_in_bits);
373 : }
374 :
375 : /* If MASK is a contiguous range of set bits, write them
376 : to *OUT and return true.
377 : Otherwise return false. */
378 :
379 : bool
380 144 : bit_range::from_mask (unsigned HOST_WIDE_INT mask, bit_range *out)
381 : {
382 144 : unsigned iter_bit_idx = 0;
383 144 : unsigned HOST_WIDE_INT iter_bit_mask = 1;
384 :
385 : /* Find the first contiguous run of set bits in MASK. */
386 :
387 : /* Find first set bit in MASK. */
388 780 : while (iter_bit_idx < HOST_BITS_PER_WIDE_INT)
389 : {
390 776 : if (mask & iter_bit_mask)
391 : break;
392 636 : iter_bit_idx++;
393 636 : iter_bit_mask <<= 1;
394 : }
395 144 : if (iter_bit_idx == HOST_BITS_PER_WIDE_INT)
396 : /* MASK is zero. */
397 : return false;
398 :
399 140 : unsigned first_set_iter_bit_idx = iter_bit_idx;
400 140 : unsigned num_set_bits = 1;
401 140 : iter_bit_idx++;
402 140 : iter_bit_mask <<= 1;
403 :
404 : /* Find next unset bit in MASK. */
405 396 : while (iter_bit_idx < HOST_BITS_PER_WIDE_INT)
406 : {
407 396 : if (!(mask & iter_bit_mask))
408 : break;
409 256 : num_set_bits++;
410 256 : iter_bit_idx++;
411 256 : iter_bit_mask <<= 1;
412 : }
413 140 : if (iter_bit_idx == HOST_BITS_PER_WIDE_INT)
414 : {
415 0 : *out = bit_range (first_set_iter_bit_idx, num_set_bits);
416 0 : return true;
417 : }
418 :
419 : /* We now have the first contiguous run of set bits in MASK.
420 : Fail if any other bits are set. */
421 7892 : while (iter_bit_idx < HOST_BITS_PER_WIDE_INT)
422 : {
423 7760 : if (mask & iter_bit_mask)
424 : return false;
425 7752 : iter_bit_idx++;
426 7752 : iter_bit_mask <<= 1;
427 : }
428 :
429 132 : *out = bit_range (first_set_iter_bit_idx, num_set_bits);
430 132 : return true;
431 : }
432 :
433 : /* Attempt to convert this bit_range to a byte_range.
434 : Return true if it is possible, writing the result to *OUT.
435 : Otherwise return false. */
436 :
437 : bool
438 8171 : bit_range::as_byte_range (byte_range *out) const
439 : {
440 8171 : if (m_start_bit_offset % BITS_PER_UNIT == 0
441 8171 : && m_size_in_bits % BITS_PER_UNIT == 0)
442 : {
443 8158 : out->m_start_byte_offset = m_start_bit_offset / BITS_PER_UNIT;
444 8158 : out->m_size_in_bytes = m_size_in_bits / BITS_PER_UNIT;
445 8158 : return true;
446 : }
447 : return false;
448 : }
449 :
450 : /* Dump this object to PP. */
451 :
452 : void
453 2024 : byte_range::dump_to_pp (pretty_printer *pp) const
454 : {
455 2024 : if (m_size_in_bytes == 0)
456 : {
457 0 : pp_string (pp, "empty");
458 : }
459 2024 : else if (m_size_in_bytes == 1)
460 : {
461 1447 : pp_string (pp, "byte ");
462 1447 : pp_wide_int (pp, m_start_byte_offset, SIGNED);
463 : }
464 : else
465 : {
466 577 : pp_string (pp, "bytes ");
467 577 : pp_wide_int (pp, m_start_byte_offset, SIGNED);
468 577 : pp_string (pp, "-");
469 577 : pp_wide_int (pp, get_last_byte_offset (), SIGNED);
470 : }
471 2024 : }
472 :
473 : /* Dump this object to stderr. */
474 :
475 : DEBUG_FUNCTION void
476 0 : byte_range::dump () const
477 : {
478 0 : tree_dump_pretty_printer pp (stderr);
479 0 : dump_to_pp (&pp);
480 0 : pp_newline (&pp);
481 0 : }
482 :
483 : /* Generate a JSON value for this byte_range.
484 : This is intended for debugging the analyzer rather
485 : than serialization. */
486 :
487 : std::unique_ptr<json::object>
488 4 : byte_range::to_json () const
489 : {
490 4 : auto obj = std::make_unique<json::object> ();
491 4 : obj->set ("start_byte_offset",
492 4 : byte_offset_to_json (m_start_byte_offset));
493 4 : obj->set ("size_in_bytes",
494 4 : byte_offset_to_json (m_size_in_bytes));
495 4 : return obj;
496 : }
497 :
498 : /* If OTHER is a subset of this, return true and write
499 : to *OUT the relative range of OTHER within this.
500 : Otherwise return false. */
501 :
502 : bool
503 941 : byte_range::contains_p (const byte_range &other, byte_range *out) const
504 : {
505 941 : if (contains_p (other.get_start_byte_offset ())
506 941 : && contains_p (other.get_last_byte_offset ()))
507 : {
508 394 : out->m_start_byte_offset = other.m_start_byte_offset - m_start_byte_offset;
509 394 : out->m_size_in_bytes = other.m_size_in_bytes;
510 394 : return true;
511 : }
512 : else
513 547 : return false;
514 : }
515 :
516 : /* qsort comparator for byte ranges. */
517 :
518 : int
519 1994 : byte_range::cmp (const byte_range &br1, const byte_range &br2)
520 : {
521 : /* Order first by offset. */
522 1994 : if (int start_cmp = wi::cmps (br1.m_start_byte_offset,
523 1994 : br2.m_start_byte_offset))
524 : return start_cmp;
525 :
526 : /* ...then by size. */
527 0 : return wi::cmpu (br1.m_size_in_bytes, br2.m_size_in_bytes);
528 : }
529 :
530 : /* class concrete_binding : public binding_key. */
531 :
532 : /* Implementation of binding_key::dump_to_pp vfunc for concrete_binding. */
533 :
534 : void
535 1133 : concrete_binding::dump_to_pp (pretty_printer *pp, bool) const
536 : {
537 1133 : m_bit_range.dump_to_pp (pp);
538 1133 : }
539 :
540 : /* Return true if this binding overlaps with OTHER. */
541 :
542 : bool
543 304440 : concrete_binding::overlaps_p (const concrete_binding &other) const
544 : {
545 304440 : if (get_start_bit_offset () < other.get_next_bit_offset ()
546 304440 : && get_next_bit_offset () > other.get_start_bit_offset ())
547 59544 : return true;
548 : return false;
549 : }
550 :
551 : /* If this is expressible as a concrete byte range, return true
552 : and write it to *OUT. Otherwise return false. */
553 :
554 : bool
555 0 : concrete_binding::get_byte_range (byte_range *out) const
556 : {
557 0 : return m_bit_range.as_byte_range (out);
558 : }
559 :
560 : /* Comparator for use by vec<const concrete_binding *>::qsort. */
561 :
562 : int
563 41 : concrete_binding::cmp_ptr_ptr (const void *p1, const void *p2)
564 : {
565 41 : const concrete_binding *b1 = *(const concrete_binding * const *)p1;
566 41 : const concrete_binding *b2 = *(const concrete_binding * const *)p2;
567 :
568 41 : return bit_range::cmp (b1->m_bit_range, b2->m_bit_range);
569 : }
570 :
571 : /* class symbolic_binding : public binding_key. */
572 :
573 : void
574 16 : symbolic_binding::dump_to_pp (pretty_printer *pp, bool simple) const
575 : {
576 : //binding_key::dump_to_pp (pp, simple);
577 16 : pp_string (pp, "region: ");
578 16 : m_region->dump_to_pp (pp, simple);
579 16 : }
580 :
581 : /* Comparator for use by vec<const symbolic_binding *>::qsort. */
582 :
583 : int
584 19 : symbolic_binding::cmp_ptr_ptr (const void *p1, const void *p2)
585 : {
586 19 : const symbolic_binding *b1 = *(const symbolic_binding * const *)p1;
587 19 : const symbolic_binding *b2 = *(const symbolic_binding * const *)p2;
588 :
589 19 : return region::cmp_ids (b1->get_region (), b2->get_region ());
590 : }
591 :
592 : /* The store is oblivious to the types of the svalues bound within
593 : it: any type can get bound at any location.
594 : Simplify any casts before binding.
595 :
596 : For example, if we have:
597 : struct big { int ia[1024]; };
598 : struct big src, dst;
599 : memcpy (&dst, &src, sizeof (struct big));
600 : this reaches us in gimple form as:
601 : MEM <unsigned char[4096]> [(char * {ref-all})&dst]
602 : = MEM <unsigned char[4096]> [(char * {ref-all})&src];
603 : Using cast_region when handling the MEM_REF would give us:
604 : INIT_VAL(CAST_REG(unsigned char[4096], src))
605 : as rhs_sval, but we can fold that into a cast svalue:
606 : CAST(unsigned char[4096], INIT_VAL(src))
607 : We can discard that cast from the svalue when binding it in
608 : the store for "dst", and simply store:
609 : cluster for: dst
610 : key: {kind: direct, start: 0, size: 32768, next: 32768}
611 : value: ‘struct big’ {INIT_VAL(src)}. */
612 :
613 : static const svalue *
614 413346 : simplify_for_binding (const svalue *sval)
615 : {
616 413346 : if (const svalue *cast_sval = sval->maybe_undo_cast ())
617 0 : sval = cast_sval;
618 44010 : return sval;
619 : }
620 :
621 : /* class concrete_binding_map. */
622 :
623 : /* Dump a representation of this concrete_binding_map to PP.
624 : SIMPLE controls how values and regions are to be printed.
625 : If MULTILINE, then split the dump over multiple lines and
626 : use whitespace for readability, otherwise put all on one line. */
627 :
628 : void
629 2 : concrete_binding_map::dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const
630 : {
631 2 : bool first = true;
632 4 : for (auto iter : *this)
633 : {
634 2 : const bit_range &bits = iter.first;
635 2 : const svalue *value = iter.second;
636 2 : if (multiline)
637 : {
638 0 : pp_string (pp, " bits: {");
639 0 : bits.dump_to_pp (pp);
640 0 : pp_string (pp, "}");
641 0 : pp_newline (pp);
642 0 : pp_string (pp, " value: ");
643 0 : if (tree t = value->get_type ())
644 0 : dump_quoted_tree (pp, t);
645 0 : pp_string (pp, " {");
646 0 : value->dump_to_pp (pp, simple);
647 0 : pp_string (pp, "}");
648 0 : pp_newline (pp);
649 : }
650 : else
651 : {
652 2 : if (first)
653 : first = false;
654 : else
655 0 : pp_string (pp, ", ");
656 2 : pp_string (pp, "bits: {");
657 2 : bits.dump_to_pp (pp);
658 2 : pp_string (pp, "}, value: {");
659 2 : value->dump_to_pp (pp, simple);
660 2 : pp_string (pp, "}");
661 : }
662 : }
663 2 : }
664 :
665 : void
666 0 : concrete_binding_map::dump (bool simple) const
667 : {
668 0 : tree_dump_pretty_printer pp (stderr);
669 0 : dump_to_pp (&pp, simple, true);
670 0 : pp_newline (&pp);
671 0 : }
672 :
673 : void
674 0 : concrete_binding_map::add_to_tree_widget (text_art::tree_widget &parent_widget,
675 : const text_art::dump_widget_info &dwi) const
676 : {
677 0 : for (auto iter : *this)
678 : {
679 0 : const bit_range &bits = iter.first;
680 0 : const svalue *sval = iter.second;
681 :
682 0 : pretty_printer the_pp;
683 0 : pretty_printer * const pp = &the_pp;
684 0 : pp_format_decoder (pp) = default_tree_printer;
685 0 : pp_show_color (pp) = true;
686 0 : const bool simple = true;
687 :
688 0 : bits.dump_to_pp (pp);
689 0 : pp_string (pp, ": ");
690 0 : if (tree t = sval->get_type ())
691 0 : dump_quoted_tree (pp, t);
692 0 : pp_string (pp, " {");
693 0 : sval->dump_to_pp (pp, simple);
694 0 : pp_string (pp, "}");
695 :
696 0 : parent_widget.add_child (text_art::tree_widget::make (dwi, pp));
697 0 : }
698 0 : }
699 :
700 : void
701 12941338 : concrete_binding_map::validate () const
702 : {
703 27626690 : for (auto iter = m_map.begin (); iter != m_map.end (); ++iter)
704 : {
705 : /* Ensure we don't nest compound svalues
706 : within concrete_binding_map instances. */
707 14685352 : const svalue *sval = iter->second;
708 14685352 : gcc_assert (sval->get_kind () != SK_COMPOUND);
709 :
710 : /* Check for overlapping concrete bindings. */
711 14685352 : auto next (iter);
712 14685352 : ++next;
713 14685352 : if (next != m_map.end ())
714 : {
715 : /* Verify they are in order, and do not overlap. */
716 4115685 : const bit_range &iter_bits = iter->first;
717 4115685 : const bit_range &next_bits = next->first;
718 4115685 : gcc_assert (iter_bits.get_start_bit_offset ()
719 : < next_bits.get_start_bit_offset ());
720 4115685 : gcc_assert (iter_bits.get_last_bit_offset ()
721 : < next_bits.get_start_bit_offset ());
722 4115685 : gcc_assert (iter_bits.get_next_bit_offset ()
723 : <= next_bits.get_start_bit_offset ());
724 : }
725 : }
726 12941338 : }
727 :
728 : /* Comparator for imposing an order on concrete_binding_map instances. */
729 :
730 : int
731 28 : concrete_binding_map::cmp (const concrete_binding_map &map1,
732 : const concrete_binding_map &map2)
733 : {
734 28 : if (int size_cmp = map1.size () - map2.size ())
735 : return size_cmp;
736 :
737 28 : auto iter1 = map1.begin ();
738 28 : auto iter2 = map2.begin ();
739 28 : while (iter1 != map1.end ())
740 : {
741 28 : gcc_assert (iter2 != map2.end ());
742 28 : if (int bit_cmp = bit_range::cmp (iter1->first, iter2->first))
743 : return bit_cmp;
744 28 : if (int sval_cmp = svalue::cmp_ptr (iter1->second, iter2->second))
745 : return sval_cmp;
746 0 : ++iter1;
747 0 : ++iter2;
748 : }
749 : return 0;
750 : }
751 :
752 : /* Get the svalue bound precisely to BITS, if any, otherwise
753 : return nullptr. */
754 :
755 : const svalue *
756 56 : concrete_binding_map::get_any_exact_binding (const bit_range &bits) const
757 : {
758 56 : auto iter = m_map.find (bits);
759 56 : if (iter == m_map.end ())
760 : return nullptr;
761 56 : return iter->second;
762 : }
763 :
764 : /* Calculate what the complexity of a compound_svalue instance for this
765 : map will be, based on the bound svalues. */
766 :
767 : complexity
768 612 : concrete_binding_map::calc_complexity () const
769 : {
770 612 : unsigned num_child_nodes = 0;
771 612 : unsigned max_child_depth = 0;
772 2496 : for (auto iter : m_map)
773 : {
774 1884 : const complexity &sval_c = iter.second->get_complexity ();
775 1884 : num_child_nodes += sval_c.m_num_nodes;
776 1884 : max_child_depth = MAX (max_child_depth, sval_c.m_max_depth);
777 : }
778 612 : return complexity (num_child_nodes + 1, max_child_depth + 1);
779 : }
780 :
781 : void
782 38314 : concrete_binding_map::
783 : remove_overlapping_binding (store_manager *mgr,
784 : const bit_range &bits_to_drop,
785 : const bit_range &affected_bound_bits,
786 : const svalue &old_sval)
787 : {
788 38314 : gcc_assert (bits_to_drop.intersects_p (affected_bound_bits));
789 :
790 : /* Remove existing binding. */
791 38314 : erase (affected_bound_bits);
792 :
793 38314 : if (affected_bound_bits.get_start_bit_offset ()
794 38314 : < bits_to_drop.get_start_bit_offset ())
795 : {
796 : /* We have a truncated prefix. */
797 1520 : bit_range prefix_bits (affected_bound_bits.get_start_bit_offset (),
798 3040 : (bits_to_drop.get_start_bit_offset ()
799 1520 : - affected_bound_bits.get_start_bit_offset ()));
800 1520 : bit_range rel_prefix (0, prefix_bits.m_size_in_bits);
801 1520 : const svalue *prefix_sval
802 1520 : = old_sval.extract_bit_range (NULL_TREE,
803 : rel_prefix,
804 : mgr->get_svalue_manager ());
805 1520 : insert (prefix_bits, prefix_sval);
806 : }
807 :
808 76628 : if (affected_bound_bits.get_next_bit_offset ()
809 76628 : > bits_to_drop.get_next_bit_offset ())
810 : {
811 : /* We have a truncated suffix. */
812 10551 : bit_range suffix_bits (bits_to_drop.get_next_bit_offset (),
813 10551 : (affected_bound_bits.get_next_bit_offset ()
814 21102 : - bits_to_drop.get_next_bit_offset ()));
815 10551 : bit_range rel_suffix (bits_to_drop.get_next_bit_offset ()
816 10551 : - affected_bound_bits.get_start_bit_offset (),
817 10551 : suffix_bits.m_size_in_bits);
818 10551 : const svalue *suffix_sval
819 10551 : = old_sval.extract_bit_range (NULL_TREE,
820 : rel_suffix,
821 : mgr->get_svalue_manager ());
822 10551 : insert (suffix_bits, suffix_sval);
823 : }
824 38314 : }
825 :
826 : void
827 8598 : concrete_binding_map::
828 : remove_overlapping_bindings (store_manager *mgr,
829 : const bit_range &bits_to_drop)
830 : {
831 8598 : auto bindings = get_overlapping_bindings (bits_to_drop);
832 17196 : for (auto iter : bindings)
833 8598 : remove_overlapping_binding (mgr, bits_to_drop, iter.first, iter.second);
834 8598 : }
835 :
836 : std::vector<std::pair<bit_range, const svalue &>>
837 8598 : concrete_binding_map::get_overlapping_bindings (const bit_range &bits)
838 : {
839 8598 : std::vector<std::pair<bit_range, const svalue &>> result;
840 17247 : for (auto iter : m_map)
841 8649 : if (iter.first.intersects_p (bits))
842 : {
843 8598 : gcc_assert (iter.second);
844 8598 : result.push_back ({iter.first, *iter.second});
845 : }
846 8598 : return result;
847 : }
848 :
849 : /* class binding_map::const_iterator. */
850 :
851 : bool
852 25622939 : binding_map::const_iterator::operator== (const binding_map::const_iterator &other) const
853 : {
854 25622939 : if (m_concrete != other.m_concrete)
855 : return false;
856 11611832 : if (m_symbolic != other.m_symbolic)
857 515224 : return false;
858 : return true;
859 : }
860 :
861 : binding_map::const_iterator &
862 14518938 : binding_map::const_iterator::operator++ ()
863 : {
864 14518938 : if (m_concrete != m_map.m_concrete.end ())
865 14003714 : ++m_concrete;
866 : else
867 515224 : ++m_symbolic;
868 14518938 : return *this;
869 : }
870 :
871 : binding_map::binding_pair
872 3516937 : binding_map::const_iterator::operator* ()
873 : {
874 3516937 : if (m_concrete != m_map.m_concrete.end ())
875 : {
876 3414212 : const bit_range &bits = m_concrete->first;
877 3414212 : const svalue *sval = m_concrete->second;
878 3414212 : return binding_pair (m_map.m_store_mgr.get_concrete_binding (bits), sval);
879 : }
880 : else
881 : {
882 102725 : gcc_assert (m_symbolic != m_map.m_symbolic.end ());
883 102725 : const region *reg = m_symbolic->m_region;
884 102725 : const svalue *sval = m_symbolic->m_sval;
885 102725 : return binding_pair (m_map.m_store_mgr.get_symbolic_binding (reg), sval);
886 : }
887 : }
888 :
889 : const svalue *
890 11009394 : binding_map::const_iterator::get_svalue () const
891 : {
892 11009394 : if (m_concrete != m_map.m_concrete.end ())
893 10596895 : return m_concrete->second;
894 : else
895 : {
896 412499 : gcc_assert (m_symbolic != m_map.m_symbolic.end ());
897 412499 : return m_symbolic->m_sval;
898 : }
899 : }
900 :
901 : /* class binding_map::iterator. */
902 :
903 : bool
904 14853301 : binding_map::iterator::operator== (const binding_map::iterator &other) const
905 : {
906 14853301 : if (m_concrete != other.m_concrete)
907 : return false;
908 7082498 : if (m_symbolic != other.m_symbolic)
909 397757 : return false;
910 : return true;
911 : }
912 :
913 : binding_map::iterator &
914 8167777 : binding_map::iterator::operator++ ()
915 : {
916 8167777 : if (m_concrete != m_map.m_concrete.end ())
917 7770025 : ++m_concrete;
918 : else
919 397752 : ++m_symbolic;
920 8167777 : return *this;
921 : }
922 :
923 : binding_map::binding_pair
924 8259319 : binding_map::iterator::operator* ()
925 : {
926 8259319 : if (m_concrete != m_map.m_concrete.end ())
927 : {
928 7855386 : const bit_range &bits = m_concrete->first;
929 7855386 : const svalue *&sval = m_concrete->second;
930 7855386 : return binding_pair (m_map.m_store_mgr.get_concrete_binding (bits), sval);
931 : }
932 : else
933 : {
934 403933 : gcc_assert (m_symbolic != m_map.m_symbolic.end ());
935 403933 : const region *reg = m_symbolic->m_region;
936 403933 : const svalue *&sval = m_symbolic->m_sval;
937 403933 : return binding_pair (m_map.m_store_mgr.get_symbolic_binding (reg), sval);
938 : }
939 : }
940 :
941 : /* class binding_map. */
942 :
943 : // Construct an empty binding_map.
944 :
945 1577185 : binding_map::binding_map (store_manager &store_mgr)
946 1577185 : : m_store_mgr (store_mgr),
947 1577185 : m_concrete (),
948 1577185 : m_symbolic ()
949 : {
950 1577185 : }
951 :
952 : /* binding_map's copy ctor. */
953 :
954 31220645 : binding_map::binding_map (const binding_map &other)
955 31220645 : : m_store_mgr (other.m_store_mgr),
956 31220645 : m_concrete (other.m_concrete),
957 31220645 : m_symbolic (other.m_symbolic)
958 : {
959 31220645 : }
960 :
961 : /* binding_map's assignment operator. */
962 :
963 : binding_map&
964 4 : binding_map::operator= (const binding_map &other)
965 : {
966 4 : gcc_assert (&m_store_mgr == &other.m_store_mgr);
967 :
968 : /* For now, assume we only ever copy to an empty cluster. */
969 4 : gcc_assert (m_concrete.size () == 0);
970 4 : gcc_assert (m_symbolic.size () == 0);
971 :
972 4 : m_concrete = other.m_concrete;
973 4 : m_symbolic = other.m_symbolic;
974 :
975 4 : return *this;
976 : }
977 :
978 : /* binding_map's equality operator. */
979 :
980 : bool
981 3272192 : binding_map::operator== (const binding_map &other) const
982 : {
983 3272192 : if (m_concrete != other.m_concrete)
984 : return false;
985 3212283 : if (m_symbolic != other.m_symbolic)
986 : return false;
987 :
988 : return true;
989 : }
990 :
991 : /* Generate a hash value for this binding_map. */
992 :
993 : hashval_t
994 21255056 : binding_map::hash () const
995 : {
996 21255056 : hashval_t result = 0;
997 45815659 : for (auto iter : m_concrete)
998 : {
999 24560603 : result ^= iter.first.hash ();
1000 24560603 : inchash::hash hstate;
1001 24560603 : hstate.add_ptr (iter.second);
1002 24560603 : result ^= hstate.end ();
1003 : }
1004 22403881 : for (auto iter : m_symbolic)
1005 : {
1006 : /* Use a new hasher for each key to avoid depending on the ordering
1007 : of keys when accumulating the result. */
1008 1148825 : inchash::hash hstate;
1009 1148825 : hstate.add_ptr (iter.m_region);
1010 1148825 : hstate.add_ptr (iter.m_sval);
1011 1148825 : result ^= hstate.end ();
1012 : }
1013 21255056 : return result;
1014 : }
1015 :
1016 : const svalue *
1017 5110592 : binding_map::get (const binding_key *key) const
1018 : {
1019 5110592 : if (key->symbolic_p ())
1020 : {
1021 288930 : const ana::symbolic_binding &sym_key
1022 : = *static_cast <const ana::symbolic_binding *> (key);
1023 288930 : const region *reg = sym_key.get_region ();
1024 :
1025 340011 : for (auto iter : m_symbolic)
1026 : {
1027 212008 : if (iter.m_region == reg)
1028 5110592 : return iter.m_sval;
1029 : }
1030 : return nullptr;
1031 : }
1032 : else
1033 : {
1034 4821662 : const concrete_binding &conc_key
1035 : = *static_cast <const concrete_binding *> (key);
1036 4821662 : const bit_range &bits = conc_key.get_bit_range ();
1037 :
1038 4821662 : concrete_bindings_t::const_iterator iter (m_concrete.find (bits));
1039 4821662 : if (iter != m_concrete.end ())
1040 4392744 : return iter->second;
1041 : else
1042 : return nullptr;
1043 : }
1044 : }
1045 :
1046 : void
1047 2122047 : binding_map::put (const binding_key *key, const svalue *sval)
1048 : {
1049 2122047 : if (key->symbolic_p ())
1050 : {
1051 65850 : const ana::symbolic_binding &sym_key
1052 : = *static_cast <const ana::symbolic_binding *> (key);
1053 65850 : const region *reg = sym_key.get_region ();
1054 :
1055 65850 : m_symbolic.clear ();
1056 :
1057 65850 : m_symbolic.push_back ({reg, sval});
1058 : }
1059 : else
1060 : {
1061 2056197 : const concrete_binding &conc_key
1062 : = *static_cast <const concrete_binding *> (key);
1063 2056197 : const bit_range &bits = conc_key.get_bit_range ();
1064 :
1065 2056197 : concrete_bindings_t::iterator iter (m_concrete.find (bits));
1066 2056197 : if (iter != m_concrete.end ())
1067 3580 : (*iter).second = sval;
1068 : else
1069 2052617 : m_concrete.insert (bits, sval);
1070 : }
1071 2122047 : }
1072 :
1073 : void
1074 286 : binding_map::overwrite (iterator_t &pos, const svalue *v)
1075 : {
1076 286 : gcc_assert (&pos.m_map == this);
1077 286 : if (pos.m_symbolic != m_symbolic.end ())
1078 0 : (*(pos.m_symbolic)).m_sval = v;
1079 : else
1080 : {
1081 286 : gcc_assert (pos.m_concrete != m_concrete.end ());
1082 286 : (*(pos.m_concrete)).second = v;
1083 : }
1084 286 : }
1085 :
1086 : void
1087 297300 : binding_map::remove (const binding_key *key)
1088 : {
1089 297300 : if (key->symbolic_p ())
1090 9718 : m_symbolic.clear ();
1091 : else
1092 : {
1093 287582 : const concrete_binding &conc_key
1094 : = *static_cast <const concrete_binding *> (key);
1095 287582 : const bit_range &bits = conc_key.get_bit_range ();
1096 287582 : m_concrete.erase (bits);
1097 : }
1098 297300 : }
1099 :
1100 : binding_map::const_iterator_t
1101 11104001 : binding_map::begin () const
1102 : {
1103 11104001 : return binding_map::const_iterator_t (*this,
1104 : m_concrete.begin (),
1105 11104001 : m_symbolic.begin ());
1106 : }
1107 :
1108 : binding_map::const_iterator_t
1109 22113395 : binding_map::end () const
1110 : {
1111 22113395 : return binding_map::const_iterator_t (*this,
1112 : m_concrete.end (),
1113 22113395 : m_symbolic.end ());
1114 : }
1115 :
1116 : binding_map::iterator_t
1117 6685524 : binding_map::begin ()
1118 : {
1119 6685524 : return binding_map::iterator_t (*this,
1120 : m_concrete.begin (),
1121 6685524 : m_symbolic.begin ());
1122 : }
1123 :
1124 : binding_map::iterator_t
1125 14115011 : binding_map::end ()
1126 : {
1127 14115011 : return binding_map::iterator_t (*this,
1128 : m_concrete.end (),
1129 14115011 : m_symbolic.end ());
1130 : }
1131 :
1132 : size_t
1133 534054 : binding_map::elements () const
1134 : {
1135 534054 : return m_concrete.size () + m_symbolic.size ();
1136 : }
1137 :
1138 : /* Dump a representation of this binding_map to PP.
1139 : SIMPLE controls how values and regions are to be printed.
1140 : If MULTILINE, then split the dump over multiple lines and
1141 : use whitespace for readability, otherwise put all on one line. */
1142 :
1143 : void
1144 1731 : binding_map::dump_to_pp (pretty_printer *pp, bool simple,
1145 : bool multiline) const
1146 : {
1147 1731 : bool first = true;
1148 2862 : for (auto iter : *this)
1149 : {
1150 1131 : const binding_key *key = iter.m_key;
1151 1131 : const svalue *value = iter.m_sval;
1152 1131 : if (multiline)
1153 : {
1154 300 : pp_string (pp, " key: {");
1155 300 : key->dump_to_pp (pp, simple);
1156 300 : pp_string (pp, "}");
1157 300 : pp_newline (pp);
1158 300 : pp_string (pp, " value: ");
1159 300 : if (tree t = value->get_type ())
1160 300 : dump_quoted_tree (pp, t);
1161 300 : pp_string (pp, " {");
1162 300 : value->dump_to_pp (pp, simple);
1163 300 : pp_string (pp, "}");
1164 300 : pp_newline (pp);
1165 : }
1166 : else
1167 : {
1168 831 : if (first)
1169 : first = false;
1170 : else
1171 188 : pp_string (pp, ", ");
1172 831 : pp_string (pp, "binding key: {");
1173 831 : key->dump_to_pp (pp, simple);
1174 831 : pp_string (pp, "}, value: {");
1175 831 : value->dump_to_pp (pp, simple);
1176 831 : pp_string (pp, "}");
1177 : }
1178 : }
1179 1731 : }
1180 :
1181 : /* Dump a multiline representation of this binding_map to stderr. */
1182 :
1183 : DEBUG_FUNCTION void
1184 0 : binding_map::dump (bool simple) const
1185 : {
1186 0 : tree_dump_pretty_printer pp (stderr);
1187 0 : dump_to_pp (&pp, simple, true);
1188 0 : pp_newline (&pp);
1189 0 : }
1190 :
1191 : /* Assert that this object is valid. */
1192 :
1193 : void
1194 12941338 : binding_map::validate () const
1195 : {
1196 12941338 : const size_t num_concrete = m_concrete.size ();
1197 12941338 : const size_t num_symbolic = m_symbolic.size ();
1198 :
1199 : /* We shouldn't have more than one symbolic key per cluster
1200 : (or one would have clobbered the other). */
1201 12941338 : gcc_assert (num_symbolic < 2);
1202 : /* We can't have both concrete and symbolic keys. */
1203 12941338 : gcc_assert (num_concrete == 0 || num_symbolic == 0);
1204 :
1205 12941338 : m_concrete.validate ();
1206 12941338 : }
1207 :
1208 : /* Return a new json::object of the form
1209 : {KEY_DESC : SVALUE_DESC,
1210 : ...for the various key/value pairs in this binding_map}. */
1211 :
1212 : std::unique_ptr<json::object>
1213 0 : binding_map::to_json () const
1214 : {
1215 0 : auto map_obj = std::make_unique<json::object> ();
1216 :
1217 0 : for (auto iter : *this)
1218 : {
1219 0 : const svalue *value = iter.m_sval;
1220 0 : label_text key_desc = iter.m_key->get_desc ();
1221 0 : map_obj->set (key_desc.get (), value->to_json ());
1222 0 : }
1223 :
1224 0 : return map_obj;
1225 : }
1226 :
1227 : /* Add a child to PARENT_WIDGET expressing a binding between
1228 : KEY and SVAL. */
1229 :
1230 : static void
1231 0 : add_binding_to_tree_widget (text_art::tree_widget &parent_widget,
1232 : const text_art::dump_widget_info &dwi,
1233 : const binding_key *key,
1234 : const svalue *sval)
1235 : {
1236 0 : pretty_printer the_pp;
1237 0 : pretty_printer * const pp = &the_pp;
1238 0 : pp_format_decoder (pp) = default_tree_printer;
1239 0 : pp_show_color (pp) = true;
1240 0 : const bool simple = true;
1241 :
1242 0 : key->dump_to_pp (pp, simple);
1243 0 : pp_string (pp, ": ");
1244 0 : if (tree t = sval->get_type ())
1245 0 : dump_quoted_tree (pp, t);
1246 0 : pp_string (pp, " {");
1247 0 : sval->dump_to_pp (pp, simple);
1248 0 : pp_string (pp, "}");
1249 :
1250 0 : parent_widget.add_child (text_art::tree_widget::make (dwi, pp));
1251 0 : }
1252 :
1253 : void
1254 0 : binding_map::add_to_tree_widget (text_art::tree_widget &parent_widget,
1255 : const text_art::dump_widget_info &dwi) const
1256 : {
1257 0 : for (auto iter : *this)
1258 : {
1259 0 : const binding_key *key = iter.m_key;
1260 0 : const svalue *sval = iter.m_sval;
1261 0 : add_binding_to_tree_widget (parent_widget, dwi,
1262 : key, sval);
1263 : }
1264 0 : }
1265 :
1266 : /* Get the child region of PARENT_REG based upon INDEX within a
1267 : CONSTRUCTOR. */
1268 :
1269 : static const region *
1270 467 : get_subregion_within_ctor (const region *parent_reg, tree index,
1271 : region_model_manager *mgr)
1272 : {
1273 467 : switch (TREE_CODE (index))
1274 : {
1275 0 : default:
1276 0 : gcc_unreachable ();
1277 403 : case INTEGER_CST:
1278 403 : {
1279 403 : const svalue *index_sval
1280 403 : = mgr->get_or_create_constant_svalue (index);
1281 403 : return mgr->get_element_region (parent_reg,
1282 403 : TREE_TYPE (parent_reg->get_type ()),
1283 403 : index_sval);
1284 : }
1285 64 : break;
1286 64 : case FIELD_DECL:
1287 64 : return mgr->get_field_region (parent_reg, index);
1288 : }
1289 : }
1290 :
1291 : /* Get the child region of PARENT_REG based upon (INDEX, VALUE) within a
1292 : CONSTRUCTOR. */
1293 :
1294 : static const region *
1295 467 : get_subregion_within_ctor_for_ctor_pair (const region *parent_reg,
1296 : tree index,
1297 : tree value,
1298 : region_model_manager *mgr)
1299 : {
1300 467 : if (TREE_CODE (index) == INTEGER_CST
1301 403 : && TREE_CODE (value) == RAW_DATA_CST)
1302 : {
1303 : /* Special-case; see tree.def's description of CONSTRUCTOR.
1304 : We have RAW_DATA_LENGTH of bytes, starting at INDEX's start. */
1305 14 : const region *start_reg
1306 14 : = get_subregion_within_ctor (parent_reg, index, mgr);
1307 : /* Build a bit range, relative to PARENT_REG. */
1308 14 : region_offset start_offset = start_reg->get_offset (mgr);
1309 :
1310 14 : if (!start_offset.concrete_p ())
1311 : return nullptr;
1312 14 : bit_offset_t start_bit_offset = start_offset.get_bit_offset ();
1313 14 : int length = RAW_DATA_LENGTH (value);
1314 14 : bit_range bits (start_bit_offset, length * BITS_PER_UNIT);
1315 :
1316 14 : return mgr->get_bit_range (parent_reg, NULL_TREE, bits);
1317 : }
1318 :
1319 453 : return get_subregion_within_ctor (parent_reg, index, mgr);
1320 : }
1321 :
1322 : /* Get the svalue for VAL, a non-CONSTRUCTOR value within a CONSTRUCTOR. */
1323 :
1324 : static const svalue *
1325 425 : get_svalue_for_ctor_val (tree val, region_model_manager *mgr)
1326 : {
1327 : /* Reuse the get_rvalue logic from region_model. */
1328 425 : region_model m (mgr);
1329 425 : return m.get_rvalue (path_var (val, 0), nullptr);
1330 425 : }
1331 :
1332 : /* Bind values from CONSTRUCTOR to this map, relative to
1333 : PARENT_REG's relationship to its base region.
1334 : Return true if successful, false if there was a problem (e.g. due
1335 : to hitting a complexity limit). */
1336 :
1337 : bool
1338 164 : concrete_binding_map::apply_ctor_to_region (const region *parent_reg, tree ctor,
1339 : region_model_manager *mgr)
1340 : {
1341 164 : gcc_assert (parent_reg);
1342 164 : gcc_assert (TREE_CODE (ctor) == CONSTRUCTOR);
1343 :
1344 164 : unsigned ix;
1345 164 : tree index;
1346 164 : tree val;
1347 164 : tree parent_type = parent_reg->get_type ();
1348 164 : tree field;
1349 164 : if (TREE_CODE (parent_type) == RECORD_TYPE)
1350 55 : field = TYPE_FIELDS (parent_type);
1351 : else
1352 : field = NULL_TREE;
1353 630 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), ix, index, val)
1354 : {
1355 467 : if (!index)
1356 : {
1357 : /* If index is NULL, then iterate through the fields for
1358 : a RECORD_TYPE, or use an INTEGER_CST otherwise.
1359 : Compare with similar logic in output_constructor. */
1360 189 : if (field)
1361 : {
1362 0 : index = field;
1363 0 : field = DECL_CHAIN (field);
1364 : }
1365 : else
1366 189 : index = build_int_cst (integer_type_node, ix);
1367 : }
1368 278 : else if (TREE_CODE (index) == RANGE_EXPR)
1369 : {
1370 0 : tree min_index = TREE_OPERAND (index, 0);
1371 0 : tree max_index = TREE_OPERAND (index, 1);
1372 0 : if (min_index == max_index)
1373 : {
1374 0 : if (!apply_ctor_pair_to_child_region (parent_reg, mgr,
1375 : min_index, val))
1376 : return false;
1377 : }
1378 : else
1379 : {
1380 0 : if (!apply_ctor_val_to_range (parent_reg, mgr,
1381 : min_index, max_index, val))
1382 : return false;
1383 : }
1384 0 : continue;
1385 0 : }
1386 467 : if (!apply_ctor_pair_to_child_region (parent_reg, mgr, index, val))
1387 : return false;
1388 : }
1389 : return true;
1390 : }
1391 :
1392 : /* Bind the value VAL into the range of elements within PARENT_REF
1393 : from MIN_INDEX to MAX_INDEX (including endpoints).
1394 : For use in handling RANGE_EXPR within a CONSTRUCTOR.
1395 : Return true if successful, false if there was a problem (e.g. due
1396 : to hitting a complexity limit). */
1397 :
1398 : bool
1399 0 : concrete_binding_map::apply_ctor_val_to_range (const region *parent_reg,
1400 : region_model_manager *mgr,
1401 : tree min_index, tree max_index,
1402 : tree val)
1403 : {
1404 0 : gcc_assert (TREE_CODE (min_index) == INTEGER_CST);
1405 0 : gcc_assert (TREE_CODE (max_index) == INTEGER_CST);
1406 :
1407 : /* Generate a binding key for the range. */
1408 0 : const region *min_element
1409 0 : = get_subregion_within_ctor (parent_reg, min_index, mgr);
1410 0 : const region *max_element
1411 0 : = get_subregion_within_ctor (parent_reg, max_index, mgr);
1412 0 : region_offset min_offset = min_element->get_offset (mgr);
1413 0 : if (min_offset.symbolic_p ())
1414 : return false;
1415 0 : bit_offset_t start_bit_offset = min_offset.get_bit_offset ();
1416 0 : store_manager *smgr = mgr->get_store_manager ();
1417 0 : if (max_element->empty_p ())
1418 : return false;
1419 0 : const binding_key *max_element_key = binding_key::make (smgr, max_element);
1420 0 : if (max_element_key->symbolic_p ())
1421 : return false;
1422 0 : const concrete_binding *max_element_ckey
1423 0 : = max_element_key->dyn_cast_concrete_binding ();
1424 0 : bit_size_t range_size_in_bits
1425 0 : = max_element_ckey->get_next_bit_offset () - start_bit_offset;
1426 :
1427 : /* Get the value. */
1428 0 : if (TREE_CODE (val) == CONSTRUCTOR)
1429 : return false;
1430 0 : const svalue *sval = get_svalue_for_ctor_val (val, mgr);
1431 :
1432 : /* Bind the value to the range. */
1433 0 : const bit_range affected_bits (start_bit_offset, range_size_in_bits);
1434 0 : insert (affected_bits, sval);
1435 0 : return true;
1436 : }
1437 :
1438 : /* Bind the value VAL into INDEX within PARENT_REF.
1439 : For use in handling a pair of entries within a CONSTRUCTOR.
1440 : Return true if successful, false if there was a problem (e.g. due
1441 : to hitting a complexity limit). */
1442 :
1443 : bool
1444 467 : concrete_binding_map::apply_ctor_pair_to_child_region (const region *parent_reg,
1445 : region_model_manager *mgr,
1446 : tree index, tree val)
1447 : {
1448 467 : const region *child_reg
1449 467 : = get_subregion_within_ctor_for_ctor_pair (parent_reg, index, val, mgr);
1450 467 : if (!child_reg)
1451 : return false;
1452 467 : if (TREE_CODE (val) == CONSTRUCTOR)
1453 42 : return apply_ctor_to_region (child_reg, val, mgr);
1454 : else
1455 : {
1456 425 : const svalue *sval = get_svalue_for_ctor_val (val, mgr);
1457 425 : if (child_reg->empty_p ())
1458 : return false;
1459 425 : const binding_key *k
1460 425 : = binding_key::make (mgr->get_store_manager (), child_reg);
1461 : /* Handle the case where we have an unknown size for child_reg
1462 : (e.g. due to it being a trailing field with incomplete array
1463 : type. */
1464 425 : if (!k->concrete_p ())
1465 : {
1466 : /* Assume that sval has a well-defined size for this case. */
1467 5 : tree sval_type = sval->get_type ();
1468 5 : gcc_assert (sval_type);
1469 5 : HOST_WIDE_INT sval_byte_size = int_size_in_bytes (sval_type);
1470 5 : gcc_assert (sval_byte_size != -1);
1471 5 : bit_size_t sval_bit_size = sval_byte_size * BITS_PER_UNIT;
1472 : /* Get offset of child relative to base region. */
1473 5 : region_offset child_base_offset = child_reg->get_offset (mgr);
1474 5 : if (child_base_offset.symbolic_p ())
1475 1 : return false;
1476 : /* Convert to an offset relative to the parent region. */
1477 4 : region_offset parent_base_offset = parent_reg->get_offset (mgr);
1478 4 : gcc_assert (!parent_base_offset.symbolic_p ());
1479 4 : bit_offset_t child_parent_offset
1480 4 : = (child_base_offset.get_bit_offset ()
1481 4 : - parent_base_offset.get_bit_offset ());
1482 : /* Create a concrete key for the child within the parent. */
1483 4 : k = mgr->get_store_manager ()->get_concrete_binding
1484 4 : (child_parent_offset, sval_bit_size);
1485 : }
1486 424 : gcc_assert (k->concrete_p ());
1487 424 : auto conc_key = as_a <const concrete_binding *> (k);
1488 424 : insert (conc_key->get_bit_range (), sval);
1489 424 : return true;
1490 : }
1491 : }
1492 :
1493 : /* Populate OUT with all bindings within this map that overlap KEY. */
1494 :
1495 : void
1496 34629 : binding_map::get_overlapping_bindings (const binding_key *key,
1497 : auto_vec<const binding_key *> *out)
1498 : {
1499 312242 : for (auto iter : *this)
1500 : {
1501 277613 : const binding_key *iter_key = iter.m_key;
1502 555226 : if (const concrete_binding *ckey
1503 277613 : = key->dyn_cast_concrete_binding ())
1504 : {
1505 550154 : if (const concrete_binding *iter_ckey
1506 275077 : = iter_key->dyn_cast_concrete_binding ())
1507 : {
1508 274572 : if (ckey->overlaps_p (*iter_ckey))
1509 29716 : out->safe_push (iter_key);
1510 : }
1511 : else
1512 : {
1513 : /* Assume overlap. */
1514 505 : out->safe_push (iter_key);
1515 : }
1516 : }
1517 : else
1518 : {
1519 : /* Assume overlap. */
1520 2536 : out->safe_push (iter_key);
1521 : }
1522 : }
1523 34629 : }
1524 :
1525 : /* Remove, truncate, and/or split any bindings within this map that
1526 : overlap DROP_KEY.
1527 :
1528 : For example, if we have:
1529 :
1530 : +------------------------------------+
1531 : | old binding |
1532 : +------------------------------------+
1533 :
1534 : which is to be overwritten with:
1535 :
1536 : .......+----------------------+.......
1537 : .......| new binding |.......
1538 : .......+----------------------+.......
1539 :
1540 : this function "cuts a hole" out of the old binding:
1541 :
1542 : +------+......................+------+
1543 : |prefix| hole for new binding |suffix|
1544 : +------+......................+------+
1545 :
1546 : into which the new binding can be added without
1547 : overlapping the prefix or suffix.
1548 :
1549 : The prefix and suffix (if added) will be bound to the pertinent
1550 : parts of the value of the old binding.
1551 :
1552 : For example, given:
1553 : struct s5
1554 : {
1555 : char arr[8];
1556 : };
1557 : void test_5 (struct s5 *p)
1558 : {
1559 : struct s5 f = *p;
1560 : f.arr[3] = 42;
1561 : }
1562 : then after the "f = *p;" we have:
1563 : cluster for: f: INIT_VAL((*INIT_VAL(p_33(D))))
1564 : and at the "f.arr[3] = 42;" we remove the bindings overlapping
1565 : "f.arr[3]", replacing it with a prefix (bytes 0-2) and suffix (bytes 4-7)
1566 : giving:
1567 : cluster for: f
1568 : key: {bytes 0-2}
1569 : value: {BITS_WITHIN(bytes 0-2, inner_val: INIT_VAL((*INIT_VAL(p_33(D))).arr))}
1570 : key: {bytes 4-7}
1571 : value: {BITS_WITHIN(bytes 4-7, inner_val: INIT_VAL((*INIT_VAL(p_33(D))).arr))}
1572 : punching a hole into which the new value can be written at byte 3:
1573 : cluster for: f
1574 : key: {bytes 0-2}
1575 : value: {BITS_WITHIN(bytes 0-2, inner_val: INIT_VAL((*INIT_VAL(p_33(D))).arr))}
1576 : key: {byte 3}
1577 : value: 'char' {(char)42}
1578 : key: {bytes 4-7}
1579 : value: {BITS_WITHIN(bytes 4-7, inner_val: INIT_VAL((*INIT_VAL(p_33(D))).arr))}
1580 :
1581 : If UNCERTAINTY is non-NULL, use it to record any svalues that
1582 : were removed, as being maybe-bound.
1583 :
1584 : If MAYBE_LIVE_VALUES is non-NULL, then use it to record any svalues that
1585 : were removed as being maybe-live.
1586 :
1587 : If ALWAYS_OVERLAP, then assume that DROP_KEY can overlap anything
1588 : in the map, due to one or both of the underlying clusters being
1589 : symbolic (but not the same symbolic region). Hence even if DROP_KEY is a
1590 : concrete binding it could actually be referring to the same memory as
1591 : distinct concrete bindings in the map. Remove all bindings, but
1592 : register any svalues with *UNCERTAINTY. */
1593 :
1594 : void
1595 81229 : binding_map::remove_overlapping_bindings (store_manager *mgr,
1596 : const binding_key *drop_key,
1597 : uncertainty_t *uncertainty,
1598 : svalue_set *maybe_live_values,
1599 : bool always_overlap)
1600 : {
1601 : /* Get the bindings of interest within this map. */
1602 81229 : auto_vec<const binding_key *> bindings;
1603 81229 : if (always_overlap)
1604 94029 : for (auto iter : *this)
1605 47429 : bindings.safe_push (iter.m_key); /* Add all bindings. */
1606 : else
1607 : /* Just add overlapping bindings. */
1608 34629 : get_overlapping_bindings (drop_key, &bindings);
1609 :
1610 : unsigned i;
1611 : const binding_key *iter_binding;
1612 225033 : FOR_EACH_VEC_ELT (bindings, i, iter_binding)
1613 : {
1614 : /* Record any svalues that were removed to *UNCERTAINTY as being
1615 : maybe-bound, provided at least some part of the binding is symbolic.
1616 :
1617 : Specifically, if at least one of the bindings is symbolic, or we
1618 : have ALWAYS_OVERLAP for the case where we have possibly aliasing
1619 : regions, then we don't know that the svalue has been overwritten,
1620 : and should record that to *UNCERTAINTY.
1621 :
1622 : However, if we have concrete keys accessing within the same symbolic
1623 : region, then we *know* that the symbolic region has been overwritten,
1624 : so we don't record it to *UNCERTAINTY, as this could be a genuine
1625 : leak. */
1626 80186 : const svalue *old_sval = get (iter_binding);
1627 80186 : if (uncertainty
1628 102744 : && (drop_key->symbolic_p ()
1629 20445 : || iter_binding->symbolic_p ()
1630 18087 : || always_overlap))
1631 18835 : uncertainty->on_maybe_bound_sval (old_sval);
1632 :
1633 : /* Record any svalues that were removed to *MAYBE_LIVE_VALUES as being
1634 : maybe-live. */
1635 80186 : if (maybe_live_values)
1636 47455 : maybe_live_values->add (old_sval);
1637 :
1638 : /* Begin by removing the old binding. */
1639 80186 : remove (iter_binding);
1640 :
1641 : /* Don't attempt to handle prefixes/suffixes for the
1642 : "always_overlap" case; everything's being removed. */
1643 80186 : if (always_overlap)
1644 47429 : continue;
1645 :
1646 : /* Now potentially add the prefix and suffix. */
1647 65514 : if (const concrete_binding *drop_ckey
1648 32757 : = drop_key->dyn_cast_concrete_binding ())
1649 60442 : if (const concrete_binding *iter_ckey
1650 30221 : = iter_binding->dyn_cast_concrete_binding ())
1651 : {
1652 29716 : gcc_assert (drop_ckey->overlaps_p (*iter_ckey));
1653 :
1654 29716 : const bit_range &drop_bits = drop_ckey->get_bit_range ();
1655 29716 : const bit_range &iter_bits = iter_ckey->get_bit_range ();
1656 29716 : gcc_assert (old_sval);
1657 29716 : m_concrete.remove_overlapping_binding (mgr, drop_bits, iter_bits,
1658 : *old_sval);
1659 : }
1660 : }
1661 81229 : }
1662 :
1663 : /* class binding_cluster. */
1664 :
1665 1577177 : binding_cluster::binding_cluster (store_manager &store_mgr,
1666 1577177 : const region *base_region)
1667 1577177 : : m_base_region (base_region), m_map (store_mgr),
1668 1577177 : m_escaped (false), m_touched (false)
1669 : {
1670 1577177 : }
1671 :
1672 : /* binding_cluster's copy ctor. */
1673 :
1674 31220645 : binding_cluster::binding_cluster (const binding_cluster &other)
1675 31220645 : : m_base_region (other.m_base_region), m_map (other.m_map),
1676 31220645 : m_escaped (other.m_escaped), m_touched (other.m_touched)
1677 : {
1678 31220645 : }
1679 :
1680 : /* binding_cluster's assignment operator. */
1681 :
1682 : binding_cluster&
1683 0 : binding_cluster::operator= (const binding_cluster &other)
1684 : {
1685 0 : gcc_assert (m_base_region == other.m_base_region);
1686 0 : m_map = other.m_map;
1687 0 : m_escaped = other.m_escaped;
1688 0 : m_touched = other.m_touched;
1689 0 : return *this;
1690 : }
1691 :
1692 : /* binding_cluster's equality operator. */
1693 :
1694 : bool
1695 3272188 : binding_cluster::operator== (const binding_cluster &other) const
1696 : {
1697 3272188 : if (m_map != other.m_map)
1698 : return false;
1699 :
1700 3209909 : if (m_base_region != other.m_base_region)
1701 : return false;
1702 :
1703 3209909 : if (m_escaped != other.m_escaped)
1704 : return false;
1705 :
1706 3209265 : if (m_touched != other.m_touched)
1707 : return false;
1708 :
1709 3206603 : gcc_checking_assert (hash () == other.hash ());
1710 :
1711 : return true;
1712 : }
1713 :
1714 : /* Generate a hash value for this binding_cluster. */
1715 :
1716 : hashval_t
1717 21255056 : binding_cluster::hash () const
1718 : {
1719 21255056 : return m_map.hash ();
1720 : }
1721 :
1722 : /* Return true if this binding_cluster is symbolic
1723 : i.e. its base region is symbolic. */
1724 :
1725 : bool
1726 9289855 : binding_cluster::symbolic_p () const
1727 : {
1728 9289855 : return m_base_region->get_kind () == RK_SYMBOLIC;
1729 : }
1730 :
1731 : /* Dump a representation of this binding_cluster to PP.
1732 : SIMPLE controls how values and regions are to be printed.
1733 : If MULTILINE, then split the dump over multiple lines and
1734 : use whitespace for readability, otherwise put all on one line. */
1735 :
1736 : void
1737 1731 : binding_cluster::dump_to_pp (pretty_printer *pp, bool simple,
1738 : bool multiline) const
1739 : {
1740 1731 : if (m_escaped)
1741 : {
1742 1092 : if (multiline)
1743 : {
1744 904 : pp_string (pp, " ESCAPED");
1745 904 : pp_newline (pp);
1746 : }
1747 : else
1748 188 : pp_string (pp, "(ESCAPED)");
1749 : }
1750 1731 : if (m_touched)
1751 : {
1752 272 : if (multiline)
1753 : {
1754 84 : pp_string (pp, " TOUCHED");
1755 84 : pp_newline (pp);
1756 : }
1757 : else
1758 188 : pp_string (pp, "(TOUCHED)");
1759 : }
1760 :
1761 1731 : m_map.dump_to_pp (pp, simple, multiline);
1762 1731 : }
1763 :
1764 : /* Dump a multiline representation of this binding_cluster to stderr. */
1765 :
1766 : DEBUG_FUNCTION void
1767 0 : binding_cluster::dump (bool simple) const
1768 : {
1769 0 : tree_dump_pretty_printer pp (stderr);
1770 0 : pp_string (&pp, " cluster for: ");
1771 0 : m_base_region->dump_to_pp (&pp, simple);
1772 0 : pp_string (&pp, ": ");
1773 0 : pp_newline (&pp);
1774 0 : dump_to_pp (&pp, simple, true);
1775 0 : pp_newline (&pp);
1776 0 : }
1777 :
1778 : /* Assert that this object is valid. */
1779 :
1780 : void
1781 12941338 : binding_cluster::validate () const
1782 : {
1783 12941338 : m_map.validate ();
1784 12941338 : }
1785 :
1786 : /* Return a new json::object of the form
1787 : {"escaped": true/false,
1788 : "touched": true/false,
1789 : "map" : object for the binding_map. */
1790 :
1791 : std::unique_ptr<json::object>
1792 0 : binding_cluster::to_json () const
1793 : {
1794 0 : auto cluster_obj = std::make_unique<json::object> ();
1795 :
1796 0 : cluster_obj->set_bool ("escaped", m_escaped);
1797 0 : cluster_obj->set_bool ("touched", m_touched);
1798 0 : cluster_obj->set ("map", m_map.to_json ());
1799 :
1800 0 : return cluster_obj;
1801 : }
1802 :
1803 : std::unique_ptr<text_art::tree_widget>
1804 0 : binding_cluster::make_dump_widget (const text_art::dump_widget_info &dwi,
1805 : store_manager *mgr) const
1806 : {
1807 0 : pretty_printer the_pp;
1808 0 : pretty_printer * const pp = &the_pp;
1809 0 : pp_format_decoder (pp) = default_tree_printer;
1810 0 : pp_show_color (pp) = true;
1811 0 : const bool simple = true;
1812 :
1813 0 : m_base_region->dump_to_pp (pp, simple);
1814 0 : pp_string (pp, ": ");
1815 :
1816 0 : if (const svalue *sval = maybe_get_simple_value (mgr))
1817 : {
1818 : /* Special-case to simplify dumps for the common case where
1819 : we just have one value directly bound to the whole of a
1820 : region. */
1821 0 : sval->dump_to_pp (pp, simple);
1822 0 : if (escaped_p ())
1823 0 : pp_string (pp, " (ESCAPED)");
1824 0 : if (touched_p ())
1825 0 : pp_string (pp, " (TOUCHED)");
1826 :
1827 0 : return text_art::tree_widget::make (dwi, pp);
1828 : }
1829 : else
1830 : {
1831 0 : if (escaped_p ())
1832 0 : pp_string (pp, " (ESCAPED)");
1833 0 : if (touched_p ())
1834 0 : pp_string (pp, " (TOUCHED)");
1835 :
1836 0 : std::unique_ptr<text_art::tree_widget> cluster_widget
1837 0 : (text_art::tree_widget::make (dwi, pp));
1838 :
1839 0 : m_map.add_to_tree_widget (*cluster_widget, dwi);
1840 :
1841 0 : return cluster_widget;
1842 0 : }
1843 0 : }
1844 :
1845 : /* Add a binding of SVAL of kind KIND to REG, unpacking SVAL if it is a
1846 : compound_sval. */
1847 :
1848 : void
1849 449663 : binding_cluster::bind (store_manager *mgr,
1850 : const region *reg, const svalue *sval)
1851 : {
1852 899326 : if (const compound_svalue *compound_sval
1853 449663 : = sval->dyn_cast_compound_svalue ())
1854 : {
1855 778 : bind_compound_sval (mgr, reg, compound_sval);
1856 778 : return;
1857 : }
1858 :
1859 448885 : if (reg->empty_p ())
1860 : return;
1861 448885 : const binding_key *binding = binding_key::make (mgr, reg);
1862 448885 : bind_key (binding, sval);
1863 : }
1864 :
1865 : /* Bind SVAL to KEY.
1866 : Unpacking of compound_svalues should already have been done by the
1867 : time this is called. */
1868 :
1869 : void
1870 451452 : binding_cluster::bind_key (const binding_key *key, const svalue *sval)
1871 : {
1872 451452 : gcc_assert (sval->get_kind () != SK_COMPOUND);
1873 :
1874 451452 : m_map.put (key, sval);
1875 451452 : if (key->symbolic_p ())
1876 24835 : m_touched = true;
1877 451452 : }
1878 :
1879 : /* Subroutine of binding_cluster::bind.
1880 : Unpack compound_svals when binding them, so that we bind them
1881 : element-wise. */
1882 :
1883 : void
1884 778 : binding_cluster::bind_compound_sval (store_manager *mgr,
1885 : const region *reg,
1886 : const compound_svalue *compound_sval)
1887 : {
1888 778 : region_offset reg_offset
1889 778 : = reg->get_offset (mgr->get_svalue_manager ());
1890 778 : if (reg_offset.symbolic_p ())
1891 : {
1892 4 : m_touched = true;
1893 4 : clobber_region (mgr, reg);
1894 4 : return;
1895 : }
1896 :
1897 2941 : for (auto iter : *compound_sval)
1898 : {
1899 2167 : const bit_range &iter_bits = iter.first;
1900 2167 : const svalue *iter_sval = iter.second;
1901 :
1902 2167 : bit_offset_t effective_start
1903 2167 : = (iter_bits.get_start_bit_offset ()
1904 2167 : + reg_offset.get_bit_offset ());
1905 2167 : const bit_range affected_bits (effective_start,
1906 2167 : iter_bits.m_size_in_bits);
1907 2167 : bind_key (mgr->get_concrete_binding (affected_bits), iter_sval);
1908 : }
1909 : }
1910 :
1911 : /* Remove all bindings overlapping REG within this cluster. */
1912 :
1913 : void
1914 6305 : binding_cluster::clobber_region (store_manager *mgr, const region *reg)
1915 : {
1916 6305 : remove_overlapping_bindings (mgr, reg, nullptr, nullptr);
1917 6305 : }
1918 :
1919 : /* Remove any bindings for REG within this cluster. */
1920 :
1921 : void
1922 217114 : binding_cluster::purge_region (store_manager *mgr, const region *reg)
1923 : {
1924 217114 : gcc_assert (reg->get_kind () == RK_DECL);
1925 217114 : if (reg->empty_p ())
1926 : return;
1927 217114 : const binding_key *binding
1928 217114 : = binding_key::make (mgr, const_cast<region *> (reg));
1929 217114 : m_map.remove (binding);
1930 : }
1931 :
1932 : /* Clobber REG and fill it with repeated copies of SVAL. */
1933 :
1934 : void
1935 1583 : binding_cluster::fill_region (store_manager *mgr,
1936 : const region *reg,
1937 : const svalue *sval)
1938 : {
1939 1583 : clobber_region (mgr, reg);
1940 :
1941 1583 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
1942 1583 : const svalue *byte_size_sval = reg->get_byte_size_sval (sval_mgr);
1943 1583 : const svalue *fill_sval
1944 1583 : = sval_mgr->get_or_create_repeated_svalue (reg->get_type (),
1945 : byte_size_sval, sval);
1946 1583 : bind (mgr, reg, fill_sval);
1947 1583 : }
1948 :
1949 : /* Clobber REG within this cluster and fill it with zeroes. */
1950 :
1951 : void
1952 33 : binding_cluster::zero_fill_region (store_manager *mgr, const region *reg)
1953 : {
1954 33 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
1955 33 : const svalue *zero_sval = sval_mgr->get_or_create_int_cst (char_type_node, 0);
1956 33 : fill_region (mgr, reg, zero_sval);
1957 33 : }
1958 :
1959 : /* Mark REG_TO_BIND within this cluster as being unknown.
1960 :
1961 : Remove any bindings overlapping REG_FOR_OVERLAP.
1962 : If UNCERTAINTY is non-NULL, use it to record any svalues that
1963 : had bindings to them removed, as being maybe-bound.
1964 : If MAYBE_LIVE_VALUES is non-NULL, use it to record any svalues that
1965 : had bindings to them removed, as being maybe-live.
1966 :
1967 : REG_TO_BIND and REG_FOR_OVERLAP are the same for
1968 : store::mark_region_as_unknown, but are different in
1969 : store::set_value's alias handling, for handling the case where
1970 : we have a write to a symbolic REG_FOR_OVERLAP. */
1971 :
1972 : void
1973 46909 : binding_cluster::mark_region_as_unknown (store_manager *mgr,
1974 : const region *reg_to_bind,
1975 : const region *reg_for_overlap,
1976 : uncertainty_t *uncertainty,
1977 : svalue_set *maybe_live_values)
1978 : {
1979 46909 : if (reg_to_bind->empty_p ())
1980 : return;
1981 :
1982 46909 : remove_overlapping_bindings (mgr, reg_for_overlap, uncertainty,
1983 : maybe_live_values);
1984 :
1985 : /* Add a default binding to "unknown". */
1986 46909 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
1987 46909 : const svalue *sval
1988 46909 : = sval_mgr->get_or_create_unknown_svalue (reg_to_bind->get_type ());
1989 46909 : bind (mgr, reg_to_bind, sval);
1990 : }
1991 :
1992 : /* Purge state involving SVAL. */
1993 :
1994 : void
1995 404030 : binding_cluster::purge_state_involving (const svalue *sval,
1996 : region_model_manager *sval_mgr)
1997 : {
1998 404030 : auto_vec<const binding_key *> to_remove;
1999 404030 : auto_vec<std::pair<const binding_key *, tree> > to_make_unknown;
2000 811573 : for (auto iter : m_map)
2001 : {
2002 407543 : const binding_key *iter_key = iter.m_key;
2003 815086 : if (const symbolic_binding *symbolic_key
2004 407543 : = iter_key->dyn_cast_symbolic_binding ())
2005 : {
2006 32599 : const region *reg = symbolic_key->get_region ();
2007 32599 : if (reg->involves_p (sval))
2008 0 : to_remove.safe_push (iter_key);
2009 : }
2010 407543 : const svalue *iter_sval = iter.m_sval;
2011 407543 : if (iter_sval->involves_p (sval))
2012 3207 : to_make_unknown.safe_push (std::make_pair(iter_key,
2013 3207 : iter_sval->get_type ()));
2014 : }
2015 404030 : for (auto iter : to_remove)
2016 : {
2017 0 : m_map.remove (iter);
2018 0 : m_touched = true;
2019 : }
2020 413651 : for (auto iter : to_make_unknown)
2021 : {
2022 3207 : const svalue *new_sval
2023 3207 : = sval_mgr->get_or_create_unknown_svalue (iter.second);
2024 3207 : m_map.put (iter.first, new_sval);
2025 : }
2026 404030 : }
2027 :
2028 : /* Get any SVAL bound to REG within this cluster via kind KIND,
2029 : without checking parent regions of REG. */
2030 :
2031 : const svalue *
2032 1433107 : binding_cluster::get_binding (store_manager *mgr,
2033 : const region *reg) const
2034 : {
2035 1433107 : if (reg->empty_p ())
2036 : return nullptr;
2037 1433107 : const binding_key *reg_binding = binding_key::make (mgr, reg);
2038 1433107 : const svalue *sval = m_map.get (reg_binding);
2039 1433107 : if (sval)
2040 : {
2041 : /* If we have a struct with a single field, then the binding of
2042 : the field will equal that of the struct, and looking up e.g.
2043 : PARENT_REG.field within:
2044 : cluster for PARENT_REG: INIT_VAL(OTHER_REG)
2045 : will erroneously return INIT_VAL(OTHER_REG), rather than
2046 : SUB_VALUE(INIT_VAL(OTHER_REG), FIELD) == INIT_VAL(OTHER_REG.FIELD).
2047 : Fix this issue by iterating upwards whilst the bindings are equal,
2048 : expressing the lookups as subvalues.
2049 : We have to gather a list of subregion accesses, then walk it
2050 : in reverse to get the subvalues. */
2051 1166355 : auto_vec<const region *> regions;
2052 1175271 : while (const region *parent_reg = reg->get_parent_region ())
2053 : {
2054 1175271 : const binding_key *parent_reg_binding
2055 1175271 : = binding_key::make (mgr, parent_reg);
2056 1175271 : if (parent_reg_binding == reg_binding
2057 11183 : && sval->get_type ()
2058 11171 : && reg->get_type ()
2059 1186442 : && sval->get_type () != reg->get_type ())
2060 : {
2061 8916 : regions.safe_push (reg);
2062 8916 : reg = parent_reg;
2063 : }
2064 : else
2065 : break;
2066 8916 : }
2067 1166355 : if (sval->get_type ()
2068 1156998 : && reg->get_type ()
2069 2322307 : && sval->get_type () == reg->get_type ())
2070 : {
2071 953620 : unsigned i;
2072 953620 : const region *iter_reg;
2073 1190207 : FOR_EACH_VEC_ELT_REVERSE (regions, i, iter_reg)
2074 : {
2075 8106 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
2076 8106 : sval = rmm_mgr->get_or_create_sub_svalue (iter_reg->get_type (),
2077 : sval, iter_reg);
2078 : }
2079 : }
2080 1166355 : }
2081 : return sval;
2082 : }
2083 :
2084 : /* Get any SVAL bound to REG within this cluster,
2085 : either directly for REG, or recursively checking for bindings within
2086 : parent regions and extracting subvalues if need be. */
2087 :
2088 : const svalue *
2089 1433107 : binding_cluster::get_binding_recursive (store_manager *mgr,
2090 : const region *reg) const
2091 : {
2092 1433107 : if (const svalue *sval = get_binding (mgr, reg))
2093 : return sval;
2094 266752 : if (reg != m_base_region)
2095 162968 : if (const region *parent_reg = reg->get_parent_region ())
2096 325936 : if (const svalue *parent_sval
2097 162968 : = get_binding_recursive (mgr, parent_reg))
2098 : {
2099 : /* Extract child svalue from parent svalue. */
2100 54879 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
2101 54879 : return rmm_mgr->get_or_create_sub_svalue (reg->get_type (),
2102 54879 : parent_sval, reg);
2103 : }
2104 : return nullptr;
2105 : }
2106 :
2107 : /* Get any value bound for REG within this cluster. */
2108 :
2109 : const svalue *
2110 1270139 : binding_cluster::get_any_binding (store_manager *mgr,
2111 : const region *reg) const
2112 : {
2113 : /* Look for a direct binding. */
2114 2540278 : if (const svalue *direct_sval
2115 1270139 : = get_binding_recursive (mgr, reg))
2116 : return direct_sval;
2117 :
2118 : /* If we had a write to a cluster of unknown size, we might
2119 : have a self-binding of the whole base region with an svalue,
2120 : where the base region is symbolic.
2121 : Handle such cases by returning sub_svalue instances. */
2122 103784 : if (const svalue *cluster_sval = maybe_get_simple_value (mgr))
2123 : {
2124 : /* Extract child svalue from parent svalue. */
2125 0 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
2126 0 : return rmm_mgr->get_or_create_sub_svalue (reg->get_type (),
2127 0 : cluster_sval, reg);
2128 : }
2129 :
2130 : /* If this cluster has been touched by a symbolic write, then the content
2131 : of any subregion not currently specifically bound is "UNKNOWN". */
2132 103784 : if (m_touched)
2133 : {
2134 7211 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
2135 7211 : return rmm_mgr->get_or_create_unknown_svalue (reg->get_type ());
2136 : }
2137 :
2138 : /* Alternatively, if this is a symbolic read and the cluster has any bindings,
2139 : then we don't know if we're reading those values or not, so the result
2140 : is also "UNKNOWN". */
2141 96573 : if (reg->get_offset (mgr->get_svalue_manager ()).symbolic_p ()
2142 96573 : && m_map.elements () > 0)
2143 : {
2144 440 : region_model_manager *rmm_mgr = mgr->get_svalue_manager ();
2145 440 : return rmm_mgr->get_or_create_unknown_svalue (reg->get_type ());
2146 : }
2147 :
2148 96133 : if (const svalue *compound_sval = maybe_get_compound_binding (mgr, reg))
2149 : return compound_sval;
2150 :
2151 : /* Otherwise, the initial value, or uninitialized. */
2152 : return nullptr;
2153 : }
2154 :
2155 : /* Attempt to get a compound_svalue for the bindings within the cluster
2156 : affecting REG (which could be the base region itself).
2157 :
2158 : Create a compound_svalue with the subset of bindings the affect REG,
2159 : offsetting them so that the offsets are relative to the start of REG
2160 : within the cluster.
2161 :
2162 : For example, REG could be one element within an array of structs.
2163 :
2164 : Return the resulting compound_svalue, or nullptr if there's a problem. */
2165 :
2166 : const svalue *
2167 96133 : binding_cluster::maybe_get_compound_binding (store_manager *mgr,
2168 : const region *reg) const
2169 : {
2170 96133 : region_offset cluster_offset
2171 96133 : = m_base_region->get_offset (mgr->get_svalue_manager ());
2172 96133 : if (cluster_offset.symbolic_p ())
2173 : return nullptr;
2174 96133 : region_offset reg_offset = reg->get_offset (mgr->get_svalue_manager ());
2175 96133 : if (reg_offset.symbolic_p ())
2176 : return nullptr;
2177 :
2178 76613 : if (reg->empty_p ())
2179 : return nullptr;
2180 :
2181 76613 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
2182 :
2183 : /* We will a build the result map in two parts:
2184 : (a) result_map, holding the concrete keys from this cluster,
2185 :
2186 : (b) default_map, holding the initial values for the region
2187 : (e.g. uninitialized, initializer values, or zero), unless this
2188 : cluster has been touched.
2189 :
2190 : We will populate (a), and as we do, clobber (b), trimming and
2191 : splitting its bindings as necessary.
2192 : Finally, we will merge (b) into (a), giving a concrete map
2193 : that merges both the initial values and the bound values from
2194 : the binding_cluster.
2195 : Doing it this way reduces N for the O(N^2) intersection-finding,
2196 : perhaps we should have a spatial-organized data structure for
2197 : concrete keys, though. */
2198 :
2199 76613 : concrete_binding_map result_map;
2200 76613 : concrete_binding_map default_map;
2201 :
2202 : /* Set up default values in default_map. */
2203 76613 : const svalue *default_sval;
2204 76613 : if (m_touched)
2205 0 : default_sval = sval_mgr->get_or_create_unknown_svalue (reg->get_type ());
2206 : else
2207 76613 : default_sval = sval_mgr->get_or_create_initial_value (reg);
2208 76613 : const binding_key *default_key = binding_key::make (mgr, reg);
2209 :
2210 : /* Express the bit-range of the default key for REG relative to REG,
2211 : rather than to the base region. */
2212 76613 : const concrete_binding *concrete_default_key
2213 76613 : = default_key->dyn_cast_concrete_binding ();
2214 76613 : if (!concrete_default_key)
2215 : return nullptr;
2216 75773 : const bit_range default_key_relative_to_reg
2217 75773 : (0, concrete_default_key->get_size_in_bits ());
2218 :
2219 75773 : default_map.insert (default_key_relative_to_reg, default_sval);
2220 :
2221 151319 : for (auto iter : m_map)
2222 : {
2223 82939 : const binding_key *key = iter.m_key;
2224 82939 : const svalue *sval = iter.m_sval;
2225 :
2226 165878 : if (const concrete_binding *concrete_key
2227 82939 : = key->dyn_cast_concrete_binding ())
2228 : {
2229 82939 : const bit_range &bound_range = concrete_key->get_bit_range ();
2230 :
2231 82939 : bit_size_t reg_bit_size;
2232 82939 : if (!reg->get_bit_size (®_bit_size))
2233 7393 : return nullptr;
2234 :
2235 82939 : bit_range reg_range (reg_offset.get_bit_offset (),
2236 82939 : reg_bit_size);
2237 :
2238 : /* Skip bindings that are outside the bit range of REG. */
2239 82939 : if (!bound_range.intersects_p (reg_range))
2240 66948 : continue;
2241 :
2242 : /* We shouldn't have an exact match; that should have been
2243 : handled already. */
2244 15991 : gcc_assert (!(reg_range == bound_range));
2245 :
2246 15991 : bit_range subrange (0, 0);
2247 15991 : if (reg_range.contains_p (bound_range, &subrange))
2248 : {
2249 : /* We have a bound range fully within REG.
2250 : Add it to result_map, offsetting accordingly. */
2251 8522 : result_map.insert (subrange, sval);
2252 :
2253 : /* Clobber default_map, removing/trimming/spliting where
2254 : it overlaps with offset_concrete_key. */
2255 8522 : default_map.remove_overlapping_bindings (mgr, subrange);
2256 : }
2257 7469 : else if (bound_range.contains_p (reg_range, &subrange))
2258 : {
2259 : /* REG is fully within the bound range, but
2260 : is not equal to it; we're extracting a subvalue. */
2261 7393 : return sval->extract_bit_range (reg->get_type (),
2262 : subrange,
2263 7393 : mgr->get_svalue_manager ());
2264 : }
2265 : else
2266 : {
2267 : /* REG and the bound range partially overlap. */
2268 76 : bit_range reg_subrange (0, 0);
2269 76 : bit_range bound_subrange (0, 0);
2270 76 : reg_range.intersects_p (bound_range,
2271 : ®_subrange, &bound_subrange);
2272 :
2273 : /* Get the bits from the bound value for the bits at the
2274 : intersection (relative to the bound value). */
2275 76 : const svalue *overlap_sval
2276 76 : = sval->extract_bit_range (NULL_TREE,
2277 : bound_subrange,
2278 : mgr->get_svalue_manager ());
2279 :
2280 76 : result_map.insert (reg_subrange, overlap_sval);
2281 :
2282 : /* Clobber default_map, removing/trimming/spliting where
2283 : it overlaps with overlap_concrete_key. */
2284 76 : default_map.remove_overlapping_bindings (mgr, reg_subrange);
2285 : }
2286 : }
2287 : else
2288 : /* Can't handle symbolic bindings. */
2289 : return nullptr;
2290 : }
2291 :
2292 68380 : if (result_map.size () == 0)
2293 : return nullptr;
2294 :
2295 : /* Merge any bindings from default_map into result_map. */
2296 3468 : for (auto iter : default_map)
2297 : {
2298 323 : const bit_range &key = iter.first;
2299 323 : const svalue *sval = iter.second;
2300 323 : result_map.insert (key, sval);
2301 : }
2302 :
2303 3145 : return sval_mgr->get_or_create_compound_svalue (reg->get_type (),
2304 3145 : std::move (result_map));
2305 76613 : }
2306 :
2307 : /* Remove, truncate, and/or split any bindings within this map that
2308 : could overlap REG.
2309 :
2310 : If REG's base region or this cluster is symbolic and they're different
2311 : base regions, then remove everything in this cluster's map, on the
2312 : grounds that REG could be referring to the same memory as anything
2313 : in the map.
2314 :
2315 : If UNCERTAINTY is non-NULL, use it to record any svalues that
2316 : were removed, as being maybe-bound.
2317 :
2318 : If MAYBE_LIVE_VALUES is non-NULL, use it to record any svalues that
2319 : were removed, as being maybe-live. */
2320 :
2321 : void
2322 81229 : binding_cluster::remove_overlapping_bindings (store_manager *mgr,
2323 : const region *reg,
2324 : uncertainty_t *uncertainty,
2325 : svalue_set *maybe_live_values)
2326 : {
2327 81229 : if (reg->empty_p ())
2328 : return;
2329 81229 : const binding_key *reg_binding = binding_key::make (mgr, reg);
2330 :
2331 81229 : const region *cluster_base_reg = get_base_region ();
2332 81229 : const region *other_base_reg = reg->get_base_region ();
2333 : /* If at least one of the base regions involved is symbolic, and they're
2334 : not the same base region, then consider everything in the map as
2335 : potentially overlapping with reg_binding (even if it's a concrete
2336 : binding and things in the map are concrete - they could be referring
2337 : to the same memory when the symbolic base regions are taken into
2338 : account). */
2339 81229 : bool always_overlap = (cluster_base_reg != other_base_reg
2340 81229 : && (cluster_base_reg->get_kind () == RK_SYMBOLIC
2341 18076 : || other_base_reg->get_kind () == RK_SYMBOLIC));
2342 81229 : m_map.remove_overlapping_bindings (mgr, reg_binding, uncertainty,
2343 : maybe_live_values,
2344 : always_overlap);
2345 : }
2346 :
2347 : /* Attempt to merge CLUSTER_A and CLUSTER_B into OUT_CLUSTER, using
2348 : MGR and MERGER.
2349 : Return true if they can be merged, false otherwise. */
2350 :
2351 : bool
2352 1209564 : binding_cluster::can_merge_p (const binding_cluster *cluster_a,
2353 : const binding_cluster *cluster_b,
2354 : binding_cluster *out_cluster,
2355 : store *out_store,
2356 : store_manager *mgr,
2357 : model_merger *merger)
2358 : {
2359 1209564 : gcc_assert (out_cluster);
2360 :
2361 : /* Merge flags ("ESCAPED" and "TOUCHED") by setting the merged flag to
2362 : true if either of the inputs is true. */
2363 1209564 : if ((cluster_a && cluster_a->m_escaped)
2364 769479 : || (cluster_b && cluster_b->m_escaped))
2365 2702 : out_cluster->m_escaped = true;
2366 1209564 : if ((cluster_a && cluster_a->m_touched)
2367 1012479 : || (cluster_b && cluster_b->m_touched))
2368 7064 : out_cluster->m_touched = true;
2369 :
2370 : /* At least one of CLUSTER_A and CLUSTER_B are non-NULL, but either
2371 : could be NULL. Handle these cases. */
2372 1209564 : if (cluster_a == nullptr)
2373 : {
2374 7666 : gcc_assert (cluster_b != nullptr);
2375 7666 : gcc_assert (cluster_b->m_base_region == out_cluster->m_base_region);
2376 7666 : out_cluster->make_unknown_relative_to (cluster_b, out_store, mgr);
2377 7666 : return true;
2378 : }
2379 1201898 : if (cluster_b == nullptr)
2380 : {
2381 24587 : gcc_assert (cluster_a != nullptr);
2382 24587 : gcc_assert (cluster_a->m_base_region == out_cluster->m_base_region);
2383 24587 : out_cluster->make_unknown_relative_to (cluster_a, out_store, mgr);
2384 24587 : return true;
2385 : }
2386 :
2387 : /* The "both inputs are non-NULL" case. */
2388 1177311 : gcc_assert (cluster_a != nullptr && cluster_b != nullptr);
2389 1177311 : gcc_assert (cluster_a->m_base_region == out_cluster->m_base_region);
2390 1177311 : gcc_assert (cluster_b->m_base_region == out_cluster->m_base_region);
2391 :
2392 1177311 : hash_set<const binding_key *> keys;
2393 2910167 : for (auto iter_a : cluster_a->m_map)
2394 : {
2395 1732856 : const binding_key *key_a = iter_a.m_key;
2396 1732856 : keys.add (key_a);
2397 : }
2398 2804176 : for (auto iter_b : cluster_b->m_map)
2399 : {
2400 1626865 : const binding_key *key_b = iter_b.m_key;
2401 1626865 : keys.add (key_b);
2402 : }
2403 1177311 : int num_symbolic_keys = 0;
2404 1177311 : int num_concrete_keys = 0;
2405 2812382 : for (hash_set<const binding_key *>::iterator iter = keys.begin ();
2406 4447453 : iter != keys.end (); ++iter)
2407 : {
2408 1724564 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
2409 1724564 : const binding_key *key = *iter;
2410 1724564 : const svalue *sval_a = cluster_a->get_any_value (key);
2411 1724564 : const svalue *sval_b = cluster_b->get_any_value (key);
2412 :
2413 1724564 : if (key->symbolic_p ())
2414 52649 : num_symbolic_keys++;
2415 : else
2416 1671915 : num_concrete_keys++;
2417 :
2418 1724564 : if (sval_a == sval_b)
2419 : {
2420 1362352 : gcc_assert (sval_a);
2421 1362352 : out_cluster->m_map.put (key, sval_a);
2422 1362352 : continue;
2423 : }
2424 362212 : else if (sval_a && sval_b)
2425 : {
2426 430162 : if (const svalue *merged_sval
2427 158787 : = sval_a->can_merge_p (sval_b, sval_mgr, merger))
2428 : {
2429 112588 : out_cluster->m_map.put (key, merged_sval);
2430 112588 : continue;
2431 : }
2432 : /* Merger of the svalues failed. Reject merger of the cluster. */
2433 89493 : return false;
2434 : }
2435 :
2436 : /* If we get here, then one cluster binds this key and the other
2437 : doesn't; merge them as "UNKNOWN". */
2438 203425 : gcc_assert (sval_a || sval_b);
2439 :
2440 203425 : const svalue *bound_sval = sval_a ? sval_a : sval_b;
2441 203425 : tree type = bound_sval->get_type ();
2442 203425 : const svalue *unknown_sval
2443 203425 : = mgr->get_svalue_manager ()->get_or_create_unknown_svalue (type);
2444 :
2445 : /* ...but reject the merger if this sval shouldn't be mergeable
2446 : (e.g. reject merging svalues that have non-purgable sm-state,
2447 : to avoid falsely reporting memory leaks by merging them
2448 : with something else). */
2449 203425 : if (!bound_sval->can_merge_p (unknown_sval, sval_mgr, merger))
2450 : return false;
2451 :
2452 160131 : out_cluster->m_map.put (key, unknown_sval);
2453 : }
2454 :
2455 : /* We might now have overlapping concrete clusters in OUT_CLUSTER.
2456 : Reject such cases. */
2457 1087818 : if (num_concrete_keys > 0)
2458 : {
2459 : /* Check for overlapping concrete bindings. */
2460 797696 : const auto &concrete_bindings
2461 797696 : = out_cluster->get_map ().get_concrete_bindings ();
2462 2246623 : for (auto iter = concrete_bindings.begin ();
2463 2246623 : iter != concrete_bindings.end (); ++iter)
2464 : {
2465 1465551 : auto next (iter);
2466 1465551 : ++next;
2467 1465551 : if (next != concrete_bindings.end ())
2468 : {
2469 684479 : const bit_range &iter_bits = iter->first;
2470 684479 : const bit_range &next_bits = next->first;
2471 684479 : if (iter_bits.intersects_p (next_bits))
2472 106117 : return false;
2473 : }
2474 : }
2475 : }
2476 :
2477 : /* We can only have at most one symbolic key per cluster,
2478 : and if we do, we can't have any concrete keys.
2479 : If this happens, mark the cluster as touched, with no keys. */
2480 1071194 : if (num_symbolic_keys >= 2
2481 1070589 : || (num_concrete_keys > 0 && num_symbolic_keys > 0))
2482 : {
2483 2699 : out_cluster->m_touched = true;
2484 2699 : out_cluster->m_map.clear ();
2485 : }
2486 :
2487 : /* We don't handle other kinds of overlaps yet. */
2488 :
2489 : return true;
2490 1177311 : }
2491 :
2492 : /* Update this cluster to reflect an attempt to merge OTHER where there
2493 : is no other cluster to merge with, and so we're notionally merging the
2494 : bound values in OTHER with the initial value of the relevant regions.
2495 :
2496 : Any bound keys in OTHER should be bound to unknown in this. */
2497 :
2498 : void
2499 32253 : binding_cluster::make_unknown_relative_to (const binding_cluster *other,
2500 : store *out_store,
2501 : store_manager *mgr)
2502 : {
2503 64570 : for (auto iter : *other)
2504 : {
2505 32317 : const binding_key *iter_key = iter.m_key;
2506 32317 : const svalue *iter_sval = iter.m_sval;
2507 32317 : const svalue *unknown_sval
2508 : = mgr->get_svalue_manager ()->get_or_create_unknown_svalue
2509 32317 : (iter_sval->get_type ());
2510 32317 : m_map.put (iter_key, unknown_sval);
2511 :
2512 : /* For any pointers in OTHER, the merger means that the
2513 : concrete pointer becomes an unknown value, which could
2514 : show up as a false report of a leak when considering what
2515 : pointers are live before vs after.
2516 : Avoid this by marking the base regions they point to as having
2517 : escaped. */
2518 64634 : if (const region_svalue *region_sval
2519 32317 : = iter_sval->dyn_cast_region_svalue ())
2520 : {
2521 2116 : const region *base_reg
2522 2116 : = region_sval->get_pointee ()->get_base_region ();
2523 2116 : if (base_reg->tracked_p ()
2524 2116 : && !base_reg->symbolic_for_unknown_ptr_p ())
2525 : {
2526 2114 : binding_cluster *c
2527 2114 : = out_store->get_or_create_cluster (*mgr, base_reg);
2528 2114 : c->mark_as_escaped ();
2529 : }
2530 : }
2531 : }
2532 32253 : }
2533 :
2534 : /* Mark this cluster as having escaped. */
2535 :
2536 : void
2537 36046 : binding_cluster::mark_as_escaped ()
2538 : {
2539 36046 : m_escaped = true;
2540 36046 : }
2541 :
2542 : /* If this cluster has escaped (by this call, or by an earlier one, or
2543 : by being an external param), then unbind all values and mark it
2544 : as "touched", so that it has a conjured value, rather than an
2545 : initial_svalue.
2546 : Use P to purge state involving conjured_svalues. */
2547 :
2548 : void
2549 123597 : binding_cluster::on_unknown_fncall (const gcall &call,
2550 : store_manager *mgr,
2551 : const conjured_purge &p)
2552 : {
2553 123597 : if (m_escaped)
2554 : {
2555 20945 : m_map.clear ();
2556 :
2557 20945 : if (!m_base_region->empty_p ())
2558 : {
2559 : /* Bind it to a new "conjured" value using CALL. */
2560 20945 : const svalue *sval
2561 : = mgr->get_svalue_manager ()->get_or_create_conjured_svalue
2562 20945 : (m_base_region->get_type (), &call, m_base_region, p);
2563 20945 : bind (mgr, m_base_region, sval);
2564 : }
2565 :
2566 20945 : m_touched = true;
2567 : }
2568 123597 : }
2569 :
2570 : /* Mark this cluster as having been clobbered by STMT.
2571 : Use P to purge state involving conjured_svalues. */
2572 :
2573 : void
2574 510 : binding_cluster::on_asm (const gasm *stmt,
2575 : store_manager *mgr,
2576 : const conjured_purge &p)
2577 : {
2578 510 : m_map.clear ();
2579 :
2580 : /* Bind it to a new "conjured" value using CALL. */
2581 510 : const svalue *sval
2582 : = mgr->get_svalue_manager ()->get_or_create_conjured_svalue
2583 510 : (m_base_region->get_type (), stmt, m_base_region, p);
2584 510 : bind (mgr, m_base_region, sval);
2585 :
2586 510 : m_touched = true;
2587 510 : }
2588 :
2589 : /* Return true if this cluster has escaped. */
2590 :
2591 : bool
2592 9292874 : binding_cluster::escaped_p () const
2593 : {
2594 : /* Consider the "errno" region to always have escaped. */
2595 9292874 : if (m_base_region->get_kind () == RK_ERRNO)
2596 : return true;
2597 9269323 : return m_escaped;
2598 : }
2599 :
2600 : /* Return true if this binding_cluster has no information
2601 : i.e. if there are no bindings, and it hasn't been marked as having
2602 : escaped, or touched symbolically. */
2603 :
2604 : bool
2605 221832 : binding_cluster::redundant_p () const
2606 : {
2607 221832 : return (m_map.elements () == 0
2608 217926 : && !m_escaped
2609 438626 : && !m_touched);
2610 : }
2611 :
2612 : /* Add PV to OUT_PVS, casting it to TYPE if it is not already of that type. */
2613 :
2614 : static void
2615 7034 : append_pathvar_with_type (path_var pv,
2616 : tree type,
2617 : auto_vec<path_var> *out_pvs)
2618 : {
2619 7034 : gcc_assert (pv.m_tree);
2620 :
2621 7034 : if (TREE_TYPE (pv.m_tree) != type)
2622 537 : pv.m_tree = build1 (NOP_EXPR, type, pv.m_tree);
2623 :
2624 7034 : out_pvs->safe_push (pv);
2625 7034 : }
2626 :
2627 : /* Find representative path_vars for SVAL within this binding of BASE_REG,
2628 : appending the results to OUT_PVS. */
2629 :
2630 : void
2631 44010 : binding_cluster::get_representative_path_vars (const region_model *model,
2632 : svalue_set *visited,
2633 : const region *base_reg,
2634 : const svalue *sval,
2635 : logger *logger,
2636 : auto_vec<path_var> *out_pvs)
2637 : const
2638 : {
2639 44010 : sval = simplify_for_binding (sval);
2640 :
2641 84727 : for (auto iter : m_map)
2642 : {
2643 40717 : const binding_key *key = iter.m_key;
2644 40717 : const svalue *bound_sval = iter.m_sval;
2645 40717 : if (bound_sval == sval)
2646 : {
2647 14850 : if (const concrete_binding *ckey
2648 7425 : = key->dyn_cast_concrete_binding ())
2649 : {
2650 7391 : auto_vec <const region *> subregions;
2651 7391 : base_reg->get_subregions_for_binding
2652 7391 : (model->get_manager (),
2653 : ckey->get_start_bit_offset (),
2654 : ckey->get_size_in_bits (),
2655 : sval->get_type (),
2656 : &subregions);
2657 7391 : unsigned i;
2658 7391 : const region *subregion;
2659 21399 : FOR_EACH_VEC_ELT (subregions, i, subregion)
2660 : {
2661 14008 : if (path_var pv
2662 7004 : = model->get_representative_path_var (subregion,
2663 : visited,
2664 7004 : logger))
2665 7000 : append_pathvar_with_type (pv, sval->get_type (), out_pvs);
2666 : }
2667 7391 : }
2668 : else
2669 : {
2670 34 : const symbolic_binding *skey = (const symbolic_binding *)key;
2671 68 : if (path_var pv
2672 34 : = model->get_representative_path_var (skey->get_region (),
2673 : visited,
2674 34 : logger))
2675 34 : append_pathvar_with_type (pv, sval->get_type (), out_pvs);
2676 : }
2677 : }
2678 : }
2679 44010 : }
2680 :
2681 : /* Get any svalue bound to KEY, or nullptr. */
2682 :
2683 : const svalue *
2684 3597299 : binding_cluster::get_any_value (const binding_key *key) const
2685 : {
2686 3597299 : return m_map.get (key);
2687 : }
2688 :
2689 : /* If this cluster has a single direct binding for the whole of the region,
2690 : return it.
2691 : For use in simplifying dumps. */
2692 :
2693 : const svalue *
2694 292262 : binding_cluster::maybe_get_simple_value (store_manager *mgr) const
2695 : {
2696 : /* Fail gracefully if MGR is nullptr to make it easier to dump store
2697 : instances in the debugger. */
2698 292262 : if (mgr == nullptr)
2699 : return nullptr;
2700 :
2701 292262 : if (m_map.elements () != 1)
2702 : return nullptr;
2703 :
2704 148171 : if (m_base_region->empty_p ())
2705 : return nullptr;
2706 :
2707 148171 : const binding_key *key = binding_key::make (mgr, m_base_region);
2708 148171 : return get_any_value (key);
2709 : }
2710 :
2711 : /* class store_manager. */
2712 :
2713 : logger *
2714 382158 : store_manager::get_logger () const
2715 : {
2716 382158 : return m_mgr->get_logger ();
2717 : }
2718 :
2719 : /* binding consolidation. */
2720 :
2721 : const concrete_binding *
2722 13513596 : store_manager::get_concrete_binding (bit_offset_t start_bit_offset,
2723 : bit_offset_t size_in_bits)
2724 : {
2725 13513596 : concrete_binding b (start_bit_offset, size_in_bits);
2726 27014520 : if (concrete_binding *existing = m_concrete_binding_key_mgr.get (b))
2727 : return existing;
2728 :
2729 12672 : concrete_binding *to_save = new concrete_binding (b);
2730 12672 : m_concrete_binding_key_mgr.put (b, to_save);
2731 12672 : return to_save;
2732 13513596 : }
2733 :
2734 : const symbolic_binding *
2735 1852503 : store_manager::get_symbolic_binding (const region *reg)
2736 : {
2737 1852503 : symbolic_binding b (reg);
2738 3684553 : if (symbolic_binding *existing = m_symbolic_binding_key_mgr.get (b))
2739 : return existing;
2740 :
2741 20453 : symbolic_binding *to_save = new symbolic_binding (b);
2742 20453 : m_symbolic_binding_key_mgr.put (b, to_save);
2743 20453 : return to_save;
2744 1852503 : }
2745 :
2746 : /* class store. */
2747 :
2748 : /* store's default ctor. */
2749 :
2750 415012 : store::store ()
2751 415012 : : m_called_unknown_fn (false)
2752 : {
2753 415012 : }
2754 :
2755 : /* store's copy ctor. */
2756 :
2757 3481141 : store::store (const store &other)
2758 3481141 : : m_cluster_map (other.m_cluster_map.elements ()),
2759 3481141 : m_called_unknown_fn (other.m_called_unknown_fn)
2760 : {
2761 3481141 : for (cluster_map_t::iterator iter = other.m_cluster_map.begin ();
2762 33646858 : iter != other.m_cluster_map.end ();
2763 30165717 : ++iter)
2764 : {
2765 30165717 : const region *reg = (*iter).first;
2766 0 : gcc_assert (reg);
2767 30165717 : binding_cluster *c = (*iter).second;
2768 30165717 : gcc_assert (c);
2769 30165717 : m_cluster_map.put (reg, new binding_cluster (*c));
2770 : }
2771 3481141 : }
2772 :
2773 : /* store's dtor. */
2774 :
2775 3896153 : store::~store ()
2776 : {
2777 35743171 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2778 35743171 : iter != m_cluster_map.end ();
2779 31847018 : ++iter)
2780 31847018 : delete (*iter).second;
2781 3896153 : }
2782 :
2783 : /* store's assignment operator. */
2784 :
2785 : store &
2786 97771 : store::operator= (const store &other)
2787 : {
2788 : /* Delete existing cluster map. */
2789 747447 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2790 747447 : iter != m_cluster_map.end ();
2791 649676 : ++iter)
2792 1299352 : delete (*iter).second;
2793 97771 : m_cluster_map.empty ();
2794 :
2795 97771 : m_called_unknown_fn = other.m_called_unknown_fn;
2796 :
2797 97771 : for (cluster_map_t::iterator iter = other.m_cluster_map.begin ();
2798 1152699 : iter != other.m_cluster_map.end ();
2799 1054928 : ++iter)
2800 : {
2801 1054928 : const region *reg = (*iter).first;
2802 1054928 : gcc_assert (reg);
2803 1054928 : binding_cluster *c = (*iter).second;
2804 1054928 : gcc_assert (c);
2805 1054928 : m_cluster_map.put (reg, new binding_cluster (*c));
2806 : }
2807 97771 : return *this;
2808 : }
2809 :
2810 : /* store's equality operator. */
2811 :
2812 : bool
2813 505285 : store::operator== (const store &other) const
2814 : {
2815 505285 : if (m_called_unknown_fn != other.m_called_unknown_fn)
2816 : return false;
2817 :
2818 501186 : if (m_cluster_map.elements () != other.m_cluster_map.elements ())
2819 : return false;
2820 :
2821 3681558 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2822 3681558 : iter != m_cluster_map.end ();
2823 3206603 : ++iter)
2824 : {
2825 3273898 : const region *reg = (*iter).first;
2826 3273898 : binding_cluster *c = (*iter).second;
2827 3273898 : binding_cluster **other_slot
2828 3273898 : = const_cast <cluster_map_t &> (other.m_cluster_map).get (reg);
2829 3273898 : if (other_slot == nullptr)
2830 67295 : return false;
2831 3272188 : if (*c != **other_slot)
2832 : return false;
2833 : }
2834 :
2835 407660 : gcc_checking_assert (hash () == other.hash ());
2836 :
2837 : return true;
2838 : }
2839 :
2840 : /* Get a hash value for this store. */
2841 :
2842 : hashval_t
2843 2065140 : store::hash () const
2844 : {
2845 2065140 : hashval_t result = 0;
2846 2065140 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2847 16906990 : iter != m_cluster_map.end ();
2848 14841850 : ++iter)
2849 14841850 : result ^= (*iter).second->hash ();
2850 2065140 : return result;
2851 : }
2852 :
2853 : /* Populate OUT with a sorted list of parent regions for the regions in IN,
2854 : removing duplicate parents. */
2855 :
2856 : static void
2857 2134 : get_sorted_parent_regions (auto_vec<const region *> *out,
2858 : auto_vec<const region *> &in)
2859 : {
2860 : /* Get the set of parent regions. */
2861 2134 : hash_set<const region *> parent_regions;
2862 2134 : const region *iter_reg;
2863 2134 : unsigned i;
2864 10887 : FOR_EACH_VEC_ELT (in, i, iter_reg)
2865 : {
2866 8753 : const region *parent_reg = iter_reg->get_parent_region ();
2867 8753 : gcc_assert (parent_reg);
2868 8753 : parent_regions.add (parent_reg);
2869 : }
2870 :
2871 : /* Write to OUT. */
2872 2134 : for (hash_set<const region *>::iterator iter = parent_regions.begin();
2873 11368 : iter != parent_regions.end(); ++iter)
2874 4617 : out->safe_push (*iter);
2875 :
2876 : /* Sort OUT. */
2877 4066 : out->qsort (region::cmp_ptr_ptr);
2878 2134 : }
2879 :
2880 : /* Dump a representation of this store to PP, using SIMPLE to control how
2881 : svalues and regions are printed.
2882 : MGR is used for simplifying dumps if non-NULL, but can also be nullptr
2883 : (to make it easier to use from the debugger). */
2884 :
2885 : void
2886 2126 : store::dump_to_pp (pretty_printer *pp, bool simple, bool multiline,
2887 : store_manager *mgr) const
2888 : {
2889 : /* Sort into some deterministic order. */
2890 2126 : auto_vec<const region *> base_regions;
2891 10879 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
2892 19632 : iter != m_cluster_map.end (); ++iter)
2893 : {
2894 8753 : const region *base_reg = (*iter).first;
2895 8753 : base_regions.safe_push (base_reg);
2896 : }
2897 2126 : base_regions.qsort (region::cmp_ptr_ptr);
2898 :
2899 : /* Gather clusters, organize by parent region, so that we can group
2900 : together locals, globals, etc. */
2901 2126 : auto_vec<const region *> parent_regions;
2902 2126 : get_sorted_parent_regions (&parent_regions, base_regions);
2903 :
2904 2126 : const region *parent_reg;
2905 2126 : unsigned i;
2906 6743 : FOR_EACH_VEC_ELT (parent_regions, i, parent_reg)
2907 : {
2908 4617 : gcc_assert (parent_reg);
2909 4617 : pp_string (pp, "clusters within ");
2910 4617 : parent_reg->dump_to_pp (pp, simple);
2911 4617 : if (multiline)
2912 1175 : pp_newline (pp);
2913 : else
2914 3442 : pp_string (pp, " {");
2915 :
2916 : const region *base_reg;
2917 : unsigned j;
2918 27008 : FOR_EACH_VEC_ELT (base_regions, j, base_reg)
2919 : {
2920 : /* This is O(N * M), but N ought to be small. */
2921 22391 : if (base_reg->get_parent_region () != parent_reg)
2922 13638 : continue;
2923 8753 : binding_cluster *cluster
2924 8753 : = *const_cast<cluster_map_t &> (m_cluster_map).get (base_reg);
2925 8753 : if (!multiline)
2926 : {
2927 5854 : if (j > 0)
2928 4435 : pp_string (pp, ", ");
2929 : }
2930 8753 : if (const svalue *sval = cluster->maybe_get_simple_value (mgr))
2931 : {
2932 : /* Special-case to simplify dumps for the common case where
2933 : we just have one value directly bound to the whole of a
2934 : region. */
2935 7022 : if (multiline)
2936 : {
2937 1811 : pp_string (pp, " cluster for: ");
2938 1811 : base_reg->dump_to_pp (pp, simple);
2939 1811 : pp_string (pp, ": ");
2940 1811 : sval->dump_to_pp (pp, simple);
2941 1811 : if (cluster->escaped_p ())
2942 48 : pp_string (pp, " (ESCAPED)");
2943 1811 : if (cluster->touched_p ())
2944 6 : pp_string (pp, " (TOUCHED)");
2945 1811 : pp_newline (pp);
2946 : }
2947 : else
2948 : {
2949 5211 : pp_string (pp, "region: {");
2950 5211 : base_reg->dump_to_pp (pp, simple);
2951 5211 : pp_string (pp, ", value: ");
2952 5211 : sval->dump_to_pp (pp, simple);
2953 5211 : if (cluster->escaped_p ())
2954 1066 : pp_string (pp, " (ESCAPED)");
2955 5211 : if (cluster->touched_p ())
2956 1066 : pp_string (pp, " (TOUCHED)");
2957 5211 : pp_string (pp, "}");
2958 : }
2959 : }
2960 1731 : else if (multiline)
2961 : {
2962 1088 : pp_string (pp, " cluster for: ");
2963 1088 : base_reg->dump_to_pp (pp, simple);
2964 1088 : pp_newline (pp);
2965 1088 : cluster->dump_to_pp (pp, simple, multiline);
2966 : }
2967 : else
2968 : {
2969 643 : pp_string (pp, "base region: {");
2970 643 : base_reg->dump_to_pp (pp, simple);
2971 643 : pp_string (pp, "} has cluster: {");
2972 643 : cluster->dump_to_pp (pp, simple, multiline);
2973 643 : pp_string (pp, "}");
2974 : }
2975 : }
2976 4617 : if (!multiline)
2977 3442 : pp_string (pp, "}");
2978 : }
2979 2126 : pp_printf (pp, "m_called_unknown_fn: %s",
2980 2126 : m_called_unknown_fn ? "TRUE" : "FALSE");
2981 2126 : if (multiline)
2982 545 : pp_newline (pp);
2983 2126 : }
2984 :
2985 : /* Dump a multiline representation of this store to stderr. */
2986 :
2987 : DEBUG_FUNCTION void
2988 0 : store::dump (bool simple) const
2989 : {
2990 0 : tree_dump_pretty_printer pp (stderr);
2991 0 : dump_to_pp (&pp, simple, true, nullptr);
2992 0 : pp_newline (&pp);
2993 0 : }
2994 :
2995 : /* Assert that this object is valid. */
2996 :
2997 : void
2998 1729302 : store::validate () const
2999 : {
3000 14670640 : for (auto iter : m_cluster_map)
3001 12941338 : iter.second->validate ();
3002 1729302 : }
3003 :
3004 : /* Return a new json::object of the form
3005 : {PARENT_REGION_DESC: {BASE_REGION_DESC: object for binding_map,
3006 : ... for each cluster within parent region},
3007 : ...for each parent region,
3008 : "called_unknown_fn": true/false}. */
3009 :
3010 : std::unique_ptr<json::object>
3011 4 : store::to_json () const
3012 : {
3013 4 : auto store_obj = std::make_unique<json::object> ();
3014 :
3015 : /* Sort into some deterministic order. */
3016 4 : auto_vec<const region *> base_regions;
3017 4 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3018 4 : iter != m_cluster_map.end (); ++iter)
3019 : {
3020 0 : const region *base_reg = (*iter).first;
3021 0 : base_regions.safe_push (base_reg);
3022 : }
3023 4 : base_regions.qsort (region::cmp_ptr_ptr);
3024 :
3025 : /* Gather clusters, organize by parent region, so that we can group
3026 : together locals, globals, etc. */
3027 4 : auto_vec<const region *> parent_regions;
3028 4 : get_sorted_parent_regions (&parent_regions, base_regions);
3029 :
3030 4 : const region *parent_reg;
3031 4 : unsigned i;
3032 4 : FOR_EACH_VEC_ELT (parent_regions, i, parent_reg)
3033 : {
3034 0 : gcc_assert (parent_reg);
3035 :
3036 0 : auto clusters_in_parent_reg_obj = std::make_unique<json::object> ();
3037 :
3038 0 : const region *base_reg;
3039 0 : unsigned j;
3040 0 : FOR_EACH_VEC_ELT (base_regions, j, base_reg)
3041 : {
3042 : /* This is O(N * M), but N ought to be small. */
3043 0 : if (base_reg->get_parent_region () != parent_reg)
3044 0 : continue;
3045 0 : binding_cluster *cluster
3046 0 : = *const_cast<cluster_map_t &> (m_cluster_map).get (base_reg);
3047 0 : label_text base_reg_desc = base_reg->get_desc ();
3048 0 : clusters_in_parent_reg_obj->set (base_reg_desc.get (),
3049 0 : cluster->to_json ());
3050 0 : }
3051 0 : label_text parent_reg_desc = parent_reg->get_desc ();
3052 0 : store_obj->set (parent_reg_desc.get (),
3053 : std::move (clusters_in_parent_reg_obj));
3054 0 : }
3055 :
3056 4 : store_obj->set_bool ("called_unknown_fn", m_called_unknown_fn);
3057 :
3058 4 : return store_obj;
3059 4 : }
3060 :
3061 : std::unique_ptr<text_art::tree_widget>
3062 4 : store::make_dump_widget (const text_art::dump_widget_info &dwi,
3063 : store_manager *mgr) const
3064 : {
3065 4 : std::unique_ptr<text_art::tree_widget> store_widget
3066 4 : (text_art::tree_widget::make (dwi, "Store"));
3067 :
3068 4 : store_widget->add_child
3069 8 : (text_art::tree_widget::from_fmt (dwi, nullptr,
3070 : "m_called_unknown_fn: %s",
3071 4 : m_called_unknown_fn ? "true" : "false"));
3072 :
3073 : /* Sort into some deterministic order. */
3074 4 : auto_vec<const region *> base_regions;
3075 4 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3076 4 : iter != m_cluster_map.end (); ++iter)
3077 : {
3078 0 : const region *base_reg = (*iter).first;
3079 0 : base_regions.safe_push (base_reg);
3080 : }
3081 4 : base_regions.qsort (region::cmp_ptr_ptr);
3082 :
3083 : /* Gather clusters, organize by parent region, so that we can group
3084 : together locals, globals, etc. */
3085 4 : auto_vec<const region *> parent_regions;
3086 4 : get_sorted_parent_regions (&parent_regions, base_regions);
3087 :
3088 4 : const region *parent_reg;
3089 4 : unsigned i;
3090 4 : FOR_EACH_VEC_ELT (parent_regions, i, parent_reg)
3091 : {
3092 0 : gcc_assert (parent_reg);
3093 :
3094 0 : pretty_printer the_pp;
3095 0 : pretty_printer * const pp = &the_pp;
3096 0 : pp_format_decoder (pp) = default_tree_printer;
3097 0 : pp_show_color (pp) = true;
3098 0 : const bool simple = true;
3099 :
3100 0 : parent_reg->dump_to_pp (pp, simple);
3101 :
3102 0 : std::unique_ptr<text_art::tree_widget> parent_reg_widget
3103 0 : (text_art::tree_widget::make (dwi, pp));
3104 :
3105 0 : const region *base_reg;
3106 0 : unsigned j;
3107 0 : FOR_EACH_VEC_ELT (base_regions, j, base_reg)
3108 : {
3109 : /* This is O(N * M), but N ought to be small. */
3110 0 : if (base_reg->get_parent_region () != parent_reg)
3111 0 : continue;
3112 0 : binding_cluster *cluster
3113 0 : = *const_cast<cluster_map_t &> (m_cluster_map).get (base_reg);
3114 0 : parent_reg_widget->add_child
3115 0 : (cluster->make_dump_widget (dwi, mgr));
3116 : }
3117 0 : store_widget->add_child (std::move (parent_reg_widget));
3118 0 : }
3119 :
3120 4 : return store_widget;
3121 4 : }
3122 :
3123 : /* Get any svalue bound to REG, or nullptr. */
3124 :
3125 : const svalue *
3126 4146406 : store::get_any_binding (store_manager *mgr, const region *reg) const
3127 : {
3128 4146406 : const region *base_reg = reg->get_base_region ();
3129 4146406 : binding_cluster **cluster_slot
3130 4146406 : = const_cast <cluster_map_t &> (m_cluster_map).get (base_reg);
3131 4146406 : if (!cluster_slot)
3132 : return nullptr;
3133 1269971 : return (*cluster_slot)->get_any_binding (mgr, reg);
3134 : }
3135 :
3136 : /* Set the value of LHS_REG to RHS_SVAL. */
3137 :
3138 : void
3139 382158 : store::set_value (store_manager *mgr, const region *lhs_reg,
3140 : const svalue *rhs_sval,
3141 : uncertainty_t *uncertainty)
3142 : {
3143 382158 : logger *logger = mgr->get_logger ();
3144 382158 : LOG_SCOPE (logger);
3145 :
3146 382158 : remove_overlapping_bindings (mgr, lhs_reg, uncertainty);
3147 :
3148 382158 : if (lhs_reg->get_type ())
3149 369336 : rhs_sval = simplify_for_binding (rhs_sval);
3150 : /* ...but if we have no type for the region, retain any cast. */
3151 :
3152 382158 : const region *lhs_base_reg = lhs_reg->get_base_region ();
3153 382158 : binding_cluster *lhs_cluster;
3154 382158 : if (lhs_base_reg->symbolic_for_unknown_ptr_p ())
3155 : {
3156 : /* Reject attempting to bind values into a symbolic region
3157 : for an unknown ptr; merely invalidate values below. */
3158 2532 : lhs_cluster = nullptr;
3159 :
3160 : /* The LHS of the write is *UNKNOWN. If the RHS is a pointer,
3161 : then treat the region being pointed to as having escaped. */
3162 2532 : if (const region_svalue *ptr_sval = rhs_sval->dyn_cast_region_svalue ())
3163 : {
3164 83 : const region *ptr_dst = ptr_sval->get_pointee ();
3165 83 : const region *ptr_base_reg = ptr_dst->get_base_region ();
3166 83 : mark_as_escaped (*mgr, ptr_base_reg);
3167 : }
3168 2532 : if (uncertainty)
3169 1110 : uncertainty->on_maybe_bound_sval (rhs_sval);
3170 : }
3171 379626 : else if (lhs_base_reg->tracked_p ())
3172 : {
3173 379548 : lhs_cluster = get_or_create_cluster (*mgr, lhs_base_reg);
3174 379548 : lhs_cluster->bind (mgr, lhs_reg, rhs_sval);
3175 : }
3176 : else
3177 : {
3178 : /* Reject attempting to bind values into an untracked region;
3179 : merely invalidate values below. */
3180 : lhs_cluster = nullptr;
3181 : }
3182 :
3183 : /* Bindings to a cluster can affect other clusters if a symbolic
3184 : base region is involved.
3185 : Writes to concrete clusters can't affect other concrete clusters,
3186 : but can affect symbolic clusters.
3187 : Writes to symbolic clusters can affect both concrete and symbolic
3188 : clusters.
3189 : Invalidate our knowledge of other clusters that might have been
3190 : affected by the write.
3191 : Gather the set of all svalues that might still be live even if
3192 : the store doesn't refer to them. */
3193 382158 : svalue_set maybe_live_values;
3194 5473517 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3195 10564876 : iter != m_cluster_map.end (); ++iter)
3196 : {
3197 5091359 : const region *iter_base_reg = (*iter).first;
3198 5091359 : binding_cluster *iter_cluster = (*iter).second;
3199 5091359 : if (iter_base_reg != lhs_base_reg
3200 5091359 : && (lhs_cluster == nullptr
3201 4673061 : || lhs_cluster->symbolic_p ()
3202 4616794 : || iter_cluster->symbolic_p ()))
3203 : {
3204 676649 : tristate t_alias = eval_alias (lhs_base_reg, iter_base_reg);
3205 676649 : switch (t_alias.get_value ())
3206 : {
3207 0 : default:
3208 0 : gcc_unreachable ();
3209 :
3210 46600 : case tristate::TS_UNKNOWN:
3211 46600 : if (logger)
3212 : {
3213 0 : pretty_printer *pp = logger->get_printer ();
3214 0 : logger->start_log_line ();
3215 0 : logger->log_partial ("possible aliasing of ");
3216 0 : iter_base_reg->dump_to_pp (pp, true);
3217 0 : logger->log_partial (" when writing SVAL: ");
3218 0 : rhs_sval->dump_to_pp (pp, true);
3219 0 : logger->log_partial (" to LHS_REG: ");
3220 0 : lhs_reg->dump_to_pp (pp, true);
3221 0 : logger->end_log_line ();
3222 : }
3223 : /* Mark all of iter_cluster's iter_base_reg as unknown,
3224 : using LHS_REG when considering overlaps, to handle
3225 : symbolic vs concrete issues. */
3226 46600 : iter_cluster->mark_region_as_unknown
3227 46600 : (mgr,
3228 : iter_base_reg, /* reg_to_bind */
3229 : lhs_reg, /* reg_for_overlap */
3230 : uncertainty,
3231 : &maybe_live_values);
3232 46600 : break;
3233 :
3234 0 : case tristate::TS_TRUE:
3235 0 : gcc_unreachable ();
3236 : break;
3237 :
3238 : case tristate::TS_FALSE:
3239 : /* If they can't be aliases, then don't invalidate this
3240 : cluster. */
3241 : break;
3242 : }
3243 : }
3244 : }
3245 : /* Given the set of svalues that might still be live, process them
3246 : (e.g. marking regions as escaped).
3247 : We do this after the iteration to avoid potentially changing
3248 : m_cluster_map whilst iterating over it. */
3249 382158 : on_maybe_live_values (*mgr, maybe_live_values);
3250 382158 : }
3251 :
3252 : /* Determine if BASE_REG_A could be an alias of BASE_REG_B. */
3253 :
3254 : tristate
3255 676858 : store::eval_alias (const region *base_reg_a,
3256 : const region *base_reg_b) const
3257 : {
3258 : /* SSA names can't alias. */
3259 676858 : tree decl_a = base_reg_a->maybe_get_decl ();
3260 676858 : if (decl_a && TREE_CODE (decl_a) == SSA_NAME)
3261 488254 : return tristate::TS_FALSE;
3262 188604 : tree decl_b = base_reg_b->maybe_get_decl ();
3263 188604 : if (decl_b && TREE_CODE (decl_b) == SSA_NAME)
3264 42371 : return tristate::TS_FALSE;
3265 :
3266 : /* Different decls don't alias. */
3267 146233 : if (decl_a && decl_b && decl_a != decl_b)
3268 53 : return tristate::TS_FALSE;
3269 :
3270 : /* Try both ways, for symmetry. */
3271 146180 : tristate ts_ab = eval_alias_1 (base_reg_a, base_reg_b);
3272 146180 : if (ts_ab.is_false ())
3273 18492 : return tristate::TS_FALSE;
3274 127688 : tristate ts_ba = eval_alias_1 (base_reg_b, base_reg_a);
3275 127688 : if (ts_ba.is_false ())
3276 80959 : return tristate::TS_FALSE;
3277 46729 : return tristate::TS_UNKNOWN;
3278 : }
3279 :
3280 : /* Half of store::eval_alias; called twice for symmetry. */
3281 :
3282 : tristate
3283 273868 : store::eval_alias_1 (const region *base_reg_a,
3284 : const region *base_reg_b) const
3285 : {
3286 : /* If they're in different memory spaces, they can't alias. */
3287 273868 : {
3288 273868 : enum memory_space memspace_a = base_reg_a->get_memory_space ();
3289 273868 : if (memspace_a != MEMSPACE_UNKNOWN)
3290 : {
3291 106674 : enum memory_space memspace_b = base_reg_b->get_memory_space ();
3292 106674 : if (memspace_b != MEMSPACE_UNKNOWN
3293 106674 : && memspace_a != memspace_b)
3294 48 : return tristate::TS_FALSE;
3295 : }
3296 : }
3297 :
3298 547640 : if (const symbolic_region *sym_reg_a
3299 273820 : = base_reg_a->dyn_cast_symbolic_region ())
3300 : {
3301 162052 : const svalue *sval_a = sym_reg_a->get_pointer ();
3302 162052 : if (tree decl_b = base_reg_b->maybe_get_decl ())
3303 : {
3304 103832 : if (!may_be_aliased (decl_b))
3305 39193 : return tristate::TS_FALSE;
3306 64639 : if (sval_a->get_kind () == SK_INITIAL)
3307 55092 : if (!is_global_var (decl_b))
3308 : {
3309 : /* The initial value of a pointer can't point to a local. */
3310 49081 : return tristate::TS_FALSE;
3311 : }
3312 : }
3313 73778 : if (sval_a->get_kind () == SK_INITIAL
3314 73778 : && base_reg_b->get_kind () == RK_HEAP_ALLOCATED)
3315 : {
3316 : /* The initial value of a pointer can't point to a
3317 : region that was allocated on the heap after the beginning of the
3318 : path. */
3319 11049 : return tristate::TS_FALSE;
3320 : }
3321 125458 : if (const widening_svalue *widening_sval_a
3322 62729 : = sval_a->dyn_cast_widening_svalue ())
3323 : {
3324 2266 : const svalue *base = widening_sval_a->get_base_svalue ();
3325 4532 : if (const region_svalue *region_sval
3326 2266 : = base->dyn_cast_region_svalue ())
3327 : {
3328 209 : const region *pointee = region_sval->get_pointee ();
3329 : /* If we have sval_a is WIDENING(®ION, OP), and
3330 : B can't alias REGION, then B can't alias A either.
3331 : For example, A might arise from
3332 : for (ptr = ®ION; ...; ptr++)
3333 : where sval_a is ptr in the 2nd iteration of the loop.
3334 : We want to ensure that "*ptr" can only clobber things
3335 : within REGION's base region. */
3336 209 : tristate ts = eval_alias (pointee->get_base_region (),
3337 : base_reg_b);
3338 209 : if (ts.is_false ())
3339 80 : return tristate::TS_FALSE;
3340 : }
3341 : }
3342 : }
3343 174417 : return tristate::TS_UNKNOWN;
3344 : }
3345 :
3346 : /* Record all of the values in MAYBE_LIVE_VALUES as being possibly live. */
3347 :
3348 : void
3349 382467 : store::on_maybe_live_values (store_manager &mgr,
3350 : const svalue_set &maybe_live_values)
3351 : {
3352 449675 : for (auto sval : maybe_live_values)
3353 : {
3354 33604 : if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3355 : {
3356 173 : const region *base_reg = ptr_sval->get_pointee ()->get_base_region ();
3357 173 : mark_as_escaped (mgr, base_reg);
3358 : }
3359 : }
3360 382467 : }
3361 :
3362 : /* Remove all bindings overlapping REG within this store. */
3363 :
3364 : void
3365 6656 : store::clobber_region (store_manager *mgr, const region *reg)
3366 : {
3367 6656 : const region *base_reg = reg->get_base_region ();
3368 6656 : binding_cluster **slot = m_cluster_map.get (base_reg);
3369 6656 : if (!slot)
3370 1938 : return;
3371 4718 : binding_cluster *cluster = *slot;
3372 4718 : cluster->clobber_region (mgr, reg);
3373 4718 : if (cluster->redundant_p ())
3374 : {
3375 3225 : delete cluster;
3376 3225 : m_cluster_map.remove (base_reg);
3377 : }
3378 : }
3379 :
3380 : /* Remove any bindings for REG within this store. */
3381 :
3382 : void
3383 217114 : store::purge_region (store_manager *mgr, const region *reg)
3384 : {
3385 217114 : const region *base_reg = reg->get_base_region ();
3386 217114 : binding_cluster **slot = m_cluster_map.get (base_reg);
3387 217114 : if (!slot)
3388 0 : return;
3389 217114 : binding_cluster *cluster = *slot;
3390 217114 : cluster->purge_region (mgr, reg);
3391 217114 : if (cluster->redundant_p ())
3392 : {
3393 213062 : delete cluster;
3394 213062 : m_cluster_map.remove (base_reg);
3395 : }
3396 : }
3397 :
3398 : /* Fill REG with SVAL. */
3399 :
3400 : void
3401 1588 : store::fill_region (store_manager *mgr, const region *reg, const svalue *sval)
3402 : {
3403 : /* Filling an empty region is a no-op. */
3404 1588 : if (reg->empty_p ())
3405 : return;
3406 :
3407 1556 : const region *base_reg = reg->get_base_region ();
3408 1556 : if (base_reg->symbolic_for_unknown_ptr_p ()
3409 1556 : || !base_reg->tracked_p ())
3410 6 : return;
3411 1550 : binding_cluster *cluster = get_or_create_cluster (*mgr, base_reg);
3412 1550 : cluster->fill_region (mgr, reg, sval);
3413 : }
3414 :
3415 : /* Zero-fill REG. */
3416 :
3417 : void
3418 846 : store::zero_fill_region (store_manager *mgr, const region *reg)
3419 : {
3420 846 : region_model_manager *sval_mgr = mgr->get_svalue_manager ();
3421 846 : const svalue *zero_sval = sval_mgr->get_or_create_int_cst (char_type_node, 0);
3422 846 : fill_region (mgr, reg, zero_sval);
3423 846 : }
3424 :
3425 : /* Mark REG as having unknown content. */
3426 :
3427 : void
3428 309 : store::mark_region_as_unknown (store_manager *mgr, const region *reg,
3429 : uncertainty_t *uncertainty,
3430 : svalue_set *maybe_live_values)
3431 : {
3432 309 : const region *base_reg = reg->get_base_region ();
3433 309 : if (base_reg->symbolic_for_unknown_ptr_p ()
3434 309 : || !base_reg->tracked_p ())
3435 0 : return;
3436 309 : binding_cluster *cluster = get_or_create_cluster (*mgr, base_reg);
3437 309 : cluster->mark_region_as_unknown (mgr, reg, reg, uncertainty,
3438 : maybe_live_values);
3439 : }
3440 :
3441 : /* Purge state involving SVAL. */
3442 :
3443 : void
3444 30046 : store::purge_state_involving (const svalue *sval,
3445 : region_model_manager *sval_mgr)
3446 : {
3447 30046 : auto_vec <const region *> base_regs_to_purge;
3448 838348 : for (auto iter : m_cluster_map)
3449 : {
3450 404151 : const region *base_reg = iter.first;
3451 404151 : if (base_reg->involves_p (sval))
3452 121 : base_regs_to_purge.safe_push (base_reg);
3453 : else
3454 : {
3455 404030 : binding_cluster *cluster = iter.second;
3456 404030 : cluster->purge_state_involving (sval, sval_mgr);
3457 : }
3458 : }
3459 :
3460 30409 : for (auto iter : base_regs_to_purge)
3461 121 : purge_cluster (iter);
3462 30046 : }
3463 :
3464 : /* Get the cluster for BASE_REG, or nullptr (const version). */
3465 :
3466 : const binding_cluster *
3467 2697953 : store::get_cluster (const region *base_reg) const
3468 : {
3469 2697953 : gcc_assert (base_reg);
3470 2697953 : gcc_assert (base_reg->get_base_region () == base_reg);
3471 5395906 : if (binding_cluster **slot
3472 2697953 : = const_cast <cluster_map_t &> (m_cluster_map).get (base_reg))
3473 2644056 : return *slot;
3474 : else
3475 : return nullptr;
3476 : }
3477 :
3478 : /* Get the cluster for BASE_REG, or nullptr (non-const version). */
3479 :
3480 : binding_cluster *
3481 10517176 : store::get_cluster (const region *base_reg)
3482 : {
3483 10517176 : gcc_assert (base_reg);
3484 10517176 : gcc_assert (base_reg->get_base_region () == base_reg);
3485 10517176 : if (binding_cluster **slot = m_cluster_map.get (base_reg))
3486 8730184 : return *slot;
3487 : else
3488 : return nullptr;
3489 : }
3490 :
3491 : /* Get the cluster for BASE_REG, creating it if doesn't already exist. */
3492 :
3493 : binding_cluster *
3494 1628623 : store::get_or_create_cluster (store_manager &store_mgr,
3495 : const region *base_reg)
3496 : {
3497 1628623 : gcc_assert (base_reg);
3498 1628623 : gcc_assert (base_reg->get_base_region () == base_reg);
3499 :
3500 : /* We shouldn't create clusters for dereferencing an UNKNOWN ptr. */
3501 1628623 : gcc_assert (!base_reg->symbolic_for_unknown_ptr_p ());
3502 :
3503 : /* We shouldn't create clusters for base regions that aren't trackable. */
3504 1628623 : gcc_assert (base_reg->tracked_p ());
3505 :
3506 1628623 : if (binding_cluster **slot = m_cluster_map.get (base_reg))
3507 51647 : return *slot;
3508 :
3509 1576976 : binding_cluster *cluster = new binding_cluster (store_mgr, base_reg);
3510 1576976 : m_cluster_map.put (base_reg, cluster);
3511 :
3512 1576976 : return cluster;
3513 : }
3514 :
3515 : /* Remove any cluster for BASE_REG, for use by
3516 : region_model::unbind_region_and_descendents
3517 : when popping stack frames and handling deleted heap regions. */
3518 :
3519 : void
3520 38534 : store::purge_cluster (const region *base_reg)
3521 : {
3522 38534 : gcc_assert (base_reg->get_base_region () == base_reg);
3523 38534 : binding_cluster **slot = m_cluster_map.get (base_reg);
3524 38534 : if (!slot)
3525 : return;
3526 38534 : binding_cluster *cluster = *slot;
3527 77068 : delete cluster;
3528 38534 : m_cluster_map.remove (base_reg);
3529 : }
3530 :
3531 : /* Attempt to merge STORE_A and STORE_B into OUT_STORE.
3532 : Return true if successful, or false if the stores can't be merged. */
3533 :
3534 : bool
3535 148384 : store::can_merge_p (const store *store_a, const store *store_b,
3536 : store *out_store, store_manager *mgr,
3537 : model_merger *merger)
3538 : {
3539 148384 : if (store_a->m_called_unknown_fn || store_b->m_called_unknown_fn)
3540 45967 : out_store->m_called_unknown_fn = true;
3541 :
3542 : /* Get the union of all base regions for STORE_A and STORE_B. */
3543 148384 : hash_set<const region *> base_regions;
3544 1944676 : for (cluster_map_t::iterator iter_a = store_a->m_cluster_map.begin ();
3545 3740968 : iter_a != store_a->m_cluster_map.end (); ++iter_a)
3546 : {
3547 1796292 : const region *base_reg_a = (*iter_a).first;
3548 1796292 : base_regions.add (base_reg_a);
3549 : }
3550 1921930 : for (cluster_map_t::iterator iter_b = store_b->m_cluster_map.begin ();
3551 3695476 : iter_b != store_b->m_cluster_map.end (); ++iter_b)
3552 : {
3553 1773546 : const region *base_reg_b = (*iter_b).first;
3554 1773546 : base_regions.add (base_reg_b);
3555 : }
3556 :
3557 : /* Sort the base regions before considering them. This ought not to
3558 : affect the results, but can affect which types UNKNOWN_REGIONs are
3559 : created for in a run; sorting them thus avoids minor differences
3560 : in logfiles. */
3561 148384 : auto_vec<const region *> vec_base_regions (base_regions.elements ());
3562 148384 : for (hash_set<const region *>::iterator iter = base_regions.begin ();
3563 3779822 : iter != base_regions.end (); ++iter)
3564 1815719 : vec_base_regions.quick_push (*iter);
3565 148384 : vec_base_regions.qsort (region::cmp_ptr_ptr);
3566 : unsigned i;
3567 : const region *base_reg;
3568 1399347 : FOR_EACH_VEC_ELT (vec_base_regions, i, base_reg)
3569 : {
3570 1209564 : const binding_cluster *cluster_a = store_a->get_cluster (base_reg);
3571 1209564 : const binding_cluster *cluster_b = store_b->get_cluster (base_reg);
3572 : /* At least one of cluster_a and cluster_b must be non-NULL. */
3573 1209564 : binding_cluster *out_cluster
3574 1209564 : = out_store->get_or_create_cluster (*mgr, base_reg);
3575 1209564 : if (!binding_cluster::can_merge_p (cluster_a, cluster_b,
3576 : out_cluster, out_store, mgr, merger))
3577 : return false;
3578 : }
3579 : return true;
3580 148384 : }
3581 :
3582 : /* Mark the cluster for BASE_REG as having escaped.
3583 : For use when handling an unrecognized function call, and
3584 : for params to "top-level" calls.
3585 : Further unknown function calls could touch it, even if the cluster
3586 : isn't reachable from args of those calls. */
3587 :
3588 : void
3589 36693 : store::mark_as_escaped (store_manager &mgr, const region *base_reg)
3590 : {
3591 36693 : gcc_assert (base_reg);
3592 36693 : gcc_assert (base_reg->get_base_region () == base_reg);
3593 :
3594 36693 : if (base_reg->symbolic_for_unknown_ptr_p ()
3595 36693 : || !base_reg->tracked_p ())
3596 5005 : return;
3597 :
3598 31688 : binding_cluster *cluster = get_or_create_cluster (mgr, base_reg);
3599 31688 : cluster->mark_as_escaped ();
3600 : }
3601 :
3602 : /* Handle an unknown fncall by updating any clusters that have escaped
3603 : (either in this fncall, or in a prior one). */
3604 :
3605 : void
3606 18048 : store::on_unknown_fncall (const gcall &call, store_manager *mgr,
3607 : const conjured_purge &p)
3608 : {
3609 18048 : m_called_unknown_fn = true;
3610 :
3611 141645 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3612 265242 : iter != m_cluster_map.end (); ++iter)
3613 123597 : (*iter).second->on_unknown_fncall (call, mgr, p);
3614 18048 : }
3615 :
3616 : /* Return true if a non-const pointer to BASE_REG (or something within it)
3617 : has escaped to code outside of the TU being analyzed. */
3618 :
3619 : bool
3620 8808250 : store::escaped_p (const region *base_reg) const
3621 : {
3622 8808250 : gcc_assert (base_reg);
3623 8808250 : gcc_assert (base_reg->get_base_region () == base_reg);
3624 :
3625 : /* "errno" can always be modified by external code. */
3626 8808250 : if (base_reg->get_kind () == RK_ERRNO)
3627 : return true;
3628 :
3629 17468332 : if (binding_cluster **cluster_slot
3630 8734166 : = const_cast <cluster_map_t &>(m_cluster_map).get (base_reg))
3631 8734166 : return (*cluster_slot)->escaped_p ();
3632 : return false;
3633 : }
3634 :
3635 : /* Populate OUT_PVS with a list of path_vars for describing SVAL based on
3636 : this store, using VISITED to ensure the traversal terminates. */
3637 :
3638 : void
3639 12561 : store::get_representative_path_vars (const region_model *model,
3640 : svalue_set *visited,
3641 : const svalue *sval,
3642 : logger *logger,
3643 : auto_vec<path_var> *out_pvs) const
3644 : {
3645 12561 : gcc_assert (sval);
3646 :
3647 : /* Find all bindings that reference SVAL. */
3648 56571 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3649 100581 : iter != m_cluster_map.end (); ++iter)
3650 : {
3651 44010 : const region *base_reg = (*iter).first;
3652 44010 : binding_cluster *cluster = (*iter).second;
3653 44010 : cluster->get_representative_path_vars (model, visited, base_reg, sval,
3654 : logger,
3655 : out_pvs);
3656 : }
3657 :
3658 12561 : if (const initial_svalue *init_sval = sval->dyn_cast_initial_svalue ())
3659 : {
3660 2305 : const region *reg = init_sval->get_region ();
3661 2305 : if (path_var pv = model->get_representative_path_var (reg,
3662 : visited,
3663 2305 : logger))
3664 2221 : out_pvs->safe_push (pv);
3665 : }
3666 12561 : }
3667 :
3668 : /* Remove all bindings overlapping REG within this store, removing
3669 : any clusters that become redundant.
3670 :
3671 : If UNCERTAINTY is non-NULL, use it to record any svalues that
3672 : were removed, as being maybe-bound. */
3673 :
3674 : void
3675 382158 : store::remove_overlapping_bindings (store_manager *mgr, const region *reg,
3676 : uncertainty_t *uncertainty)
3677 : {
3678 382158 : const region *base_reg = reg->get_base_region ();
3679 382158 : if (binding_cluster **cluster_slot = m_cluster_map.get (base_reg))
3680 : {
3681 74121 : binding_cluster *cluster = *cluster_slot;
3682 74121 : if (reg == base_reg && !escaped_p (base_reg))
3683 : {
3684 : /* Remove whole cluster. */
3685 46106 : m_cluster_map.remove (base_reg);
3686 92212 : delete cluster;
3687 46106 : return;
3688 : }
3689 : /* Pass nullptr for the maybe_live_values here, as we don't want to
3690 : record the old svalues as being maybe-bound. */
3691 28015 : cluster->remove_overlapping_bindings (mgr, reg, uncertainty, nullptr);
3692 : }
3693 : }
3694 :
3695 : /* Subclass of visitor that accumulates a hash_set of the regions that
3696 : were visited. */
3697 :
3698 1658488 : struct region_finder : public visitor
3699 : {
3700 46857340 : void visit_region (const region *reg) final override
3701 : {
3702 46857340 : m_regs.add (reg);
3703 46857340 : }
3704 :
3705 : hash_set<const region *> m_regs;
3706 : };
3707 :
3708 : /* Canonicalize this store, to maximize the chance of equality between
3709 : instances. */
3710 :
3711 : void
3712 829244 : store::canonicalize (store_manager *mgr)
3713 : {
3714 : /* If we have e.g.:
3715 : cluster for: HEAP_ALLOCATED_REGION(543)
3716 : ESCAPED
3717 : TOUCHED
3718 : where the heap region is empty and unreferenced, then purge that
3719 : cluster, to avoid unbounded state chains involving these. */
3720 :
3721 : /* Find regions that are referenced by bound values in the store. */
3722 829244 : region_finder s;
3723 6854330 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3724 12879416 : iter != m_cluster_map.end (); ++iter)
3725 : {
3726 6025086 : binding_cluster *cluster = (*iter).second;
3727 6025086 : for (binding_cluster::iterator_t bind_iter = cluster->m_map.begin ();
3728 13220930 : bind_iter != cluster->m_map.end (); ++bind_iter)
3729 7195844 : (*bind_iter).m_sval->accept (&s);
3730 : }
3731 :
3732 : /* Locate heap-allocated regions that have empty bindings that weren't
3733 : found above. */
3734 829244 : hash_set<const region *> purgeable_regions;
3735 6854330 : for (cluster_map_t::iterator iter = m_cluster_map.begin ();
3736 12879416 : iter != m_cluster_map.end (); ++iter)
3737 : {
3738 6025086 : const region *base_reg = (*iter).first;
3739 6025086 : binding_cluster *cluster = (*iter).second;
3740 6025086 : if (base_reg->get_kind () == RK_HEAP_ALLOCATED)
3741 : {
3742 : /* Don't purge a heap-allocated region that's been marked as
3743 : escaping, since this could be recording that a ptr to it
3744 : was written to an unknown symbolic region along this
3745 : path, and so we don't know whether it's referenced or
3746 : not, and hence should report it as leaking
3747 : (PR analyzer/106473). */
3748 293710 : if (cluster->escaped_p ())
3749 113985 : continue;
3750 :
3751 179725 : if (cluster->empty_p ())
3752 450 : if (!s.m_regs.contains (base_reg))
3753 0 : purgeable_regions.add (base_reg);
3754 :
3755 : /* Also cover the UNKNOWN case. */
3756 179725 : if (const svalue *sval = cluster->maybe_get_simple_value (mgr))
3757 54405 : if (sval->get_kind () == SK_UNKNOWN)
3758 28674 : if (!s.m_regs.contains (base_reg))
3759 122 : purgeable_regions.add (base_reg);
3760 : }
3761 : }
3762 :
3763 : /* Purge them. */
3764 829366 : for (hash_set<const region *>::iterator iter = purgeable_regions.begin ();
3765 1658732 : iter != purgeable_regions.end (); ++iter)
3766 : {
3767 122 : const region *base_reg = *iter;
3768 122 : purge_cluster (base_reg);
3769 : }
3770 829244 : }
3771 :
3772 : static bool
3773 90759 : needs_loop_replay_fixup_p (const svalue &sval)
3774 : {
3775 90759 : struct my_visitor : public visitor
3776 : {
3777 90759 : my_visitor () : m_found_widening (false) {}
3778 :
3779 : void
3780 400 : visit_widening_svalue (const widening_svalue *) final override
3781 : {
3782 400 : m_found_widening = true;
3783 400 : }
3784 :
3785 : bool m_found_widening;
3786 90759 : } v;
3787 :
3788 90759 : sval.accept (&v);
3789 90759 : return v.m_found_widening;
3790 : }
3791 :
3792 : /* Subroutine for use by exploded_path::feasible_p.
3793 :
3794 : We need to deal with state differences between:
3795 : (a) when the exploded_graph is being initially constructed and
3796 : (b) when replaying the state changes along a specific path in
3797 : in exploded_path::feasible_p.
3798 :
3799 : In (a), state merging happens, so when exploring a loop
3800 : for (i = 0; i < 1024; i++)
3801 : on successive iterations we have i == 0, then i == WIDENING.
3802 :
3803 : In (b), no state merging happens, so naively replaying the path
3804 : that goes twice through the loop then exits it
3805 : would lead to i == 0, then i == 1, and then a (i >= 1024) eedge
3806 : that exits the loop, which would be found to be infeasible as i == 1,
3807 : and the path would be rejected.
3808 :
3809 : We need to fix up state during replay. This subroutine is
3810 : called whenever we enter a supernode that we've already
3811 : visited along this exploded_path, passing in OTHER_STORE
3812 : from the destination enode's state.
3813 :
3814 : Find bindings to widening values in OTHER_STORE.
3815 : For all that are found, update the binding in this store to UNKNOWN. */
3816 :
3817 : void
3818 4661 : store::loop_replay_fixup (const store *other_store,
3819 : region_model_manager *mgr)
3820 : {
3821 4661 : gcc_assert (other_store);
3822 34866 : for (cluster_map_t::iterator iter = other_store->m_cluster_map.begin ();
3823 65071 : iter != other_store->m_cluster_map.end (); ++iter)
3824 : {
3825 30205 : const region *base_reg = (*iter).first;
3826 30205 : binding_cluster *cluster = (*iter).second;
3827 120964 : for (binding_cluster::iterator_t bind_iter = cluster->m_map.begin ();
3828 120964 : bind_iter != cluster->m_map.end (); ++bind_iter)
3829 : {
3830 90759 : const binding_key *key = (*bind_iter).m_key;
3831 90759 : const svalue *sval = (*bind_iter).m_sval;
3832 90759 : if (needs_loop_replay_fixup_p (*sval))
3833 : {
3834 400 : binding_cluster *this_cluster
3835 400 : = get_or_create_cluster (*mgr->get_store_manager (),
3836 : base_reg);
3837 400 : const svalue *unknown
3838 400 : = mgr->get_or_create_unknown_svalue (sval->get_type ());
3839 400 : this_cluster->bind_key (key, unknown);
3840 : }
3841 : }
3842 : }
3843 4661 : }
3844 :
3845 : /* Use R to replay the bindings from SUMMARY into this object. */
3846 :
3847 : void
3848 1658 : store::replay_call_summary (call_summary_replay &r,
3849 : const store &summary)
3850 : {
3851 1658 : if (summary.m_called_unknown_fn)
3852 : {
3853 : /* A call to an external function occurred in the summary.
3854 : Hence we need to invalidate our knownledge of globals,
3855 : escaped regions, etc. */
3856 40 : on_unknown_fncall (r.get_call_stmt (),
3857 : r.get_store_manager (),
3858 40 : conjured_purge (r.get_caller_model (),
3859 40 : r.get_ctxt ()));
3860 : }
3861 :
3862 1658 : auto_vec<const region *> keys (summary.m_cluster_map.elements ());
3863 14127 : for (auto kv : summary.m_cluster_map)
3864 12469 : keys.quick_push (kv.first);
3865 1658 : keys.qsort (region::cmp_ptr_ptr);
3866 17435 : for (auto base_reg : keys)
3867 12469 : replay_call_summary_cluster (r, summary, base_reg);
3868 1658 : }
3869 :
3870 : /* Use R and SUMMARY to replay the bindings in SUMMARY_CLUSTER
3871 : into this object. */
3872 :
3873 : void
3874 12469 : store::replay_call_summary_cluster (call_summary_replay &r,
3875 : const store &summary,
3876 : const region *summary_base_reg)
3877 : {
3878 12469 : const call_details &cd = r.get_call_details ();
3879 12469 : region_model_manager *reg_mgr = r.get_manager ();
3880 12469 : store_manager *mgr = reg_mgr->get_store_manager ();
3881 12469 : const binding_cluster *summary_cluster
3882 12469 : = summary.get_cluster (summary_base_reg);
3883 :
3884 : /* Handle "ESCAPED" and "TOUCHED" flags. */
3885 12469 : if (summary_cluster->escaped_p () || summary_cluster->touched_p ())
3886 7740 : if (const region *caller_reg
3887 3870 : = r.convert_region_from_summary (summary_base_reg))
3888 : {
3889 3867 : const region *caller_base_reg = caller_reg->get_base_region ();
3890 3867 : if (caller_base_reg->tracked_p ()
3891 3867 : && !caller_base_reg->symbolic_for_unknown_ptr_p ())
3892 : {
3893 2940 : binding_cluster *caller_cluster
3894 2940 : = get_or_create_cluster (*mgr, caller_base_reg);
3895 2940 : if (summary_cluster->escaped_p ())
3896 2244 : caller_cluster->mark_as_escaped ();
3897 2940 : if (summary_cluster->touched_p ())
3898 1762 : caller_cluster->m_touched = true;
3899 : }
3900 : }
3901 :
3902 12469 : switch (summary_base_reg->get_kind ())
3903 : {
3904 : /* Top-level regions. */
3905 0 : case RK_FRAME:
3906 0 : case RK_GLOBALS:
3907 0 : case RK_CODE:
3908 0 : case RK_STACK:
3909 0 : case RK_HEAP:
3910 0 : case RK_THREAD_LOCAL:
3911 0 : case RK_ROOT:
3912 : /* Child regions. */
3913 0 : case RK_FIELD:
3914 0 : case RK_ELEMENT:
3915 0 : case RK_OFFSET:
3916 0 : case RK_SIZED:
3917 0 : case RK_CAST:
3918 0 : case RK_BIT_RANGE:
3919 : /* Other regions. */
3920 0 : case RK_VAR_ARG:
3921 0 : case RK_UNKNOWN:
3922 : /* These should never be the base region of a binding cluster. */
3923 0 : gcc_unreachable ();
3924 : break;
3925 :
3926 : case RK_FUNCTION:
3927 : case RK_LABEL:
3928 : case RK_STRING:
3929 : /* These can be marked as escaping. */
3930 : break;
3931 :
3932 2703 : case RK_SYMBOLIC:
3933 2703 : {
3934 2703 : const symbolic_region *summary_symbolic_reg
3935 2703 : = as_a <const symbolic_region *> (summary_base_reg);
3936 2703 : const svalue *summary_ptr_sval = summary_symbolic_reg->get_pointer ();
3937 2703 : const svalue *caller_ptr_sval
3938 2703 : = r.convert_svalue_from_summary (summary_ptr_sval);
3939 2703 : if (!caller_ptr_sval)
3940 : return;
3941 2289 : const region *caller_dest_reg
3942 2289 : = cd.get_model ()->deref_rvalue (caller_ptr_sval,
3943 : NULL_TREE,
3944 : cd.get_ctxt ());
3945 2289 : const svalue *summary_sval
3946 2289 : = summary.get_any_binding (mgr, summary_base_reg);
3947 2289 : if (!summary_sval)
3948 : return;
3949 1977 : const svalue *caller_sval
3950 1977 : = r.convert_svalue_from_summary (summary_sval);
3951 1977 : if (!caller_sval)
3952 2 : caller_sval =
3953 2 : reg_mgr->get_or_create_unknown_svalue (summary_sval->get_type ());
3954 1977 : set_value (mgr, caller_dest_reg,
3955 : caller_sval, nullptr /* uncertainty_t * */);
3956 : }
3957 1977 : break;
3958 :
3959 9761 : case RK_HEAP_ALLOCATED:
3960 9761 : case RK_DECL:
3961 9761 : case RK_ERRNO:
3962 9761 : case RK_PRIVATE:
3963 9761 : {
3964 9761 : const region *caller_dest_reg
3965 9761 : = r.convert_region_from_summary (summary_base_reg);
3966 9761 : if (!caller_dest_reg)
3967 : return;
3968 9528 : const svalue *summary_sval
3969 9528 : = summary.get_any_binding (mgr, summary_base_reg);
3970 9528 : if (!summary_sval)
3971 187 : summary_sval = reg_mgr->get_or_create_compound_svalue
3972 187 : (summary_base_reg->get_type (),
3973 187 : summary_cluster->get_map ().get_concrete_bindings ());
3974 9528 : const svalue *caller_sval
3975 9528 : = r.convert_svalue_from_summary (summary_sval);
3976 9528 : if (!caller_sval)
3977 2 : caller_sval =
3978 2 : reg_mgr->get_or_create_unknown_svalue (summary_sval->get_type ());
3979 9528 : set_value (mgr, caller_dest_reg,
3980 : caller_sval, nullptr /* uncertainty_t * */);
3981 : }
3982 9528 : break;
3983 :
3984 : case RK_ALLOCA:
3985 : /* Ignore bindings of alloca regions in the summary. */
3986 : break;
3987 : }
3988 : }
3989 :
3990 : #if CHECKING_P
3991 :
3992 : namespace selftest {
3993 :
3994 : /* Verify that bit_range::intersects_p works as expected. */
3995 :
3996 : static void
3997 4 : test_bit_range_intersects_p ()
3998 : {
3999 4 : bit_range b0 (0, 1);
4000 4 : bit_range b1 (1, 1);
4001 4 : bit_range b2 (2, 1);
4002 4 : bit_range b3 (3, 1);
4003 4 : bit_range b4 (4, 1);
4004 4 : bit_range b5 (5, 1);
4005 4 : bit_range b6 (6, 1);
4006 4 : bit_range b7 (7, 1);
4007 4 : bit_range b1_to_6 (1, 6);
4008 4 : bit_range b0_to_7 (0, 8);
4009 4 : bit_range b3_to_5 (3, 3);
4010 4 : bit_range b6_to_7 (6, 2);
4011 :
4012 : /* self-intersection is true. */
4013 4 : ASSERT_TRUE (b0.intersects_p (b0));
4014 4 : ASSERT_TRUE (b7.intersects_p (b7));
4015 4 : ASSERT_TRUE (b1_to_6.intersects_p (b1_to_6));
4016 4 : ASSERT_TRUE (b0_to_7.intersects_p (b0_to_7));
4017 :
4018 4 : ASSERT_FALSE (b0.intersects_p (b1));
4019 4 : ASSERT_FALSE (b1.intersects_p (b0));
4020 4 : ASSERT_FALSE (b0.intersects_p (b7));
4021 4 : ASSERT_FALSE (b7.intersects_p (b0));
4022 :
4023 4 : ASSERT_TRUE (b0_to_7.intersects_p (b0));
4024 4 : ASSERT_TRUE (b0_to_7.intersects_p (b7));
4025 4 : ASSERT_TRUE (b0.intersects_p (b0_to_7));
4026 4 : ASSERT_TRUE (b7.intersects_p (b0_to_7));
4027 :
4028 4 : ASSERT_FALSE (b0.intersects_p (b1_to_6));
4029 4 : ASSERT_FALSE (b1_to_6.intersects_p (b0));
4030 4 : ASSERT_TRUE (b1.intersects_p (b1_to_6));
4031 4 : ASSERT_TRUE (b1_to_6.intersects_p (b1));
4032 4 : ASSERT_TRUE (b1_to_6.intersects_p (b6));
4033 4 : ASSERT_FALSE (b1_to_6.intersects_p (b7));
4034 :
4035 4 : ASSERT_TRUE (b1_to_6.intersects_p (b0_to_7));
4036 4 : ASSERT_TRUE (b0_to_7.intersects_p (b1_to_6));
4037 :
4038 4 : ASSERT_FALSE (b3_to_5.intersects_p (b6_to_7));
4039 4 : ASSERT_FALSE (b6_to_7.intersects_p (b3_to_5));
4040 :
4041 4 : bit_range r1 (0,0);
4042 4 : bit_range r2 (0,0);
4043 4 : ASSERT_TRUE (b1_to_6.intersects_p (b0_to_7, &r1, &r2));
4044 4 : ASSERT_EQ (r1.get_start_bit_offset (), 0);
4045 4 : ASSERT_EQ (r1.m_size_in_bits, 6);
4046 4 : ASSERT_EQ (r2.get_start_bit_offset (), 1);
4047 4 : ASSERT_EQ (r2.m_size_in_bits, 6);
4048 :
4049 4 : ASSERT_TRUE (b0_to_7.intersects_p (b1_to_6, &r1, &r2));
4050 4 : ASSERT_EQ (r1.get_start_bit_offset (), 1);
4051 4 : ASSERT_EQ (r1.m_size_in_bits, 6);
4052 4 : ASSERT_EQ (r2.get_start_bit_offset (), 0);
4053 4 : ASSERT_EQ (r2.m_size_in_bits, 6);
4054 4 : }
4055 :
4056 : /* Implementation detail of ASSERT_BIT_RANGE_FROM_MASK_EQ. */
4057 :
4058 : static void
4059 76 : assert_bit_range_from_mask_eq (const location &loc,
4060 : unsigned HOST_WIDE_INT mask,
4061 : const bit_range &expected)
4062 : {
4063 76 : bit_range actual (0, 0);
4064 76 : bool ok = bit_range::from_mask (mask, &actual);
4065 76 : ASSERT_TRUE_AT (loc, ok);
4066 76 : ASSERT_EQ_AT (loc, actual, expected);
4067 76 : }
4068 :
4069 : /* Assert that bit_range::from_mask (MASK) returns true, and writes
4070 : out EXPECTED_BIT_RANGE. */
4071 :
4072 : #define ASSERT_BIT_RANGE_FROM_MASK_EQ(MASK, EXPECTED_BIT_RANGE) \
4073 : SELFTEST_BEGIN_STMT \
4074 : assert_bit_range_from_mask_eq (SELFTEST_LOCATION, MASK, \
4075 : EXPECTED_BIT_RANGE); \
4076 : SELFTEST_END_STMT
4077 :
4078 : /* Implementation detail of ASSERT_NO_BIT_RANGE_FROM_MASK. */
4079 :
4080 : static void
4081 12 : assert_no_bit_range_from_mask_eq (const location &loc,
4082 : unsigned HOST_WIDE_INT mask)
4083 : {
4084 12 : bit_range actual (0, 0);
4085 12 : bool ok = bit_range::from_mask (mask, &actual);
4086 12 : ASSERT_FALSE_AT (loc, ok);
4087 12 : }
4088 :
4089 : /* Assert that bit_range::from_mask (MASK) returns false. */
4090 :
4091 : #define ASSERT_NO_BIT_RANGE_FROM_MASK(MASK) \
4092 : SELFTEST_BEGIN_STMT \
4093 : assert_no_bit_range_from_mask_eq (SELFTEST_LOCATION, MASK); \
4094 : SELFTEST_END_STMT
4095 :
4096 : /* Verify that bit_range::from_mask works as expected. */
4097 :
4098 : static void
4099 4 : test_bit_range_from_mask ()
4100 : {
4101 : /* Should fail on zero. */
4102 4 : ASSERT_NO_BIT_RANGE_FROM_MASK (0);
4103 :
4104 : /* Verify 1-bit masks. */
4105 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (1, bit_range (0, 1));
4106 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (2, bit_range (1, 1));
4107 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (4, bit_range (2, 1));
4108 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (8, bit_range (3, 1));
4109 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (16, bit_range (4, 1));
4110 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (32, bit_range (5, 1));
4111 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (64, bit_range (6, 1));
4112 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (128, bit_range (7, 1));
4113 :
4114 : /* Verify N-bit masks starting at bit 0. */
4115 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (3, bit_range (0, 2));
4116 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (7, bit_range (0, 3));
4117 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (15, bit_range (0, 4));
4118 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (31, bit_range (0, 5));
4119 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (63, bit_range (0, 6));
4120 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (127, bit_range (0, 7));
4121 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (255, bit_range (0, 8));
4122 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (0xffff, bit_range (0, 16));
4123 :
4124 : /* Various other tests. */
4125 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (0x30, bit_range (4, 2));
4126 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (0x700, bit_range (8, 3));
4127 4 : ASSERT_BIT_RANGE_FROM_MASK_EQ (0x600, bit_range (9, 2));
4128 :
4129 : /* Multiple ranges of set bits should fail. */
4130 4 : ASSERT_NO_BIT_RANGE_FROM_MASK (0x101);
4131 4 : ASSERT_NO_BIT_RANGE_FROM_MASK (0xf0f0f0f0);
4132 4 : }
4133 :
4134 : /* Implementation detail of ASSERT_OVERLAP. */
4135 :
4136 : static void
4137 56 : assert_overlap (const location &loc,
4138 : const concrete_binding *b1,
4139 : const concrete_binding *b2)
4140 : {
4141 56 : ASSERT_TRUE_AT (loc, b1->overlaps_p (*b2));
4142 56 : ASSERT_TRUE_AT (loc, b2->overlaps_p (*b1));
4143 56 : }
4144 :
4145 : /* Implementation detail of ASSERT_DISJOINT. */
4146 :
4147 : static void
4148 20 : assert_disjoint (const location &loc,
4149 : const concrete_binding *b1,
4150 : const concrete_binding *b2)
4151 : {
4152 20 : ASSERT_FALSE_AT (loc, b1->overlaps_p (*b2));
4153 20 : ASSERT_FALSE_AT (loc, b2->overlaps_p (*b1));
4154 20 : }
4155 :
4156 : /* Assert that B1 and B2 overlap, checking both ways. */
4157 :
4158 : #define ASSERT_OVERLAP(B1, B2) \
4159 : SELFTEST_BEGIN_STMT \
4160 : assert_overlap (SELFTEST_LOCATION, B1, B2); \
4161 : SELFTEST_END_STMT
4162 :
4163 : /* Assert that B1 and B2 do not overlap, checking both ways. */
4164 :
4165 : #define ASSERT_DISJOINT(B1, B2) \
4166 : SELFTEST_BEGIN_STMT \
4167 : assert_disjoint (SELFTEST_LOCATION, B1, B2); \
4168 : SELFTEST_END_STMT
4169 :
4170 : /* Verify that concrete_binding::overlaps_p works as expected. */
4171 :
4172 : static void
4173 4 : test_binding_key_overlap ()
4174 : {
4175 4 : store_manager mgr (nullptr);
4176 :
4177 : /* Various 8-bit bindings. */
4178 4 : const concrete_binding *cb_0_7 = mgr.get_concrete_binding (0, 8);
4179 4 : const concrete_binding *cb_8_15 = mgr.get_concrete_binding (8, 8);
4180 4 : const concrete_binding *cb_16_23 = mgr.get_concrete_binding (16, 8);
4181 4 : const concrete_binding *cb_24_31 = mgr.get_concrete_binding (24, 8);
4182 :
4183 : /* 16-bit bindings. */
4184 4 : const concrete_binding *cb_0_15 = mgr.get_concrete_binding (0, 16);
4185 4 : const concrete_binding *cb_8_23 = mgr.get_concrete_binding (8, 16);
4186 4 : const concrete_binding *cb_16_31 = mgr.get_concrete_binding (16, 16);
4187 :
4188 : /* 32-bit binding. */
4189 4 : const concrete_binding *cb_0_31 = mgr.get_concrete_binding (0, 32);
4190 :
4191 : /* Everything should self-overlap. */
4192 4 : ASSERT_OVERLAP (cb_0_7, cb_0_7);
4193 4 : ASSERT_OVERLAP (cb_8_15, cb_8_15);
4194 4 : ASSERT_OVERLAP (cb_16_23, cb_16_23);
4195 4 : ASSERT_OVERLAP (cb_24_31, cb_24_31);
4196 4 : ASSERT_OVERLAP (cb_0_15, cb_0_15);
4197 4 : ASSERT_OVERLAP (cb_8_23, cb_8_23);
4198 4 : ASSERT_OVERLAP (cb_16_31, cb_16_31);
4199 4 : ASSERT_OVERLAP (cb_0_31, cb_0_31);
4200 :
4201 : /* Verify the 8-bit bindings that don't overlap each other. */
4202 4 : ASSERT_DISJOINT (cb_0_7, cb_8_15);
4203 4 : ASSERT_DISJOINT (cb_8_15, cb_16_23);
4204 :
4205 : /* Check for overlap of differently-sized bindings. */
4206 4 : ASSERT_OVERLAP (cb_0_7, cb_0_31);
4207 : /* ...and with differing start points. */
4208 4 : ASSERT_OVERLAP (cb_8_15, cb_0_31);
4209 4 : ASSERT_DISJOINT (cb_8_15, cb_16_31);
4210 4 : ASSERT_OVERLAP (cb_16_23, cb_0_31);
4211 4 : ASSERT_OVERLAP (cb_16_31, cb_0_31);
4212 :
4213 4 : ASSERT_DISJOINT (cb_0_7, cb_8_23);
4214 4 : ASSERT_OVERLAP (cb_8_23, cb_16_23);
4215 4 : ASSERT_OVERLAP (cb_8_23, cb_16_31);
4216 4 : ASSERT_DISJOINT (cb_8_23, cb_24_31);
4217 4 : }
4218 :
4219 : static void
4220 4 : test_binding_map_ops ()
4221 : {
4222 4 : region_model_manager region_mgr;
4223 4 : store_manager store_mgr (®ion_mgr);
4224 :
4225 : /* Assignment of empty. */
4226 4 : {
4227 4 : binding_map src (store_mgr);
4228 4 : binding_map dst (store_mgr);
4229 4 : dst = src;
4230 :
4231 4 : ASSERT_EQ (src, dst);
4232 4 : }
4233 4 : }
4234 :
4235 : /* Run all of the selftests within this file. */
4236 :
4237 : void
4238 4 : analyzer_store_cc_tests ()
4239 : {
4240 4 : test_bit_range_intersects_p ();
4241 4 : test_bit_range_from_mask ();
4242 4 : test_binding_key_overlap ();
4243 4 : test_binding_map_ops ();
4244 4 : }
4245 :
4246 : } // namespace selftest
4247 :
4248 : #endif /* CHECKING_P */
4249 :
4250 : } // namespace ana
4251 :
4252 : #endif /* #if ENABLE_ANALYZER */
|