Line data Source code
1 : /* Classes for modeling the state of memory.
2 : Copyright (C) 2019-2026 Free Software Foundation, Inc.
3 : Contributed by David Malcolm <dmalcolm@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but
13 : WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 : General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #define INCLUDE_ALGORITHM
22 : #include "analyzer/common.h"
23 :
24 : #include "ordered-hash-map.h"
25 : #include "options.h"
26 : #include "cgraph.h"
27 : #include "cfg.h"
28 : #include "sbitmap.h"
29 : #include "diagnostics/event-id.h"
30 : #include "stor-layout.h"
31 : #include "stringpool.h"
32 : #include "attribs.h"
33 : #include "tree-object-size.h"
34 : #include "gimple-ssa.h"
35 : #include "tree-phinodes.h"
36 : #include "tree-ssa-operands.h"
37 : #include "ssa-iterators.h"
38 : #include "target.h"
39 : #include "calls.h"
40 : #include "is-a.h"
41 : #include "gcc-rich-location.h"
42 : #include "gcc-urlifier.h"
43 : #include "diagnostics/sarif-sink.h"
44 : #include "tree-pretty-print.h"
45 : #include "fold-const.h"
46 : #include "selftest-tree.h"
47 : #include "context.h"
48 : #include "channels.h"
49 : #include "value-relation.h"
50 : #include "range-op.h"
51 :
52 : #include "text-art/tree-widget.h"
53 :
54 : #include "analyzer/analyzer-logging.h"
55 : #include "analyzer/supergraph.h"
56 : #include "analyzer/call-string.h"
57 : #include "analyzer/program-point.h"
58 : #include "analyzer/store.h"
59 : #include "analyzer/region-model.h"
60 : #include "analyzer/constraint-manager.h"
61 : #include "analyzer/sm.h"
62 : #include "analyzer/pending-diagnostic.h"
63 : #include "analyzer/region-model-reachability.h"
64 : #include "analyzer/analyzer-selftests.h"
65 : #include "analyzer/program-state.h"
66 : #include "analyzer/call-summary.h"
67 : #include "analyzer/checker-event.h"
68 : #include "analyzer/checker-path.h"
69 : #include "analyzer/feasible-graph.h"
70 : #include "analyzer/record-layout.h"
71 : #include "analyzer/function-set.h"
72 :
73 : #if ENABLE_ANALYZER
74 :
75 : namespace ana {
76 :
77 : /* Dump T to PP in language-independent form, for debugging/logging/dumping
78 : purposes. */
79 :
80 : void
81 41489 : dump_tree (pretty_printer *pp, tree t)
82 : {
83 41489 : dump_generic_node (pp, t, 0, TDF_SLIM, 0);
84 41489 : }
85 :
86 : /* Dump T to PP in language-independent form in quotes, for
87 : debugging/logging/dumping purposes. */
88 :
89 : void
90 1366 : dump_quoted_tree (pretty_printer *pp, tree t)
91 : {
92 1366 : pp_begin_quote (pp, pp_show_color (pp));
93 1366 : dump_tree (pp, t);
94 1366 : pp_end_quote (pp, pp_show_color (pp));
95 1366 : }
96 :
97 : /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
98 : calls within other pp_printf calls.
99 :
100 : default_tree_printer handles 'T' and some other codes by calling
101 : dump_generic_node (pp, t, 0, TDF_SLIM, 0);
102 : dump_generic_node calls pp_printf in various places, leading to
103 : garbled output.
104 :
105 : Ideally pp_printf could be made to be reentrant, but in the meantime
106 : this function provides a workaround. */
107 :
108 : void
109 4434 : print_quoted_type (pretty_printer *pp, tree t)
110 : {
111 4434 : if (!t)
112 : return;
113 4341 : pp_begin_quote (pp, pp_show_color (pp));
114 4341 : dump_generic_node (pp, t, 0, TDF_SLIM, 0);
115 4341 : pp_end_quote (pp, pp_show_color (pp));
116 : }
117 :
118 : /* Print EXPR to PP, without quotes.
119 : For use within svalue::maybe_print_for_user
120 : and region::maybe_print_for_user. */
121 :
122 : void
123 38 : print_expr_for_user (pretty_printer *pp, tree expr)
124 : {
125 : /* Workaround for C++'s lang_hooks.decl_printable_name,
126 : which unhelpfully (for us) prefixes the decl with its
127 : type. */
128 38 : if (DECL_P (expr))
129 38 : dump_generic_node (pp, expr, 0, TDF_SLIM, 0);
130 : else
131 0 : pp_printf (pp, "%E", expr);
132 38 : }
133 :
134 : /* class region_to_value_map. */
135 :
136 : /* Assignment operator for region_to_value_map. */
137 :
138 : region_to_value_map &
139 58676 : region_to_value_map::operator= (const region_to_value_map &other)
140 : {
141 58676 : m_hash_map.empty ();
142 71699 : for (auto iter : other.m_hash_map)
143 : {
144 13023 : const region *reg = iter.first;
145 13023 : const svalue *sval = iter.second;
146 13023 : m_hash_map.put (reg, sval);
147 : }
148 58676 : return *this;
149 : }
150 :
151 : /* Equality operator for region_to_value_map. */
152 :
153 : bool
154 443550 : region_to_value_map::operator== (const region_to_value_map &other) const
155 : {
156 443550 : if (m_hash_map.elements () != other.m_hash_map.elements ())
157 : return false;
158 :
159 706801 : for (auto iter : *this)
160 : {
161 132929 : const region *reg = iter.first;
162 132929 : const svalue *sval = iter.second;
163 132929 : const svalue * const *other_slot = other.get (reg);
164 132929 : if (other_slot == nullptr)
165 58 : return false;
166 132899 : if (sval != *other_slot)
167 : return false;
168 : }
169 :
170 441001 : return true;
171 : }
172 :
173 : /* Dump this object to PP. */
174 :
175 : void
176 416 : region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
177 : bool multiline) const
178 : {
179 416 : auto_vec<const region *> regs;
180 1248 : for (iterator iter = begin (); iter != end (); ++iter)
181 416 : regs.safe_push ((*iter).first);
182 416 : regs.qsort (region::cmp_ptr_ptr);
183 416 : if (multiline)
184 416 : pp_newline (pp);
185 : else
186 0 : pp_string (pp, " {");
187 : unsigned i;
188 : const region *reg;
189 832 : FOR_EACH_VEC_ELT (regs, i, reg)
190 : {
191 416 : if (multiline)
192 416 : pp_string (pp, " ");
193 0 : else if (i > 0)
194 0 : pp_string (pp, ", ");
195 416 : reg->dump_to_pp (pp, simple);
196 416 : pp_string (pp, ": ");
197 416 : const svalue *sval = *get (reg);
198 416 : sval->dump_to_pp (pp, true);
199 416 : if (multiline)
200 416 : pp_newline (pp);
201 : }
202 416 : if (!multiline)
203 0 : pp_string (pp, "}");
204 416 : }
205 :
206 : /* Dump this object to stderr. */
207 :
208 : DEBUG_FUNCTION void
209 0 : region_to_value_map::dump (bool simple) const
210 : {
211 0 : tree_dump_pretty_printer pp (stderr);
212 0 : dump_to_pp (&pp, simple, true);
213 0 : pp_newline (&pp);
214 0 : }
215 :
216 : /* Generate a JSON value for this region_to_value_map.
217 : This is intended for debugging the analyzer rather than
218 : serialization. */
219 :
220 : std::unique_ptr<json::object>
221 4 : region_to_value_map::to_json () const
222 : {
223 4 : auto map_obj = std::make_unique<json::object> ();
224 :
225 4 : auto_vec<const region *> regs;
226 4 : for (iterator iter = begin (); iter != end (); ++iter)
227 0 : regs.safe_push ((*iter).first);
228 4 : regs.qsort (region::cmp_ptr_ptr);
229 :
230 : unsigned i;
231 : const region *reg;
232 4 : FOR_EACH_VEC_ELT (regs, i, reg)
233 : {
234 0 : label_text reg_desc = reg->get_desc ();
235 0 : const svalue *sval = *get (reg);
236 0 : map_obj->set (reg_desc.get (), sval->to_json ());
237 0 : }
238 :
239 4 : return map_obj;
240 4 : }
241 :
242 : std::unique_ptr<text_art::tree_widget>
243 4 : region_to_value_map::
244 : make_dump_widget (const text_art::dump_widget_info &dwi) const
245 : {
246 4 : if (is_empty ())
247 4 : return nullptr;
248 :
249 0 : std::unique_ptr<text_art::tree_widget> w
250 0 : (text_art::tree_widget::make (dwi, "Dynamic Extents"));
251 :
252 0 : auto_vec<const region *> regs;
253 0 : for (iterator iter = begin (); iter != end (); ++iter)
254 0 : regs.safe_push ((*iter).first);
255 0 : regs.qsort (region::cmp_ptr_ptr);
256 :
257 : unsigned i;
258 : const region *reg;
259 0 : FOR_EACH_VEC_ELT (regs, i, reg)
260 : {
261 0 : pretty_printer the_pp;
262 0 : pretty_printer * const pp = &the_pp;
263 0 : pp_format_decoder (pp) = default_tree_printer;
264 0 : const bool simple = true;
265 :
266 0 : reg->dump_to_pp (pp, simple);
267 0 : pp_string (pp, ": ");
268 0 : const svalue *sval = *get (reg);
269 0 : sval->dump_to_pp (pp, true);
270 0 : w->add_child (text_art::tree_widget::make (dwi, pp));
271 0 : }
272 0 : return w;
273 0 : }
274 :
275 : /* Attempt to merge THIS with OTHER, writing the result
276 : to OUT.
277 :
278 : For now, write (region, value) mappings that are in common between THIS
279 : and OTHER to OUT, effectively taking the intersection.
280 :
281 : Reject merger of different values. */
282 :
283 : bool
284 42209 : region_to_value_map::can_merge_with_p (const region_to_value_map &other,
285 : region_to_value_map *out) const
286 : {
287 57227 : for (auto iter : *this)
288 : {
289 9475 : const region *iter_reg = iter.first;
290 9475 : const svalue *iter_sval = iter.second;
291 9475 : const svalue * const * other_slot = other.get (iter_reg);
292 9475 : if (other_slot)
293 : {
294 9191 : if (iter_sval == *other_slot)
295 7225 : out->put (iter_reg, iter_sval);
296 : else
297 1966 : return false;
298 : }
299 : }
300 40243 : return true;
301 : }
302 :
303 : /* Purge any state involving SVAL. */
304 :
305 : void
306 26833 : region_to_value_map::purge_state_involving (const svalue *sval)
307 : {
308 26833 : auto_vec<const region *> to_purge;
309 68103 : for (auto iter : *this)
310 : {
311 20635 : const region *iter_reg = iter.first;
312 20635 : const svalue *iter_sval = iter.second;
313 20635 : if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
314 26 : to_purge.safe_push (iter_reg);
315 : }
316 26911 : for (auto iter : to_purge)
317 26 : m_hash_map.remove (iter);
318 26833 : }
319 :
320 : // struct exception_node
321 :
322 : bool
323 10276 : exception_node::operator== (const exception_node &other) const
324 : {
325 10276 : return (m_exception_sval == other.m_exception_sval
326 10276 : && m_typeinfo_sval == other.m_typeinfo_sval
327 20552 : && m_destructor_sval == other.m_destructor_sval);
328 : }
329 :
330 : void
331 6 : exception_node::dump_to_pp (pretty_printer *pp,
332 : bool simple) const
333 : {
334 6 : pp_printf (pp, "{exception: ");
335 6 : m_exception_sval->dump_to_pp (pp, simple);
336 6 : pp_string (pp, ", typeinfo: ");
337 6 : m_typeinfo_sval->dump_to_pp (pp, simple);
338 6 : pp_string (pp, ", destructor: ");
339 6 : m_destructor_sval->dump_to_pp (pp, simple);
340 6 : pp_string (pp, "}");
341 6 : }
342 :
343 : void
344 0 : exception_node::dump (FILE *fp, bool simple) const
345 : {
346 0 : tree_dump_pretty_printer pp (fp);
347 0 : dump_to_pp (&pp, simple);
348 0 : pp_newline (&pp);
349 0 : }
350 :
351 : /* Dump a multiline representation of this model to stderr. */
352 :
353 : DEBUG_FUNCTION void
354 0 : exception_node::dump (bool simple) const
355 : {
356 0 : dump (stderr, simple);
357 0 : }
358 :
359 : DEBUG_FUNCTION void
360 0 : exception_node::dump () const
361 : {
362 0 : text_art::dump (*this);
363 0 : }
364 :
365 : std::unique_ptr<json::object>
366 0 : exception_node::to_json () const
367 : {
368 0 : auto obj = std::make_unique<json::object> ();
369 0 : obj->set ("exception", m_exception_sval->to_json ());
370 0 : obj->set ("typeinfo", m_typeinfo_sval->to_json ());
371 0 : obj->set ("destructor", m_destructor_sval->to_json ());
372 0 : return obj;
373 : }
374 :
375 : std::unique_ptr<text_art::tree_widget>
376 0 : exception_node::make_dump_widget (const text_art::dump_widget_info &dwi) const
377 : {
378 0 : using text_art::tree_widget;
379 0 : std::unique_ptr<tree_widget> w
380 0 : (tree_widget::from_fmt (dwi, nullptr, "Exception Node"));
381 :
382 0 : w->add_child (m_exception_sval->make_dump_widget (dwi, "exception"));
383 0 : w->add_child (m_typeinfo_sval->make_dump_widget (dwi, "typeinfo"));
384 0 : w->add_child (m_destructor_sval->make_dump_widget (dwi, "destructor"));
385 :
386 0 : return w;
387 : }
388 :
389 : tree
390 466 : exception_node::maybe_get_type () const
391 : {
392 466 : return m_typeinfo_sval->maybe_get_type_from_typeinfo ();
393 : }
394 :
395 : void
396 64 : exception_node::add_to_reachable_regions (reachable_regions ®s) const
397 : {
398 64 : regs.handle_sval (m_exception_sval);
399 64 : regs.handle_sval (m_typeinfo_sval);
400 64 : regs.handle_sval (m_destructor_sval);
401 64 : }
402 :
403 : /* class region_model. */
404 :
405 : /* Ctor for region_model: construct an "empty" model. */
406 :
407 370385 : region_model::region_model (region_model_manager *mgr)
408 370385 : : m_mgr (mgr), m_store (), m_current_frame (nullptr),
409 370385 : m_thrown_exceptions_stack (),
410 370385 : m_caught_exceptions_stack (),
411 370385 : m_dynamic_extents ()
412 : {
413 370385 : m_constraints = new constraint_manager (mgr);
414 370385 : }
415 :
416 : /* region_model's copy ctor. */
417 :
418 3186718 : region_model::region_model (const region_model &other)
419 3186718 : : m_mgr (other.m_mgr), m_store (other.m_store),
420 3186718 : m_constraints (new constraint_manager (*other.m_constraints)),
421 3186718 : m_current_frame (other.m_current_frame),
422 3186718 : m_thrown_exceptions_stack (other.m_thrown_exceptions_stack),
423 3186718 : m_caught_exceptions_stack (other.m_caught_exceptions_stack),
424 3186718 : m_dynamic_extents (other.m_dynamic_extents)
425 : {
426 3186718 : }
427 :
428 : /* region_model's dtor. */
429 :
430 3557103 : region_model::~region_model ()
431 : {
432 3557103 : delete m_constraints;
433 3557103 : }
434 :
435 : /* region_model's assignment operator. */
436 :
437 : region_model &
438 58676 : region_model::operator= (const region_model &other)
439 : {
440 : /* m_mgr is const. */
441 58676 : gcc_assert (m_mgr == other.m_mgr);
442 :
443 58676 : m_store = other.m_store;
444 :
445 58676 : delete m_constraints;
446 58676 : m_constraints = new constraint_manager (*other.m_constraints);
447 :
448 58676 : m_current_frame = other.m_current_frame;
449 :
450 58676 : m_thrown_exceptions_stack = other.m_thrown_exceptions_stack;
451 58676 : m_caught_exceptions_stack = other.m_caught_exceptions_stack;
452 :
453 58676 : m_dynamic_extents = other.m_dynamic_extents;
454 :
455 58676 : return *this;
456 : }
457 :
458 : /* Equality operator for region_model.
459 :
460 : Amongst other things this directly compares the stores and the constraint
461 : managers, so for this to be meaningful both this and OTHER should
462 : have been canonicalized. */
463 :
464 : bool
465 502470 : region_model::operator== (const region_model &other) const
466 : {
467 : /* We can only compare instances that use the same manager. */
468 502470 : gcc_assert (m_mgr == other.m_mgr);
469 :
470 502470 : if (m_store != other.m_store)
471 : return false;
472 :
473 404919 : if (*m_constraints != *other.m_constraints)
474 : return false;
475 :
476 399977 : if (m_current_frame != other.m_current_frame)
477 : return false;
478 :
479 399969 : if (m_thrown_exceptions_stack != other.m_thrown_exceptions_stack)
480 : return false;
481 399969 : if (m_caught_exceptions_stack != other.m_caught_exceptions_stack)
482 : return false;
483 :
484 399969 : if (m_dynamic_extents != other.m_dynamic_extents)
485 : return false;
486 :
487 399752 : gcc_checking_assert (hash () == other.hash ());
488 :
489 : return true;
490 : }
491 :
492 : /* Generate a hash value for this region_model. */
493 :
494 : hashval_t
495 1241652 : region_model::hash () const
496 : {
497 1241652 : hashval_t result = m_store.hash ();
498 1241652 : result ^= m_constraints->hash ();
499 1241652 : return result;
500 : }
501 :
502 : /* Dump a representation of this model to PP, showing the
503 : stack, the store, and any constraints.
504 : Use SIMPLE to control how svalues and regions are printed. */
505 :
506 : void
507 2126 : region_model::dump_to_pp (pretty_printer *pp, bool simple,
508 : bool multiline) const
509 : {
510 : /* Dump frame stack. */
511 2126 : pp_printf (pp, "stack depth: %i", get_stack_depth ());
512 2126 : if (multiline)
513 545 : pp_newline (pp);
514 : else
515 1581 : pp_string (pp, " {");
516 4228 : for (const frame_region *iter_frame = m_current_frame; iter_frame;
517 2102 : iter_frame = iter_frame->get_calling_frame ())
518 : {
519 2102 : if (multiline)
520 549 : pp_string (pp, " ");
521 1553 : else if (iter_frame != m_current_frame)
522 0 : pp_string (pp, ", ");
523 2102 : pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
524 2102 : iter_frame->dump_to_pp (pp, simple);
525 2102 : if (multiline)
526 549 : pp_newline (pp);
527 : }
528 2126 : if (!multiline)
529 1581 : pp_string (pp, "}");
530 :
531 : /* Dump exception stacks. */
532 2126 : if (m_thrown_exceptions_stack.size () > 0)
533 : {
534 6 : pp_printf (pp, "thrown exceptions: %i", (int)m_thrown_exceptions_stack.size ());
535 6 : if (multiline)
536 6 : pp_newline (pp);
537 : else
538 0 : pp_string (pp, " {");
539 12 : for (size_t idx = 0; idx < m_thrown_exceptions_stack.size (); ++idx)
540 : {
541 6 : if (multiline)
542 6 : pp_string (pp, " ");
543 0 : else if (idx > 0)
544 0 : pp_string (pp, ", ");
545 6 : pp_printf (pp, "exception (index %i): ", (int)idx);
546 6 : m_thrown_exceptions_stack[idx].dump_to_pp (pp, simple);
547 6 : if (multiline)
548 6 : pp_newline (pp);
549 : }
550 6 : if (!multiline)
551 0 : pp_string (pp, "}");
552 : }
553 2126 : if (m_caught_exceptions_stack.size () > 0)
554 : {
555 0 : pp_printf (pp, "caught exceptions: %i", (int)m_caught_exceptions_stack.size ());
556 0 : if (multiline)
557 0 : pp_newline (pp);
558 : else
559 0 : pp_string (pp, " {");
560 0 : for (size_t idx = 0; idx < m_caught_exceptions_stack.size (); ++idx)
561 : {
562 0 : if (multiline)
563 0 : pp_string (pp, " ");
564 0 : else if (idx > 0)
565 0 : pp_string (pp, ", ");
566 0 : pp_printf (pp, "exception (index %i): ", (int)idx);
567 0 : m_caught_exceptions_stack[idx].dump_to_pp (pp, simple);
568 0 : if (multiline)
569 0 : pp_newline (pp);
570 : }
571 0 : if (!multiline)
572 0 : pp_string (pp, "}");
573 : }
574 :
575 : /* Dump store. */
576 2126 : if (!multiline)
577 1581 : pp_string (pp, ", {");
578 2126 : m_store.dump_to_pp (pp, simple, multiline,
579 2126 : m_mgr->get_store_manager ());
580 2126 : if (!multiline)
581 1581 : pp_string (pp, "}");
582 :
583 : /* Dump constraints. */
584 2126 : pp_string (pp, "constraint_manager:");
585 2126 : if (multiline)
586 545 : pp_newline (pp);
587 : else
588 1581 : pp_string (pp, " {");
589 2126 : m_constraints->dump_to_pp (pp, multiline);
590 2126 : if (!multiline)
591 1581 : pp_string (pp, "}");
592 :
593 : /* Dump sizes of dynamic regions, if any are known. */
594 2126 : if (!m_dynamic_extents.is_empty ())
595 : {
596 416 : pp_string (pp, "dynamic_extents:");
597 416 : m_dynamic_extents.dump_to_pp (pp, simple, multiline);
598 : }
599 2126 : }
600 :
601 : /* Dump a representation of this model to FILE. */
602 :
603 : void
604 0 : region_model::dump (FILE *fp, bool simple, bool multiline) const
605 : {
606 0 : tree_dump_pretty_printer pp (fp);
607 0 : dump_to_pp (&pp, simple, multiline);
608 0 : pp_newline (&pp);
609 0 : }
610 :
611 : /* Dump a multiline representation of this model to stderr. */
612 :
613 : DEBUG_FUNCTION void
614 0 : region_model::dump (bool simple) const
615 : {
616 0 : dump (stderr, simple, true);
617 0 : }
618 :
619 : /* Dump a tree-like representation of this state to stderr. */
620 :
621 : DEBUG_FUNCTION void
622 0 : region_model::dump () const
623 : {
624 0 : text_art::dump (*this);
625 0 : }
626 :
627 : /* Dump a multiline representation of this model to stderr. */
628 :
629 : DEBUG_FUNCTION void
630 0 : region_model::debug () const
631 : {
632 0 : dump (true);
633 0 : }
634 :
635 : /* Generate a JSON value for this region_model.
636 : This is intended for debugging the analyzer rather than
637 : serialization. */
638 :
639 : std::unique_ptr<json::object>
640 4 : region_model::to_json () const
641 : {
642 4 : auto model_obj = std::make_unique<json::object> ();
643 4 : model_obj->set ("store", m_store.to_json ());
644 4 : model_obj->set ("constraints", m_constraints->to_json ());
645 4 : if (m_current_frame)
646 4 : model_obj->set ("current_frame", m_current_frame->to_json ());
647 :
648 4 : auto thrown_exceptions_arr = std::make_unique<json::array> ();
649 4 : for (auto &node : m_thrown_exceptions_stack)
650 0 : thrown_exceptions_arr->append (node.to_json ());
651 4 : model_obj->set ("thrown_exception_stack", std::move (thrown_exceptions_arr));
652 :
653 4 : auto caught_exceptions_arr = std::make_unique<json::array> ();
654 4 : for (auto &node : m_caught_exceptions_stack)
655 0 : caught_exceptions_arr->append (node.to_json ());
656 4 : model_obj->set ("caught_exception_stack", std::move (caught_exceptions_arr));
657 :
658 4 : model_obj->set ("dynamic_extents", m_dynamic_extents.to_json ());
659 8 : return model_obj;
660 4 : }
661 :
662 : std::unique_ptr<text_art::tree_widget>
663 4 : region_model::make_dump_widget (const text_art::dump_widget_info &dwi) const
664 : {
665 4 : using text_art::tree_widget;
666 4 : std::unique_ptr<tree_widget> model_widget
667 4 : (tree_widget::from_fmt (dwi, nullptr, "Region Model"));
668 :
669 4 : if (m_current_frame)
670 : {
671 0 : pretty_printer the_pp;
672 0 : pretty_printer * const pp = &the_pp;
673 0 : pp_format_decoder (pp) = default_tree_printer;
674 0 : pp_show_color (pp) = true;
675 0 : const bool simple = true;
676 :
677 0 : pp_string (pp, "Current Frame: ");
678 0 : m_current_frame->dump_to_pp (pp, simple);
679 0 : model_widget->add_child (tree_widget::make (dwi, pp));
680 0 : }
681 :
682 4 : if (m_thrown_exceptions_stack.size () > 0)
683 : {
684 0 : auto thrown_exceptions_widget
685 0 : = tree_widget::make (dwi, "Thrown Exceptions");
686 0 : for (auto &thrown_exception : m_thrown_exceptions_stack)
687 0 : thrown_exceptions_widget->add_child
688 0 : (thrown_exception.make_dump_widget (dwi));
689 0 : model_widget->add_child (std::move (thrown_exceptions_widget));
690 0 : }
691 4 : if (m_caught_exceptions_stack.size () > 0)
692 : {
693 0 : auto caught_exceptions_widget
694 0 : = tree_widget::make (dwi, "Caught Exceptions");
695 0 : for (auto &caught_exception : m_caught_exceptions_stack)
696 0 : caught_exceptions_widget->add_child
697 0 : (caught_exception.make_dump_widget (dwi));
698 0 : model_widget->add_child (std::move (caught_exceptions_widget));
699 0 : }
700 :
701 4 : model_widget->add_child
702 8 : (m_store.make_dump_widget (dwi,
703 4 : m_mgr->get_store_manager ()));
704 4 : model_widget->add_child (m_constraints->make_dump_widget (dwi));
705 4 : model_widget->add_child (m_dynamic_extents.make_dump_widget (dwi));
706 4 : return model_widget;
707 : }
708 :
709 : /* Assert that this object is valid. */
710 :
711 : void
712 1718070 : region_model::validate () const
713 : {
714 1718070 : m_store.validate ();
715 1718070 : }
716 :
717 : /* Canonicalize the store and constraints, to maximize the chance of
718 : equality between region_model instances. */
719 :
720 : void
721 823919 : region_model::canonicalize ()
722 : {
723 823919 : m_store.canonicalize (m_mgr->get_store_manager ());
724 823919 : m_constraints->canonicalize ();
725 823919 : }
726 :
727 : /* Return true if this region_model is in canonical form. */
728 :
729 : bool
730 389786 : region_model::canonicalized_p () const
731 : {
732 389786 : region_model copy (*this);
733 389786 : copy.canonicalize ();
734 389786 : return *this == copy;
735 389786 : }
736 :
737 : /* See the comment for store::loop_replay_fixup. */
738 :
739 : void
740 4661 : region_model::loop_replay_fixup (const region_model *dst_state)
741 : {
742 4661 : m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
743 4661 : }
744 :
745 : /* A subclass of pending_diagnostic for complaining about uses of
746 : poisoned values. */
747 :
748 : class poisoned_value_diagnostic
749 : : public pending_diagnostic_subclass<poisoned_value_diagnostic>
750 : {
751 : public:
752 2252 : poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
753 : const region *src_region,
754 : tree check_expr)
755 2252 : : m_expr (expr), m_pkind (pkind),
756 2252 : m_src_region (src_region),
757 2252 : m_check_expr (check_expr)
758 : {}
759 :
760 14387 : const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
761 :
762 70 : bool use_of_uninit_p () const final override
763 : {
764 70 : return m_pkind == poison_kind::uninit;
765 : }
766 :
767 1360 : bool operator== (const poisoned_value_diagnostic &other) const
768 : {
769 1360 : return (m_expr == other.m_expr
770 1335 : && m_pkind == other.m_pkind
771 2695 : && m_src_region == other.m_src_region);
772 : }
773 :
774 1956 : int get_controlling_option () const final override
775 : {
776 1956 : switch (m_pkind)
777 : {
778 0 : default:
779 0 : gcc_unreachable ();
780 : case poison_kind::uninit:
781 : return OPT_Wanalyzer_use_of_uninitialized_value;
782 : case poison_kind::freed:
783 : case poison_kind::deleted:
784 : return OPT_Wanalyzer_use_after_free;
785 : case poison_kind::popped_stack:
786 : return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
787 : }
788 : }
789 :
790 1381 : bool terminate_path_p () const final override { return true; }
791 :
792 575 : bool emit (diagnostic_emission_context &ctxt) final override
793 : {
794 575 : switch (m_pkind)
795 : {
796 0 : default:
797 0 : gcc_unreachable ();
798 548 : case poison_kind::uninit:
799 548 : {
800 548 : ctxt.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
801 548 : return ctxt.warn ("use of uninitialized value %qE",
802 548 : m_expr);
803 : }
804 3 : break;
805 3 : case poison_kind::freed:
806 3 : {
807 3 : ctxt.add_cwe (416); /* "CWE-416: Use After Free". */
808 3 : return ctxt.warn ("use after %<free%> of %qE",
809 3 : m_expr);
810 : }
811 9 : break;
812 9 : case poison_kind::deleted:
813 9 : {
814 9 : ctxt.add_cwe (416); /* "CWE-416: Use After Free". */
815 9 : return ctxt.warn ("use after %<delete%> of %qE",
816 9 : m_expr);
817 : }
818 15 : break;
819 15 : case poison_kind::popped_stack:
820 15 : {
821 : /* TODO: which CWE? */
822 15 : return ctxt.warn
823 15 : ("dereferencing pointer %qE to within stale stack frame",
824 15 : m_expr);
825 : }
826 : break;
827 : }
828 : }
829 :
830 : bool
831 1150 : describe_final_event (pretty_printer &pp,
832 : const evdesc::final_event &) final override
833 : {
834 1150 : switch (m_pkind)
835 : {
836 0 : default:
837 0 : gcc_unreachable ();
838 1096 : case poison_kind::uninit:
839 1096 : {
840 1096 : pp_printf (&pp,
841 : "use of uninitialized value %qE here",
842 : m_expr);
843 1096 : return true;
844 : }
845 6 : case poison_kind::freed:
846 6 : {
847 6 : pp_printf (&pp,
848 : "use after %<free%> of %qE here",
849 : m_expr);
850 6 : return true;
851 : }
852 18 : case poison_kind::deleted:
853 18 : {
854 18 : pp_printf (&pp,
855 : "use after %<delete%> of %qE here",
856 : m_expr);
857 18 : return true;
858 : }
859 30 : case poison_kind::popped_stack:
860 30 : {
861 30 : pp_printf (&pp,
862 : "dereferencing pointer %qE to within stale stack frame",
863 : m_expr);
864 30 : return true;
865 : }
866 : }
867 : }
868 :
869 575 : void mark_interesting_stuff (interesting_t *interest) final override
870 : {
871 575 : if (m_src_region)
872 540 : interest->add_region_creation (m_src_region);
873 575 : }
874 :
875 : /* Attempt to suppress false positives.
876 : Reject paths where the value of the underlying region isn't poisoned.
877 : This can happen due to state merging when exploring the exploded graph,
878 : where the more precise analysis during feasibility analysis finds that
879 : the region is in fact valid.
880 : To do this we need to get the value from the fgraph. Unfortunately
881 : we can't simply query the state of m_src_region (from the enode),
882 : since it might be a different region in the fnode state (e.g. with
883 : heap-allocated regions, the numbering could be different).
884 : Hence we access m_check_expr, if available. */
885 :
886 1317 : bool check_valid_fpath_p (const feasible_node &fnode)
887 : const final override
888 : {
889 1317 : if (!m_check_expr)
890 : return true;
891 1210 : const svalue *fsval = fnode.get_model ().get_rvalue (m_check_expr, nullptr);
892 : /* Check to see if the expr is also poisoned in FNODE (and in the
893 : same way). */
894 1210 : const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
895 1210 : if (!fspval)
896 : return false;
897 1210 : if (fspval->get_poison_kind () != m_pkind)
898 : return false;
899 : return true;
900 : }
901 :
902 : void
903 8 : maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
904 : const final override
905 : {
906 8 : auto &props = result_obj.get_or_create_properties ();
907 : #define PROPERTY_PREFIX "gcc/analyzer/poisoned_value_diagnostic/"
908 8 : props.set (PROPERTY_PREFIX "expr", tree_to_json (m_expr));
909 8 : props.set_string (PROPERTY_PREFIX "kind", poison_kind_to_str (m_pkind));
910 8 : if (m_src_region)
911 8 : props.set (PROPERTY_PREFIX "src_region", m_src_region->to_json ());
912 8 : props.set (PROPERTY_PREFIX "check_expr", tree_to_json (m_check_expr));
913 : #undef PROPERTY_PREFIX
914 8 : }
915 :
916 : private:
917 : tree m_expr;
918 : enum poison_kind m_pkind;
919 : const region *m_src_region;
920 : tree m_check_expr;
921 : };
922 :
923 : /* A subclass of pending_diagnostic for complaining about shifts
924 : by negative counts. */
925 :
926 : class shift_count_negative_diagnostic
927 : : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
928 : {
929 : public:
930 16 : shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
931 16 : : m_assign (assign), m_count_cst (count_cst)
932 : {}
933 :
934 120 : const char *get_kind () const final override
935 : {
936 120 : return "shift_count_negative_diagnostic";
937 : }
938 :
939 16 : bool operator== (const shift_count_negative_diagnostic &other) const
940 : {
941 16 : return (m_assign == other.m_assign
942 16 : && same_tree_p (m_count_cst, other.m_count_cst));
943 : }
944 :
945 24 : int get_controlling_option () const final override
946 : {
947 24 : return OPT_Wanalyzer_shift_count_negative;
948 : }
949 :
950 8 : bool emit (diagnostic_emission_context &ctxt) final override
951 : {
952 8 : return ctxt.warn ("shift by negative count (%qE)", m_count_cst);
953 : }
954 :
955 : bool
956 16 : describe_final_event (pretty_printer &pp,
957 : const evdesc::final_event &) final override
958 : {
959 16 : pp_printf (&pp,
960 : "shift by negative amount here (%qE)",
961 : m_count_cst);
962 16 : return true;
963 : }
964 :
965 : private:
966 : const gassign *m_assign;
967 : tree m_count_cst;
968 : };
969 :
970 : /* A subclass of pending_diagnostic for complaining about shifts
971 : by counts >= the width of the operand type. */
972 :
973 : class shift_count_overflow_diagnostic
974 : : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
975 : {
976 : public:
977 8 : shift_count_overflow_diagnostic (const gassign *assign,
978 : int operand_precision,
979 : tree count_cst)
980 8 : : m_assign (assign), m_operand_precision (operand_precision),
981 8 : m_count_cst (count_cst)
982 : {}
983 :
984 36 : const char *get_kind () const final override
985 : {
986 36 : return "shift_count_overflow_diagnostic";
987 : }
988 :
989 8 : bool operator== (const shift_count_overflow_diagnostic &other) const
990 : {
991 8 : return (m_assign == other.m_assign
992 8 : && m_operand_precision == other.m_operand_precision
993 16 : && same_tree_p (m_count_cst, other.m_count_cst));
994 : }
995 :
996 12 : int get_controlling_option () const final override
997 : {
998 12 : return OPT_Wanalyzer_shift_count_overflow;
999 : }
1000 :
1001 4 : bool emit (diagnostic_emission_context &ctxt) final override
1002 : {
1003 4 : return ctxt.warn ("shift by count (%qE) >= precision of type (%qi)",
1004 4 : m_count_cst, m_operand_precision);
1005 : }
1006 :
1007 : bool
1008 8 : describe_final_event (pretty_printer &pp,
1009 : const evdesc::final_event &) final override
1010 : {
1011 8 : pp_printf (&pp,
1012 : "shift by count %qE here",
1013 : m_count_cst);
1014 8 : return true;
1015 : }
1016 :
1017 : private:
1018 : const gassign *m_assign;
1019 : int m_operand_precision;
1020 : tree m_count_cst;
1021 : };
1022 :
1023 : /* A subclass of pending_diagnostic for complaining about pointer
1024 : subtractions involving unrelated buffers. */
1025 :
1026 : class undefined_ptrdiff_diagnostic
1027 : : public pending_diagnostic_subclass<undefined_ptrdiff_diagnostic>
1028 : {
1029 : public:
1030 : /* Region_creation_event subclass to give a custom wording when
1031 : talking about creation of buffers for LHS and RHS of the
1032 : subtraction. */
1033 : class ptrdiff_region_creation_event : public region_creation_event
1034 : {
1035 : public:
1036 56 : ptrdiff_region_creation_event (const event_loc_info &loc_info,
1037 : bool is_lhs)
1038 56 : : region_creation_event (loc_info),
1039 56 : m_is_lhs (is_lhs)
1040 : {
1041 : }
1042 :
1043 112 : void print_desc (pretty_printer &pp) const final override
1044 : {
1045 112 : if (m_is_lhs)
1046 56 : pp_string (&pp,
1047 : "underlying object for left-hand side"
1048 : " of subtraction created here");
1049 : else
1050 56 : pp_string (&pp,
1051 : "underlying object for right-hand side"
1052 : " of subtraction created here");
1053 112 : }
1054 :
1055 : private:
1056 : bool m_is_lhs;
1057 : };
1058 :
1059 64 : undefined_ptrdiff_diagnostic (const gassign *assign,
1060 : const svalue *sval_a,
1061 : const svalue *sval_b,
1062 : const region *base_reg_a,
1063 : const region *base_reg_b)
1064 64 : : m_assign (assign),
1065 64 : m_sval_a (sval_a),
1066 64 : m_sval_b (sval_b),
1067 64 : m_base_reg_a (base_reg_a),
1068 64 : m_base_reg_b (base_reg_b)
1069 : {
1070 64 : gcc_assert (m_base_reg_a != m_base_reg_b);
1071 : }
1072 :
1073 380 : const char *get_kind () const final override
1074 : {
1075 380 : return "undefined_ptrdiff_diagnostic";
1076 : }
1077 :
1078 56 : bool operator== (const undefined_ptrdiff_diagnostic &other) const
1079 : {
1080 56 : return (m_assign == other.m_assign
1081 56 : && m_sval_a == other.m_sval_a
1082 56 : && m_sval_b == other.m_sval_b
1083 56 : && m_base_reg_a == other.m_base_reg_a
1084 112 : && m_base_reg_b == other.m_base_reg_b);
1085 : }
1086 :
1087 84 : int get_controlling_option () const final override
1088 : {
1089 84 : return OPT_Wanalyzer_undefined_behavior_ptrdiff;
1090 : }
1091 :
1092 28 : bool emit (diagnostic_emission_context &ctxt) final override
1093 : {
1094 : /* CWE-469: Use of Pointer Subtraction to Determine Size. */
1095 28 : ctxt.add_cwe (469);
1096 28 : return ctxt.warn ("undefined behavior when subtracting pointers");
1097 : }
1098 :
1099 56 : void add_region_creation_events (const region *reg,
1100 : tree /*capacity*/,
1101 : const event_loc_info &loc_info,
1102 : checker_path &emission_path) final override
1103 : {
1104 56 : if (reg == m_base_reg_a)
1105 28 : emission_path.add_event
1106 28 : (std::make_unique<ptrdiff_region_creation_event> (loc_info, true));
1107 28 : else if (reg == m_base_reg_b)
1108 28 : emission_path.add_event
1109 28 : (std::make_unique<ptrdiff_region_creation_event> (loc_info, false));
1110 56 : }
1111 :
1112 : bool
1113 56 : describe_final_event (pretty_printer &pp,
1114 : const evdesc::final_event &) final override
1115 : {
1116 56 : pp_string (&pp,
1117 : "subtraction of pointers has undefined behavior if"
1118 : " they do not point into the same array object");
1119 56 : return true;
1120 : }
1121 :
1122 28 : void mark_interesting_stuff (interesting_t *interesting) final override
1123 : {
1124 28 : interesting->add_region_creation (m_base_reg_a);
1125 28 : interesting->add_region_creation (m_base_reg_b);
1126 28 : }
1127 :
1128 : private:
1129 : const gassign *m_assign;
1130 : const svalue *m_sval_a;
1131 : const svalue *m_sval_b;
1132 : const region *m_base_reg_a;
1133 : const region *m_base_reg_b;
1134 : };
1135 :
1136 : /* Check the pointer subtraction SVAL_A - SVAL_B at ASSIGN and add
1137 : a warning to CTXT if they're not within the same base region. */
1138 :
1139 : static void
1140 582 : check_for_invalid_ptrdiff (const gassign *assign,
1141 : region_model_context &ctxt,
1142 : const svalue *sval_a, const svalue *sval_b)
1143 : {
1144 582 : const region *base_reg_a = sval_a->maybe_get_deref_base_region ();
1145 582 : if (!base_reg_a)
1146 518 : return;
1147 102 : const region *base_reg_b = sval_b->maybe_get_deref_base_region ();
1148 102 : if (!base_reg_b)
1149 : return;
1150 :
1151 78 : if (base_reg_a == base_reg_b)
1152 : return;
1153 :
1154 64 : if (base_reg_a->get_kind () == RK_SYMBOLIC)
1155 : return;
1156 64 : if (base_reg_b->get_kind () == RK_SYMBOLIC)
1157 : return;
1158 :
1159 64 : ctxt.warn
1160 64 : (std::make_unique<undefined_ptrdiff_diagnostic> (assign,
1161 : sval_a,
1162 : sval_b,
1163 : base_reg_a,
1164 : base_reg_b));
1165 : }
1166 :
1167 : /* If ASSIGN is a stmt that can be modelled via
1168 : set_value (lhs_reg, SVALUE, CTXT)
1169 : for some SVALUE, get the SVALUE.
1170 : Otherwise return nullptr. */
1171 :
1172 : const svalue *
1173 403592 : region_model::get_gassign_result (const gassign *assign,
1174 : region_model_context *ctxt)
1175 : {
1176 403592 : tree lhs = gimple_assign_lhs (assign);
1177 :
1178 403592 : if (gimple_has_volatile_ops (assign)
1179 403592 : && !gimple_clobber_p (assign))
1180 : {
1181 116 : conjured_purge p (this, ctxt);
1182 116 : return m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs),
1183 : assign,
1184 : get_lvalue (lhs, ctxt),
1185 : p);
1186 : }
1187 :
1188 403476 : tree rhs1 = gimple_assign_rhs1 (assign);
1189 403476 : enum tree_code op = gimple_assign_rhs_code (assign);
1190 403476 : switch (op)
1191 : {
1192 : default:
1193 : return nullptr;
1194 :
1195 38156 : case POINTER_PLUS_EXPR:
1196 38156 : {
1197 : /* e.g. "_1 = a_10(D) + 12;" */
1198 38156 : tree ptr = rhs1;
1199 38156 : tree offset = gimple_assign_rhs2 (assign);
1200 :
1201 38156 : const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1202 38156 : const svalue *offset_sval = get_rvalue (offset, ctxt);
1203 : /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
1204 : is an integer of type sizetype". */
1205 38156 : offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
1206 :
1207 38156 : const svalue *sval_binop
1208 38156 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1209 : ptr_sval, offset_sval);
1210 38156 : return sval_binop;
1211 : }
1212 722 : break;
1213 :
1214 722 : case POINTER_DIFF_EXPR:
1215 722 : {
1216 : /* e.g. "_1 = p_2(D) - q_3(D);". */
1217 722 : tree rhs2 = gimple_assign_rhs2 (assign);
1218 722 : const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
1219 722 : const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
1220 :
1221 : // TODO: perhaps fold to zero if they're known to be equal?
1222 :
1223 722 : if (ctxt)
1224 582 : check_for_invalid_ptrdiff (assign, *ctxt, rhs1_sval, rhs2_sval);
1225 :
1226 722 : const svalue *sval_binop
1227 722 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1228 : rhs1_sval, rhs2_sval);
1229 722 : return sval_binop;
1230 : }
1231 198197 : break;
1232 :
1233 : /* Assignments of the form
1234 : set_value (lvalue (LHS), rvalue (EXPR))
1235 : for various EXPR.
1236 : We already have the lvalue for the LHS above, as "lhs_reg". */
1237 198197 : case ADDR_EXPR: /* LHS = &RHS; */
1238 198197 : case BIT_FIELD_REF:
1239 198197 : case COMPONENT_REF: /* LHS = op0.op1; */
1240 198197 : case MEM_REF:
1241 198197 : case REAL_CST:
1242 198197 : case COMPLEX_CST:
1243 198197 : case VECTOR_CST:
1244 198197 : case INTEGER_CST:
1245 198197 : case ARRAY_REF:
1246 198197 : case SSA_NAME: /* LHS = VAR; */
1247 198197 : case VAR_DECL: /* LHS = VAR; */
1248 198197 : case PARM_DECL:/* LHS = VAR; */
1249 198197 : case REALPART_EXPR:
1250 198197 : case IMAGPART_EXPR:
1251 198197 : return get_rvalue (rhs1, ctxt);
1252 :
1253 63593 : case ABS_EXPR:
1254 63593 : case ABSU_EXPR:
1255 63593 : case CONJ_EXPR:
1256 63593 : case BIT_NOT_EXPR:
1257 63593 : case FIX_TRUNC_EXPR:
1258 63593 : case FLOAT_EXPR:
1259 63593 : case NEGATE_EXPR:
1260 63593 : case NOP_EXPR:
1261 63593 : case VIEW_CONVERT_EXPR:
1262 63593 : {
1263 : /* Unary ops. */
1264 63593 : const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1265 63593 : const svalue *sval_unaryop
1266 63593 : = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
1267 63593 : return sval_unaryop;
1268 : }
1269 :
1270 14464 : case EQ_EXPR:
1271 14464 : case GE_EXPR:
1272 14464 : case LE_EXPR:
1273 14464 : case NE_EXPR:
1274 14464 : case GT_EXPR:
1275 14464 : case LT_EXPR:
1276 14464 : case UNORDERED_EXPR:
1277 14464 : case ORDERED_EXPR:
1278 14464 : {
1279 14464 : tree rhs2 = gimple_assign_rhs2 (assign);
1280 :
1281 14464 : const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
1282 14464 : const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
1283 :
1284 14464 : if (TREE_TYPE (lhs) == boolean_type_node)
1285 : {
1286 : /* Consider constraints between svalues. */
1287 14327 : tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
1288 14327 : if (t.is_known ())
1289 7979 : return m_mgr->get_or_create_constant_svalue
1290 7979 : (t.is_true () ? boolean_true_node : boolean_false_node);
1291 : }
1292 :
1293 : /* Otherwise, generate a symbolic binary op. */
1294 6485 : const svalue *sval_binop
1295 6485 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1296 : rhs1_sval, rhs2_sval);
1297 6485 : return sval_binop;
1298 : }
1299 76419 : break;
1300 :
1301 76419 : case PLUS_EXPR:
1302 76419 : case MINUS_EXPR:
1303 76419 : case MULT_EXPR:
1304 76419 : case MULT_HIGHPART_EXPR:
1305 76419 : case TRUNC_DIV_EXPR:
1306 76419 : case CEIL_DIV_EXPR:
1307 76419 : case FLOOR_DIV_EXPR:
1308 76419 : case ROUND_DIV_EXPR:
1309 76419 : case TRUNC_MOD_EXPR:
1310 76419 : case CEIL_MOD_EXPR:
1311 76419 : case FLOOR_MOD_EXPR:
1312 76419 : case ROUND_MOD_EXPR:
1313 76419 : case RDIV_EXPR:
1314 76419 : case EXACT_DIV_EXPR:
1315 76419 : case LSHIFT_EXPR:
1316 76419 : case RSHIFT_EXPR:
1317 76419 : case LROTATE_EXPR:
1318 76419 : case RROTATE_EXPR:
1319 76419 : case BIT_IOR_EXPR:
1320 76419 : case BIT_XOR_EXPR:
1321 76419 : case BIT_AND_EXPR:
1322 76419 : case MIN_EXPR:
1323 76419 : case MAX_EXPR:
1324 76419 : case COMPLEX_EXPR:
1325 76419 : {
1326 : /* Binary ops. */
1327 76419 : tree rhs2 = gimple_assign_rhs2 (assign);
1328 :
1329 76419 : const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
1330 76419 : const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
1331 :
1332 76419 : if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
1333 : {
1334 : /* "INT34-C. Do not shift an expression by a negative number of bits
1335 : or by greater than or equal to the number of bits that exist in
1336 : the operand." */
1337 13709 : if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
1338 13437 : if (TREE_CODE (rhs2_cst) == INTEGER_CST
1339 13437 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
1340 : {
1341 13435 : if (tree_int_cst_sgn (rhs2_cst) < 0)
1342 16 : ctxt->warn
1343 16 : (std::make_unique<shift_count_negative_diagnostic>
1344 16 : (assign, rhs2_cst));
1345 13419 : else if (compare_tree_int (rhs2_cst,
1346 13419 : TYPE_PRECISION (TREE_TYPE (rhs1)))
1347 : >= 0)
1348 8 : ctxt->warn
1349 8 : (std::make_unique<shift_count_overflow_diagnostic>
1350 8 : (assign,
1351 16 : int (TYPE_PRECISION (TREE_TYPE (rhs1))),
1352 : rhs2_cst));
1353 : }
1354 : }
1355 :
1356 24 : if (ctxt
1357 66459 : && (op == TRUNC_DIV_EXPR
1358 : || op == CEIL_DIV_EXPR
1359 : || op == FLOOR_DIV_EXPR
1360 : || op == ROUND_DIV_EXPR
1361 : || op == TRUNC_MOD_EXPR
1362 : || op == CEIL_MOD_EXPR
1363 : || op == FLOOR_MOD_EXPR
1364 : || op == ROUND_MOD_EXPR
1365 : || op == RDIV_EXPR
1366 66459 : || op == EXACT_DIV_EXPR))
1367 : {
1368 903 : if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
1369 725 : if (zerop (rhs2_cst))
1370 : {
1371 : /* Ideally we should issue a warning here;
1372 : see PR analyzer/124217. */
1373 : return nullptr;
1374 : }
1375 : }
1376 :
1377 76389 : const svalue *sval_binop
1378 76389 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1379 : rhs1_sval, rhs2_sval);
1380 76389 : return sval_binop;
1381 : }
1382 :
1383 : /* Vector expressions. In theory we could implement these elementwise,
1384 : but for now, simply return unknown values. */
1385 0 : case VEC_DUPLICATE_EXPR:
1386 0 : case VEC_SERIES_EXPR:
1387 0 : case VEC_COND_EXPR:
1388 0 : case VEC_PERM_EXPR:
1389 0 : case VEC_WIDEN_MULT_HI_EXPR:
1390 0 : case VEC_WIDEN_MULT_LO_EXPR:
1391 0 : case VEC_WIDEN_MULT_EVEN_EXPR:
1392 0 : case VEC_WIDEN_MULT_ODD_EXPR:
1393 0 : case VEC_UNPACK_HI_EXPR:
1394 0 : case VEC_UNPACK_LO_EXPR:
1395 0 : case VEC_UNPACK_FLOAT_HI_EXPR:
1396 0 : case VEC_UNPACK_FLOAT_LO_EXPR:
1397 0 : case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1398 0 : case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1399 0 : case VEC_PACK_TRUNC_EXPR:
1400 0 : case VEC_PACK_SAT_EXPR:
1401 0 : case VEC_PACK_FIX_TRUNC_EXPR:
1402 0 : case VEC_PACK_FLOAT_EXPR:
1403 0 : case VEC_WIDEN_LSHIFT_HI_EXPR:
1404 0 : case VEC_WIDEN_LSHIFT_LO_EXPR:
1405 0 : return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1406 : }
1407 : }
1408 :
1409 : /* Workaround for discarding certain false positives from
1410 : -Wanalyzer-use-of-uninitialized-value
1411 : of the form:
1412 : ((A OR-IF B) OR-IF C)
1413 : and:
1414 : ((A AND-IF B) AND-IF C)
1415 : where evaluating B is redundant, but could involve simple accesses of
1416 : uninitialized locals.
1417 :
1418 : When optimization is turned on the FE can immediately fold compound
1419 : conditionals. Specifically, c_parser_condition parses this condition:
1420 : ((A OR-IF B) OR-IF C)
1421 : and calls c_fully_fold on the condition.
1422 : Within c_fully_fold, fold_truth_andor is called, which bails when
1423 : optimization is off, but if any optimization is turned on can convert the
1424 : ((A OR-IF B) OR-IF C)
1425 : into:
1426 : ((A OR B) OR_IF C)
1427 : for sufficiently simple B
1428 : i.e. the inner OR-IF becomes an OR.
1429 : At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
1430 : giving this for the inner condition:
1431 : tmp = A | B;
1432 : if (tmp)
1433 : thus effectively synthesizing a redundant access of B when optimization
1434 : is turned on, when compared to:
1435 : if (A) goto L1; else goto L4;
1436 : L1: if (B) goto L2; else goto L4;
1437 : L2: if (C) goto L3; else goto L4;
1438 : for the unoptimized case.
1439 :
1440 : Return true if CTXT appears to be handling such a short-circuitable stmt,
1441 : such as the def-stmt for B for the:
1442 : tmp = A | B;
1443 : case above, for the case where A is true and thus B would have been
1444 : short-circuited without optimization, using MODEL for the value of A. */
1445 :
1446 : static bool
1447 1158 : within_short_circuited_stmt_p (const region_model *model,
1448 : const gassign *assign_stmt)
1449 : {
1450 : /* We must have an assignment to a temporary of _Bool type. */
1451 1158 : tree lhs = gimple_assign_lhs (assign_stmt);
1452 1158 : if (TREE_TYPE (lhs) != boolean_type_node)
1453 : return false;
1454 40 : if (TREE_CODE (lhs) != SSA_NAME)
1455 : return false;
1456 40 : if (SSA_NAME_VAR (lhs) != NULL_TREE)
1457 : return false;
1458 :
1459 : /* The temporary bool must be used exactly once: as the second arg of
1460 : a BIT_IOR_EXPR or BIT_AND_EXPR. */
1461 40 : use_operand_p use_op;
1462 40 : gimple *use_stmt;
1463 40 : if (!single_imm_use (lhs, &use_op, &use_stmt))
1464 : return false;
1465 1186 : const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
1466 40 : if (!use_assign)
1467 : return false;
1468 40 : enum tree_code op = gimple_assign_rhs_code (use_assign);
1469 40 : if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
1470 : return false;
1471 28 : if (!(gimple_assign_rhs1 (use_assign) != lhs
1472 28 : && gimple_assign_rhs2 (use_assign) == lhs))
1473 : return false;
1474 :
1475 : /* The first arg of the bitwise stmt must have a known value in MODEL
1476 : that implies that the value of the second arg doesn't matter, i.e.
1477 : 1 for bitwise or, 0 for bitwise and. */
1478 28 : tree other_arg = gimple_assign_rhs1 (use_assign);
1479 : /* Use a nullptr ctxt here to avoid generating warnings. */
1480 28 : const svalue *other_arg_sval = model->get_rvalue (other_arg, nullptr);
1481 28 : tree other_arg_cst = other_arg_sval->maybe_get_constant ();
1482 28 : if (!other_arg_cst)
1483 : return false;
1484 12 : switch (op)
1485 : {
1486 0 : default:
1487 0 : gcc_unreachable ();
1488 12 : case BIT_IOR_EXPR:
1489 12 : if (zerop (other_arg_cst))
1490 : return false;
1491 : break;
1492 0 : case BIT_AND_EXPR:
1493 0 : if (!zerop (other_arg_cst))
1494 : return false;
1495 : break;
1496 : }
1497 :
1498 : /* All tests passed. We appear to be in a stmt that generates a boolean
1499 : temporary with a value that won't matter. */
1500 : return true;
1501 : }
1502 :
1503 : /* Workaround for discarding certain false positives from
1504 : -Wanalyzer-use-of-uninitialized-value
1505 : seen with -ftrivial-auto-var-init=.
1506 :
1507 : -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1508 :
1509 : If the address of the var is taken, gimplification will give us
1510 : something like:
1511 :
1512 : _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1513 : len = _1;
1514 :
1515 : The result of DEFERRED_INIT will be an uninit value; we don't
1516 : want to emit a false positive for "len = _1;"
1517 :
1518 : Return true if ASSIGN_STMT is such a stmt. */
1519 :
1520 : static bool
1521 1146 : due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1522 :
1523 : {
1524 : /* We must have an assignment to a decl from an SSA name that's the
1525 : result of a IFN_DEFERRED_INIT call. */
1526 2122 : if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1527 : return false;
1528 287 : tree lhs = gimple_assign_lhs (assign_stmt);
1529 287 : if (TREE_CODE (lhs) != VAR_DECL)
1530 : return false;
1531 222 : tree rhs = gimple_assign_rhs1 (assign_stmt);
1532 222 : if (TREE_CODE (rhs) != SSA_NAME)
1533 : return false;
1534 222 : const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1535 222 : const gcall *call = dyn_cast <const gcall *> (def_stmt);
1536 222 : if (!call)
1537 : return false;
1538 222 : if (gimple_call_internal_p (call)
1539 222 : && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1540 210 : return true;
1541 : return false;
1542 : }
1543 :
1544 : /* Check for SVAL being poisoned, adding a warning to CTXT.
1545 : Return SVAL, or, if a warning is added, another value, to avoid
1546 : repeatedly complaining about the same poisoned value in followup code.
1547 : SRC_REGION is a hint about where SVAL came from, and can be nullptr. */
1548 :
1549 : const svalue *
1550 3285276 : region_model::check_for_poison (const svalue *sval,
1551 : tree expr,
1552 : const region *src_region,
1553 : region_model_context *ctxt) const
1554 : {
1555 3285276 : if (!ctxt)
1556 : return sval;
1557 :
1558 1708399 : if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1559 : {
1560 2713 : enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1561 :
1562 : /* Ignore uninitialized uses of empty types; there's nothing
1563 : to initialize. */
1564 2713 : if (pkind == poison_kind::uninit
1565 2672 : && sval->get_type ()
1566 5275 : && is_empty_type (sval->get_type ()))
1567 : return sval;
1568 :
1569 2474 : if (pkind == poison_kind::uninit)
1570 2433 : if (const gimple *curr_stmt = ctxt->get_stmt ())
1571 1503 : if (const gassign *assign_stmt
1572 3410 : = dyn_cast <const gassign *> (curr_stmt))
1573 : {
1574 : /* Special case to avoid certain false positives. */
1575 1158 : if (within_short_circuited_stmt_p (this, assign_stmt))
1576 : return sval;
1577 :
1578 : /* Special case to avoid false positive on
1579 : -ftrivial-auto-var-init=. */
1580 1146 : if (due_to_ifn_deferred_init_p (assign_stmt))
1581 : return sval;
1582 : }
1583 :
1584 : /* If we have an SSA name for a temporary, we don't want to print
1585 : '<unknown>'.
1586 : Poisoned values are shared by type, and so we can't reconstruct
1587 : the tree other than via the def stmts, using
1588 : fixup_tree_for_diagnostic. */
1589 2252 : tree diag_arg = fixup_tree_for_diagnostic (expr);
1590 2252 : if (src_region == nullptr && pkind == poison_kind::uninit)
1591 2160 : src_region = get_region_for_poisoned_expr (expr);
1592 :
1593 : /* Can we reliably get the poisoned value from "expr"?
1594 : This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1595 : Unfortunately, we might not have a reliable value for EXPR.
1596 : Hence we only query its value now, and only use it if we get the
1597 : poisoned value back again. */
1598 2252 : tree check_expr = expr;
1599 2252 : const svalue *foo_sval = get_rvalue (expr, nullptr);
1600 2252 : if (foo_sval == sval)
1601 : check_expr = expr;
1602 : else
1603 110 : check_expr = nullptr;
1604 4504 : if (ctxt->warn
1605 2252 : (std::make_unique<poisoned_value_diagnostic> (diag_arg,
1606 : pkind,
1607 : src_region,
1608 : check_expr)))
1609 : {
1610 : /* We only want to report use of a poisoned value at the first
1611 : place it gets used; return an unknown value to avoid generating
1612 : a chain of followup warnings. */
1613 1364 : sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1614 : }
1615 :
1616 2252 : return sval;
1617 : }
1618 :
1619 : return sval;
1620 : }
1621 :
1622 : /* Attempt to get a region for describing EXPR, the source of region of
1623 : a poisoned_svalue for use in a poisoned_value_diagnostic.
1624 : Return nullptr if there is no good region to use. */
1625 :
1626 : const region *
1627 2160 : region_model::get_region_for_poisoned_expr (tree expr) const
1628 : {
1629 2160 : if (TREE_CODE (expr) == SSA_NAME)
1630 : {
1631 1401 : tree decl = SSA_NAME_VAR (expr);
1632 1362 : if (decl && DECL_P (decl))
1633 : expr = decl;
1634 : else
1635 : return nullptr;
1636 : }
1637 2121 : return get_lvalue (expr, nullptr);
1638 : }
1639 :
1640 : /* Update this model for the ASSIGN stmt, using CTXT to report any
1641 : diagnostics. */
1642 :
1643 : void
1644 237297 : region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1645 : {
1646 237297 : tree lhs = gimple_assign_lhs (assign);
1647 237297 : tree rhs1 = gimple_assign_rhs1 (assign);
1648 :
1649 237297 : const region *lhs_reg = get_lvalue (lhs, ctxt);
1650 :
1651 : /* Any writes other than to the stack are treated
1652 : as externally visible. */
1653 237297 : if (ctxt)
1654 : {
1655 191099 : enum memory_space memspace = lhs_reg->get_memory_space ();
1656 191099 : if (memspace != MEMSPACE_STACK)
1657 11185 : ctxt->maybe_did_work ();
1658 : }
1659 :
1660 : /* Most assignments are handled by:
1661 : set_value (lhs_reg, SVALUE, CTXT)
1662 : for some SVALUE. */
1663 237297 : if (const svalue *sval = get_gassign_result (assign, ctxt))
1664 : {
1665 230764 : tree expr = get_diagnostic_tree_for_gassign (assign);
1666 230764 : check_for_poison (sval, expr, nullptr, ctxt);
1667 230764 : set_value (lhs_reg, sval, ctxt);
1668 230764 : return;
1669 : }
1670 :
1671 6533 : enum tree_code op = gimple_assign_rhs_code (assign);
1672 6533 : switch (op)
1673 : {
1674 15 : default:
1675 15 : {
1676 15 : if (0)
1677 : sorry_at (assign->location, "unhandled assignment op: %qs",
1678 : get_tree_code_name (op));
1679 15 : const svalue *unknown_sval
1680 15 : = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1681 15 : set_value (lhs_reg, unknown_sval, ctxt);
1682 : }
1683 15 : break;
1684 :
1685 6265 : case CONSTRUCTOR:
1686 6265 : {
1687 6265 : if (TREE_CLOBBER_P (rhs1))
1688 : {
1689 : /* e.g. "x ={v} {CLOBBER};" */
1690 6130 : clobber_region (lhs_reg);
1691 : }
1692 : else
1693 : {
1694 : /* Any CONSTRUCTOR that survives to this point is either
1695 : just a zero-init of everything, or a vector. */
1696 135 : if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1697 135 : zero_fill_region (lhs_reg, ctxt);
1698 : unsigned ix;
1699 : tree index;
1700 : tree val;
1701 297 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1702 : {
1703 162 : gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1704 162 : if (!index)
1705 22 : index = build_int_cst (integer_type_node, ix);
1706 162 : gcc_assert (TREE_CODE (index) == INTEGER_CST);
1707 162 : const svalue *index_sval
1708 162 : = m_mgr->get_or_create_constant_svalue (index);
1709 162 : gcc_assert (index_sval);
1710 162 : const region *sub_reg
1711 162 : = m_mgr->get_element_region (lhs_reg,
1712 162 : TREE_TYPE (val),
1713 : index_sval);
1714 162 : const svalue *val_sval = get_rvalue (val, ctxt);
1715 162 : set_value (sub_reg, val_sval, ctxt);
1716 : }
1717 : }
1718 : }
1719 : break;
1720 :
1721 253 : case STRING_CST:
1722 253 : {
1723 : /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1724 253 : const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1725 387 : m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1726 134 : ctxt ? ctxt->get_uncertainty () : nullptr);
1727 : }
1728 253 : break;
1729 : }
1730 : }
1731 :
1732 : /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1733 : Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1734 : side effects. */
1735 :
1736 : void
1737 311760 : region_model::on_stmt_pre (const gimple *stmt,
1738 : bool *out_unknown_side_effects,
1739 : region_model_context *ctxt)
1740 : {
1741 311760 : switch (gimple_code (stmt))
1742 : {
1743 : case GIMPLE_COND:
1744 : case GIMPLE_EH_DISPATCH:
1745 : case GIMPLE_GOTO:
1746 : case GIMPLE_LABEL:
1747 : case GIMPLE_NOP:
1748 : case GIMPLE_PREDICT:
1749 : case GIMPLE_RESX:
1750 : case GIMPLE_SWITCH:
1751 : /* No-ops here. */
1752 : break;
1753 :
1754 237297 : case GIMPLE_ASSIGN:
1755 237297 : {
1756 237297 : const gassign *assign = as_a <const gassign *> (stmt);
1757 237297 : on_assignment (assign, ctxt);
1758 : }
1759 237297 : break;
1760 :
1761 384 : case GIMPLE_ASM:
1762 384 : {
1763 384 : const gasm *asm_stmt = as_a <const gasm *> (stmt);
1764 384 : on_asm_stmt (asm_stmt, ctxt);
1765 384 : if (ctxt)
1766 342 : ctxt->maybe_did_work ();
1767 : }
1768 : break;
1769 :
1770 70729 : case GIMPLE_CALL:
1771 70729 : {
1772 : /* Track whether we have a gcall to a function that's not recognized by
1773 : anything, for which we don't have a function body, or for which we
1774 : don't know the fndecl. */
1775 70729 : const gcall *call = as_a <const gcall *> (stmt);
1776 70729 : *out_unknown_side_effects = on_call_pre (*call, ctxt);
1777 : }
1778 70729 : break;
1779 :
1780 0 : case GIMPLE_RETURN:
1781 0 : {
1782 0 : const greturn *return_ = as_a <const greturn *> (stmt);
1783 0 : on_return (return_, ctxt);
1784 : }
1785 0 : break;
1786 :
1787 : /* We don't expect to see any other statement kinds in the analyzer. */
1788 0 : case GIMPLE_DEBUG: // should have stripped these out when building the supergraph
1789 0 : default:
1790 0 : internal_error ("unexpected gimple stmt code: %qs",
1791 0 : gimple_code_name[gimple_code (stmt)]);
1792 311760 : break;
1793 : }
1794 311760 : }
1795 :
1796 : /* Given a call CD with function attribute FORMAT_ATTR, check that the
1797 : format arg to the call is a valid null-terminated string. */
1798 :
1799 : void
1800 1054 : region_model::check_call_format_attr (const call_details &cd,
1801 : tree format_attr) const
1802 : {
1803 : /* We assume that FORMAT_ATTR has already been validated. */
1804 :
1805 : /* arg0 of the attribute should be kind of format strings
1806 : that this function expects (e.g. "printf"). */
1807 1054 : const tree arg0_tree_list = TREE_VALUE (format_attr);
1808 1054 : if (!arg0_tree_list)
1809 0 : return;
1810 :
1811 : /* arg1 of the attribute should be the 1-based parameter index
1812 : to treat as the format string. */
1813 1054 : const tree arg1_tree_list = TREE_CHAIN (arg0_tree_list);
1814 1054 : if (!arg1_tree_list)
1815 : return;
1816 1054 : const tree arg1_value = TREE_VALUE (arg1_tree_list);
1817 1054 : if (!arg1_value)
1818 : return;
1819 :
1820 1054 : unsigned format_arg_idx = TREE_INT_CST_LOW (arg1_value) - 1;
1821 1054 : if (cd.num_args () <= format_arg_idx)
1822 : return;
1823 :
1824 : /* Subclass of annotating_context that
1825 : adds a note about the format attr to any saved diagnostics. */
1826 1054 : class annotating_ctxt : public annotating_context
1827 : {
1828 : public:
1829 1054 : annotating_ctxt (const call_details &cd,
1830 : unsigned fmt_param_idx)
1831 1054 : : annotating_context (cd.get_ctxt ()),
1832 1054 : m_cd (cd),
1833 1054 : m_fmt_param_idx (fmt_param_idx)
1834 : {
1835 : }
1836 13 : void add_annotations () final override
1837 : {
1838 0 : class reason_format_attr
1839 : : public pending_note_subclass<reason_format_attr>
1840 : {
1841 : public:
1842 13 : reason_format_attr (const call_arg_details &arg_details)
1843 13 : : m_arg_details (arg_details)
1844 : {
1845 : }
1846 :
1847 74 : const char *get_kind () const final override
1848 : {
1849 74 : return "reason_format_attr";
1850 : }
1851 :
1852 13 : void emit () const final override
1853 : {
1854 13 : inform (DECL_SOURCE_LOCATION (m_arg_details.m_called_fndecl),
1855 : "parameter %i of %qD marked as a format string"
1856 : " via %qs attribute",
1857 13 : m_arg_details.m_arg_idx + 1, m_arg_details.m_called_fndecl,
1858 : "format");
1859 13 : }
1860 :
1861 37 : bool operator== (const reason_format_attr &other) const
1862 : {
1863 37 : return m_arg_details == other.m_arg_details;
1864 : }
1865 :
1866 : private:
1867 : call_arg_details m_arg_details;
1868 : };
1869 :
1870 13 : call_arg_details arg_details (m_cd, m_fmt_param_idx);
1871 13 : add_note (std::make_unique<reason_format_attr> (arg_details));
1872 13 : }
1873 : private:
1874 : const call_details &m_cd;
1875 : unsigned m_fmt_param_idx;
1876 : };
1877 :
1878 1054 : annotating_ctxt my_ctxt (cd, format_arg_idx);
1879 1054 : call_details my_cd (cd, &my_ctxt);
1880 1054 : my_cd.check_for_null_terminated_string_arg (format_arg_idx);
1881 : }
1882 :
1883 : /* Ensure that all arguments at the call described by CD are checked
1884 : for poisoned values, by calling get_rvalue on each argument.
1885 :
1886 : Check that calls to functions with "format" attribute have valid
1887 : null-terminated strings for their format argument. */
1888 :
1889 : void
1890 49104 : region_model::check_call_args (const call_details &cd) const
1891 : {
1892 114381 : for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1893 65277 : cd.get_arg_svalue (arg_idx);
1894 :
1895 : /* Handle attribute "format". */
1896 49104 : if (tree format_attr = cd.lookup_function_attribute ("format"))
1897 1054 : check_call_format_attr (cd, format_attr);
1898 49104 : }
1899 :
1900 : /* Update this model for an outcome of a call that returns a specific
1901 : integer constant.
1902 : If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1903 : the state-merger code from merging success and failure outcomes. */
1904 :
1905 : void
1906 845 : region_model::update_for_int_cst_return (const call_details &cd,
1907 : int retval,
1908 : bool unmergeable)
1909 : {
1910 845 : if (!cd.get_lhs_type ())
1911 : return;
1912 591 : if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1913 : return;
1914 585 : const svalue *result
1915 585 : = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
1916 585 : if (unmergeable)
1917 585 : result = m_mgr->get_or_create_unmergeable (result);
1918 585 : set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1919 : }
1920 :
1921 : /* Update this model for an outcome of a call that returns zero.
1922 : If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1923 : the state-merger code from merging success and failure outcomes. */
1924 :
1925 : void
1926 286 : region_model::update_for_zero_return (const call_details &cd,
1927 : bool unmergeable)
1928 : {
1929 286 : update_for_int_cst_return (cd, 0, unmergeable);
1930 286 : }
1931 :
1932 : /* Update this model for an outcome of a call that returns non-zero.
1933 : Specifically, assign an svalue to the LHS, and add a constraint that
1934 : that svalue is non-zero. */
1935 :
1936 : void
1937 133 : region_model::update_for_nonzero_return (const call_details &cd)
1938 : {
1939 133 : if (!cd.get_lhs_type ())
1940 : return;
1941 97 : if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1942 : return;
1943 97 : cd.set_any_lhs_with_defaults ();
1944 97 : const svalue *zero
1945 97 : = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1946 97 : const svalue *result
1947 97 : = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1948 97 : add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1949 : }
1950 :
1951 : /* Subroutine of region_model::maybe_get_copy_bounds.
1952 : The Linux kernel commonly uses
1953 : min_t([unsigned] long, VAR, sizeof(T));
1954 : to set an upper bound on the size of a copy_to_user.
1955 : Attempt to simplify such sizes by trying to get the upper bound as a
1956 : constant.
1957 : Return the simplified svalue if possible, or nullptr otherwise. */
1958 :
1959 : static const svalue *
1960 53 : maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1961 : region_model_manager *mgr)
1962 : {
1963 53 : tree type = num_bytes_sval->get_type ();
1964 70 : while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1965 : num_bytes_sval = raw;
1966 53 : if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1967 38 : if (binop_sval->get_op () == MIN_EXPR)
1968 8 : if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1969 : {
1970 8 : return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1971 : /* TODO: we might want to also capture the constraint
1972 : when recording the diagnostic, or note that we're using
1973 : the upper bound. */
1974 : }
1975 : return nullptr;
1976 : }
1977 :
1978 : /* Attempt to get an upper bound for the size of a copy when simulating a
1979 : copy function.
1980 :
1981 : NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1982 : Use it if it's constant, otherwise try to simplify it. Failing
1983 : that, use the size of SRC_REG if constant.
1984 :
1985 : Return a symbolic value for an upper limit on the number of bytes
1986 : copied, or nullptr if no such value could be determined. */
1987 :
1988 : const svalue *
1989 144 : region_model::maybe_get_copy_bounds (const region *src_reg,
1990 : const svalue *num_bytes_sval)
1991 : {
1992 144 : if (num_bytes_sval->maybe_get_constant ())
1993 : return num_bytes_sval;
1994 :
1995 106 : if (const svalue *simplified
1996 53 : = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1997 8 : num_bytes_sval = simplified;
1998 :
1999 53 : if (num_bytes_sval->maybe_get_constant ())
2000 : return num_bytes_sval;
2001 :
2002 : /* For now, try just guessing the size as the capacity of the
2003 : base region of the src.
2004 : This is a hack; we might get too large a value. */
2005 45 : const region *src_base_reg = src_reg->get_base_region ();
2006 45 : num_bytes_sval = get_capacity (src_base_reg);
2007 :
2008 45 : if (num_bytes_sval->maybe_get_constant ())
2009 11 : return num_bytes_sval;
2010 :
2011 : /* Non-constant: give up. */
2012 : return nullptr;
2013 : }
2014 :
2015 : /* Get any known_function for FNDECL for call CD.
2016 :
2017 : The call must match all assumptions made by the known_function (such as
2018 : e.g. "argument 1's type must be a pointer type").
2019 :
2020 : Return nullptr if no known_function is found, or it does not match the
2021 : assumption(s). */
2022 :
2023 : const known_function *
2024 292286 : region_model::get_known_function (tree fndecl, const call_details &cd) const
2025 : {
2026 292286 : known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2027 292286 : return known_fn_mgr->get_match (fndecl, cd);
2028 : }
2029 :
2030 : /* Get any known_function for IFN, or nullptr. */
2031 :
2032 : const known_function *
2033 1383 : region_model::get_known_function (enum internal_fn ifn) const
2034 : {
2035 1383 : known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2036 1383 : return known_fn_mgr->get_internal_fn (ifn);
2037 : }
2038 :
2039 : /* Get any builtin_known_function for CALL and emit any warning to CTXT
2040 : if not nullptr.
2041 :
2042 : The call must match all assumptions made by the known_function (such as
2043 : e.g. "argument 1's type must be a pointer type").
2044 :
2045 : Return nullptr if no builtin_known_function is found, or it does
2046 : not match the assumption(s).
2047 :
2048 : Internally calls get_known_function to find a known_function and cast it
2049 : to a builtin_known_function.
2050 :
2051 : For instance, calloc is a C builtin, defined in gcc/builtins.def
2052 : by the DEF_LIB_BUILTIN macro. Such builtins are recognized by the
2053 : analyzer by their name, so that even in C++ or if the user redeclares
2054 : them but mismatch their signature, they are still recognized as builtins.
2055 :
2056 : Cases when a supposed builtin is not flagged as one by the FE:
2057 :
2058 : The C++ FE does not recognize calloc as a builtin if it has not been
2059 : included from a standard header, but the C FE does. Hence in C++ if
2060 : CALL comes from a calloc and stdlib is not included,
2061 : gcc/tree.h:fndecl_built_in_p (CALL) would be false.
2062 :
2063 : In C code, a __SIZE_TYPE__ calloc (__SIZE_TYPE__, __SIZE_TYPE__) user
2064 : declaration has obviously a mismatching signature from the standard, and
2065 : its function_decl tree won't be unified by
2066 : gcc/c-decl.cc:match_builtin_function_types.
2067 :
2068 : Yet in both cases the analyzer should treat the calls as a builtin calloc
2069 : so that extra attributes unspecified by the standard but added by GCC
2070 : (e.g. sprintf attributes in gcc/builtins.def), useful for the detection of
2071 : dangerous behavior, are indeed processed.
2072 :
2073 : Therefore for those cases when a "builtin flag" is not added by the FE,
2074 : builtins' kf are derived from builtin_known_function, whose method
2075 : builtin_known_function::builtin_decl returns the builtin's
2076 : function_decl tree as defined in gcc/builtins.def, with all the extra
2077 : attributes. */
2078 :
2079 : const builtin_known_function *
2080 163200 : region_model::get_builtin_kf (const gcall &call,
2081 : region_model_context *ctxt /* = nullptr */) const
2082 : {
2083 163200 : region_model *mut_this = const_cast <region_model *> (this);
2084 163200 : tree callee_fndecl = mut_this->get_fndecl_for_call (call, ctxt);
2085 163200 : if (! callee_fndecl)
2086 : return nullptr;
2087 :
2088 163200 : call_details cd (call, mut_this, ctxt);
2089 163200 : if (const known_function *kf = get_known_function (callee_fndecl, cd))
2090 111622 : return kf->dyn_cast_builtin_kf ();
2091 :
2092 : return nullptr;
2093 : }
2094 :
2095 : /* Subclass of custom_edge_info for use by exploded_edges that represent
2096 : an exception being thrown from a call we don't have the code for. */
2097 :
2098 : class exception_thrown_from_unrecognized_call : public custom_edge_info
2099 : {
2100 : public:
2101 5459 : exception_thrown_from_unrecognized_call (const gcall &call,
2102 : tree fndecl)
2103 5459 : : m_call (call),
2104 5459 : m_fndecl (fndecl)
2105 : {
2106 : }
2107 :
2108 12 : void print (pretty_printer *pp) const final override
2109 : {
2110 12 : if (m_fndecl)
2111 12 : pp_printf (pp, "if %qD throws an exception...", m_fndecl);
2112 : else
2113 0 : pp_printf (pp, "if the called function throws an exception...");
2114 12 : };
2115 :
2116 : bool
2117 5455 : update_model (region_model *model,
2118 : const exploded_edge *,
2119 : region_model_context *ctxt) const final override
2120 : {
2121 : /* Allocate an exception and set it as the current exception. */
2122 5455 : const region *exception_reg
2123 : = model->get_or_create_region_for_heap_alloc
2124 5455 : (nullptr, /* We don't know the size of the region. */
2125 : ctxt);
2126 :
2127 5455 : region_model_manager *mgr = model->get_manager ();
2128 5455 : conjured_purge p (model, ctxt);
2129 :
2130 : /* The contents of the region are some conjured svalue. */
2131 5455 : const svalue *exception_sval
2132 10910 : = mgr->get_or_create_conjured_svalue (NULL_TREE,
2133 5455 : &m_call,
2134 : exception_reg, p, 0);
2135 5455 : model->set_value (exception_reg, exception_sval, ctxt);
2136 5455 : const svalue *exception_ptr_sval
2137 5455 : = mgr->get_ptr_svalue (ptr_type_node, exception_reg);
2138 5455 : const svalue *tinfo_sval
2139 10910 : = mgr->get_or_create_conjured_svalue (ptr_type_node,
2140 5455 : &m_call,
2141 : exception_reg, p, 1);
2142 5455 : const svalue *destructor_sval
2143 10910 : = mgr->get_or_create_conjured_svalue (ptr_type_node,
2144 5455 : &m_call,
2145 : exception_reg, p, 2);
2146 :
2147 : /* Push a new exception_node on the model's thrown exception stack. */
2148 5455 : exception_node eh_node (exception_ptr_sval, tinfo_sval, destructor_sval);
2149 5455 : model->push_thrown_exception (eh_node);
2150 :
2151 5455 : return true;
2152 : }
2153 :
2154 : void
2155 23 : add_events_to_path (checker_path *emission_path,
2156 : const exploded_edge &eedge,
2157 : pending_diagnostic &) const final override
2158 : {
2159 23 : const exploded_node *dst_node = eedge.m_dest;
2160 23 : const program_point &dst_point = dst_node->get_point ();
2161 23 : const int dst_stack_depth = dst_point.get_stack_depth ();
2162 :
2163 23 : emission_path->add_event
2164 23 : (std::make_unique<throw_from_call_to_external_fn_event>
2165 23 : (event_loc_info (m_call.location,
2166 : dst_point.get_fndecl (),
2167 23 : dst_stack_depth),
2168 : dst_node,
2169 : m_call,
2170 23 : m_fndecl));
2171 23 : }
2172 :
2173 : exploded_node *
2174 5346 : create_enode (exploded_graph &eg,
2175 : const program_point &point,
2176 : program_state &&state,
2177 : exploded_node *enode_for_diag,
2178 : region_model_context *ctxt) const final override
2179 : {
2180 5346 : exploded_node *thrown_enode
2181 5346 : = eg.get_or_create_node (point, state, enode_for_diag,
2182 : /* Don't add to worklist. */
2183 : false);
2184 5346 : if (!thrown_enode)
2185 : return nullptr;
2186 :
2187 : /* Add successor edges for thrown_enode "by hand" for the exception. */
2188 5261 : eg.unwind_from_exception (*thrown_enode,
2189 5261 : &m_call,
2190 : ctxt);
2191 5261 : return thrown_enode;
2192 : }
2193 :
2194 : private:
2195 : const gcall &m_call;
2196 : tree m_fndecl; // could be null
2197 : };
2198 :
2199 : /* Get a set of functions that are assumed to not throw exceptions. */
2200 :
2201 : static function_set
2202 5326 : get_fns_assumed_not_to_throw ()
2203 : {
2204 : // TODO: populate this list more fully
2205 5326 : static const char * const fn_names[] = {
2206 : /* This array must be kept sorted. */
2207 :
2208 : "fclose"
2209 : };
2210 5326 : const size_t count = ARRAY_SIZE (fn_names);
2211 5326 : function_set fs (fn_names, count);
2212 5326 : return fs;
2213 : }
2214 :
2215 : /* Return true if CALL could throw an exception.
2216 : FNDECL could be NULL_TREE. */
2217 :
2218 : static bool
2219 12786 : can_throw_p (const gcall &call, tree fndecl)
2220 : {
2221 12786 : if (!flag_exceptions)
2222 : return false;
2223 :
2224 : /* Compatibility flag to allow the user to assume external functions
2225 : never throw exceptions. This may be useful when using the analyzer
2226 : on C code that is compiled with -fexceptions, but for which the headers
2227 : haven't yet had "nothrow" attributes systematically added. */
2228 6013 : if (flag_analyzer_assume_nothrow)
2229 : return false;
2230 :
2231 6011 : if (gimple_call_nothrow_p (&call))
2232 : return false;
2233 :
2234 5466 : if (fndecl)
2235 : {
2236 5326 : const function_set fs = get_fns_assumed_not_to_throw ();
2237 5326 : if (fs.contains_decl_p (fndecl))
2238 7 : return false;
2239 : }
2240 :
2241 : return true;
2242 : }
2243 :
2244 : /* Given CALL where we don't know what code is being called
2245 : (by not having the body of FNDECL, or having NULL_TREE for FNDECL),
2246 : potentially bifurcate control flow to simulate the call throwing
2247 : an exception. */
2248 :
2249 : void
2250 17553 : region_model::check_for_throw_inside_call (const gcall &call,
2251 : tree fndecl,
2252 : region_model_context *ctxt)
2253 : {
2254 17553 : if (!ctxt)
2255 12094 : return;
2256 :
2257 : /* Could this function throw an exception?
2258 : If so, add an extra e-edge for that. */
2259 12786 : if (!can_throw_p (call, fndecl))
2260 : return;
2261 :
2262 5459 : auto throws_exception
2263 5459 : = std::make_unique<exception_thrown_from_unrecognized_call> (call, fndecl);
2264 5459 : ctxt->bifurcate (std::move (throws_exception));
2265 5459 : }
2266 :
2267 : /* A subclass of pending_diagnostic for complaining about jumps through NULL
2268 : function pointers. */
2269 :
2270 : class jump_through_null : public pending_diagnostic_subclass<jump_through_null>
2271 : {
2272 : public:
2273 16 : jump_through_null (const gcall &call)
2274 16 : : m_call (call)
2275 : {}
2276 :
2277 152 : const char *get_kind () const final override
2278 : {
2279 152 : return "jump_through_null";
2280 : }
2281 :
2282 16 : bool operator== (const jump_through_null &other) const
2283 : {
2284 16 : return &m_call == &other.m_call;
2285 : }
2286 :
2287 32 : int get_controlling_option () const final override
2288 : {
2289 32 : return OPT_Wanalyzer_jump_through_null;
2290 : }
2291 :
2292 16 : bool emit (diagnostic_emission_context &ctxt) final override
2293 : {
2294 16 : return ctxt.warn ("jump through null pointer");
2295 : }
2296 :
2297 32 : bool describe_final_event (pretty_printer &pp,
2298 : const evdesc::final_event &) final override
2299 : {
2300 32 : pp_string (&pp, "jump through null pointer here");
2301 32 : return true;
2302 : }
2303 :
2304 : private:
2305 : const gcall &m_call;
2306 : };
2307 : /* Update this model for the CALL stmt, using CTXT to report any
2308 : diagnostics - the first half.
2309 :
2310 : Updates to the region_model that should be made *before* sm-states
2311 : are updated are done here; other updates to the region_model are done
2312 : in region_model::on_call_post.
2313 :
2314 : Return true if the function call has unknown side effects (it wasn't
2315 : recognized and we don't have a body for it, or are unable to tell which
2316 : fndecl it is). */
2317 :
2318 : bool
2319 70729 : region_model::on_call_pre (const gcall &call, region_model_context *ctxt)
2320 : {
2321 70729 : call_details cd (call, this, ctxt);
2322 :
2323 : /* Special-case for IFN_DEFERRED_INIT.
2324 : We want to report uninitialized variables with -fanalyzer (treating
2325 : -ftrivial-auto-var-init= as purely a mitigation feature).
2326 : Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
2327 : lhs of the call, so that it is still uninitialized from the point of
2328 : view of the analyzer. */
2329 70729 : if (gimple_call_internal_p (&call)
2330 70729 : && gimple_call_internal_fn (&call) == IFN_DEFERRED_INIT)
2331 : return false; /* No side effects. */
2332 :
2333 : /* Get svalues for all of the arguments at the callsite, to ensure that we
2334 : complain about any uninitialized arguments. This might lead to
2335 : duplicates if any of the handling below also looks up the svalues,
2336 : but the deduplication code should deal with that. */
2337 66471 : if (ctxt)
2338 49104 : check_call_args (cd);
2339 :
2340 66471 : tree callee_fndecl = get_fndecl_for_call (call, ctxt);
2341 :
2342 66471 : if (gimple_call_internal_p (&call))
2343 2766 : if (const known_function *kf
2344 1383 : = get_known_function (gimple_call_internal_fn (&call)))
2345 : {
2346 1353 : kf->impl_call_pre (cd);
2347 1353 : return false; /* No further side effects. */
2348 : }
2349 :
2350 65118 : if (!callee_fndecl)
2351 : {
2352 : /* Check for jump through nullptr. */
2353 476 : if (ctxt)
2354 404 : if (tree fn_ptr = gimple_call_fn (&call))
2355 : {
2356 378 : const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
2357 378 : if (fn_ptr_sval->all_zeroes_p ())
2358 : {
2359 16 : ctxt->warn
2360 16 : (std::make_unique<jump_through_null> (call));
2361 16 : ctxt->terminate_path ();
2362 16 : return true;
2363 : }
2364 : }
2365 :
2366 460 : check_for_throw_inside_call (call, NULL_TREE, ctxt);
2367 460 : cd.set_any_lhs_with_defaults ();
2368 460 : return true; /* Unknown side effects. */
2369 : }
2370 :
2371 64642 : if (const known_function *kf = get_known_function (callee_fndecl, cd))
2372 : {
2373 45414 : kf->impl_call_pre (cd);
2374 45414 : return false; /* No further side effects. */
2375 : }
2376 :
2377 19228 : cd.set_any_lhs_with_defaults ();
2378 :
2379 19228 : const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
2380 19228 : if (callee_fndecl_flags & (ECF_CONST | ECF_PURE))
2381 : return false; /* No side effects. */
2382 :
2383 17870 : if (fndecl_built_in_p (callee_fndecl))
2384 : return true; /* Unknown side effects. */
2385 :
2386 17093 : if (!fndecl_has_gimple_body_p (callee_fndecl))
2387 : {
2388 17093 : check_for_throw_inside_call (call, callee_fndecl, ctxt);
2389 17093 : return true; /* Unknown side effects. */
2390 : }
2391 :
2392 : return false; /* No side effects. */
2393 : }
2394 :
2395 : /* Update this model for the CALL stmt, using CTXT to report any
2396 : diagnostics - the second half.
2397 :
2398 : Updates to the region_model that should be made *after* sm-states
2399 : are updated are done here; other updates to the region_model are done
2400 : in region_model::on_call_pre.
2401 :
2402 : If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
2403 : to purge state. */
2404 :
2405 : void
2406 70484 : region_model::on_call_post (const gcall &call,
2407 : bool unknown_side_effects,
2408 : region_model_context *ctxt)
2409 : {
2410 70484 : if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2411 : {
2412 64444 : call_details cd (call, this, ctxt);
2413 64444 : if (const known_function *kf = get_known_function (callee_fndecl, cd))
2414 : {
2415 45274 : kf->impl_call_post (cd);
2416 90887 : return;
2417 : }
2418 : /* Was this fndecl referenced by
2419 : __attribute__((malloc(FOO)))? */
2420 19170 : if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
2421 : {
2422 339 : impl_deallocation_call (cd);
2423 339 : return;
2424 : }
2425 : }
2426 :
2427 24871 : if (unknown_side_effects)
2428 : {
2429 16848 : handle_unrecognized_call (call, ctxt);
2430 16848 : if (ctxt)
2431 12201 : ctxt->maybe_did_work ();
2432 : }
2433 : }
2434 :
2435 : /* Purge state involving SVAL from this region_model, using CTXT
2436 : (if non-NULL) to purge other state in a program_state.
2437 :
2438 : For example, if we're at the def-stmt of an SSA name, then we need to
2439 : purge any state for svalues that involve that SSA name. This avoids
2440 : false positives in loops, since a symbolic value referring to the
2441 : SSA name will be referring to the previous value of that SSA name.
2442 :
2443 : For example, in:
2444 : while ((e = hashmap_iter_next(&iter))) {
2445 : struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
2446 : free (e_strbuf->value);
2447 : }
2448 : at the def-stmt of e_8:
2449 : e_8 = hashmap_iter_next (&iter);
2450 : we should purge the "freed" state of:
2451 : INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
2452 : which is the "e_strbuf->value" value from the previous iteration,
2453 : or we will erroneously report a double-free - the "e_8" within it
2454 : refers to the previous value. */
2455 :
2456 : void
2457 26833 : region_model::purge_state_involving (const svalue *sval,
2458 : region_model_context *ctxt)
2459 : {
2460 26833 : if (!sval->can_have_associated_state_p ())
2461 : return;
2462 26833 : m_store.purge_state_involving (sval, m_mgr);
2463 26833 : m_constraints->purge_state_involving (sval);
2464 26833 : m_dynamic_extents.purge_state_involving (sval);
2465 26833 : if (ctxt)
2466 17816 : ctxt->purge_state_involving (sval);
2467 : }
2468 :
2469 : /* A pending_note subclass for adding a note about an
2470 : __attribute__((access, ...)) to a diagnostic. */
2471 :
2472 : class reason_attr_access : public pending_note_subclass<reason_attr_access>
2473 : {
2474 : public:
2475 22 : reason_attr_access (tree callee_fndecl, const attr_access &access)
2476 22 : : m_callee_fndecl (callee_fndecl),
2477 22 : m_ptr_argno (access.ptrarg),
2478 22 : m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
2479 : {
2480 22 : }
2481 :
2482 116 : const char *get_kind () const final override { return "reason_attr_access"; }
2483 :
2484 18 : void emit () const final override
2485 : {
2486 18 : auto_urlify_attributes sentinel;
2487 18 : inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
2488 : "parameter %i of %qD marked with attribute %qs",
2489 18 : m_ptr_argno + 1, m_callee_fndecl, m_access_str);
2490 18 : }
2491 :
2492 58 : bool operator== (const reason_attr_access &other) const
2493 : {
2494 58 : return (m_callee_fndecl == other.m_callee_fndecl
2495 22 : && m_ptr_argno == other.m_ptr_argno
2496 80 : && !strcmp (m_access_str, other.m_access_str));
2497 : }
2498 :
2499 : private:
2500 : tree m_callee_fndecl;
2501 : unsigned m_ptr_argno;
2502 : const char *m_access_str;
2503 : };
2504 :
2505 : /* Check CALL a call to external function CALLEE_FNDECL based on
2506 : any __attribute__ ((access, ....) on the latter, complaining to
2507 : CTXT about any issues.
2508 :
2509 : Currently we merely call check_region_for_write on any regions
2510 : pointed to by arguments marked with a "write_only" or "read_write"
2511 : attribute. */
2512 :
2513 : void
2514 1256 : region_model::check_function_attr_access (const gcall &call,
2515 : tree callee_fndecl,
2516 : region_model_context *ctxt,
2517 : rdwr_map &rdwr_idx) const
2518 : {
2519 1256 : gcc_assert (callee_fndecl);
2520 1256 : gcc_assert (ctxt);
2521 :
2522 1256 : tree fntype = TREE_TYPE (callee_fndecl);
2523 1256 : gcc_assert (fntype);
2524 :
2525 1256 : unsigned argno = 0;
2526 :
2527 4773 : for (tree iter = TYPE_ARG_TYPES (fntype); iter;
2528 3517 : iter = TREE_CHAIN (iter), ++argno)
2529 : {
2530 3517 : const attr_access* access = rdwr_idx.get (argno);
2531 3517 : if (!access)
2532 3239 : continue;
2533 :
2534 : /* Ignore any duplicate entry in the map for the size argument. */
2535 278 : if (access->ptrarg != argno)
2536 114 : continue;
2537 :
2538 164 : if (access->mode == access_write_only
2539 164 : || access->mode == access_read_write)
2540 : {
2541 : /* Subclass of annotating_context that
2542 : adds a note about the attr access to any saved diagnostics. */
2543 40 : class annotating_ctxt : public annotating_context
2544 : {
2545 : public:
2546 40 : annotating_ctxt (tree callee_fndecl,
2547 : const attr_access &access,
2548 : region_model_context *ctxt)
2549 40 : : annotating_context (ctxt),
2550 40 : m_callee_fndecl (callee_fndecl),
2551 40 : m_access (access)
2552 : {
2553 : }
2554 22 : void add_annotations () final override
2555 : {
2556 22 : add_note (std::make_unique<reason_attr_access>
2557 22 : (m_callee_fndecl, m_access));
2558 22 : }
2559 : private:
2560 : tree m_callee_fndecl;
2561 : const attr_access &m_access;
2562 : };
2563 :
2564 : /* Use this ctxt below so that any diagnostics get the
2565 : note added to them. */
2566 40 : annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
2567 :
2568 40 : tree ptr_tree = gimple_call_arg (&call, access->ptrarg);
2569 40 : const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
2570 40 : const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
2571 40 : check_region_for_write (reg, nullptr, &my_ctxt);
2572 : /* We don't use the size arg for now. */
2573 : }
2574 : }
2575 1256 : }
2576 :
2577 : /* Subroutine of region_model::check_function_attr_null_terminated_string_arg,
2578 : checking one instance of __attribute__((null_terminated_string_arg)). */
2579 :
2580 : void
2581 200 : region_model::
2582 : check_one_function_attr_null_terminated_string_arg (const gcall &call,
2583 : tree callee_fndecl,
2584 : region_model_context *ctxt,
2585 : rdwr_map &rdwr_idx,
2586 : tree attr)
2587 : {
2588 200 : gcc_assert (callee_fndecl);
2589 200 : gcc_assert (ctxt);
2590 200 : gcc_assert (attr);
2591 :
2592 200 : tree arg = TREE_VALUE (attr);
2593 200 : if (!arg)
2594 76 : return;
2595 :
2596 : /* Convert from 1-based to 0-based index. */
2597 200 : unsigned int arg_idx = TREE_INT_CST_LOW (TREE_VALUE (arg)) - 1;
2598 :
2599 : /* If there's also an "access" attribute on the ptr param
2600 : for reading with a size param specified, then that size
2601 : limits the size of the possible read from the pointer. */
2602 200 : if (const attr_access* access = rdwr_idx.get (arg_idx))
2603 104 : if ((access->mode == access_read_only
2604 104 : || access->mode == access_read_write)
2605 104 : && access->sizarg != UINT_MAX)
2606 : {
2607 76 : call_details cd_checked (call, this, ctxt);
2608 76 : const svalue *limit_sval
2609 76 : = cd_checked.get_arg_svalue (access->sizarg);
2610 76 : const svalue *ptr_sval
2611 76 : = cd_checked.get_arg_svalue (arg_idx);
2612 : /* Try reading all of the bytes expressed by the size param,
2613 : but without emitting warnings (via a null context). */
2614 76 : const svalue *limited_sval
2615 76 : = read_bytes (deref_rvalue (ptr_sval, NULL_TREE, nullptr),
2616 : NULL_TREE,
2617 : limit_sval,
2618 : nullptr);
2619 76 : if (limited_sval->get_kind () == SK_POISONED)
2620 : {
2621 : /* Reading up to the truncation limit caused issues.
2622 : Assume that the string is meant to be terminated
2623 : before then, so perform a *checked* check for the
2624 : terminator. */
2625 24 : check_for_null_terminated_string_arg (cd_checked,
2626 : arg_idx);
2627 : }
2628 : else
2629 : {
2630 : /* Reading up to the truncation limit seems OK; repeat
2631 : the read, but with checking enabled. */
2632 52 : read_bytes (deref_rvalue (ptr_sval, NULL_TREE, ctxt),
2633 : NULL_TREE,
2634 : limit_sval,
2635 : ctxt);
2636 : }
2637 76 : return;
2638 : }
2639 :
2640 : /* Otherwise, we don't have an access-attribute limiting the read.
2641 : Simulate a read up to the null terminator (if any). */
2642 :
2643 124 : call_details cd (call, this, ctxt);
2644 124 : check_for_null_terminated_string_arg (cd, arg_idx);
2645 : }
2646 :
2647 : /* Check CALL a call to external function CALLEE_FNDECL for any uses
2648 : of __attribute__ ((null_terminated_string_arg)), compaining
2649 : to CTXT about any issues.
2650 :
2651 : Use RDWR_IDX for tracking uses of __attribute__ ((access, ....). */
2652 :
2653 : void
2654 1256 : region_model::
2655 : check_function_attr_null_terminated_string_arg (const gcall &call,
2656 : tree callee_fndecl,
2657 : region_model_context *ctxt,
2658 : rdwr_map &rdwr_idx)
2659 : {
2660 1256 : gcc_assert (callee_fndecl);
2661 1256 : gcc_assert (ctxt);
2662 :
2663 1256 : tree fntype = TREE_TYPE (callee_fndecl);
2664 1256 : gcc_assert (fntype);
2665 :
2666 : /* A function declaration can specify multiple attribute
2667 : null_terminated_string_arg, each with one argument. */
2668 1456 : for (tree attr = TYPE_ATTRIBUTES (fntype); attr; attr = TREE_CHAIN (attr))
2669 : {
2670 1280 : attr = lookup_attribute ("null_terminated_string_arg", attr);
2671 1280 : if (!attr)
2672 : return;
2673 :
2674 200 : check_one_function_attr_null_terminated_string_arg (call, callee_fndecl,
2675 : ctxt, rdwr_idx,
2676 : attr);
2677 : }
2678 : }
2679 :
2680 : /* Check CALL a call to external function CALLEE_FNDECL for any
2681 : function attributes, complaining to CTXT about any issues. */
2682 :
2683 : void
2684 11818 : region_model::check_function_attrs (const gcall &call,
2685 : tree callee_fndecl,
2686 : region_model_context *ctxt)
2687 : {
2688 11818 : gcc_assert (callee_fndecl);
2689 11818 : gcc_assert (ctxt);
2690 :
2691 11818 : tree fntype = TREE_TYPE (callee_fndecl);
2692 11818 : if (!fntype)
2693 10562 : return;
2694 :
2695 11818 : if (!TYPE_ATTRIBUTES (fntype))
2696 : return;
2697 :
2698 : /* Initialize a map of attribute access specifications for arguments
2699 : to the function call. */
2700 1256 : rdwr_map rdwr_idx;
2701 1256 : init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2702 :
2703 1256 : check_function_attr_access (call, callee_fndecl, ctxt, rdwr_idx);
2704 1256 : check_function_attr_null_terminated_string_arg (call, callee_fndecl,
2705 : ctxt, rdwr_idx);
2706 1256 : }
2707 :
2708 : /* Handle a call CALL to a function with unknown behavior.
2709 :
2710 : Traverse the regions in this model, determining what regions are
2711 : reachable from pointer arguments to CALL and from global variables,
2712 : recursively.
2713 :
2714 : Set all reachable regions to new unknown values and purge sm-state
2715 : from their values, and from values that point to them. */
2716 :
2717 : void
2718 16848 : region_model::handle_unrecognized_call (const gcall &call,
2719 : region_model_context *ctxt)
2720 : {
2721 16848 : tree fndecl = get_fndecl_for_call (call, ctxt);
2722 :
2723 16848 : if (fndecl && ctxt)
2724 11818 : check_function_attrs (call, fndecl, ctxt);
2725 :
2726 16848 : reachable_regions reachable_regs (this);
2727 :
2728 : /* Determine the reachable regions and their mutability. */
2729 16848 : {
2730 : /* Add globals and regions that already escaped in previous
2731 : unknown calls. */
2732 16848 : m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2733 : &reachable_regs);
2734 :
2735 : /* Params that are pointers. */
2736 16848 : tree iter_param_types = NULL_TREE;
2737 16848 : if (fndecl)
2738 16393 : iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2739 35516 : for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (&call);
2740 : arg_idx++)
2741 : {
2742 : /* Track expected param type, where available. */
2743 18668 : tree param_type = NULL_TREE;
2744 18668 : if (iter_param_types)
2745 : {
2746 17304 : param_type = TREE_VALUE (iter_param_types);
2747 17304 : gcc_assert (param_type);
2748 17304 : iter_param_types = TREE_CHAIN (iter_param_types);
2749 : }
2750 :
2751 18668 : tree parm = gimple_call_arg (&call, arg_idx);
2752 18668 : const svalue *parm_sval = get_rvalue (parm, ctxt);
2753 18668 : reachable_regs.handle_parm (parm_sval, param_type);
2754 : }
2755 : }
2756 :
2757 16848 : uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : nullptr;
2758 :
2759 : /* Purge sm-state for the svalues that were reachable,
2760 : both in non-mutable and mutable form. */
2761 44043 : for (svalue_set::iterator iter
2762 16848 : = reachable_regs.begin_reachable_svals ();
2763 71238 : iter != reachable_regs.end_reachable_svals (); ++iter)
2764 : {
2765 27195 : const svalue *sval = (*iter);
2766 27195 : if (ctxt)
2767 21944 : ctxt->on_unknown_change (sval, false);
2768 : }
2769 56179 : for (svalue_set::iterator iter
2770 16848 : = reachable_regs.begin_mutable_svals ();
2771 95510 : iter != reachable_regs.end_mutable_svals (); ++iter)
2772 : {
2773 39331 : const svalue *sval = (*iter);
2774 39331 : if (ctxt)
2775 32224 : ctxt->on_unknown_change (sval, true);
2776 39331 : if (uncertainty)
2777 31017 : uncertainty->on_mutable_sval_at_unknown_call (sval);
2778 : }
2779 :
2780 : /* Mark any clusters that have escaped. */
2781 16848 : reachable_regs.mark_escaped_clusters (ctxt);
2782 :
2783 : /* Update bindings for all clusters that have escaped, whether above,
2784 : or previously. */
2785 16848 : m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2786 16848 : conjured_purge (this, ctxt));
2787 :
2788 : /* Purge dynamic extents from any regions that have escaped mutably:
2789 : realloc could have been called on them. */
2790 41543 : for (hash_set<const region *>::iterator
2791 16848 : iter = reachable_regs.begin_mutable_base_regs ();
2792 41543 : iter != reachable_regs.end_mutable_base_regs ();
2793 24695 : ++iter)
2794 : {
2795 24695 : const region *base_reg = (*iter);
2796 24695 : unset_dynamic_extents (base_reg);
2797 : }
2798 16848 : }
2799 :
2800 : /* Traverse the regions in this model, determining what regions are
2801 : reachable from the store and populating *OUT.
2802 :
2803 : If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2804 : for reachability (for handling return values from functions when
2805 : analyzing return of the only function on the stack).
2806 :
2807 : If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2808 : within it as being maybe-bound as additional "roots" for reachability.
2809 :
2810 : Find svalues that haven't leaked. */
2811 :
2812 : void
2813 968210 : region_model::get_reachable_svalues (svalue_set *out,
2814 : const svalue *extra_sval,
2815 : const uncertainty_t *uncertainty)
2816 : {
2817 968210 : reachable_regions reachable_regs (this);
2818 :
2819 : /* Add globals and regions that already escaped in previous
2820 : unknown calls. */
2821 968210 : m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2822 : &reachable_regs);
2823 :
2824 968210 : if (extra_sval)
2825 5201 : reachable_regs.handle_sval (extra_sval);
2826 :
2827 968210 : if (uncertainty)
2828 444817 : for (uncertainty_t::iterator iter
2829 430514 : = uncertainty->begin_maybe_bound_svals ();
2830 889634 : iter != uncertainty->end_maybe_bound_svals (); ++iter)
2831 14303 : reachable_regs.handle_sval (*iter);
2832 :
2833 : /* Get regions for locals that have explicitly bound values. */
2834 9467767 : for (store::cluster_map_t::iterator iter = m_store.begin ();
2835 17967324 : iter != m_store.end (); ++iter)
2836 : {
2837 8499557 : const region *base_reg = (*iter).first;
2838 8499557 : if (const region *parent = base_reg->get_parent_region ())
2839 8499557 : if (parent->get_kind () == RK_FRAME)
2840 5584275 : reachable_regs.add (base_reg, false);
2841 : }
2842 :
2843 : /* Populate *OUT based on the values that were reachable. */
2844 968210 : for (svalue_set::iterator iter
2845 968210 : = reachable_regs.begin_reachable_svals ();
2846 18567128 : iter != reachable_regs.end_reachable_svals (); ++iter)
2847 8799459 : out->add (*iter);
2848 968210 : }
2849 :
2850 : /* Update this model for the RETURN_STMT, using CTXT to report any
2851 : diagnostics. */
2852 :
2853 : void
2854 0 : region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2855 : {
2856 0 : tree callee = get_current_function ()->decl;
2857 0 : tree lhs = DECL_RESULT (callee);
2858 0 : tree rhs = gimple_return_retval (return_stmt);
2859 :
2860 0 : if (lhs && rhs)
2861 : {
2862 0 : const svalue *sval = get_rvalue (rhs, ctxt);
2863 0 : const region *ret_reg = get_lvalue (lhs, ctxt);
2864 0 : set_value (ret_reg, sval, ctxt);
2865 : }
2866 0 : }
2867 :
2868 : /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
2869 : ENODE, using CTXT to report any diagnostics.
2870 :
2871 : This is for the initial direct invocation of setjmp/sigsetjmp (which returns
2872 : 0), as opposed to any second return due to longjmp/sigsetjmp. */
2873 :
2874 : void
2875 34 : region_model::on_setjmp (const gcall &call,
2876 : const exploded_node &enode,
2877 : const superedge &sedge,
2878 : region_model_context *ctxt)
2879 : {
2880 34 : const svalue *buf_ptr = get_rvalue (gimple_call_arg (&call, 0), ctxt);
2881 34 : const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (&call, 0),
2882 : ctxt);
2883 :
2884 : /* Create a setjmp_svalue for this call and store it in BUF_REG's
2885 : region. */
2886 34 : if (buf_reg)
2887 : {
2888 34 : setjmp_record r (&enode, &sedge, call);
2889 34 : const svalue *sval
2890 34 : = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
2891 34 : set_value (buf_reg, sval, ctxt);
2892 : }
2893 :
2894 : /* Direct calls to setjmp return 0. */
2895 34 : if (tree lhs = gimple_call_lhs (&call))
2896 : {
2897 16 : const svalue *new_sval
2898 16 : = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
2899 16 : const region *lhs_reg = get_lvalue (lhs, ctxt);
2900 16 : set_value (lhs_reg, new_sval, ctxt);
2901 : }
2902 34 : }
2903 :
2904 : /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
2905 : to a "setjmp" at SETJMP_CALL where the final stack depth should be
2906 : SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
2907 : done, and should be done by the caller. */
2908 :
2909 : void
2910 31 : region_model::on_longjmp (const gcall &longjmp_call, const gcall &setjmp_call,
2911 : int setjmp_stack_depth, region_model_context *ctxt)
2912 : {
2913 : /* Evaluate the val, using the frame of the "longjmp". */
2914 31 : tree fake_retval = gimple_call_arg (&longjmp_call, 1);
2915 31 : const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
2916 :
2917 : /* Pop any frames until we reach the stack depth of the function where
2918 : setjmp was called. */
2919 31 : gcc_assert (get_stack_depth () >= setjmp_stack_depth);
2920 61 : while (get_stack_depth () > setjmp_stack_depth)
2921 30 : pop_frame (nullptr, nullptr, ctxt, nullptr, false);
2922 :
2923 31 : gcc_assert (get_stack_depth () == setjmp_stack_depth);
2924 :
2925 : /* Assign to LHS of "setjmp" in new_state. */
2926 31 : if (tree lhs = gimple_call_lhs (&setjmp_call))
2927 : {
2928 : /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
2929 27 : const svalue *zero_sval
2930 27 : = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
2931 27 : tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
2932 : /* If we have 0, use 1. */
2933 27 : if (eq_zero.is_true ())
2934 : {
2935 2 : const svalue *one_sval
2936 2 : = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
2937 2 : fake_retval_sval = one_sval;
2938 : }
2939 : else
2940 : {
2941 : /* Otherwise note that the value is nonzero. */
2942 25 : m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
2943 : }
2944 :
2945 : /* Decorate the return value from setjmp as being unmergeable,
2946 : so that we don't attempt to merge states with it as zero
2947 : with states in which it's nonzero, leading to a clean distinction
2948 : in the exploded_graph betweeen the first return and the second
2949 : return. */
2950 27 : fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
2951 :
2952 27 : const region *lhs_reg = get_lvalue (lhs, ctxt);
2953 27 : set_value (lhs_reg, fake_retval_sval, ctxt);
2954 : }
2955 31 : }
2956 :
2957 : /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2958 :
2959 : Get the id of the region for PV within this region_model,
2960 : emitting any diagnostics to CTXT. */
2961 :
2962 : const region *
2963 2476455 : region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2964 : {
2965 2476455 : tree expr = pv.m_tree;
2966 :
2967 2476455 : gcc_assert (expr);
2968 :
2969 2476455 : switch (TREE_CODE (expr))
2970 : {
2971 56 : default:
2972 56 : return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2973 56 : dump_location_t ());
2974 :
2975 27084 : case ARRAY_REF:
2976 27084 : {
2977 27084 : tree array = TREE_OPERAND (expr, 0);
2978 27084 : tree index = TREE_OPERAND (expr, 1);
2979 :
2980 27084 : const region *array_reg = get_lvalue (array, ctxt);
2981 27084 : const svalue *index_sval = get_rvalue (index, ctxt);
2982 27084 : return m_mgr->get_element_region (array_reg,
2983 27084 : TREE_TYPE (TREE_TYPE (array)),
2984 27084 : index_sval);
2985 : }
2986 189 : break;
2987 :
2988 189 : case BIT_FIELD_REF:
2989 189 : {
2990 189 : tree inner_expr = TREE_OPERAND (expr, 0);
2991 189 : const region *inner_reg = get_lvalue (inner_expr, ctxt);
2992 189 : tree num_bits = TREE_OPERAND (expr, 1);
2993 189 : tree first_bit_offset = TREE_OPERAND (expr, 2);
2994 189 : gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2995 189 : gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2996 189 : bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2997 189 : TREE_INT_CST_LOW (num_bits));
2998 189 : return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2999 : }
3000 69061 : break;
3001 :
3002 69061 : case MEM_REF:
3003 69061 : {
3004 69061 : tree ptr = TREE_OPERAND (expr, 0);
3005 69061 : tree offset = TREE_OPERAND (expr, 1);
3006 69061 : const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3007 69061 : const svalue *offset_sval = get_rvalue (offset, ctxt);
3008 69061 : const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
3009 69061 : return m_mgr->get_offset_region (star_ptr,
3010 69061 : TREE_TYPE (expr),
3011 69061 : offset_sval);
3012 : }
3013 876695 : break;
3014 :
3015 876695 : case FUNCTION_DECL:
3016 876695 : return m_mgr->get_region_for_fndecl (expr);
3017 :
3018 250 : case LABEL_DECL:
3019 250 : return m_mgr->get_region_for_label (expr);
3020 :
3021 151956 : case VAR_DECL:
3022 : /* Handle globals. */
3023 151956 : if (is_global_var (expr))
3024 51161 : return m_mgr->get_region_for_global (expr);
3025 :
3026 : /* Fall through. */
3027 :
3028 1391914 : case SSA_NAME:
3029 1391914 : case PARM_DECL:
3030 1391914 : case RESULT_DECL:
3031 1391914 : {
3032 1391914 : gcc_assert (TREE_CODE (expr) == SSA_NAME
3033 : || TREE_CODE (expr) == PARM_DECL
3034 : || VAR_P (expr)
3035 : || TREE_CODE (expr) == RESULT_DECL);
3036 :
3037 1391914 : int stack_index = pv.m_stack_depth;
3038 1391914 : const frame_region *frame = get_frame_at_index (stack_index);
3039 1391914 : gcc_assert (frame);
3040 1391914 : return frame->get_region_for_local (m_mgr, expr, ctxt);
3041 : }
3042 :
3043 44860 : case COMPONENT_REF:
3044 44860 : {
3045 : /* obj.field */
3046 44860 : tree obj = TREE_OPERAND (expr, 0);
3047 44860 : tree field = TREE_OPERAND (expr, 1);
3048 44860 : const region *obj_reg = get_lvalue (obj, ctxt);
3049 44860 : return m_mgr->get_field_region (obj_reg, field);
3050 : }
3051 15185 : break;
3052 :
3053 15185 : case STRING_CST:
3054 15185 : return m_mgr->get_region_for_string (expr);
3055 : }
3056 : }
3057 :
3058 : /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
3059 :
3060 : static void
3061 5538048 : assert_compat_types (tree src_type, tree dst_type)
3062 : {
3063 5538048 : if (src_type && dst_type && !VOID_TYPE_P (dst_type))
3064 : {
3065 : #if CHECKING_P
3066 5537756 : if (!(useless_type_conversion_p (src_type, dst_type)))
3067 0 : internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
3068 : #endif
3069 : }
3070 5538048 : }
3071 :
3072 : /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
3073 :
3074 : bool
3075 13186 : compat_types_p (tree src_type, tree dst_type)
3076 : {
3077 13186 : if (src_type && dst_type && !VOID_TYPE_P (dst_type))
3078 13186 : if (!(useless_type_conversion_p (src_type, dst_type)))
3079 : return false;
3080 : return true;
3081 : }
3082 :
3083 : /* Get the region for PV within this region_model,
3084 : emitting any diagnostics to CTXT. */
3085 :
3086 : const region *
3087 2476455 : region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
3088 : {
3089 2476455 : if (pv.m_tree == NULL_TREE)
3090 : return nullptr;
3091 :
3092 2476455 : const region *result_reg = get_lvalue_1 (pv, ctxt);
3093 2476455 : assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
3094 2476455 : return result_reg;
3095 : }
3096 :
3097 : /* Get the region for EXPR within this region_model (assuming the most
3098 : recent stack frame if it's a local). */
3099 :
3100 : const region *
3101 1530954 : region_model::get_lvalue (tree expr, region_model_context *ctxt) const
3102 : {
3103 1530954 : return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3104 : }
3105 :
3106 : /* Implementation of region_model::get_rvalue; the latter adds type-checking.
3107 :
3108 : Get the value of PV within this region_model,
3109 : emitting any diagnostics to CTXT. */
3110 :
3111 : const svalue *
3112 3048823 : region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
3113 : {
3114 3048823 : gcc_assert (pv.m_tree);
3115 :
3116 3048823 : switch (TREE_CODE (pv.m_tree))
3117 : {
3118 45 : default:
3119 45 : return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
3120 :
3121 940929 : case ADDR_EXPR:
3122 940929 : {
3123 : /* "&EXPR". */
3124 940929 : tree expr = pv.m_tree;
3125 940929 : tree op0 = TREE_OPERAND (expr, 0);
3126 940929 : const region *expr_reg = get_lvalue (op0, ctxt);
3127 940929 : return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
3128 : }
3129 130 : break;
3130 :
3131 130 : case BIT_FIELD_REF:
3132 130 : {
3133 130 : tree expr = pv.m_tree;
3134 130 : tree op0 = TREE_OPERAND (expr, 0);
3135 130 : const region *reg = get_lvalue (op0, ctxt);
3136 130 : tree num_bits = TREE_OPERAND (expr, 1);
3137 130 : tree first_bit_offset = TREE_OPERAND (expr, 2);
3138 130 : gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
3139 130 : gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
3140 130 : bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
3141 130 : TREE_INT_CST_LOW (num_bits));
3142 130 : return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
3143 : }
3144 :
3145 35465 : case VAR_DECL:
3146 35465 : if (DECL_HARD_REGISTER (pv.m_tree))
3147 : {
3148 : /* If it has a hard register, it doesn't have a memory region
3149 : and can't be referred to as an lvalue. */
3150 43 : return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
3151 : }
3152 : /* Fall through. */
3153 897581 : case PARM_DECL:
3154 897581 : case SSA_NAME:
3155 897581 : case RESULT_DECL:
3156 897581 : case ARRAY_REF:
3157 897581 : {
3158 897581 : const region *reg = get_lvalue (pv, ctxt);
3159 897581 : return get_store_value (reg, ctxt);
3160 : }
3161 :
3162 130 : case REALPART_EXPR:
3163 130 : case IMAGPART_EXPR:
3164 130 : case VIEW_CONVERT_EXPR:
3165 130 : {
3166 130 : tree expr = pv.m_tree;
3167 130 : tree arg = TREE_OPERAND (expr, 0);
3168 130 : const svalue *arg_sval = get_rvalue (arg, ctxt);
3169 130 : const svalue *sval_unaryop
3170 130 : = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
3171 : arg_sval);
3172 130 : return sval_unaryop;
3173 1160120 : };
3174 :
3175 1160120 : case INTEGER_CST:
3176 1160120 : case REAL_CST:
3177 1160120 : case COMPLEX_CST:
3178 1160120 : case VECTOR_CST:
3179 1160120 : case STRING_CST:
3180 1160120 : case RAW_DATA_CST:
3181 1160120 : return m_mgr->get_or_create_constant_svalue (pv.m_tree);
3182 :
3183 8 : case POINTER_PLUS_EXPR:
3184 8 : {
3185 8 : tree expr = pv.m_tree;
3186 8 : tree ptr = TREE_OPERAND (expr, 0);
3187 8 : tree offset = TREE_OPERAND (expr, 1);
3188 8 : const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3189 8 : const svalue *offset_sval = get_rvalue (offset, ctxt);
3190 8 : const svalue *sval_binop
3191 8 : = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
3192 : ptr_sval, offset_sval);
3193 8 : return sval_binop;
3194 : }
3195 :
3196 : /* Binary ops. */
3197 94 : case PLUS_EXPR:
3198 94 : case MULT_EXPR:
3199 94 : case BIT_AND_EXPR:
3200 94 : case BIT_IOR_EXPR:
3201 94 : case BIT_XOR_EXPR:
3202 94 : {
3203 94 : tree expr = pv.m_tree;
3204 94 : tree arg0 = TREE_OPERAND (expr, 0);
3205 94 : tree arg1 = TREE_OPERAND (expr, 1);
3206 94 : const svalue *arg0_sval = get_rvalue (arg0, ctxt);
3207 94 : const svalue *arg1_sval = get_rvalue (arg1, ctxt);
3208 94 : const svalue *sval_binop
3209 94 : = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
3210 : arg0_sval, arg1_sval);
3211 94 : return sval_binop;
3212 : }
3213 :
3214 47880 : case COMPONENT_REF:
3215 47880 : case MEM_REF:
3216 47880 : {
3217 47880 : const region *ref_reg = get_lvalue (pv, ctxt);
3218 47880 : return get_store_value (ref_reg, ctxt);
3219 : }
3220 1863 : case OBJ_TYPE_REF:
3221 1863 : {
3222 1863 : tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
3223 1863 : return get_rvalue (expr, ctxt);
3224 : }
3225 : }
3226 : }
3227 :
3228 : /* Get the value of PV within this region_model,
3229 : emitting any diagnostics to CTXT. */
3230 :
3231 : const svalue *
3232 3085138 : region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
3233 : {
3234 3085138 : if (pv.m_tree == NULL_TREE)
3235 : return nullptr;
3236 :
3237 3048823 : const svalue *result_sval = get_rvalue_1 (pv, ctxt);
3238 :
3239 3048823 : assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
3240 :
3241 3048823 : result_sval = check_for_poison (result_sval, pv.m_tree, nullptr, ctxt);
3242 :
3243 3048823 : return result_sval;
3244 : }
3245 :
3246 : /* Get the value of EXPR within this region_model (assuming the most
3247 : recent stack frame if it's a local). */
3248 :
3249 : const svalue *
3250 3084644 : region_model::get_rvalue (tree expr, region_model_context *ctxt) const
3251 : {
3252 3084644 : return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3253 : }
3254 :
3255 : /* Return true if this model is on a path with "main" as the entrypoint
3256 : (as opposed to one in which we're merely analyzing a subset of the
3257 : path through the code). */
3258 :
3259 : bool
3260 224102 : region_model::called_from_main_p () const
3261 : {
3262 224102 : if (!m_current_frame)
3263 : return false;
3264 : /* Determine if the oldest stack frame in this model is for "main". */
3265 217739 : const frame_region *frame0 = get_frame_at_index (0);
3266 217739 : gcc_assert (frame0);
3267 217739 : return id_equal (DECL_NAME (frame0->get_function ().decl), "main");
3268 : }
3269 :
3270 : /* Subroutine of region_model::get_store_value for when REG is (or is within)
3271 : a global variable that hasn't been touched since the start of this path
3272 : (or was implicitly touched due to a call to an unknown function). */
3273 :
3274 : const svalue *
3275 233289 : region_model::get_initial_value_for_global (const region *reg) const
3276 : {
3277 : /* Get the decl that REG is for (or is within). */
3278 233289 : const decl_region *base_reg
3279 233289 : = reg->get_base_region ()->dyn_cast_decl_region ();
3280 233289 : gcc_assert (base_reg);
3281 233289 : tree decl = base_reg->get_decl ();
3282 :
3283 : /* Special-case: to avoid having to explicitly update all previously
3284 : untracked globals when calling an unknown fn, they implicitly have
3285 : an unknown value if an unknown call has occurred, unless this is
3286 : static to-this-TU and hasn't escaped. Globals that have escaped
3287 : are explicitly tracked, so we shouldn't hit this case for them. */
3288 233289 : if (m_store.called_unknown_fn_p ()
3289 71465 : && TREE_PUBLIC (decl)
3290 250844 : && !TREE_READONLY (decl))
3291 9243 : return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3292 :
3293 : /* If we are on a path from the entrypoint from "main" and we have a
3294 : global decl defined in this TU that hasn't been touched yet, then
3295 : the initial value of REG can be taken from the initialization value
3296 : of the decl. */
3297 224046 : if (called_from_main_p () || TREE_READONLY (decl))
3298 14660 : return reg->get_initial_value_at_main (m_mgr);
3299 :
3300 : /* Otherwise, return INIT_VAL(REG). */
3301 209386 : return m_mgr->get_or_create_initial_value (reg);
3302 : }
3303 :
3304 : /* Get a value for REG, looking it up in the store, or otherwise falling
3305 : back to "initial" or "unknown" values.
3306 : Use CTXT to report any warnings associated with reading from REG. */
3307 :
3308 : const svalue *
3309 4094329 : region_model::get_store_value (const region *reg,
3310 : region_model_context *ctxt) const
3311 : {
3312 : /* Getting the value of an empty region gives an unknown_svalue. */
3313 4094329 : if (reg->empty_p ())
3314 52 : return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3315 :
3316 4094277 : bool check_poisoned = true;
3317 4094277 : if (check_region_for_read (reg, ctxt))
3318 425 : check_poisoned = false;
3319 :
3320 : /* Special-case: handle var_decls in the constant pool. */
3321 4094277 : if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
3322 3433599 : if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
3323 : return sval;
3324 :
3325 4094263 : const svalue *sval
3326 4094263 : = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
3327 4094263 : if (sval)
3328 : {
3329 1145144 : if (reg->get_type ())
3330 1143162 : sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
3331 1145144 : return sval;
3332 : }
3333 :
3334 : /* Special-case: read at a constant index within a STRING_CST. */
3335 2949119 : if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
3336 133054 : if (tree byte_offset_cst
3337 133054 : = offset_reg->get_byte_offset ()->maybe_get_constant ())
3338 8308 : if (const string_region *str_reg
3339 8308 : = reg->get_parent_region ()->dyn_cast_string_region ())
3340 : {
3341 194 : tree string_cst = str_reg->get_string_cst ();
3342 388 : if (const svalue *char_sval
3343 194 : = m_mgr->maybe_get_char_from_string_cst (string_cst,
3344 : byte_offset_cst))
3345 190 : return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3346 : }
3347 :
3348 : /* Special-case: read the initial char of a STRING_CST. */
3349 2948929 : if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
3350 4850 : if (const string_region *str_reg
3351 2425 : = cast_reg->get_parent_region ()->dyn_cast_string_region ())
3352 : {
3353 186 : tree string_cst = str_reg->get_string_cst ();
3354 186 : tree byte_offset_cst = integer_zero_node;
3355 372 : if (const svalue *char_sval
3356 186 : = m_mgr->maybe_get_char_from_string_cst (string_cst,
3357 : byte_offset_cst))
3358 186 : return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3359 : }
3360 :
3361 : /* Otherwise we implicitly have the initial value of the region
3362 : (if the cluster had been touched, binding_cluster::get_any_binding,
3363 : would have returned UNKNOWN, and we would already have returned
3364 : that above). */
3365 :
3366 : /* Handle globals. */
3367 2948743 : if (reg->get_base_region ()->get_parent_region ()->get_kind ()
3368 : == RK_GLOBALS)
3369 233289 : return get_initial_value_for_global (reg);
3370 :
3371 2715454 : return m_mgr->get_or_create_initial_value (reg, check_poisoned);
3372 : }
3373 :
3374 : /* Return false if REG does not exist, true if it may do.
3375 : This is for detecting regions within the stack that don't exist anymore
3376 : after frames are popped. */
3377 :
3378 : bool
3379 2643576 : region_model::region_exists_p (const region *reg) const
3380 : {
3381 : /* If within a stack frame, check that the stack frame is live. */
3382 2643576 : if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
3383 : {
3384 : /* Check that the current frame is the enclosing frame, or is called
3385 : by it. */
3386 2611238 : for (const frame_region *iter_frame = get_current_frame (); iter_frame;
3387 560272 : iter_frame = iter_frame->get_calling_frame ())
3388 2595091 : if (iter_frame == enclosing_frame)
3389 : return true;
3390 : return false;
3391 : }
3392 :
3393 : return true;
3394 : }
3395 :
3396 : /* Get a region for referencing PTR_SVAL, creating a region if need be, and
3397 : potentially generating warnings via CTXT.
3398 : PTR_SVAL must be of pointer type.
3399 : PTR_TREE if non-NULL can be used when emitting diagnostics. */
3400 :
3401 : const region *
3402 120307 : region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
3403 : region_model_context *ctxt,
3404 : bool add_nonnull_constraint) const
3405 : {
3406 120307 : gcc_assert (ptr_sval);
3407 120307 : gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
3408 :
3409 : /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
3410 : as a constraint. This suppresses false positives from
3411 : -Wanalyzer-null-dereference for the case where we later have an
3412 : if (PTR_SVAL) that would occur if we considered the false branch
3413 : and transitioned the malloc state machine from start->null. */
3414 120307 : if (add_nonnull_constraint)
3415 : {
3416 115225 : tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
3417 115225 : const svalue *null_ptr
3418 115225 : = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
3419 115225 : m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
3420 : }
3421 :
3422 120307 : switch (ptr_sval->get_kind ())
3423 : {
3424 : default:
3425 : break;
3426 :
3427 47483 : case SK_REGION:
3428 47483 : {
3429 47483 : const region_svalue *region_sval
3430 47483 : = as_a <const region_svalue *> (ptr_sval);
3431 47483 : return region_sval->get_pointee ();
3432 : }
3433 :
3434 21245 : case SK_BINOP:
3435 21245 : {
3436 21245 : const binop_svalue *binop_sval
3437 21245 : = as_a <const binop_svalue *> (ptr_sval);
3438 21245 : switch (binop_sval->get_op ())
3439 : {
3440 21245 : case POINTER_PLUS_EXPR:
3441 21245 : {
3442 : /* If we have a symbolic value expressing pointer arithmentic,
3443 : try to convert it to a suitable region. */
3444 21245 : const region *parent_region
3445 21245 : = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
3446 21245 : const svalue *offset = binop_sval->get_arg1 ();
3447 21245 : tree type= TREE_TYPE (ptr_sval->get_type ());
3448 21245 : return m_mgr->get_offset_region (parent_region, type, offset);
3449 : }
3450 : default:
3451 : break;
3452 : }
3453 : }
3454 : break;
3455 :
3456 2638 : case SK_POISONED:
3457 2638 : {
3458 2638 : if (ctxt)
3459 : {
3460 624 : tree ptr = get_representative_tree (ptr_sval);
3461 : /* If we can't get a representative tree for PTR_SVAL
3462 : (e.g. if it hasn't been bound into the store), then
3463 : fall back on PTR_TREE, if non-NULL. */
3464 624 : if (!ptr)
3465 624 : ptr = ptr_tree;
3466 624 : if (ptr)
3467 : {
3468 0 : const poisoned_svalue *poisoned_sval
3469 0 : = as_a <const poisoned_svalue *> (ptr_sval);
3470 0 : enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3471 0 : ctxt->warn (std::make_unique<poisoned_value_diagnostic>
3472 0 : (ptr, pkind, nullptr, nullptr));
3473 : }
3474 : }
3475 : }
3476 : break;
3477 : }
3478 :
3479 51579 : return m_mgr->get_symbolic_region (ptr_sval);
3480 : }
3481 :
3482 : /* Attempt to get BITS within any value of REG, as TYPE.
3483 : In particular, extract values from compound_svalues for the case
3484 : where there's a concrete binding at BITS.
3485 : Return an unknown svalue if we can't handle the given case.
3486 : Use CTXT to report any warnings associated with reading from REG. */
3487 :
3488 : const svalue *
3489 130 : region_model::get_rvalue_for_bits (tree type,
3490 : const region *reg,
3491 : const bit_range &bits,
3492 : region_model_context *ctxt) const
3493 : {
3494 130 : const svalue *sval = get_store_value (reg, ctxt);
3495 130 : return m_mgr->get_or_create_bits_within (type, bits, sval);
3496 : }
3497 :
3498 : /* A subclass of pending_diagnostic for complaining about writes to
3499 : constant regions of memory. */
3500 :
3501 : class write_to_const_diagnostic
3502 : : public pending_diagnostic_subclass<write_to_const_diagnostic>
3503 : {
3504 : public:
3505 33 : write_to_const_diagnostic (const region *reg, tree decl)
3506 33 : : m_reg (reg), m_decl (decl)
3507 : {}
3508 :
3509 421 : const char *get_kind () const final override
3510 : {
3511 421 : return "write_to_const_diagnostic";
3512 : }
3513 :
3514 33 : bool operator== (const write_to_const_diagnostic &other) const
3515 : {
3516 33 : return (m_reg == other.m_reg
3517 33 : && m_decl == other.m_decl);
3518 : }
3519 :
3520 66 : int get_controlling_option () const final override
3521 : {
3522 66 : return OPT_Wanalyzer_write_to_const;
3523 : }
3524 :
3525 33 : bool emit (diagnostic_emission_context &ctxt) final override
3526 : {
3527 33 : auto_diagnostic_group d;
3528 33 : bool warned;
3529 33 : switch (m_reg->get_kind ())
3530 : {
3531 20 : default:
3532 20 : warned = ctxt.warn ("write to %<const%> object %qE", m_decl);
3533 20 : break;
3534 9 : case RK_FUNCTION:
3535 9 : warned = ctxt.warn ("write to function %qE", m_decl);
3536 9 : break;
3537 4 : case RK_LABEL:
3538 4 : warned = ctxt.warn ("write to label %qE", m_decl);
3539 4 : break;
3540 : }
3541 33 : if (warned)
3542 33 : inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
3543 66 : return warned;
3544 33 : }
3545 :
3546 : bool
3547 66 : describe_final_event (pretty_printer &pp,
3548 : const evdesc::final_event &) final override
3549 : {
3550 66 : switch (m_reg->get_kind ())
3551 : {
3552 40 : default:
3553 40 : {
3554 40 : pp_printf (&pp,
3555 : "write to %<const%> object %qE here", m_decl);
3556 40 : return true;
3557 : }
3558 18 : case RK_FUNCTION:
3559 18 : {
3560 18 : pp_printf (&pp,
3561 : "write to function %qE here", m_decl);
3562 18 : return true;
3563 : }
3564 8 : case RK_LABEL:
3565 8 : {
3566 8 : pp_printf (&pp,
3567 : "write to label %qE here", m_decl);
3568 8 : return true;
3569 : }
3570 : }
3571 : }
3572 :
3573 : private:
3574 : const region *m_reg;
3575 : tree m_decl;
3576 : };
3577 :
3578 : /* A subclass of pending_diagnostic for complaining about writes to
3579 : string literals. */
3580 :
3581 : class write_to_string_literal_diagnostic
3582 : : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
3583 : {
3584 : public:
3585 51 : write_to_string_literal_diagnostic (const region *reg)
3586 51 : : m_reg (reg)
3587 : {}
3588 :
3589 337 : const char *get_kind () const final override
3590 : {
3591 337 : return "write_to_string_literal_diagnostic";
3592 : }
3593 :
3594 47 : bool operator== (const write_to_string_literal_diagnostic &other) const
3595 : {
3596 47 : return m_reg == other.m_reg;
3597 : }
3598 :
3599 94 : int get_controlling_option () const final override
3600 : {
3601 94 : return OPT_Wanalyzer_write_to_string_literal;
3602 : }
3603 :
3604 43 : bool emit (diagnostic_emission_context &ctxt) final override
3605 : {
3606 43 : return ctxt.warn ("write to string literal");
3607 : /* Ideally we would show the location of the STRING_CST as well,
3608 : but it is not available at this point. */
3609 : }
3610 :
3611 : bool
3612 86 : describe_final_event (pretty_printer &pp,
3613 : const evdesc::final_event &) final override
3614 : {
3615 86 : pp_string (&pp, "write to string literal here");
3616 86 : return true;
3617 : }
3618 :
3619 : private:
3620 : const region *m_reg;
3621 : };
3622 :
3623 : /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
3624 :
3625 : void
3626 271265 : region_model::check_for_writable_region (const region* dest_reg,
3627 : region_model_context *ctxt) const
3628 : {
3629 : /* Fail gracefully if CTXT is nullptr. */
3630 271265 : if (!ctxt)
3631 : return;
3632 :
3633 271265 : const region *base_reg = dest_reg->get_base_region ();
3634 271265 : switch (base_reg->get_kind ())
3635 : {
3636 : default:
3637 : break;
3638 9 : case RK_FUNCTION:
3639 9 : {
3640 9 : const function_region *func_reg = as_a <const function_region *> (base_reg);
3641 9 : tree fndecl = func_reg->get_fndecl ();
3642 9 : ctxt->warn
3643 9 : (std::make_unique<write_to_const_diagnostic>
3644 9 : (func_reg, fndecl));
3645 : }
3646 9 : break;
3647 4 : case RK_LABEL:
3648 4 : {
3649 4 : const label_region *label_reg = as_a <const label_region *> (base_reg);
3650 4 : tree label = label_reg->get_label ();
3651 4 : ctxt->warn
3652 4 : (std::make_unique<write_to_const_diagnostic>
3653 4 : (label_reg, label));
3654 : }
3655 4 : break;
3656 254391 : case RK_DECL:
3657 254391 : {
3658 254391 : const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3659 254391 : tree decl = decl_reg->get_decl ();
3660 : /* Warn about writes to const globals.
3661 : Don't warn for writes to const locals, and params in particular,
3662 : since we would warn in push_frame when setting them up (e.g the
3663 : "this" param is "T* const"). */
3664 254391 : if (TREE_READONLY (decl)
3665 254391 : && is_global_var (decl))
3666 20 : ctxt->warn
3667 20 : (std::make_unique<write_to_const_diagnostic> (dest_reg, decl));
3668 : }
3669 254391 : break;
3670 51 : case RK_STRING:
3671 51 : ctxt->warn
3672 51 : (std::make_unique<write_to_string_literal_diagnostic> (dest_reg));
3673 51 : break;
3674 : }
3675 : }
3676 :
3677 : /* Get the capacity of REG in bytes. */
3678 :
3679 : const svalue *
3680 845847 : region_model::get_capacity (const region *reg) const
3681 : {
3682 845862 : switch (reg->get_kind ())
3683 : {
3684 : default:
3685 : break;
3686 785177 : case RK_DECL:
3687 785177 : {
3688 785177 : const decl_region *decl_reg = as_a <const decl_region *> (reg);
3689 785177 : tree decl = decl_reg->get_decl ();
3690 785177 : if (TREE_CODE (decl) == SSA_NAME)
3691 : {
3692 692379 : tree type = TREE_TYPE (decl);
3693 692379 : tree size = TYPE_SIZE (type);
3694 692379 : return get_rvalue (size, nullptr);
3695 : }
3696 : else
3697 : {
3698 92798 : tree size = decl_init_size (decl, false);
3699 92798 : if (size)
3700 92631 : return get_rvalue (size, nullptr);
3701 : }
3702 : }
3703 : break;
3704 15 : case RK_SIZED:
3705 : /* Look through sized regions to get at the capacity
3706 : of the underlying regions. */
3707 15 : return get_capacity (reg->get_parent_region ());
3708 523 : case RK_STRING:
3709 523 : {
3710 : /* "Capacity" here means "size". */
3711 523 : const string_region *string_reg = as_a <const string_region *> (reg);
3712 523 : tree string_cst = string_reg->get_string_cst ();
3713 523 : return m_mgr->get_or_create_int_cst (size_type_node,
3714 523 : TREE_STRING_LENGTH (string_cst));
3715 : }
3716 60314 : break;
3717 : }
3718 :
3719 60314 : if (const svalue *recorded = get_dynamic_extents (reg))
3720 : return recorded;
3721 :
3722 48151 : return m_mgr->get_or_create_unknown_svalue (sizetype);
3723 : }
3724 :
3725 : /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
3726 : using DIR to determine if this access is a read or write.
3727 : Return TRUE if an OOB access was detected.
3728 : If SVAL_HINT is non-NULL, use it as a hint in diagnostics
3729 : about the value that would be written to REG. */
3730 :
3731 : bool
3732 4441957 : region_model::check_region_access (const region *reg,
3733 : enum access_direction dir,
3734 : const svalue *sval_hint,
3735 : region_model_context *ctxt) const
3736 : {
3737 : /* Fail gracefully if CTXT is NULL. */
3738 4441957 : if (!ctxt)
3739 : return false;
3740 :
3741 837030 : bool oob_access_detected = false;
3742 837030 : check_region_for_taint (reg, dir, ctxt);
3743 837030 : if (!check_region_bounds (reg, dir, sval_hint, ctxt))
3744 768 : oob_access_detected = true;
3745 :
3746 837030 : switch (dir)
3747 : {
3748 0 : default:
3749 0 : gcc_unreachable ();
3750 : case access_direction::read:
3751 : /* Currently a no-op. */
3752 : break;
3753 271265 : case access_direction::write:
3754 271265 : check_for_writable_region (reg, ctxt);
3755 271265 : break;
3756 : }
3757 : return oob_access_detected;
3758 : }
3759 :
3760 : /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3761 :
3762 : void
3763 347680 : region_model::check_region_for_write (const region *dest_reg,
3764 : const svalue *sval_hint,
3765 : region_model_context *ctxt) const
3766 : {
3767 347680 : check_region_access (dest_reg, access_direction::write, sval_hint, ctxt);
3768 347680 : }
3769 :
3770 : /* If CTXT is non-NULL, use it to warn about any problems reading from REG.
3771 : Returns TRUE if an OOB read was detected. */
3772 :
3773 : bool
3774 4094277 : region_model::check_region_for_read (const region *src_reg,
3775 : region_model_context *ctxt) const
3776 : {
3777 4094277 : return check_region_access (src_reg, access_direction::read, nullptr, ctxt);
3778 : }
3779 :
3780 : /* Concrete subclass for casts of pointers that lead to trailing bytes. */
3781 :
3782 : class dubious_allocation_size
3783 : : public pending_diagnostic_subclass<dubious_allocation_size>
3784 : {
3785 : public:
3786 111 : dubious_allocation_size (const region *lhs, const region *rhs,
3787 : const svalue *capacity_sval, tree expr,
3788 : const gimple *stmt)
3789 111 : : m_lhs (lhs), m_rhs (rhs),
3790 111 : m_capacity_sval (capacity_sval), m_expr (expr),
3791 111 : m_stmt (stmt),
3792 111 : m_has_allocation_event (false)
3793 : {
3794 111 : gcc_assert (m_capacity_sval);
3795 : }
3796 :
3797 1186 : const char *get_kind () const final override
3798 : {
3799 1186 : return "dubious_allocation_size";
3800 : }
3801 :
3802 111 : bool operator== (const dubious_allocation_size &other) const
3803 : {
3804 111 : return (m_stmt == other.m_stmt
3805 111 : && pending_diagnostic::same_tree_p (m_expr, other.m_expr));
3806 : }
3807 :
3808 222 : int get_controlling_option () const final override
3809 : {
3810 222 : return OPT_Wanalyzer_allocation_size;
3811 : }
3812 :
3813 111 : bool emit (diagnostic_emission_context &ctxt) final override
3814 : {
3815 111 : ctxt.add_cwe (131);
3816 :
3817 111 : return ctxt.warn ("allocated buffer size is not a multiple"
3818 111 : " of the pointee's size");
3819 : }
3820 :
3821 : bool
3822 222 : describe_final_event (pretty_printer &pp,
3823 : const evdesc::final_event &) final override
3824 : {
3825 222 : tree pointee_type = TREE_TYPE (m_lhs->get_type ());
3826 222 : if (m_has_allocation_event)
3827 : {
3828 214 : pp_printf (&pp,
3829 : "assigned to %qT here;"
3830 : " %<sizeof (%T)%> is %qE",
3831 214 : m_lhs->get_type (), pointee_type,
3832 : size_in_bytes (pointee_type));
3833 214 : return true;
3834 : }
3835 : /* Fallback: Typically, we should always see an allocation_event
3836 : before. */
3837 8 : if (m_expr)
3838 : {
3839 8 : if (TREE_CODE (m_expr) == INTEGER_CST)
3840 : {
3841 8 : pp_printf (&pp,
3842 : "allocated %E bytes and assigned to"
3843 : " %qT here; %<sizeof (%T)%> is %qE",
3844 8 : m_expr, m_lhs->get_type (), pointee_type,
3845 : size_in_bytes (pointee_type));
3846 8 : return true;
3847 : }
3848 : else
3849 : {
3850 0 : pp_printf (&pp,
3851 : "allocated %qE bytes and assigned to"
3852 : " %qT here; %<sizeof (%T)%> is %qE",
3853 0 : m_expr, m_lhs->get_type (), pointee_type,
3854 : size_in_bytes (pointee_type));
3855 0 : return true;
3856 : }
3857 : }
3858 :
3859 0 : pp_printf (&pp,
3860 : "allocated and assigned to %qT here;"
3861 : " %<sizeof (%T)%> is %qE",
3862 0 : m_lhs->get_type (), pointee_type,
3863 : size_in_bytes (pointee_type));
3864 0 : return true;
3865 : }
3866 :
3867 : void
3868 107 : add_region_creation_events (const region *,
3869 : tree capacity,
3870 : const event_loc_info &loc_info,
3871 : checker_path &emission_path) final override
3872 : {
3873 107 : emission_path.add_event
3874 107 : (std::make_unique<region_creation_event_allocation_size>
3875 107 : (capacity, loc_info));
3876 :
3877 107 : m_has_allocation_event = true;
3878 107 : }
3879 :
3880 111 : void mark_interesting_stuff (interesting_t *interest) final override
3881 : {
3882 111 : interest->add_region_creation (m_rhs);
3883 111 : }
3884 :
3885 : void
3886 0 : maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
3887 : const final override
3888 : {
3889 0 : auto &props = result_obj.get_or_create_properties ();
3890 : #define PROPERTY_PREFIX "gcc/analyzer/dubious_allocation_size/"
3891 0 : props.set (PROPERTY_PREFIX "lhs", m_lhs->to_json ());
3892 0 : props.set (PROPERTY_PREFIX "rhs", m_rhs->to_json ());
3893 0 : props.set (PROPERTY_PREFIX "capacity_sval", m_capacity_sval->to_json ());
3894 : #undef PROPERTY_PREFIX
3895 0 : }
3896 :
3897 : private:
3898 : const region *m_lhs;
3899 : const region *m_rhs;
3900 : const svalue *m_capacity_sval;
3901 : const tree m_expr;
3902 : const gimple *m_stmt;
3903 : bool m_has_allocation_event;
3904 : };
3905 :
3906 : /* Return true on dubious allocation sizes for constant sizes. */
3907 :
3908 : static bool
3909 1859 : capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3910 : bool is_struct)
3911 : {
3912 1859 : gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3913 1859 : gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3914 :
3915 1859 : unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3916 1859 : unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3917 :
3918 1859 : if (is_struct)
3919 619 : return alloc_size == 0 || alloc_size >= pointee_size;
3920 1240 : return alloc_size % pointee_size == 0;
3921 : }
3922 :
3923 : static bool
3924 394 : capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3925 : {
3926 0 : return capacity_compatible_with_type (cst, pointee_size_tree, false);
3927 : }
3928 :
3929 : /* Checks whether SVAL could be a multiple of SIZE_CST.
3930 :
3931 : It works by visiting all svalues inside SVAL until it reaches
3932 : atomic nodes. From those, it goes back up again and adds each
3933 : node that is not a multiple of SIZE_CST to the RESULT_SET. */
3934 :
3935 2262 : class size_visitor : public visitor
3936 : {
3937 : public:
3938 1131 : size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3939 1131 : : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
3940 : {
3941 1131 : m_root_sval->accept (this);
3942 1131 : }
3943 :
3944 1131 : bool is_dubious_capacity ()
3945 : {
3946 1131 : return result_set.contains (m_root_sval);
3947 : }
3948 :
3949 410 : void visit_constant_svalue (const constant_svalue *sval) final override
3950 : {
3951 410 : check_constant (sval->get_constant (), sval);
3952 410 : }
3953 :
3954 250 : void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3955 : {
3956 250 : if (CONVERT_EXPR_CODE_P (sval->get_op ())
3957 291 : && result_set.contains (sval->get_arg ()))
3958 105 : result_set.add (sval);
3959 250 : }
3960 :
3961 406 : void visit_binop_svalue (const binop_svalue *sval) final override
3962 : {
3963 406 : const svalue *arg0 = sval->get_arg0 ();
3964 406 : const svalue *arg1 = sval->get_arg1 ();
3965 :
3966 406 : switch (sval->get_op ())
3967 : {
3968 288 : case MULT_EXPR:
3969 288 : if (result_set.contains (arg0) && result_set.contains (arg1))
3970 24 : result_set.add (sval);
3971 : break;
3972 90 : case PLUS_EXPR:
3973 90 : case MINUS_EXPR:
3974 90 : if (result_set.contains (arg0) || result_set.contains (arg1))
3975 28 : result_set.add (sval);
3976 : break;
3977 : default:
3978 : break;
3979 : }
3980 406 : }
3981 :
3982 0 : void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3983 : {
3984 0 : if (result_set.contains (sval->get_arg ()))
3985 0 : result_set.add (sval);
3986 0 : }
3987 :
3988 12 : void visit_widening_svalue (const widening_svalue *sval) final override
3989 : {
3990 12 : const svalue *base = sval->get_base_svalue ();
3991 12 : const svalue *iter = sval->get_iter_svalue ();
3992 :
3993 12 : if (result_set.contains (base) || result_set.contains (iter))
3994 8 : result_set.add (sval);
3995 12 : }
3996 :
3997 303 : void visit_initial_svalue (const initial_svalue *sval) final override
3998 : {
3999 303 : equiv_class_id id = equiv_class_id::null ();
4000 303 : if (m_cm->get_equiv_class_by_svalue (sval, &id))
4001 : {
4002 75 : if (tree cst = id.get_obj (*m_cm).get_any_constant ())
4003 0 : check_constant (cst, sval);
4004 : }
4005 228 : else if (!m_cm->sval_constrained_p (sval))
4006 : {
4007 174 : result_set.add (sval);
4008 : }
4009 303 : }
4010 :
4011 30 : void visit_conjured_svalue (const conjured_svalue *sval) final override
4012 : {
4013 30 : equiv_class_id id = equiv_class_id::null ();
4014 30 : if (m_cm->get_equiv_class_by_svalue (sval, &id))
4015 13 : if (tree cst = id.get_obj (*m_cm).get_any_constant ())
4016 8 : check_constant (cst, sval);
4017 30 : }
4018 :
4019 : private:
4020 418 : void check_constant (tree cst, const svalue *sval)
4021 : {
4022 418 : switch (TREE_CODE (cst))
4023 : {
4024 : default:
4025 : /* Assume all unhandled operands are compatible. */
4026 : break;
4027 394 : case INTEGER_CST:
4028 394 : if (!capacity_compatible_with_type (cst, m_size_cst))
4029 68 : result_set.add (sval);
4030 : break;
4031 : }
4032 418 : }
4033 :
4034 : tree m_size_cst;
4035 : const svalue *m_root_sval;
4036 : constraint_manager *m_cm;
4037 : svalue_set result_set; /* Used as a mapping of svalue*->bool. */
4038 : };
4039 :
4040 : /* Return true if SIZE_CST is a power of 2, and we have
4041 : CAPACITY_SVAL == ((X | (Y - 1) ) + 1), since it is then a multiple
4042 : of SIZE_CST, as used by Linux kernel's round_up macro. */
4043 :
4044 : static bool
4045 1135 : is_round_up (tree size_cst,
4046 : const svalue *capacity_sval)
4047 : {
4048 1135 : if (!integer_pow2p (size_cst))
4049 : return false;
4050 1135 : const binop_svalue *binop_sval = capacity_sval->dyn_cast_binop_svalue ();
4051 1135 : if (!binop_sval)
4052 : return false;
4053 272 : if (binop_sval->get_op () != PLUS_EXPR)
4054 : return false;
4055 70 : tree rhs_cst = binop_sval->get_arg1 ()->maybe_get_constant ();
4056 70 : if (!rhs_cst)
4057 : return false;
4058 70 : if (!integer_onep (rhs_cst))
4059 : return false;
4060 :
4061 : /* We have CAPACITY_SVAL == (LHS + 1) for some LHS expression. */
4062 :
4063 4 : const binop_svalue *lhs_binop_sval
4064 4 : = binop_sval->get_arg0 ()->dyn_cast_binop_svalue ();
4065 4 : if (!lhs_binop_sval)
4066 : return false;
4067 4 : if (lhs_binop_sval->get_op () != BIT_IOR_EXPR)
4068 : return false;
4069 :
4070 4 : tree inner_rhs_cst = lhs_binop_sval->get_arg1 ()->maybe_get_constant ();
4071 4 : if (!inner_rhs_cst)
4072 : return false;
4073 :
4074 4 : if (wi::to_widest (inner_rhs_cst) + 1 != wi::to_widest (size_cst))
4075 : return false;
4076 : return true;
4077 : }
4078 :
4079 : /* Return true if CAPACITY_SVAL is known to be a multiple of SIZE_CST. */
4080 :
4081 : static bool
4082 1135 : is_multiple_p (tree size_cst,
4083 : const svalue *capacity_sval)
4084 : {
4085 1191 : if (const svalue *sval = capacity_sval->maybe_undo_cast ())
4086 : return is_multiple_p (size_cst, sval);
4087 :
4088 1135 : if (is_round_up (size_cst, capacity_sval))
4089 : return true;
4090 :
4091 : return false;
4092 : }
4093 :
4094 : /* Return true if we should emit a dubious_allocation_size warning
4095 : on assigning a region of capacity CAPACITY_SVAL bytes to a pointer
4096 : of type with size SIZE_CST, where CM expresses known constraints. */
4097 :
4098 : static bool
4099 1135 : is_dubious_capacity (tree size_cst,
4100 : const svalue *capacity_sval,
4101 : constraint_manager *cm)
4102 : {
4103 1135 : if (is_multiple_p (size_cst, capacity_sval))
4104 : return false;
4105 1131 : size_visitor v (size_cst, capacity_sval, cm);
4106 1131 : return v.is_dubious_capacity ();
4107 1131 : }
4108 :
4109 :
4110 : /* Return true if a struct or union either uses the inheritance pattern,
4111 : where the first field is a base struct, or the flexible array member
4112 : pattern, where the last field is an array without a specified size. */
4113 :
4114 : static bool
4115 3562 : struct_or_union_with_inheritance_p (tree struc)
4116 : {
4117 3562 : tree iter = TYPE_FIELDS (struc);
4118 3562 : if (iter == NULL_TREE)
4119 : return false;
4120 3554 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
4121 : return true;
4122 :
4123 : tree last_field;
4124 51893 : while (iter != NULL_TREE)
4125 : {
4126 48586 : last_field = iter;
4127 48586 : iter = DECL_CHAIN (iter);
4128 : }
4129 :
4130 3307 : if (last_field != NULL_TREE
4131 3307 : && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
4132 : return true;
4133 :
4134 : return false;
4135 : }
4136 :
4137 : /* Return true if the lhs and rhs of an assignment have different types. */
4138 :
4139 : static bool
4140 187669 : is_any_cast_p (const gimple *stmt)
4141 : {
4142 187669 : if (const gassign *assign = dyn_cast <const gassign *> (stmt))
4143 147993 : return gimple_assign_cast_p (assign)
4144 271088 : || !pending_diagnostic::same_tree_p (
4145 123095 : TREE_TYPE (gimple_assign_lhs (assign)),
4146 123095 : TREE_TYPE (gimple_assign_rhs1 (assign)));
4147 39676 : else if (const gcall *call = dyn_cast <const gcall *> (stmt))
4148 : {
4149 39268 : tree lhs = gimple_call_lhs (call);
4150 68877 : return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
4151 29609 : TREE_TYPE (gimple_call_lhs (call)),
4152 : gimple_call_return_type (call));
4153 : }
4154 :
4155 : return false;
4156 : }
4157 :
4158 : /* On pointer assignments, check whether the buffer size of
4159 : RHS_SVAL is compatible with the type of the LHS_REG.
4160 : Use a non-null CTXT to report allocation size warnings. */
4161 :
4162 : void
4163 346238 : region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
4164 : region_model_context *ctxt) const
4165 : {
4166 346238 : if (!ctxt || ctxt->get_stmt () == nullptr)
4167 341281 : return;
4168 : /* Only report warnings on assignments that actually change the type. */
4169 187669 : if (!is_any_cast_p (ctxt->get_stmt ()))
4170 : return;
4171 :
4172 59083 : tree pointer_type = lhs_reg->get_type ();
4173 59083 : if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
4174 : return;
4175 :
4176 15686 : tree pointee_type = TREE_TYPE (pointer_type);
4177 : /* Make sure that the type on the left-hand size actually has a size. */
4178 15686 : if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
4179 30963 : || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
4180 : return;
4181 :
4182 : /* Bail out early on function pointers. */
4183 15177 : if (TREE_CODE (pointee_type) == FUNCTION_TYPE)
4184 : return;
4185 :
4186 : /* Bail out early on pointers to structs where we can
4187 : not deduce whether the buffer size is compatible. */
4188 14889 : bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
4189 14889 : if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
4190 : return;
4191 :
4192 14607 : tree pointee_size_tree = size_in_bytes (pointee_type);
4193 : /* We give up if the type size is not known at compile-time or the
4194 : type size is always compatible regardless of the buffer size. */
4195 14607 : if (TREE_CODE (pointee_size_tree) != INTEGER_CST
4196 14528 : || integer_zerop (pointee_size_tree)
4197 29114 : || integer_onep (pointee_size_tree))
4198 9650 : return;
4199 :
4200 4957 : const region *rhs_reg = deref_rvalue (rhs_sval, NULL_TREE, ctxt, false);
4201 4957 : const svalue *capacity = get_capacity (rhs_reg);
4202 4957 : switch (capacity->get_kind ())
4203 : {
4204 1465 : case svalue_kind::SK_CONSTANT:
4205 1465 : {
4206 1465 : const constant_svalue *cst_cap_sval
4207 1465 : = as_a <const constant_svalue *> (capacity);
4208 1465 : tree cst_cap = cst_cap_sval->get_constant ();
4209 1465 : if (TREE_CODE (cst_cap) == INTEGER_CST
4210 1465 : && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
4211 : is_struct))
4212 63 : ctxt->warn
4213 126 : (std::make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
4214 : capacity, cst_cap,
4215 126 : ctxt->get_stmt ()));
4216 : }
4217 1465 : break;
4218 3492 : default:
4219 3492 : {
4220 3492 : if (!is_struct)
4221 : {
4222 1135 : if (is_dubious_capacity (pointee_size_tree,
4223 : capacity,
4224 1135 : m_constraints))
4225 : {
4226 48 : tree expr = get_representative_tree (capacity);
4227 48 : ctxt->warn
4228 96 : (std::make_unique <dubious_allocation_size> (lhs_reg,
4229 : rhs_reg,
4230 : capacity, expr,
4231 96 : ctxt->get_stmt ()));
4232 : }
4233 : }
4234 : break;
4235 : }
4236 : }
4237 : }
4238 :
4239 : /* Set the value of the region given by LHS_REG to the value given
4240 : by RHS_SVAL.
4241 : Use CTXT to report any warnings associated with writing to LHS_REG. */
4242 :
4243 : void
4244 346258 : region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
4245 : region_model_context *ctxt)
4246 : {
4247 346258 : gcc_assert (lhs_reg);
4248 346258 : gcc_assert (rhs_sval);
4249 :
4250 : /* Setting the value of an empty region is a no-op. */
4251 346258 : if (lhs_reg->empty_p ())
4252 : return;
4253 :
4254 346238 : check_region_size (lhs_reg, rhs_sval, ctxt);
4255 :
4256 346238 : check_region_for_write (lhs_reg, rhs_sval, ctxt);
4257 :
4258 616552 : m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
4259 270314 : ctxt ? ctxt->get_uncertainty () : nullptr);
4260 : }
4261 :
4262 : /* Set the value of the region given by LHS to the value given by RHS. */
4263 :
4264 : void
4265 84 : region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
4266 : {
4267 84 : const region *lhs_reg = get_lvalue (lhs, ctxt);
4268 84 : const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4269 84 : gcc_assert (lhs_reg);
4270 84 : gcc_assert (rhs_sval);
4271 84 : set_value (lhs_reg, rhs_sval, ctxt);
4272 84 : }
4273 :
4274 : /* Issue a note specifying that a particular function parameter is expected
4275 : to be a valid null-terminated string. */
4276 :
4277 : static void
4278 147 : inform_about_expected_null_terminated_string_arg (const call_arg_details &ad)
4279 : {
4280 : // TODO: ideally we'd underline the param here
4281 147 : inform (DECL_SOURCE_LOCATION (ad.m_called_fndecl),
4282 : "argument %d of %qD must be a pointer to a null-terminated string",
4283 147 : ad.m_arg_idx + 1, ad.m_called_fndecl);
4284 147 : }
4285 :
4286 : /* A binding of a specific svalue at a concrete byte range. */
4287 :
4288 : struct fragment
4289 : {
4290 3194 : fragment ()
4291 3194 : : m_byte_range (0, 0), m_sval (nullptr)
4292 : {
4293 3194 : }
4294 :
4295 774 : fragment (const byte_range &bytes, const svalue *sval)
4296 774 : : m_byte_range (bytes), m_sval (sval)
4297 : {
4298 : }
4299 :
4300 1810 : static int cmp_ptrs (const void *p1, const void *p2)
4301 : {
4302 1810 : const fragment *f1 = (const fragment *)p1;
4303 1810 : const fragment *f2 = (const fragment *)p2;
4304 1810 : return byte_range::cmp (f1->m_byte_range, f2->m_byte_range);
4305 : }
4306 :
4307 : void
4308 2 : dump_to_pp (pretty_printer *pp) const
4309 : {
4310 2 : pp_string (pp, "fragment(");
4311 2 : m_byte_range.dump_to_pp (pp);
4312 2 : pp_string (pp, ", sval: ");
4313 2 : if (m_sval)
4314 2 : m_sval->dump_to_pp (pp, true);
4315 : else
4316 0 : pp_string (pp, "nullptr");
4317 2 : pp_string (pp, ")");
4318 2 : }
4319 :
4320 : byte_range m_byte_range;
4321 : const svalue *m_sval;
4322 : };
4323 :
4324 : /* Determine if there is a zero terminator somewhere in the
4325 : part of STRING_CST covered by BYTES (where BYTES is relative to the
4326 : start of the constant).
4327 :
4328 : Return a tristate:
4329 : - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
4330 : the number of bytes from that would be read, including the zero byte.
4331 : - false if there definitely isn't a zero byte
4332 : - unknown if we don't know. */
4333 :
4334 : static tristate
4335 284 : string_cst_has_null_terminator (tree string_cst,
4336 : const byte_range &bytes,
4337 : byte_offset_t *out_bytes_read)
4338 : {
4339 284 : gcc_assert (bytes.m_start_byte_offset >= 0);
4340 :
4341 : /* If we're beyond the string_cst, reads are unsuccessful. */
4342 284 : if (tree cst_size = get_string_cst_size (string_cst))
4343 284 : if (TREE_CODE (cst_size) == INTEGER_CST)
4344 284 : if (bytes.m_start_byte_offset >= TREE_INT_CST_LOW (cst_size))
4345 0 : return tristate::unknown ();
4346 :
4347 : /* Assume all bytes after TREE_STRING_LENGTH are zero. This handles
4348 : the case where an array is initialized with a string_cst that isn't
4349 : as long as the array, where the remaining elements are
4350 : empty-initialized and thus zeroed. */
4351 284 : if (bytes.m_start_byte_offset >= TREE_STRING_LENGTH (string_cst))
4352 : {
4353 2 : *out_bytes_read = 1;
4354 2 : return tristate (true);
4355 : }
4356 :
4357 : /* Look for the first 0 byte within STRING_CST
4358 : from START_READ_OFFSET onwards. */
4359 282 : const byte_offset_t num_bytes_to_search
4360 564 : = std::min<byte_offset_t> ((TREE_STRING_LENGTH (string_cst)
4361 282 : - bytes.m_start_byte_offset),
4362 282 : bytes.m_size_in_bytes);
4363 282 : const char *start = (TREE_STRING_POINTER (string_cst)
4364 282 : + bytes.m_start_byte_offset.slow ());
4365 282 : if (num_bytes_to_search >= 0)
4366 282 : if (const void *p = memchr (start, 0, bytes.m_size_in_bytes.slow ()))
4367 : {
4368 162 : *out_bytes_read = (const char *)p - start + 1;
4369 162 : return tristate (true);
4370 : }
4371 :
4372 120 : *out_bytes_read = bytes.m_size_in_bytes;
4373 120 : return tristate (false);
4374 : }
4375 :
4376 : static tristate
4377 : svalue_byte_range_has_null_terminator (const svalue *sval,
4378 : const byte_range &bytes,
4379 : byte_offset_t *out_bytes_read,
4380 : logger *logger);
4381 :
4382 : /* Determine if there is a zero terminator somewhere in the
4383 : part of SVAL covered by BYTES (where BYTES is relative to the svalue).
4384 :
4385 : Return a tristate:
4386 : - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
4387 : the number of bytes from that would be read, including the zero byte.
4388 : - false if there definitely isn't a zero byte
4389 : - unknown if we don't know.
4390 :
4391 : Use LOGGER (if non-null) for any logging. */
4392 :
4393 : static tristate
4394 610 : svalue_byte_range_has_null_terminator_1 (const svalue *sval,
4395 : const byte_range &bytes,
4396 : byte_offset_t *out_bytes_read,
4397 : logger *logger)
4398 : {
4399 610 : if (bytes.m_start_byte_offset == 0
4400 610 : && sval->all_zeroes_p ())
4401 : {
4402 : /* The initial byte of an all-zeroes SVAL is a zero byte. */
4403 22 : *out_bytes_read = 1;
4404 22 : return tristate (true);
4405 : }
4406 :
4407 588 : switch (sval->get_kind ())
4408 : {
4409 181 : case SK_CONSTANT:
4410 181 : {
4411 181 : tree cst
4412 181 : = as_a <const constant_svalue *> (sval)->get_constant ();
4413 181 : switch (TREE_CODE (cst))
4414 : {
4415 166 : case STRING_CST:
4416 166 : return string_cst_has_null_terminator (cst, bytes, out_bytes_read);
4417 15 : case INTEGER_CST:
4418 15 : if (bytes.m_start_byte_offset == 0
4419 15 : && integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (cst))))
4420 : {
4421 : /* Model accesses to the initial byte of a 1-byte
4422 : INTEGER_CST. */
4423 13 : *out_bytes_read = 1;
4424 13 : if (zerop (cst))
4425 0 : return tristate (true);
4426 : else
4427 13 : return tristate (false);
4428 : }
4429 : /* Treat any other access to an INTEGER_CST as unknown. */
4430 2 : return tristate::TS_UNKNOWN;
4431 :
4432 : default:
4433 : break;
4434 : }
4435 : }
4436 : break;
4437 :
4438 127 : case SK_INITIAL:
4439 127 : {
4440 127 : const initial_svalue *initial_sval = (const initial_svalue *)sval;
4441 127 : const region *reg = initial_sval->get_region ();
4442 127 : if (const string_region *string_reg = reg->dyn_cast_string_region ())
4443 : {
4444 118 : tree string_cst = string_reg->get_string_cst ();
4445 118 : return string_cst_has_null_terminator (string_cst,
4446 : bytes,
4447 118 : out_bytes_read);
4448 : }
4449 9 : return tristate::TS_UNKNOWN;
4450 : }
4451 69 : break;
4452 :
4453 69 : case SK_BITS_WITHIN:
4454 69 : {
4455 69 : const bits_within_svalue *bits_within_sval
4456 : = (const bits_within_svalue *)sval;
4457 69 : byte_range bytes_within_inner (0, 0);
4458 69 : if (bits_within_sval->get_bits ().as_byte_range (&bytes_within_inner))
4459 : {
4460 : /* Consider e.g. looking for null terminator of
4461 : bytes 2-4 of BITS_WITHIN(bytes 10-15 of inner_sval)
4462 :
4463 : This is equivalent to looking within bytes 12-14 of
4464 : inner_sval. */
4465 69 : const byte_offset_t start_byte_relative_to_inner
4466 69 : = (bytes.m_start_byte_offset
4467 69 : + bytes_within_inner.m_start_byte_offset);
4468 69 : const byte_offset_t next_byte_relative_to_inner
4469 69 : = (bytes.get_next_byte_offset ()
4470 69 : + bytes_within_inner.m_start_byte_offset);
4471 69 : if (next_byte_relative_to_inner > start_byte_relative_to_inner)
4472 : {
4473 69 : const byte_range relative_to_inner
4474 : (start_byte_relative_to_inner,
4475 69 : next_byte_relative_to_inner - start_byte_relative_to_inner);
4476 69 : const svalue *inner_sval
4477 69 : = bits_within_sval->get_inner_svalue ();
4478 69 : return svalue_byte_range_has_null_terminator (inner_sval,
4479 : relative_to_inner,
4480 : out_bytes_read,
4481 : logger);
4482 : }
4483 : }
4484 : }
4485 0 : break;
4486 :
4487 : default:
4488 : // TODO: it may be possible to handle other cases here.
4489 : break;
4490 : }
4491 211 : return tristate::TS_UNKNOWN;
4492 : }
4493 :
4494 : /* Like svalue_byte_range_has_null_terminator_1, but add logging. */
4495 :
4496 : static tristate
4497 610 : svalue_byte_range_has_null_terminator (const svalue *sval,
4498 : const byte_range &bytes,
4499 : byte_offset_t *out_bytes_read,
4500 : logger *logger)
4501 : {
4502 610 : LOG_SCOPE (logger);
4503 610 : if (logger)
4504 : {
4505 1 : pretty_printer *pp = logger->get_printer ();
4506 1 : logger->start_log_line ();
4507 1 : bytes.dump_to_pp (pp);
4508 1 : logger->log_partial (" of sval: ");
4509 1 : sval->dump_to_pp (pp, true);
4510 1 : logger->end_log_line ();
4511 : }
4512 610 : tristate ts
4513 610 : = svalue_byte_range_has_null_terminator_1 (sval, bytes,
4514 : out_bytes_read, logger);
4515 610 : if (logger)
4516 : {
4517 1 : pretty_printer *pp = logger->get_printer ();
4518 1 : logger->start_log_line ();
4519 1 : pp_printf (pp, "has null terminator: %s", ts.as_string ());
4520 1 : if (ts.is_true ())
4521 : {
4522 1 : pp_string (pp, "; bytes read: ");
4523 1 : pp_wide_int (pp, *out_bytes_read, SIGNED);
4524 : }
4525 1 : logger->end_log_line ();
4526 : }
4527 1220 : return ts;
4528 610 : }
4529 :
4530 : /* A frozen copy of a single base region's binding_cluster within a store,
4531 : optimized for traversal of the concrete parts in byte order.
4532 : This only captures concrete bindings, and is an implementation detail
4533 : of region_model::scan_for_null_terminator. */
4534 :
4535 3061 : class iterable_cluster
4536 : {
4537 : public:
4538 3061 : iterable_cluster (const binding_cluster *cluster)
4539 3061 : {
4540 3061 : if (!cluster)
4541 : return;
4542 2470 : for (auto iter : cluster->get_map ().get_concrete_bindings ())
4543 : {
4544 774 : const bit_range &bits = iter.first;
4545 774 : const svalue *sval = iter.second;
4546 :
4547 774 : byte_range fragment_bytes (0, 0);
4548 774 : if (bits.as_byte_range (&fragment_bytes))
4549 774 : m_fragments.safe_push (fragment (fragment_bytes, sval));
4550 : }
4551 1806 : for (auto iter : cluster->get_map ().get_symbolic_bindings ())
4552 110 : m_symbolic_bindings.safe_push (iter);
4553 1696 : m_fragments.qsort (fragment::cmp_ptrs);
4554 : }
4555 :
4556 : bool
4557 3194 : get_fragment_for_byte (byte_offset_t byte, fragment *out_frag) const
4558 : {
4559 : /* TODO: binary search rather than linear. */
4560 3194 : unsigned iter_idx;
4561 3405 : for (iter_idx = 0; iter_idx < m_fragments.length (); iter_idx++)
4562 : {
4563 752 : if (m_fragments[iter_idx].m_byte_range.contains_p (byte))
4564 : {
4565 541 : *out_frag = m_fragments[iter_idx];
4566 541 : return true;
4567 : }
4568 : }
4569 : return false;
4570 : }
4571 :
4572 2653 : bool has_symbolic_bindings_p () const
4573 : {
4574 5306 : return !m_symbolic_bindings.is_empty ();
4575 : }
4576 :
4577 2 : void dump_to_pp (pretty_printer *pp) const
4578 : {
4579 2 : pp_string (pp, "iterable_cluster (fragments: [");
4580 5 : for (auto const &iter : &m_fragments)
4581 : {
4582 2 : if (&iter != m_fragments.begin ())
4583 0 : pp_string (pp, ", ");
4584 1 : iter.dump_to_pp (pp);
4585 : }
4586 2 : pp_printf (pp, "], symbolic bindings: [");
4587 2 : for (auto const &iter : m_symbolic_bindings)
4588 : {
4589 0 : if (&iter != m_symbolic_bindings.begin ())
4590 0 : pp_string (pp, ", ");
4591 0 : iter.m_region->dump_to_pp (pp, true);
4592 : }
4593 2 : pp_string (pp, "])");
4594 2 : }
4595 :
4596 : private:
4597 : auto_vec<fragment> m_fragments;
4598 : auto_vec<binding_map::symbolic_binding> m_symbolic_bindings;
4599 : };
4600 :
4601 : /* Simulate reading the bytes at BYTES from BASE_REG.
4602 : Complain to CTXT about any issues with the read e.g. out-of-bounds. */
4603 :
4604 : const svalue *
4605 7334 : region_model::get_store_bytes (const region *base_reg,
4606 : const byte_range &bytes,
4607 : region_model_context *ctxt) const
4608 : {
4609 : /* Shortcut reading all of a string_region. */
4610 7334 : if (bytes.get_start_byte_offset () == 0)
4611 7139 : if (const string_region *string_reg = base_reg->dyn_cast_string_region ())
4612 4468 : if (bytes.m_size_in_bytes
4613 4468 : == TREE_STRING_LENGTH (string_reg->get_string_cst ()))
4614 4468 : return m_mgr->get_or_create_initial_value (base_reg);
4615 :
4616 2866 : const svalue *index_sval
4617 2866 : = m_mgr->get_or_create_int_cst (size_type_node,
4618 2866 : bytes.get_start_byte_offset ());
4619 2866 : const region *offset_reg = m_mgr->get_offset_region (base_reg,
4620 : NULL_TREE,
4621 : index_sval);
4622 2866 : const svalue *byte_size_sval
4623 2866 : = m_mgr->get_or_create_int_cst (size_type_node, bytes.m_size_in_bytes);
4624 2866 : const region *read_reg = m_mgr->get_sized_region (offset_reg,
4625 : NULL_TREE,
4626 : byte_size_sval);
4627 :
4628 : /* Simulate reading those bytes from the store. */
4629 2866 : const svalue *sval = get_store_value (read_reg, ctxt);
4630 2866 : return sval;
4631 : }
4632 :
4633 : static tree
4634 2417 : get_tree_for_byte_offset (tree ptr_expr, byte_offset_t byte_offset)
4635 : {
4636 2417 : gcc_assert (ptr_expr);
4637 2417 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
4638 2417 : return fold_build2 (MEM_REF,
4639 : char_type_node,
4640 : ptr_expr, wide_int_to_tree (ptype, byte_offset));
4641 : }
4642 :
4643 : /* Simulate a series of reads of REG until we find a 0 byte
4644 : (equivalent to calling strlen).
4645 :
4646 : Complain to CTXT and return NULL if:
4647 : - the buffer pointed to isn't null-terminated
4648 : - the buffer pointed to has any uninitialized bytes before any 0-terminator
4649 : - any of the reads aren't within the bounds of the underlying base region
4650 :
4651 : Otherwise, return a svalue for the number of bytes read (strlen + 1),
4652 : and, if OUT_SVAL is non-NULL, write to *OUT_SVAL with an svalue
4653 : representing the content of REG up to and including the terminator.
4654 :
4655 : Algorithm
4656 : =========
4657 :
4658 : Get offset for first byte to read.
4659 : Find the binding (if any) that contains it.
4660 : Find the size in bits of that binding.
4661 : Round to the nearest byte (which way???)
4662 : Or maybe give up if we have a partial binding there.
4663 : Get the svalue from the binding.
4664 : Determine the strlen (if any) of that svalue.
4665 : Does it have a 0-terminator within it?
4666 : If so, we have a partial read up to and including that terminator
4667 : Read those bytes from the store; add to the result in the correct place.
4668 : Finish
4669 : If not, we have a full read of that svalue
4670 : Read those bytes from the store; add to the result in the correct place.
4671 : Update read/write offsets
4672 : Continue
4673 : If unknown:
4674 : Result is unknown
4675 : Finish
4676 : */
4677 :
4678 : const svalue *
4679 7648 : region_model::scan_for_null_terminator_1 (const region *reg,
4680 : tree expr,
4681 : const svalue **out_sval,
4682 : region_model_context *ctxt) const
4683 : {
4684 7648 : logger *logger = ctxt ? ctxt->get_logger () : nullptr;
4685 7648 : store_manager *store_mgr = m_mgr->get_store_manager ();
4686 :
4687 7648 : region_offset offset = reg->get_offset (m_mgr);
4688 7648 : if (offset.symbolic_p ())
4689 : {
4690 115 : if (out_sval)
4691 0 : *out_sval = get_store_value (reg, nullptr);
4692 115 : if (logger)
4693 0 : logger->log ("offset is symbolic");
4694 115 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4695 : }
4696 7533 : byte_offset_t src_byte_offset;
4697 7533 : if (!offset.get_concrete_byte_offset (&src_byte_offset))
4698 : {
4699 0 : if (out_sval)
4700 0 : *out_sval = get_store_value (reg, nullptr);
4701 0 : if (logger)
4702 0 : logger->log ("can't get concrete byte offset");
4703 0 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4704 : }
4705 7533 : const byte_offset_t initial_src_byte_offset = src_byte_offset;
4706 7533 : byte_offset_t dst_byte_offset = 0;
4707 :
4708 7533 : const region *base_reg = reg->get_base_region ();
4709 :
4710 7533 : if (const string_region *str_reg = base_reg->dyn_cast_string_region ())
4711 : {
4712 4474 : tree string_cst = str_reg->get_string_cst ();
4713 4476 : if (src_byte_offset >= 0
4714 4473 : && src_byte_offset < TREE_STRING_LENGTH (string_cst)
4715 8946 : && wi::fits_shwi_p (src_byte_offset))
4716 : {
4717 4472 : HOST_WIDE_INT str_byte_offset = src_byte_offset.to_shwi ();
4718 4472 : const char *effective_start
4719 4472 : = TREE_STRING_POINTER (string_cst) + str_byte_offset;
4720 4472 : size_t effective_len
4721 4472 : = TREE_STRING_LENGTH (string_cst) - str_byte_offset;
4722 4472 : if (const void *p = memchr (effective_start, 0, effective_len))
4723 : {
4724 4472 : size_t num_bytes_read
4725 4472 : = (const char *)p - effective_start + 1;
4726 : /* Simulate the read. */
4727 4472 : byte_range bytes_to_read (0, num_bytes_read);
4728 4472 : const svalue *sval = get_store_bytes (reg, bytes_to_read, ctxt);
4729 4472 : if (out_sval)
4730 834 : *out_sval = sval;
4731 4472 : if (logger)
4732 0 : logger->log ("using string_cst");
4733 4472 : return m_mgr->get_or_create_int_cst (size_type_node,
4734 4472 : num_bytes_read);
4735 : }
4736 : }
4737 : }
4738 3061 : const binding_cluster *cluster = m_store.get_cluster (base_reg);
4739 3061 : iterable_cluster c (cluster);
4740 3061 : if (logger)
4741 : {
4742 2 : pretty_printer *pp = logger->get_printer ();
4743 2 : logger->start_log_line ();
4744 2 : c.dump_to_pp (pp);
4745 2 : logger->end_log_line ();
4746 : }
4747 :
4748 3061 : binding_map result (*store_mgr);
4749 :
4750 133 : while (1)
4751 : {
4752 3194 : fragment f;
4753 3194 : if (c.get_fragment_for_byte (src_byte_offset, &f))
4754 : {
4755 541 : if (logger)
4756 : {
4757 1 : logger->start_log_line ();
4758 1 : pretty_printer *pp = logger->get_printer ();
4759 1 : pp_printf (pp, "src_byte_offset: ");
4760 1 : pp_wide_int (pp, src_byte_offset, SIGNED);
4761 1 : pp_string (pp, ": ");
4762 1 : f.dump_to_pp (pp);
4763 1 : logger->end_log_line ();
4764 : }
4765 541 : gcc_assert (f.m_byte_range.contains_p (src_byte_offset));
4766 : /* src_byte_offset and f.m_byte_range are both expressed relative to
4767 : the base region.
4768 : Convert to a byte_range relative to the svalue. */
4769 541 : const byte_range bytes_relative_to_svalue
4770 541 : (src_byte_offset - f.m_byte_range.get_start_byte_offset (),
4771 541 : f.m_byte_range.get_next_byte_offset () - src_byte_offset);
4772 541 : byte_offset_t fragment_bytes_read;
4773 541 : tristate is_terminated
4774 541 : = svalue_byte_range_has_null_terminator (f.m_sval,
4775 : bytes_relative_to_svalue,
4776 : &fragment_bytes_read,
4777 : logger);
4778 541 : if (is_terminated.is_unknown ())
4779 : {
4780 222 : if (out_sval)
4781 2 : *out_sval = get_store_value (reg, nullptr);
4782 408 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4783 : }
4784 :
4785 : /* Simulate reading those bytes from the store. */
4786 319 : byte_range bytes_to_read (src_byte_offset, fragment_bytes_read);
4787 319 : const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4788 319 : check_for_poison (sval, expr, nullptr, ctxt);
4789 :
4790 319 : if (out_sval)
4791 : {
4792 9 : byte_range bytes_to_write (dst_byte_offset, fragment_bytes_read);
4793 9 : const binding_key *key
4794 9 : = store_mgr->get_concrete_binding (bytes_to_write);
4795 9 : result.put (key, sval);
4796 : }
4797 :
4798 319 : src_byte_offset += fragment_bytes_read;
4799 319 : dst_byte_offset += fragment_bytes_read;
4800 :
4801 319 : if (is_terminated.is_true ())
4802 : {
4803 186 : if (out_sval)
4804 6 : *out_sval = m_mgr->get_or_create_compound_svalue (NULL_TREE,
4805 : result);
4806 186 : if (logger)
4807 1 : logger->log ("got terminator");
4808 186 : return m_mgr->get_or_create_int_cst (size_type_node,
4809 186 : dst_byte_offset);
4810 : }
4811 : }
4812 : else
4813 : break;
4814 : }
4815 :
4816 : /* No binding for this base_region, or no binding at src_byte_offset
4817 : (or a symbolic binding). */
4818 :
4819 2653 : if (c.has_symbolic_bindings_p ())
4820 : {
4821 110 : if (out_sval)
4822 33 : *out_sval = get_store_value (reg, nullptr);
4823 110 : if (logger)
4824 0 : logger->log ("got symbolic binding");
4825 110 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4826 : }
4827 :
4828 : /* TODO: the various special-cases seen in
4829 : region_model::get_store_value. */
4830 :
4831 : /* Simulate reading from this byte, then give up. */
4832 2543 : byte_range bytes_to_read (src_byte_offset, 1);
4833 2543 : const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4834 2543 : tree byte_expr
4835 : = (expr
4836 4960 : ? get_tree_for_byte_offset (expr,
4837 : src_byte_offset - initial_src_byte_offset)
4838 : : NULL_TREE);
4839 2543 : check_for_poison (sval, byte_expr, nullptr, ctxt);
4840 2543 : if (base_reg->can_have_initial_svalue_p ())
4841 : {
4842 2342 : if (out_sval)
4843 264 : *out_sval = get_store_value (reg, nullptr);
4844 2342 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4845 : }
4846 : else
4847 : return nullptr;
4848 6122 : }
4849 :
4850 : /* Like region_model::scan_for_null_terminator_1, but add logging. */
4851 :
4852 : const svalue *
4853 7648 : region_model::scan_for_null_terminator (const region *reg,
4854 : tree expr,
4855 : const svalue **out_sval,
4856 : region_model_context *ctxt) const
4857 : {
4858 7648 : logger *logger = ctxt ? ctxt->get_logger () : nullptr;
4859 7648 : LOG_SCOPE (logger);
4860 7648 : if (logger)
4861 : {
4862 2 : pretty_printer *pp = logger->get_printer ();
4863 2 : logger->start_log_line ();
4864 2 : logger->log_partial ("region: ");
4865 2 : reg->dump_to_pp (pp, true);
4866 2 : logger->end_log_line ();
4867 : }
4868 7648 : if (out_sval)
4869 1146 : *out_sval = nullptr;
4870 7648 : const svalue *sval = scan_for_null_terminator_1 (reg, expr, out_sval, ctxt);
4871 7648 : if (sval && out_sval)
4872 1139 : gcc_assert (*out_sval);
4873 7648 : if (logger)
4874 : {
4875 2 : pretty_printer *pp = logger->get_printer ();
4876 2 : logger->start_log_line ();
4877 2 : logger->log_partial ("length result: ");
4878 2 : if (sval)
4879 1 : sval->dump_to_pp (pp, true);
4880 : else
4881 1 : pp_printf (pp, "NULL");
4882 2 : logger->end_log_line ();
4883 2 : if (out_sval)
4884 : {
4885 2 : logger->start_log_line ();
4886 2 : logger->log_partial ("content result: ");
4887 2 : if (*out_sval)
4888 1 : (*out_sval)->dump_to_pp (pp, true);
4889 : else
4890 1 : pp_printf (pp, "NULL");
4891 2 : logger->end_log_line ();
4892 : }
4893 : }
4894 15296 : return sval;
4895 7648 : }
4896 :
4897 : /* Check that argument ARG_IDX (0-based) to the call described by CD
4898 : is a pointer to a valid null-terminated string.
4899 :
4900 : Simulate scanning through the buffer, reading until we find a 0 byte
4901 : (equivalent to calling strlen).
4902 :
4903 : Complain and return nullptr if:
4904 : - the buffer pointed to isn't null-terminated
4905 : - the buffer pointed to has any uninitalized bytes before any 0-terminator
4906 : - any of the reads aren't within the bounds of the underlying base region
4907 :
4908 : Otherwise, return a svalue for strlen of the buffer (*not* including
4909 : the null terminator).
4910 :
4911 : TODO: we should also complain if:
4912 : - the pointer is NULL (or could be). */
4913 :
4914 : const svalue *
4915 207 : region_model::check_for_null_terminated_string_arg (const call_details &cd,
4916 : unsigned arg_idx) const
4917 : {
4918 207 : return check_for_null_terminated_string_arg (cd,
4919 : arg_idx,
4920 : false, /* include_terminator */
4921 207 : nullptr); // out_sval
4922 : }
4923 :
4924 :
4925 : /* Check that argument ARG_IDX (0-based) to the call described by CD
4926 : is a pointer to a valid null-terminated string.
4927 :
4928 : Simulate scanning through the buffer, reading until we find a 0 byte
4929 : (equivalent to calling strlen).
4930 :
4931 : Complain and return nullptr if:
4932 : - the buffer pointed to isn't null-terminated
4933 : - the buffer pointed to has any uninitalized bytes before any 0-terminator
4934 : - any of the reads aren't within the bounds of the underlying base region
4935 :
4936 : Otherwise, return a svalue. This will be the number of bytes read
4937 : (including the null terminator) if INCLUDE_TERMINATOR is true, or strlen
4938 : of the buffer (not including the null terminator) if it is false.
4939 :
4940 : Also, when returning an svalue, if OUT_SVAL is non-nullptr, write to
4941 : *OUT_SVAL with an svalue representing the content of the buffer up to
4942 : and including the terminator.
4943 :
4944 : TODO: we should also complain if:
4945 : - the pointer is NULL (or could be). */
4946 :
4947 : const svalue *
4948 7177 : region_model::check_for_null_terminated_string_arg (const call_details &cd,
4949 : unsigned arg_idx,
4950 : bool include_terminator,
4951 : const svalue **out_sval) const
4952 : {
4953 0 : class null_terminator_check_event : public custom_event
4954 : {
4955 : public:
4956 159 : null_terminator_check_event (const event_loc_info &loc_info,
4957 : const call_arg_details &arg_details)
4958 159 : : custom_event (loc_info),
4959 159 : m_arg_details (arg_details)
4960 : {
4961 : }
4962 :
4963 300 : void print_desc (pretty_printer &pp) const final override
4964 : {
4965 300 : if (m_arg_details.m_arg_expr)
4966 300 : pp_printf (&pp,
4967 : "while looking for null terminator"
4968 : " for argument %i (%qE) of %qD...",
4969 300 : m_arg_details.m_arg_idx + 1,
4970 : m_arg_details.m_arg_expr,
4971 300 : m_arg_details.m_called_fndecl);
4972 : else
4973 0 : pp_printf (&pp,
4974 : "while looking for null terminator"
4975 : " for argument %i of %qD...",
4976 0 : m_arg_details.m_arg_idx + 1,
4977 0 : m_arg_details.m_called_fndecl);
4978 300 : }
4979 :
4980 : private:
4981 : const call_arg_details m_arg_details;
4982 : };
4983 :
4984 0 : class null_terminator_check_decl_note
4985 : : public pending_note_subclass<null_terminator_check_decl_note>
4986 : {
4987 : public:
4988 159 : null_terminator_check_decl_note (const call_arg_details &arg_details)
4989 159 : : m_arg_details (arg_details)
4990 : {
4991 : }
4992 :
4993 1278 : const char *get_kind () const final override
4994 : {
4995 1278 : return "null_terminator_check_decl_note";
4996 : }
4997 :
4998 147 : void emit () const final override
4999 : {
5000 147 : inform_about_expected_null_terminated_string_arg (m_arg_details);
5001 147 : }
5002 :
5003 639 : bool operator== (const null_terminator_check_decl_note &other) const
5004 : {
5005 639 : return m_arg_details == other.m_arg_details;
5006 : }
5007 :
5008 : private:
5009 : const call_arg_details m_arg_details;
5010 : };
5011 :
5012 : /* Subclass of decorated_region_model_context that
5013 : adds the above event and note to any saved diagnostics. */
5014 7177 : class annotating_ctxt : public annotating_context
5015 : {
5016 : public:
5017 7177 : annotating_ctxt (const call_details &cd,
5018 : unsigned arg_idx)
5019 7177 : : annotating_context (cd.get_ctxt ()),
5020 7177 : m_cd (cd),
5021 7177 : m_arg_idx (arg_idx)
5022 : {
5023 : }
5024 159 : void add_annotations () final override
5025 : {
5026 159 : call_arg_details arg_details (m_cd, m_arg_idx);
5027 318 : event_loc_info loc_info (m_cd.get_location (),
5028 159 : m_cd.get_model ()->get_current_function ()->decl,
5029 318 : m_cd.get_model ()->get_stack_depth ());
5030 :
5031 159 : add_event
5032 159 : (std::make_unique<null_terminator_check_event> (loc_info,
5033 : arg_details));
5034 159 : add_note
5035 159 : (std::make_unique <null_terminator_check_decl_note> (arg_details));
5036 159 : }
5037 : private:
5038 : const call_details &m_cd;
5039 : unsigned m_arg_idx;
5040 : };
5041 :
5042 : /* Use this ctxt below so that any diagnostics that get added
5043 : get annotated. */
5044 7177 : annotating_ctxt my_ctxt (cd, arg_idx);
5045 :
5046 7177 : const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
5047 7177 : const region *buf_reg
5048 7177 : = deref_rvalue (arg_sval, cd.get_arg_tree (arg_idx), &my_ctxt);
5049 :
5050 14354 : if (const svalue *num_bytes_read_sval
5051 7177 : = scan_for_null_terminator (buf_reg,
5052 : cd.get_arg_tree (arg_idx),
5053 : out_sval,
5054 : &my_ctxt))
5055 : {
5056 7014 : if (out_sval)
5057 1139 : gcc_assert (*out_sval);
5058 7014 : if (include_terminator)
5059 : return num_bytes_read_sval;
5060 : else
5061 : {
5062 : /* strlen is (bytes_read - 1). */
5063 5875 : const svalue *one = m_mgr->get_or_create_int_cst (size_type_node, 1);
5064 5875 : return m_mgr->get_or_create_binop (size_type_node,
5065 : MINUS_EXPR,
5066 : num_bytes_read_sval,
5067 5875 : one);
5068 : }
5069 : }
5070 : else
5071 : return nullptr;
5072 : }
5073 :
5074 : /* Remove all bindings overlapping REG within the store. */
5075 :
5076 : void
5077 6130 : region_model::clobber_region (const region *reg)
5078 : {
5079 6130 : m_store.clobber_region (m_mgr->get_store_manager(), reg);
5080 6130 : }
5081 :
5082 : /* Remove any bindings for REG within the store. */
5083 :
5084 : void
5085 216579 : region_model::purge_region (const region *reg)
5086 : {
5087 216579 : m_store.purge_region (m_mgr->get_store_manager(), reg);
5088 216579 : }
5089 :
5090 : /* Fill REG with SVAL.
5091 : Use CTXT to report any warnings associated with the write
5092 : (e.g. out-of-bounds). */
5093 :
5094 : void
5095 640 : region_model::fill_region (const region *reg,
5096 : const svalue *sval,
5097 : region_model_context *ctxt)
5098 : {
5099 640 : check_region_for_write (reg, nullptr, ctxt);
5100 640 : m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
5101 640 : }
5102 :
5103 : /* Zero-fill REG.
5104 : Use CTXT to report any warnings associated with the write
5105 : (e.g. out-of-bounds). */
5106 :
5107 : void
5108 705 : region_model::zero_fill_region (const region *reg,
5109 : region_model_context *ctxt)
5110 : {
5111 705 : check_region_for_write (reg, nullptr, ctxt);
5112 705 : m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
5113 705 : }
5114 :
5115 : /* Copy NUM_BYTES_SVAL of SVAL to DEST_REG.
5116 : Use CTXT to report any warnings associated with the copy
5117 : (e.g. out-of-bounds writes). */
5118 :
5119 : void
5120 2103 : region_model::write_bytes (const region *dest_reg,
5121 : const svalue *num_bytes_sval,
5122 : const svalue *sval,
5123 : region_model_context *ctxt)
5124 : {
5125 2103 : const region *sized_dest_reg
5126 2103 : = m_mgr->get_sized_region (dest_reg, NULL_TREE, num_bytes_sval);
5127 2103 : set_value (sized_dest_reg, sval, ctxt);
5128 2103 : }
5129 :
5130 : /* Read NUM_BYTES_SVAL from SRC_REG.
5131 : Use CTXT to report any warnings associated with the copy
5132 : (e.g. out-of-bounds reads, copying of uninitialized values, etc). */
5133 :
5134 : const svalue *
5135 1092 : region_model::read_bytes (const region *src_reg,
5136 : tree src_ptr_expr,
5137 : const svalue *num_bytes_sval,
5138 : region_model_context *ctxt) const
5139 : {
5140 1092 : if (num_bytes_sval->get_kind () == SK_UNKNOWN)
5141 187 : return m_mgr->get_or_create_unknown_svalue (NULL_TREE);
5142 905 : const region *sized_src_reg
5143 905 : = m_mgr->get_sized_region (src_reg, NULL_TREE, num_bytes_sval);
5144 905 : const svalue *src_contents_sval = get_store_value (sized_src_reg, ctxt);
5145 905 : check_for_poison (src_contents_sval, src_ptr_expr,
5146 : sized_src_reg, ctxt);
5147 905 : return src_contents_sval;
5148 : }
5149 :
5150 : /* Copy NUM_BYTES_SVAL bytes from SRC_REG to DEST_REG.
5151 : Use CTXT to report any warnings associated with the copy
5152 : (e.g. out-of-bounds reads/writes, copying of uninitialized values,
5153 : etc). */
5154 :
5155 : void
5156 506 : region_model::copy_bytes (const region *dest_reg,
5157 : const region *src_reg,
5158 : tree src_ptr_expr,
5159 : const svalue *num_bytes_sval,
5160 : region_model_context *ctxt)
5161 : {
5162 506 : const svalue *data_sval
5163 506 : = read_bytes (src_reg, src_ptr_expr, num_bytes_sval, ctxt);
5164 506 : write_bytes (dest_reg, num_bytes_sval, data_sval, ctxt);
5165 506 : }
5166 :
5167 : /* Mark REG as having unknown content. */
5168 :
5169 : void
5170 253 : region_model::mark_region_as_unknown (const region *reg,
5171 : uncertainty_t *uncertainty)
5172 : {
5173 253 : svalue_set maybe_live_values;
5174 253 : m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
5175 : uncertainty, &maybe_live_values);
5176 253 : m_store.on_maybe_live_values (*m_mgr->get_store_manager (),
5177 : maybe_live_values);
5178 253 : }
5179 :
5180 : /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
5181 : this model. */
5182 :
5183 : tristate
5184 210015 : region_model::eval_condition (const svalue *lhs,
5185 : enum tree_code op,
5186 : const svalue *rhs) const
5187 : {
5188 210015 : gcc_assert (lhs);
5189 210015 : gcc_assert (rhs);
5190 :
5191 : /* For now, make no attempt to capture constraints on floating-point
5192 : values. */
5193 210015 : if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
5194 363148 : || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
5195 72 : return tristate::unknown ();
5196 :
5197 : /* See what we know based on the values. */
5198 :
5199 : /* Unwrap any unmergeable values. */
5200 209943 : lhs = lhs->unwrap_any_unmergeable ();
5201 209943 : rhs = rhs->unwrap_any_unmergeable ();
5202 :
5203 209943 : if (lhs == rhs)
5204 : {
5205 : /* If we have the same svalue, then we have equality
5206 : (apart from NaN-handling).
5207 : TODO: should this definitely be the case for poisoned values? */
5208 : /* Poisoned and unknown values are "unknowable". */
5209 21209 : if (lhs->get_kind () == SK_POISONED
5210 21209 : || lhs->get_kind () == SK_UNKNOWN)
5211 9564 : return tristate::TS_UNKNOWN;
5212 :
5213 11645 : switch (op)
5214 : {
5215 8622 : case EQ_EXPR:
5216 8622 : case GE_EXPR:
5217 8622 : case LE_EXPR:
5218 8622 : return tristate::TS_TRUE;
5219 :
5220 3023 : case NE_EXPR:
5221 3023 : case GT_EXPR:
5222 3023 : case LT_EXPR:
5223 3023 : return tristate::TS_FALSE;
5224 :
5225 : default:
5226 : /* For other ops, use the logic below. */
5227 : break;
5228 : }
5229 : }
5230 :
5231 : /* If we have a pair of region_svalues, compare them. */
5232 188734 : if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
5233 20688 : if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
5234 : {
5235 304 : tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
5236 304 : if (res.is_known ())
5237 296 : return res;
5238 : /* Otherwise, only known through constraints. */
5239 : }
5240 :
5241 188438 : if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
5242 : {
5243 : /* If we have a pair of constants, compare them. */
5244 47688 : if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5245 14681 : return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
5246 : else
5247 : {
5248 : /* When we have one constant, put it on the RHS. */
5249 33007 : std::swap (lhs, rhs);
5250 33007 : op = swap_tree_comparison (op);
5251 : }
5252 : }
5253 173757 : gcc_assert (lhs->get_kind () != SK_CONSTANT);
5254 :
5255 : /* Handle comparison against zero. */
5256 173757 : if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5257 145190 : if (zerop (cst_rhs->get_constant ()))
5258 : {
5259 92187 : if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
5260 : {
5261 : /* A region_svalue is a non-NULL pointer, except in certain
5262 : special cases (see the comment for region::non_null_p). */
5263 20181 : const region *pointee = ptr->get_pointee ();
5264 20181 : if (pointee->non_null_p ())
5265 : {
5266 10426 : switch (op)
5267 : {
5268 0 : default:
5269 0 : gcc_unreachable ();
5270 :
5271 206 : case EQ_EXPR:
5272 206 : case GE_EXPR:
5273 206 : case LE_EXPR:
5274 206 : return tristate::TS_FALSE;
5275 :
5276 10220 : case NE_EXPR:
5277 10220 : case GT_EXPR:
5278 10220 : case LT_EXPR:
5279 10220 : return tristate::TS_TRUE;
5280 : }
5281 : }
5282 : }
5283 72006 : else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
5284 : {
5285 : /* Treat offsets from a non-NULL pointer as being non-NULL. This
5286 : isn't strictly true, in that eventually ptr++ will wrap
5287 : around and be NULL, but it won't occur in practise and thus
5288 : can be used to suppress effectively false positives that we
5289 : shouldn't warn for. */
5290 19696 : if (binop->get_op () == POINTER_PLUS_EXPR)
5291 : {
5292 12341 : tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
5293 12341 : if (lhs_ts.is_known ())
5294 11683 : return lhs_ts;
5295 : }
5296 : }
5297 104620 : else if (const unaryop_svalue *unaryop
5298 52310 : = lhs->dyn_cast_unaryop_svalue ())
5299 : {
5300 2801 : if (unaryop->get_op () == NEGATE_EXPR)
5301 : {
5302 : /* e.g. "-X <= 0" is equivalent to X >= 0". */
5303 51 : tristate lhs_ts = eval_condition (unaryop->get_arg (),
5304 : swap_tree_comparison (op),
5305 : rhs);
5306 51 : if (lhs_ts.is_known ())
5307 48 : return lhs_ts;
5308 : }
5309 : }
5310 : }
5311 :
5312 : /* Handle rejection of equality for comparisons of the initial values of
5313 : "external" values (such as params) with the address of locals. */
5314 151600 : if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
5315 36243 : if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
5316 : {
5317 89 : tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
5318 89 : if (res.is_known ())
5319 32 : return res;
5320 : }
5321 151568 : if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
5322 5126 : if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
5323 : {
5324 161 : tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
5325 161 : if (res.is_known ())
5326 0 : return res;
5327 : }
5328 :
5329 151568 : if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
5330 5218 : if (tree rhs_cst = rhs->maybe_get_constant ())
5331 : {
5332 2838 : tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
5333 2838 : if (res.is_known ())
5334 65 : return res;
5335 : }
5336 :
5337 : /* Handle comparisons between two svalues with more than one operand. */
5338 151503 : if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
5339 : {
5340 26356 : switch (op)
5341 : {
5342 : default:
5343 : break;
5344 3578 : case EQ_EXPR:
5345 3578 : {
5346 : /* TODO: binops can be equal even if they are not structurally
5347 : equal in case of commutative operators. */
5348 3578 : tristate res = structural_equality (lhs, rhs);
5349 3578 : if (res.is_true ())
5350 44 : return res;
5351 : }
5352 3534 : break;
5353 1073 : case LE_EXPR:
5354 1073 : {
5355 1073 : tristate res = structural_equality (lhs, rhs);
5356 1073 : if (res.is_true ())
5357 0 : return res;
5358 : }
5359 1073 : break;
5360 7162 : case GE_EXPR:
5361 7162 : {
5362 7162 : tristate res = structural_equality (lhs, rhs);
5363 7162 : if (res.is_true ())
5364 39 : return res;
5365 7123 : res = symbolic_greater_than (binop, rhs);
5366 7123 : if (res.is_true ())
5367 36 : return res;
5368 : }
5369 : break;
5370 8568 : case GT_EXPR:
5371 8568 : {
5372 8568 : tristate res = symbolic_greater_than (binop, rhs);
5373 8568 : if (res.is_true ())
5374 158 : return res;
5375 : }
5376 8410 : break;
5377 : }
5378 : }
5379 :
5380 : /* Try range_op, but avoid cases where we have been sloppy about types. */
5381 151226 : if (lhs->get_type ()
5382 104926 : && rhs->get_type ()
5383 250617 : && range_compatible_p (lhs->get_type (), rhs->get_type ()))
5384 : {
5385 93189 : value_range lhs_vr, rhs_vr;
5386 93189 : if (lhs->maybe_get_value_range (lhs_vr))
5387 43378 : if (rhs->maybe_get_value_range (rhs_vr))
5388 : {
5389 42808 : range_op_handler handler (op);
5390 42808 : if (handler)
5391 : {
5392 42808 : int_range_max out;
5393 42808 : if (handler.fold_range (out, boolean_type_node, lhs_vr, rhs_vr))
5394 : {
5395 42808 : if (out.zero_p ())
5396 187 : return tristate::TS_FALSE;
5397 42621 : if (out.nonzero_p ())
5398 155 : return tristate::TS_TRUE;
5399 : }
5400 42808 : }
5401 : }
5402 93189 : }
5403 :
5404 : /* Attempt to unwrap cast if there is one, and the types match. */
5405 150884 : tree lhs_type = lhs->get_type ();
5406 150884 : tree rhs_type = rhs->get_type ();
5407 150884 : if (lhs_type && rhs_type)
5408 : {
5409 99049 : const unaryop_svalue *lhs_un_op = dyn_cast <const unaryop_svalue *> (lhs);
5410 99049 : const unaryop_svalue *rhs_un_op = dyn_cast <const unaryop_svalue *> (rhs);
5411 3512 : if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
5412 3362 : && rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
5413 99166 : && lhs_type == rhs_type)
5414 : {
5415 117 : tristate res = eval_condition (lhs_un_op->get_arg (),
5416 : op,
5417 : rhs_un_op->get_arg ());
5418 117 : if (res.is_known ())
5419 0 : return res;
5420 : }
5421 3395 : else if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
5422 102177 : && lhs_type == rhs_type)
5423 : {
5424 2665 : tristate res = eval_condition (lhs_un_op->get_arg (), op, rhs);
5425 2665 : if (res.is_known ())
5426 35 : return res;
5427 : }
5428 2419 : else if (rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
5429 98686 : && lhs_type == rhs_type)
5430 : {
5431 1556 : tristate res = eval_condition (lhs, op, rhs_un_op->get_arg ());
5432 1556 : if (res.is_known ())
5433 0 : return res;
5434 : }
5435 : }
5436 :
5437 : /* Otherwise, try constraints.
5438 : Cast to const to ensure we don't change the constraint_manager as we
5439 : do this (e.g. by creating equivalence classes). */
5440 150849 : const constraint_manager *constraints = m_constraints;
5441 150849 : return constraints->eval_condition (lhs, op, rhs);
5442 : }
5443 :
5444 : /* Subroutine of region_model::eval_condition, for rejecting
5445 : equality of INIT_VAL(PARM) with &LOCAL. */
5446 :
5447 : tristate
5448 250 : region_model::compare_initial_and_pointer (const initial_svalue *init,
5449 : const region_svalue *ptr) const
5450 : {
5451 250 : const region *pointee = ptr->get_pointee ();
5452 :
5453 : /* If we have a pointer to something within a stack frame, it can't be the
5454 : initial value of a param. */
5455 250 : if (pointee->maybe_get_frame_region ())
5456 32 : if (init->initial_value_of_param_p ())
5457 32 : return tristate::TS_FALSE;
5458 :
5459 218 : return tristate::TS_UNKNOWN;
5460 : }
5461 :
5462 : /* Return true if SVAL is definitely positive. */
5463 :
5464 : static bool
5465 14300 : is_positive_svalue (const svalue *sval)
5466 : {
5467 14300 : if (tree cst = sval->maybe_get_constant ())
5468 14025 : return !zerop (cst) && get_range_pos_neg (cst) == 1;
5469 275 : tree type = sval->get_type ();
5470 275 : if (!type)
5471 : return false;
5472 : /* Consider a binary operation size_t + int. The analyzer wraps the int in
5473 : an unaryop_svalue, converting it to a size_t, but in the dynamic execution
5474 : the result is smaller than the first operand. Thus, we have to look if
5475 : the argument of the unaryop_svalue is also positive. */
5476 216 : if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
5477 10 : return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
5478 18 : && is_positive_svalue (un_op->get_arg ());
5479 206 : return TYPE_UNSIGNED (type);
5480 : }
5481 :
5482 : /* Return true if A is definitely larger than B.
5483 :
5484 : Limitation: does not account for integer overflows and does not try to
5485 : return false, so it can not be used negated. */
5486 :
5487 : tristate
5488 15691 : region_model::symbolic_greater_than (const binop_svalue *bin_a,
5489 : const svalue *b) const
5490 : {
5491 15691 : if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
5492 : {
5493 : /* Eliminate the right-hand side of both svalues. */
5494 14332 : if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
5495 2544 : if (bin_a->get_op () == bin_b->get_op ()
5496 1412 : && eval_condition (bin_a->get_arg1 (),
5497 : GT_EXPR,
5498 1412 : bin_b->get_arg1 ()).is_true ()
5499 3956 : && eval_condition (bin_a->get_arg0 (),
5500 : GE_EXPR,
5501 63 : bin_b->get_arg0 ()).is_true ())
5502 40 : return tristate (tristate::TS_TRUE);
5503 :
5504 : /* Otherwise, try to remove a positive offset or factor from BIN_A. */
5505 14292 : if (is_positive_svalue (bin_a->get_arg1 ())
5506 14292 : && eval_condition (bin_a->get_arg0 (),
5507 13581 : GE_EXPR, b).is_true ())
5508 154 : return tristate (tristate::TS_TRUE);
5509 : }
5510 15497 : return tristate::unknown ();
5511 : }
5512 :
5513 : /* Return true if A and B are equal structurally.
5514 :
5515 : Structural equality means that A and B are equal if the svalues A and B have
5516 : the same nodes at the same positions in the tree and the leafs are equal.
5517 : Equality for conjured_svalues and initial_svalues is determined by comparing
5518 : the pointers while constants are compared by value. That behavior is useful
5519 : to check for binaryop_svlaues that evaluate to the same concrete value but
5520 : might use one operand with a different type but the same constant value.
5521 :
5522 : For example,
5523 : binop_svalue (mult_expr,
5524 : initial_svalue (‘size_t’, decl_region (..., 'some_var')),
5525 : constant_svalue (‘size_t’, 4))
5526 : and
5527 : binop_svalue (mult_expr,
5528 : initial_svalue (‘size_t’, decl_region (..., 'some_var'),
5529 : constant_svalue (‘sizetype’, 4))
5530 : are structurally equal. A concrete C code example, where this occurs, can
5531 : be found in test7 of out-of-bounds-5.c. */
5532 :
5533 : tristate
5534 14569 : region_model::structural_equality (const svalue *a, const svalue *b) const
5535 : {
5536 : /* If A and B are referentially equal, they are also structurally equal. */
5537 14569 : if (a == b)
5538 392 : return tristate (tristate::TS_TRUE);
5539 :
5540 14177 : switch (a->get_kind ())
5541 : {
5542 1223 : default:
5543 1223 : return tristate::unknown ();
5544 : /* SK_CONJURED and SK_INITIAL are already handled
5545 : by the referential equality above. */
5546 1018 : case SK_CONSTANT:
5547 1018 : {
5548 1018 : tree a_cst = a->maybe_get_constant ();
5549 1018 : tree b_cst = b->maybe_get_constant ();
5550 1018 : if (a_cst && b_cst)
5551 1806 : return tristate (tree_int_cst_equal (a_cst, b_cst));
5552 : }
5553 94 : return tristate (tristate::TS_FALSE);
5554 9 : case SK_UNARYOP:
5555 9 : {
5556 9 : const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
5557 9 : if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
5558 8 : return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
5559 : un_b->get_type ())
5560 8 : && un_a->get_op () == un_b->get_op ()
5561 : && structural_equality (un_a->get_arg (),
5562 16 : un_b->get_arg ()));
5563 : }
5564 1 : return tristate (tristate::TS_FALSE);
5565 11927 : case SK_BINOP:
5566 11927 : {
5567 11927 : const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
5568 11927 : if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
5569 2306 : return tristate (bin_a->get_op () == bin_b->get_op ()
5570 : && structural_equality (bin_a->get_arg0 (),
5571 2748 : bin_b->get_arg0 ())
5572 : && structural_equality (bin_a->get_arg1 (),
5573 2748 : bin_b->get_arg1 ()));
5574 : }
5575 10553 : return tristate (tristate::TS_FALSE);
5576 : }
5577 : }
5578 :
5579 : /* Handle various constraints of the form:
5580 : LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
5581 : OP : == or !=
5582 : RHS: zero
5583 : and (with a cast):
5584 : LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
5585 : OP : == or !=
5586 : RHS: zero
5587 : by adding constraints for INNER_LHS INNEROP INNER_RHS.
5588 :
5589 : Return true if this function can fully handle the constraint; if
5590 : so, add the implied constraint(s) and write true to *OUT if they
5591 : are consistent with existing constraints, or write false to *OUT
5592 : if they contradicts existing constraints.
5593 :
5594 : Return false for cases that this function doeesn't know how to handle.
5595 :
5596 : For example, if we're checking a stored conditional, we'll have
5597 : something like:
5598 : LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
5599 : OP : NE_EXPR
5600 : RHS: zero
5601 : which this function can turn into an add_constraint of:
5602 : (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
5603 :
5604 : Similarly, optimized && and || conditionals lead to e.g.
5605 : if (p && q)
5606 : becoming gimple like this:
5607 : _1 = p_6 == 0B;
5608 : _2 = q_8 == 0B
5609 : _3 = _1 | _2
5610 : On the "_3 is false" branch we can have constraints of the form:
5611 : ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
5612 : | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
5613 : == 0
5614 : which implies that both _1 and _2 are false,
5615 : which this function can turn into a pair of add_constraints of
5616 : (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
5617 : and:
5618 : (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
5619 :
5620 : bool
5621 53099 : region_model::add_constraints_from_binop (const svalue *outer_lhs,
5622 : enum tree_code outer_op,
5623 : const svalue *outer_rhs,
5624 : bool *out,
5625 : region_model_context *ctxt)
5626 : {
5627 55123 : while (const svalue *cast = outer_lhs->maybe_undo_cast ())
5628 : outer_lhs = cast;
5629 53099 : const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
5630 53099 : if (!binop_sval)
5631 : return false;
5632 7777 : if (!outer_rhs->all_zeroes_p ())
5633 : return false;
5634 :
5635 5411 : const svalue *inner_lhs = binop_sval->get_arg0 ();
5636 5411 : enum tree_code inner_op = binop_sval->get_op ();
5637 5411 : const svalue *inner_rhs = binop_sval->get_arg1 ();
5638 :
5639 5411 : if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
5640 : return false;
5641 :
5642 : /* We have either
5643 : - "OUTER_LHS != false" (i.e. OUTER is true), or
5644 : - "OUTER_LHS == false" (i.e. OUTER is false). */
5645 4757 : bool is_true = outer_op == NE_EXPR;
5646 :
5647 4757 : switch (inner_op)
5648 : {
5649 : default:
5650 : return false;
5651 :
5652 2478 : case EQ_EXPR:
5653 2478 : case NE_EXPR:
5654 2478 : case GE_EXPR:
5655 2478 : case GT_EXPR:
5656 2478 : case LE_EXPR:
5657 2478 : case LT_EXPR:
5658 2478 : {
5659 : /* ...and "(inner_lhs OP inner_rhs) == 0"
5660 : then (inner_lhs OP inner_rhs) must have the same
5661 : logical value as LHS. */
5662 2478 : if (!is_true)
5663 1199 : inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
5664 2478 : *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
5665 2478 : return true;
5666 : }
5667 873 : break;
5668 :
5669 873 : case BIT_AND_EXPR:
5670 873 : if (is_true)
5671 : {
5672 : /* ...and "(inner_lhs & inner_rhs) != 0"
5673 : then both inner_lhs and inner_rhs must be true. */
5674 447 : const svalue *false_sval
5675 447 : = m_mgr->get_or_create_constant_svalue (boolean_false_node);
5676 447 : bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
5677 447 : bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
5678 447 : *out = sat1 && sat2;
5679 447 : return true;
5680 : }
5681 : return false;
5682 :
5683 644 : case BIT_IOR_EXPR:
5684 644 : if (!is_true)
5685 : {
5686 : /* ...and "(inner_lhs | inner_rhs) == 0"
5687 : i.e. "(inner_lhs | inner_rhs)" is false
5688 : then both inner_lhs and inner_rhs must be false. */
5689 362 : const svalue *false_sval
5690 362 : = m_mgr->get_or_create_constant_svalue (boolean_false_node);
5691 362 : bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
5692 362 : bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
5693 362 : *out = sat1 && sat2;
5694 362 : return true;
5695 : }
5696 : return false;
5697 : }
5698 : }
5699 :
5700 : /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5701 : If it is consistent with existing constraints, add it, and return true.
5702 : Return false if it contradicts existing constraints.
5703 : Use CTXT for reporting any diagnostics associated with the accesses. */
5704 :
5705 : bool
5706 73620 : region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5707 : region_model_context *ctxt)
5708 : {
5709 : /* For now, make no attempt to capture constraints on floating-point
5710 : values. */
5711 73620 : if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5712 : return true;
5713 :
5714 73250 : const svalue *lhs_sval = get_rvalue (lhs, ctxt);
5715 73250 : const svalue *rhs_sval = get_rvalue (rhs, ctxt);
5716 :
5717 73250 : return add_constraint (lhs_sval, op, rhs_sval, ctxt);
5718 : }
5719 :
5720 : static bool
5721 17084 : unusable_in_infinite_loop_constraint_p (const svalue *sval)
5722 : {
5723 17084 : if (sval->get_kind () == SK_WIDENING)
5724 0 : return true;
5725 : return false;
5726 : }
5727 :
5728 : /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5729 : If it is consistent with existing constraints, add it, and return true.
5730 : Return false if it contradicts existing constraints.
5731 : Use CTXT for reporting any diagnostics associated with the accesses. */
5732 :
5733 : bool
5734 81597 : region_model::add_constraint (const svalue *lhs,
5735 : enum tree_code op,
5736 : const svalue *rhs,
5737 : region_model_context *ctxt)
5738 : {
5739 81597 : const bool checking_for_infinite_loop
5740 81597 : = ctxt ? ctxt->checking_for_infinite_loop_p () : false;
5741 :
5742 8669 : if (checking_for_infinite_loop)
5743 : {
5744 17084 : if (unusable_in_infinite_loop_constraint_p (lhs)
5745 81597 : || unusable_in_infinite_loop_constraint_p (rhs))
5746 : {
5747 257 : gcc_assert (ctxt);
5748 257 : ctxt->on_unusable_in_infinite_loop ();
5749 257 : return false;
5750 : }
5751 : }
5752 :
5753 81340 : tristate t_cond = eval_condition (lhs, op, rhs);
5754 :
5755 : /* If we already have the condition, do nothing. */
5756 81340 : if (t_cond.is_true ())
5757 : return true;
5758 :
5759 : /* Reject a constraint that would contradict existing knowledge, as
5760 : unsatisfiable. */
5761 66005 : if (t_cond.is_false ())
5762 : return false;
5763 :
5764 56677 : if (checking_for_infinite_loop)
5765 : {
5766 : /* Here, we don't have a definite true/false value, so bail out
5767 : when checking for infinite loops. */
5768 3578 : gcc_assert (ctxt);
5769 3578 : ctxt->on_unusable_in_infinite_loop ();
5770 3578 : return false;
5771 : }
5772 :
5773 53099 : bool out;
5774 53099 : if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
5775 3287 : return out;
5776 :
5777 : /* Attempt to store the constraint. */
5778 49812 : if (!m_constraints->add_constraint (lhs, op, rhs))
5779 : return false;
5780 :
5781 : /* Notify the context, if any. This exists so that the state machines
5782 : in a program_state can be notified about the condition, and so can
5783 : set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5784 : when synthesizing constraints as above. */
5785 49708 : if (ctxt)
5786 34675 : ctxt->on_condition (lhs, op, rhs);
5787 :
5788 : /* If we have ®ION == NULL, then drop dynamic extents for REGION (for
5789 : the case where REGION is heap-allocated and thus could be NULL). */
5790 49708 : if (tree rhs_cst = rhs->maybe_get_constant ())
5791 40884 : if (op == EQ_EXPR && zerop (rhs_cst))
5792 12665 : if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
5793 1749 : unset_dynamic_extents (region_sval->get_pointee ());
5794 :
5795 : return true;
5796 : }
5797 :
5798 : /* As above, but when returning false, if OUT is non-NULL, write a
5799 : new rejected_constraint to *OUT. */
5800 :
5801 : bool
5802 72478 : region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5803 : region_model_context *ctxt,
5804 : std::unique_ptr<rejected_constraint> *out)
5805 : {
5806 72478 : bool sat = add_constraint (lhs, op, rhs, ctxt);
5807 72478 : if (!sat && out)
5808 2213 : *out = std::make_unique <rejected_op_constraint> (*this, lhs, op, rhs);
5809 72478 : return sat;
5810 : }
5811 :
5812 : /* Determine what is known about the condition "LHS OP RHS" within
5813 : this model.
5814 : Use CTXT for reporting any diagnostics associated with the accesses. */
5815 :
5816 : tristate
5817 33285 : region_model::eval_condition (tree lhs,
5818 : enum tree_code op,
5819 : tree rhs,
5820 : region_model_context *ctxt) const
5821 : {
5822 : /* For now, make no attempt to model constraints on floating-point
5823 : values. */
5824 33285 : if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5825 16 : return tristate::unknown ();
5826 :
5827 33269 : return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
5828 : }
5829 :
5830 : /* Implementation of region_model::get_representative_path_var.
5831 : Attempt to return a path_var that represents SVAL, or return NULL_TREE.
5832 : Use VISITED to prevent infinite mutual recursion with the overload for
5833 : regions. */
5834 :
5835 : path_var
5836 12785 : region_model::get_representative_path_var_1 (const svalue *sval,
5837 : svalue_set *visited,
5838 : logger *logger) const
5839 : {
5840 12785 : gcc_assert (sval);
5841 :
5842 : /* Prevent infinite recursion. */
5843 12785 : if (visited->contains (sval))
5844 : {
5845 14 : if (sval->get_kind () == SK_CONSTANT)
5846 14 : return path_var (sval->maybe_get_constant (), 0);
5847 : else
5848 0 : return path_var (NULL_TREE, 0);
5849 : }
5850 12771 : visited->add (sval);
5851 :
5852 : /* Handle casts by recursion into get_representative_path_var. */
5853 12771 : if (const svalue *cast_sval = sval->maybe_undo_cast ())
5854 : {
5855 402 : path_var result = get_representative_path_var (cast_sval, visited,
5856 : logger);
5857 402 : tree orig_type = sval->get_type ();
5858 : /* If necessary, wrap the result in a cast. */
5859 402 : if (result.m_tree && orig_type)
5860 332 : result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
5861 402 : return result;
5862 : }
5863 :
5864 12369 : auto_vec<path_var> pvs;
5865 12369 : m_store.get_representative_path_vars (this, visited, sval, logger, &pvs);
5866 :
5867 12369 : if (tree cst = sval->maybe_get_constant ())
5868 1787 : pvs.safe_push (path_var (cst, 0));
5869 :
5870 : /* Handle string literals and various other pointers. */
5871 12369 : if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5872 : {
5873 4307 : const region *reg = ptr_sval->get_pointee ();
5874 4307 : if (path_var pv = get_representative_path_var (reg, visited, logger))
5875 30 : return path_var (build1 (ADDR_EXPR,
5876 : sval->get_type (),
5877 : pv.m_tree),
5878 30 : pv.m_stack_depth);
5879 : }
5880 :
5881 : /* If we have a sub_svalue, look for ways to represent the parent. */
5882 12339 : if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
5883 : {
5884 370 : const svalue *parent_sval = sub_sval->get_parent ();
5885 370 : const region *subreg = sub_sval->get_subregion ();
5886 740 : if (path_var parent_pv
5887 370 : = get_representative_path_var (parent_sval, visited, logger))
5888 153 : if (const field_region *field_reg = subreg->dyn_cast_field_region ())
5889 111 : return path_var (build3 (COMPONENT_REF,
5890 : sval->get_type (),
5891 : parent_pv.m_tree,
5892 : field_reg->get_field (),
5893 : NULL_TREE),
5894 111 : parent_pv.m_stack_depth);
5895 : }
5896 :
5897 : /* Handle binops. */
5898 12228 : if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
5899 534 : if (path_var lhs_pv
5900 534 : = get_representative_path_var (binop_sval->get_arg0 (), visited,
5901 534 : logger))
5902 468 : if (path_var rhs_pv
5903 468 : = get_representative_path_var (binop_sval->get_arg1 (), visited,
5904 468 : logger))
5905 439 : return path_var (build2 (binop_sval->get_op (),
5906 : sval->get_type (),
5907 : lhs_pv.m_tree, rhs_pv.m_tree),
5908 439 : lhs_pv.m_stack_depth);
5909 :
5910 11789 : if (pvs.length () < 1)
5911 2070 : return path_var (NULL_TREE, 0);
5912 :
5913 9719 : pvs.qsort (readability_comparator);
5914 9719 : return pvs[0];
5915 12369 : }
5916 :
5917 : /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
5918 : Use VISITED to prevent infinite mutual recursion with the overload for
5919 : regions
5920 :
5921 : This function defers to get_representative_path_var_1 to do the work;
5922 : it adds verification that get_representative_path_var_1 returned a tree
5923 : of the correct type. */
5924 :
5925 : path_var
5926 18032 : region_model::get_representative_path_var (const svalue *sval,
5927 : svalue_set *visited,
5928 : logger *logger) const
5929 : {
5930 18032 : if (sval == nullptr)
5931 5247 : return path_var (NULL_TREE, 0);
5932 :
5933 12785 : LOG_SCOPE (logger);
5934 12785 : if (logger)
5935 : {
5936 0 : logger->start_log_line ();
5937 0 : logger->log_partial ("sval: ");
5938 0 : sval->dump_to_pp (logger->get_printer (), true);
5939 0 : logger->end_log_line ();
5940 : }
5941 :
5942 12785 : tree orig_type = sval->get_type ();
5943 :
5944 12785 : path_var result = get_representative_path_var_1 (sval, visited, logger);
5945 :
5946 : /* Verify that the result has the same type as SVAL, if any. */
5947 12785 : if (result.m_tree && orig_type)
5948 10557 : gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
5949 :
5950 12785 : if (logger)
5951 : {
5952 0 : logger->start_log_line ();
5953 0 : logger->log_partial ("sval: ");
5954 0 : sval->dump_to_pp (logger->get_printer (), true);
5955 0 : logger->end_log_line ();
5956 :
5957 0 : if (result.m_tree)
5958 0 : logger->log ("tree: %qE", result.m_tree);
5959 : else
5960 0 : logger->log ("tree: NULL");
5961 : }
5962 :
5963 12785 : return result;
5964 12785 : }
5965 :
5966 : /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
5967 :
5968 : Strip off any top-level cast, to avoid messages like
5969 : double-free of '(void *)ptr'
5970 : from analyzer diagnostics. */
5971 :
5972 : tree
5973 14250 : region_model::get_representative_tree (const svalue *sval, logger *logger) const
5974 : {
5975 14250 : svalue_set visited;
5976 14250 : tree expr = get_representative_path_var (sval, &visited, logger).m_tree;
5977 :
5978 : /* Strip off any top-level cast. */
5979 14250 : if (expr && TREE_CODE (expr) == NOP_EXPR)
5980 459 : expr = TREE_OPERAND (expr, 0);
5981 :
5982 14250 : return fixup_tree_for_diagnostic (expr);
5983 14250 : }
5984 :
5985 : tree
5986 770 : region_model::get_representative_tree (const region *reg, logger *logger) const
5987 : {
5988 770 : svalue_set visited;
5989 770 : tree expr = get_representative_path_var (reg, &visited, logger).m_tree;
5990 :
5991 : /* Strip off any top-level cast. */
5992 770 : if (expr && TREE_CODE (expr) == NOP_EXPR)
5993 0 : expr = TREE_OPERAND (expr, 0);
5994 :
5995 770 : return fixup_tree_for_diagnostic (expr);
5996 770 : }
5997 :
5998 : /* Implementation of region_model::get_representative_path_var.
5999 :
6000 : Attempt to return a path_var that represents REG, or return
6001 : the NULL path_var.
6002 : For example, a region for a field of a local would be a path_var
6003 : wrapping a COMPONENT_REF.
6004 : Use VISITED to prevent infinite mutual recursion with the overload for
6005 : svalues. */
6006 :
6007 : path_var
6008 15329 : region_model::get_representative_path_var_1 (const region *reg,
6009 : svalue_set *visited,
6010 : logger *logger) const
6011 : {
6012 15329 : switch (reg->get_kind ())
6013 : {
6014 0 : default:
6015 0 : gcc_unreachable ();
6016 :
6017 0 : case RK_FRAME:
6018 0 : case RK_GLOBALS:
6019 0 : case RK_CODE:
6020 0 : case RK_HEAP:
6021 0 : case RK_STACK:
6022 0 : case RK_THREAD_LOCAL:
6023 0 : case RK_ROOT:
6024 : /* Regions that represent memory spaces are not expressible as trees. */
6025 0 : return path_var (NULL_TREE, 0);
6026 :
6027 1 : case RK_FUNCTION:
6028 1 : {
6029 1 : const function_region *function_reg
6030 1 : = as_a <const function_region *> (reg);
6031 1 : return path_var (function_reg->get_fndecl (), 0);
6032 : }
6033 1 : case RK_LABEL:
6034 1 : {
6035 1 : const label_region *label_reg = as_a <const label_region *> (reg);
6036 1 : return path_var (label_reg->get_label (), 0);
6037 : }
6038 :
6039 225 : case RK_SYMBOLIC:
6040 225 : {
6041 225 : const symbolic_region *symbolic_reg
6042 225 : = as_a <const symbolic_region *> (reg);
6043 225 : const svalue *pointer = symbolic_reg->get_pointer ();
6044 225 : path_var pointer_pv = get_representative_path_var (pointer, visited,
6045 : logger);
6046 225 : if (!pointer_pv)
6047 16 : return path_var (NULL_TREE, 0);
6048 209 : tree offset = build_int_cst (pointer->get_type (), 0);
6049 209 : return path_var (build2 (MEM_REF,
6050 : reg->get_type (),
6051 : pointer_pv.m_tree,
6052 : offset),
6053 209 : pointer_pv.m_stack_depth);
6054 : }
6055 9621 : case RK_DECL:
6056 9621 : {
6057 9621 : const decl_region *decl_reg = as_a <const decl_region *> (reg);
6058 9621 : return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
6059 : }
6060 777 : case RK_FIELD:
6061 777 : {
6062 777 : const field_region *field_reg = as_a <const field_region *> (reg);
6063 777 : path_var parent_pv
6064 777 : = get_representative_path_var (reg->get_parent_region (), visited,
6065 : logger);
6066 777 : if (!parent_pv)
6067 37 : return path_var (NULL_TREE, 0);
6068 740 : return path_var (build3 (COMPONENT_REF,
6069 : reg->get_type (),
6070 : parent_pv.m_tree,
6071 : field_reg->get_field (),
6072 : NULL_TREE),
6073 740 : parent_pv.m_stack_depth);
6074 : }
6075 :
6076 150 : case RK_ELEMENT:
6077 150 : {
6078 150 : const element_region *element_reg
6079 150 : = as_a <const element_region *> (reg);
6080 150 : path_var parent_pv
6081 150 : = get_representative_path_var (reg->get_parent_region (), visited,
6082 : logger);
6083 150 : if (!parent_pv)
6084 0 : return path_var (NULL_TREE, 0);
6085 150 : path_var index_pv
6086 150 : = get_representative_path_var (element_reg->get_index (), visited,
6087 : logger);
6088 150 : if (!index_pv)
6089 0 : return path_var (NULL_TREE, 0);
6090 150 : return path_var (build4 (ARRAY_REF,
6091 : reg->get_type (),
6092 : parent_pv.m_tree, index_pv.m_tree,
6093 : NULL_TREE, NULL_TREE),
6094 150 : parent_pv.m_stack_depth);
6095 : }
6096 :
6097 42 : case RK_OFFSET:
6098 42 : {
6099 42 : const offset_region *offset_reg
6100 42 : = as_a <const offset_region *> (reg);
6101 42 : path_var parent_pv
6102 42 : = get_representative_path_var (reg->get_parent_region (), visited,
6103 : logger);
6104 42 : if (!parent_pv)
6105 0 : return path_var (NULL_TREE, 0);
6106 42 : path_var offset_pv
6107 42 : = get_representative_path_var (offset_reg->get_byte_offset (),
6108 : visited, logger);
6109 42 : if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
6110 42 : return path_var (NULL_TREE, 0);
6111 0 : tree addr_parent = build1 (ADDR_EXPR,
6112 : build_pointer_type (reg->get_type ()),
6113 : parent_pv.m_tree);
6114 0 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode,
6115 : true);
6116 0 : return path_var (build2 (MEM_REF, reg->get_type (), addr_parent,
6117 : fold_convert (ptype, offset_pv.m_tree)),
6118 0 : parent_pv.m_stack_depth);
6119 : }
6120 :
6121 42 : case RK_SIZED:
6122 42 : return path_var (NULL_TREE, 0);
6123 :
6124 5 : case RK_CAST:
6125 5 : {
6126 5 : path_var parent_pv
6127 5 : = get_representative_path_var (reg->get_parent_region (), visited,
6128 : logger);
6129 5 : if (!parent_pv)
6130 0 : return path_var (NULL_TREE, 0);
6131 5 : return path_var (build1 (NOP_EXPR,
6132 : reg->get_type (),
6133 : parent_pv.m_tree),
6134 5 : parent_pv.m_stack_depth);
6135 : }
6136 :
6137 4451 : case RK_HEAP_ALLOCATED:
6138 4451 : case RK_ALLOCA:
6139 : /* No good way to express heap-allocated/alloca regions as trees. */
6140 4451 : return path_var (NULL_TREE, 0);
6141 :
6142 10 : case RK_STRING:
6143 10 : {
6144 10 : const string_region *string_reg = as_a <const string_region *> (reg);
6145 10 : return path_var (string_reg->get_string_cst (), 0);
6146 : }
6147 :
6148 4 : case RK_VAR_ARG:
6149 4 : case RK_ERRNO:
6150 4 : case RK_UNKNOWN:
6151 4 : case RK_PRIVATE:
6152 4 : return path_var (NULL_TREE, 0);
6153 : }
6154 : }
6155 :
6156 : /* Attempt to return a path_var that represents REG, or return
6157 : the NULL path_var.
6158 : For example, a region for a field of a local would be a path_var
6159 : wrapping a COMPONENT_REF.
6160 : Use VISITED to prevent infinite mutual recursion with the overload for
6161 : svalues.
6162 :
6163 : This function defers to get_representative_path_var_1 to do the work;
6164 : it adds verification that get_representative_path_var_1 returned a tree
6165 : of the correct type. */
6166 :
6167 : path_var
6168 15329 : region_model::get_representative_path_var (const region *reg,
6169 : svalue_set *visited,
6170 : logger *logger) const
6171 : {
6172 15329 : LOG_SCOPE (logger);
6173 15329 : if (logger)
6174 : {
6175 0 : logger->start_log_line ();
6176 0 : logger->log_partial ("reg: ");
6177 0 : reg->dump_to_pp (logger->get_printer (), true);
6178 0 : logger->end_log_line ();
6179 : }
6180 :
6181 15329 : path_var result = get_representative_path_var_1 (reg, visited, logger);
6182 :
6183 : /* Verify that the result has the same type as REG, if any. */
6184 15329 : if (result.m_tree && reg->get_type ())
6185 10736 : gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
6186 :
6187 15329 : if (logger)
6188 : {
6189 0 : logger->start_log_line ();
6190 0 : logger->log_partial ("reg: ");
6191 0 : reg->dump_to_pp (logger->get_printer (), true);
6192 0 : logger->end_log_line ();
6193 :
6194 0 : if (result.m_tree)
6195 0 : logger->log ("tree: %qE", result.m_tree);
6196 : else
6197 0 : logger->log ("tree: NULL");
6198 : }
6199 :
6200 30658 : return result;
6201 15329 : }
6202 :
6203 : /* Push a new frame_region on to the stack region.
6204 : Populate the frame_region with child regions for the function call's
6205 : parameters, using values from the arguments at the callsite in the
6206 : caller's frame. */
6207 :
6208 : void
6209 11361 : region_model::update_for_gcall (const gcall &call_stmt,
6210 : region_model_context *ctxt,
6211 : function *callee)
6212 : {
6213 : /* Build a vec of argument svalues, using the current top
6214 : frame for resolving tree expressions. */
6215 11361 : auto_vec<const svalue *> arg_svals (gimple_call_num_args (&call_stmt));
6216 :
6217 24029 : for (unsigned i = 0; i < gimple_call_num_args (&call_stmt); i++)
6218 : {
6219 12668 : tree arg = gimple_call_arg (&call_stmt, i);
6220 12668 : arg_svals.quick_push (get_rvalue (arg, ctxt));
6221 : }
6222 :
6223 11361 : if(!callee)
6224 : {
6225 : /* Get the function * from the gcall. */
6226 0 : tree fn_decl = get_fndecl_for_call (call_stmt, ctxt);
6227 0 : callee = DECL_STRUCT_FUNCTION (fn_decl);
6228 : }
6229 :
6230 0 : gcc_assert (callee);
6231 11361 : push_frame (*callee, &call_stmt, &arg_svals, ctxt);
6232 11361 : }
6233 :
6234 : /* Pop the top-most frame_region from the stack, and copy the return
6235 : region's values (if any) into the region for the lvalue of the LHS of
6236 : the call (if any). */
6237 :
6238 : void
6239 8310 : region_model::update_for_return_gcall (const gcall &call_stmt,
6240 : region_model_context *ctxt)
6241 : {
6242 : /* Get the lvalue for the result of the call, passing it to pop_frame,
6243 : so that pop_frame can determine the region with respect to the
6244 : *caller* frame. */
6245 8310 : tree lhs = gimple_call_lhs (&call_stmt);
6246 8310 : pop_frame (lhs, nullptr, ctxt, &call_stmt);
6247 8310 : }
6248 :
6249 : /* Attempt to use R to replay SUMMARY into this object.
6250 : Return true if it is possible. */
6251 :
6252 : bool
6253 1629 : region_model::replay_call_summary (call_summary_replay &r,
6254 : const region_model &summary)
6255 : {
6256 1629 : gcc_assert (summary.get_stack_depth () == 1);
6257 :
6258 1629 : m_store.replay_call_summary (r, summary.m_store);
6259 :
6260 1629 : if (r.get_ctxt ())
6261 1542 : r.get_ctxt ()->maybe_did_work ();
6262 :
6263 1629 : if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
6264 : return false;
6265 :
6266 4395 : for (auto kv : summary.m_dynamic_extents)
6267 : {
6268 1445 : const region *summary_reg = kv.first;
6269 1445 : const region *caller_reg = r.convert_region_from_summary (summary_reg);
6270 1445 : if (!caller_reg)
6271 2 : continue;
6272 1443 : const svalue *summary_sval = kv.second;
6273 1443 : const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
6274 1443 : if (!caller_sval)
6275 0 : continue;
6276 1443 : m_dynamic_extents.put (caller_reg, caller_sval);
6277 : }
6278 :
6279 1505 : return true;
6280 : }
6281 :
6282 : /* For use with push_frame when handling a top-level call within the analysis.
6283 : PARAM has a defined but unknown initial value.
6284 : Anything it points to has escaped, since the calling context "knows"
6285 : the pointer, and thus calls to unknown functions could read/write into
6286 : the region.
6287 : If NONNULL is true, then assume that PARAM must be non-NULL. */
6288 :
6289 : void
6290 18335 : region_model::on_top_level_param (tree param,
6291 : bool nonnull,
6292 : region_model_context *ctxt)
6293 : {
6294 18335 : if (POINTER_TYPE_P (TREE_TYPE (param)))
6295 : {
6296 8709 : const region *param_reg = get_lvalue (param, ctxt);
6297 8709 : const svalue *init_ptr_sval
6298 8709 : = m_mgr->get_or_create_initial_value (param_reg);
6299 8709 : const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
6300 8709 : store_manager *store_mgr = m_mgr->get_store_manager ();
6301 8709 : m_store.mark_as_escaped (*store_mgr, pointee_reg);
6302 8709 : if (nonnull)
6303 : {
6304 455 : const svalue *null_ptr_sval
6305 455 : = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
6306 455 : add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
6307 : }
6308 : }
6309 18335 : }
6310 :
6311 : /* Update this region_model to reflect pushing a frame onto the stack
6312 : for a call to FUN.
6313 :
6314 : If CALL_STMT is non-NULL, this is for the interprocedural case where
6315 : we already have an execution path into the caller. It can be NULL for
6316 : top-level entrypoints into the analysis, or in selftests.
6317 :
6318 : If ARG_SVALS is non-NULL, use it to populate the parameters
6319 : in the new frame.
6320 : Otherwise, the params have their initial_svalues.
6321 :
6322 : Return the frame_region for the new frame. */
6323 :
6324 : const region *
6325 31011 : region_model::push_frame (const function &fun,
6326 : const gcall *call_stmt,
6327 : const vec<const svalue *> *arg_svals,
6328 : region_model_context *ctxt)
6329 : {
6330 31011 : tree fndecl = fun.decl;
6331 31011 : if (arg_svals)
6332 : {
6333 : /* If the result of the callee is DECL_BY_REFERENCE, then
6334 : we'll need to store a reference to the caller's lhs of
6335 : CALL_STMT within callee's result.
6336 : If so, determine the region of CALL_STMT's lhs within
6337 : the caller's frame before updating m_current_frame. */
6338 11361 : const region *caller_return_by_reference_reg = nullptr;
6339 11361 : if (tree result = DECL_RESULT (fndecl))
6340 11361 : if (DECL_BY_REFERENCE (result))
6341 : {
6342 39 : gcc_assert (call_stmt);
6343 39 : tree lhs = gimple_call_lhs (call_stmt);
6344 39 : gcc_assert (lhs);
6345 39 : caller_return_by_reference_reg = get_lvalue (lhs, ctxt);
6346 : }
6347 :
6348 : /* Update m_current_frame. */
6349 11361 : m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
6350 :
6351 : /* Arguments supplied from a caller frame. */
6352 11361 : unsigned idx = 0;
6353 23611 : for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
6354 12250 : iter_parm = DECL_CHAIN (iter_parm), ++idx)
6355 : {
6356 : /* If there's a mismatching declaration, the call stmt might
6357 : not have enough args. Handle this case by leaving the
6358 : rest of the params as uninitialized. */
6359 12253 : if (idx >= arg_svals->length ())
6360 : break;
6361 12250 : tree parm_lval = iter_parm;
6362 12250 : if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
6363 11262 : parm_lval = parm_default_ssa;
6364 12250 : const region *parm_reg = get_lvalue (parm_lval, ctxt);
6365 12250 : const svalue *arg_sval = (*arg_svals)[idx];
6366 12250 : set_value (parm_reg, arg_sval, ctxt);
6367 : }
6368 :
6369 : /* Handle any variadic args. */
6370 : unsigned va_arg_idx = 0;
6371 11779 : for (; idx < arg_svals->length (); idx++, va_arg_idx++)
6372 : {
6373 418 : const svalue *arg_sval = (*arg_svals)[idx];
6374 418 : const region *var_arg_reg
6375 418 : = m_mgr->get_var_arg_region (m_current_frame,
6376 : va_arg_idx);
6377 418 : set_value (var_arg_reg, arg_sval, ctxt);
6378 : }
6379 :
6380 : /* If the result of the callee is DECL_BY_REFERENCE, then above
6381 : we should have determined the region within the
6382 : caller's frame that the callee will be writing back to.
6383 : Use this now to initialize the reference in callee's frame. */
6384 11361 : if (tree result = DECL_RESULT (fndecl))
6385 11361 : if (DECL_BY_REFERENCE (result))
6386 : {
6387 : /* Get reference to the caller lhs. */
6388 39 : gcc_assert (caller_return_by_reference_reg);
6389 39 : const svalue *ref_sval
6390 39 : = m_mgr->get_ptr_svalue (TREE_TYPE (result),
6391 : caller_return_by_reference_reg);
6392 :
6393 : /* Get region for default val of DECL_RESULT within the
6394 : callee. */
6395 39 : if (tree result_default_ssa = get_ssa_default_def (fun, result))
6396 : {
6397 36 : const region *callee_result_reg
6398 36 : = get_lvalue (result_default_ssa, ctxt);
6399 :
6400 : /* Set the callee's reference to refer to the caller's lhs. */
6401 36 : set_value (callee_result_reg, ref_sval, ctxt);
6402 : }
6403 : }
6404 : }
6405 : else
6406 : {
6407 : /* Otherwise we have a top-level call within the analysis. The params
6408 : have defined but unknown initial values.
6409 : Anything they point to has escaped. */
6410 :
6411 : /* Update m_current_frame. */
6412 19650 : m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
6413 :
6414 : /* Handle "__attribute__((nonnull))". */
6415 19650 : tree fntype = TREE_TYPE (fndecl);
6416 19650 : bitmap nonnull_args = get_nonnull_args (fntype);
6417 :
6418 19650 : unsigned parm_idx = 0;
6419 37985 : for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
6420 18335 : iter_parm = DECL_CHAIN (iter_parm))
6421 : {
6422 18335 : bool non_null = (nonnull_args
6423 18335 : ? (bitmap_empty_p (nonnull_args)
6424 514 : || bitmap_bit_p (nonnull_args, parm_idx))
6425 18335 : : false);
6426 18335 : if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
6427 15455 : on_top_level_param (parm_default_ssa, non_null, ctxt);
6428 : else
6429 2880 : on_top_level_param (iter_parm, non_null, ctxt);
6430 18335 : parm_idx++;
6431 : }
6432 :
6433 19650 : BITMAP_FREE (nonnull_args);
6434 : }
6435 :
6436 31011 : return m_current_frame;
6437 : }
6438 :
6439 : /* Get the function of the top-most frame in this region_model's stack.
6440 : There must be such a frame. */
6441 :
6442 : const function *
6443 184 : region_model::get_current_function () const
6444 : {
6445 184 : const frame_region *frame = get_current_frame ();
6446 184 : gcc_assert (frame);
6447 184 : return &frame->get_function ();
6448 : }
6449 :
6450 : /* Custom region_model_context for the assignment to the result
6451 : at a call statement when popping a frame (PR analyzer/106203). */
6452 :
6453 : class caller_context : public region_model_context_decorator
6454 : {
6455 : public:
6456 4135 : caller_context (region_model_context *inner,
6457 : const gcall *call_stmt,
6458 : const frame_region &caller_frame)
6459 4135 : : region_model_context_decorator (inner),
6460 4135 : m_call_stmt (call_stmt),
6461 4135 : m_caller_frame (caller_frame)
6462 : {}
6463 :
6464 : pending_location
6465 9 : get_pending_location_for_diag () const override
6466 : {
6467 9 : pending_location ploc
6468 9 : = region_model_context_decorator::get_pending_location_for_diag ();
6469 :
6470 9 : ploc.m_event_loc_info
6471 9 : = event_loc_info (m_call_stmt->location,
6472 9 : m_caller_frame.get_fndecl (),
6473 9 : m_caller_frame.get_stack_depth ());
6474 :
6475 9 : return ploc;
6476 : }
6477 :
6478 8279 : const gimple *get_stmt () const override
6479 : {
6480 8279 : return m_call_stmt;
6481 : };
6482 :
6483 : private:
6484 : const gcall *m_call_stmt;
6485 : const frame_region &m_caller_frame;
6486 : };
6487 :
6488 :
6489 : /* Pop the topmost frame_region from this region_model's stack;
6490 :
6491 : If RESULT_LVALUE is non-null, copy any return value from the frame
6492 : into the corresponding region (evaluated with respect to the *caller*
6493 : frame, rather than the called frame).
6494 : If OUT_RESULT is non-null, copy any return value from the frame
6495 : into *OUT_RESULT.
6496 :
6497 : If non-null, use CALL_STMT as the location when complaining about
6498 : assignment of the return value to RESULT_LVALUE.
6499 :
6500 : If EVAL_RETURN_SVALUE is false, then don't evaluate the return value.
6501 : This is for use when unwinding frames e.g. due to longjmp, to suppress
6502 : erroneously reporting uninitialized return values.
6503 :
6504 : Purge the frame region and all its descendent regions.
6505 : Convert any pointers that point into such regions into
6506 : poison_kind::popped_stack svalues. */
6507 :
6508 : void
6509 26042 : region_model::pop_frame (tree result_lvalue,
6510 : const svalue **out_result,
6511 : region_model_context *ctxt,
6512 : const gcall *call_stmt,
6513 : bool eval_return_svalue)
6514 : {
6515 26042 : gcc_assert (m_current_frame);
6516 :
6517 26042 : const region_model pre_popped_model = *this;
6518 26042 : const frame_region *frame_reg = m_current_frame;
6519 :
6520 : /* Notify state machines. */
6521 26042 : if (ctxt)
6522 23792 : ctxt->on_pop_frame (frame_reg);
6523 :
6524 : /* Evaluate the result, within the callee frame. */
6525 26042 : tree fndecl = m_current_frame->get_function ().decl;
6526 26042 : tree result = DECL_RESULT (fndecl);
6527 26042 : const svalue *retval = nullptr;
6528 26042 : if (result
6529 26034 : && TREE_TYPE (result) != void_type_node
6530 37962 : && eval_return_svalue)
6531 : {
6532 9696 : retval = get_rvalue (result, ctxt);
6533 9696 : if (out_result)
6534 5201 : *out_result = retval;
6535 : }
6536 :
6537 : /* Pop the frame. */
6538 26042 : m_current_frame = m_current_frame->get_calling_frame ();
6539 :
6540 26042 : if (result_lvalue
6541 26042 : && retval
6542 : /* Don't write back for DECL_BY_REFERENCE; the writes
6543 : should have happened within the callee already. */
6544 26042 : && !DECL_BY_REFERENCE (result))
6545 : {
6546 4135 : gcc_assert (eval_return_svalue);
6547 :
6548 : /* Compute result_dst_reg using RESULT_LVALUE *after* popping
6549 : the frame, but before poisoning pointers into the old frame. */
6550 4135 : const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
6551 :
6552 : /* Assign retval to result_dst_reg, using caller_context
6553 : to set the call_stmt and the popped_frame for any diagnostics
6554 : due to the assignment. */
6555 4135 : gcc_assert (m_current_frame);
6556 4135 : caller_context caller_ctxt (ctxt, call_stmt, *m_current_frame);
6557 4135 : set_value (result_dst_reg, retval, call_stmt ? &caller_ctxt : ctxt);
6558 : }
6559 :
6560 26042 : unbind_region_and_descendents (frame_reg,poison_kind::popped_stack);
6561 :
6562 26042 : if (auto chan = g->get_channels ().analyzer_events_channel.get_if_active ())
6563 : {
6564 236 : gcc::topics::analyzer_events::on_frame_popped msg
6565 236 : {this, &pre_popped_model, retval, ctxt};
6566 236 : chan->publish (msg);
6567 : }
6568 26042 : }
6569 :
6570 : /* Get the number of frames in this region_model's stack. */
6571 :
6572 : int
6573 5407076 : region_model::get_stack_depth () const
6574 : {
6575 5407076 : const frame_region *frame = get_current_frame ();
6576 5407076 : if (frame)
6577 5390226 : return frame->get_stack_depth ();
6578 : else
6579 : return 0;
6580 : }
6581 :
6582 : /* Get the frame_region with the given index within the stack.
6583 : The frame_region must exist. */
6584 :
6585 : const frame_region *
6586 1609692 : region_model::get_frame_at_index (int index) const
6587 : {
6588 1609692 : const frame_region *frame = get_current_frame ();
6589 1609692 : gcc_assert (frame);
6590 1609692 : gcc_assert (index >= 0);
6591 1609692 : gcc_assert (index <= frame->get_index ());
6592 1835530 : while (index != frame->get_index ())
6593 : {
6594 225838 : frame = frame->get_calling_frame ();
6595 225838 : gcc_assert (frame);
6596 : }
6597 1609692 : return frame;
6598 : }
6599 :
6600 : /* Unbind svalues for any regions in REG and below.
6601 : Find any pointers to such regions; convert them to
6602 : poisoned values of kind PKIND.
6603 : Also purge any dynamic extents. */
6604 :
6605 : void
6606 36633 : region_model::unbind_region_and_descendents (const region *reg,
6607 : enum poison_kind pkind)
6608 : {
6609 : /* Gather a set of base regions to be unbound. */
6610 36633 : hash_set<const region *> base_regs;
6611 205661 : for (store::cluster_map_t::iterator iter = m_store.begin ();
6612 374689 : iter != m_store.end (); ++iter)
6613 : {
6614 169028 : const region *iter_base_reg = (*iter).first;
6615 169028 : if (iter_base_reg->descendent_of_p (reg))
6616 34985 : base_regs.add (iter_base_reg);
6617 : }
6618 71618 : for (hash_set<const region *>::iterator iter = base_regs.begin ();
6619 106603 : iter != base_regs.end (); ++iter)
6620 34985 : m_store.purge_cluster (*iter);
6621 :
6622 : /* Find any pointers to REG or its descendents; convert to poisoned. */
6623 36633 : poison_any_pointers_to_descendents (reg, pkind);
6624 :
6625 : /* Purge dynamic extents of any base regions in REG and below
6626 : (e.g. VLAs and alloca stack regions). */
6627 110098 : for (auto iter : m_dynamic_extents)
6628 : {
6629 18416 : const region *iter_reg = iter.first;
6630 18416 : if (iter_reg->descendent_of_p (reg))
6631 6004 : unset_dynamic_extents (iter_reg);
6632 : }
6633 36633 : }
6634 :
6635 : /* Find any pointers to REG or its descendents; convert them to
6636 : poisoned values of kind PKIND. */
6637 :
6638 : void
6639 36633 : region_model::poison_any_pointers_to_descendents (const region *reg,
6640 : enum poison_kind pkind)
6641 : {
6642 304719 : for (const auto &cluster_iter : m_store)
6643 : {
6644 134043 : binding_cluster *cluster = cluster_iter.second;
6645 134043 : for (auto iter = cluster->begin ();
6646 269547 : iter != cluster->end ();
6647 135504 : ++iter)
6648 : {
6649 135504 : auto bp = *iter;
6650 135504 : const svalue *sval = bp.m_sval;
6651 135504 : if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
6652 : {
6653 31051 : const region *ptr_dst = ptr_sval->get_pointee ();
6654 : /* Poison ptrs to descendents of REG, but not to REG itself,
6655 : otherwise double-free detection doesn't work (since sm-state
6656 : for "free" is stored on the original ptr svalue). */
6657 31051 : if (ptr_dst->descendent_of_p (reg)
6658 31051 : && ptr_dst != reg)
6659 : {
6660 133 : const svalue *new_sval
6661 133 : = m_mgr->get_or_create_poisoned_svalue (pkind,
6662 : sval->get_type ());
6663 133 : cluster->get_map ().overwrite (iter, new_sval);
6664 : }
6665 : }
6666 : }
6667 : }
6668 36633 : }
6669 :
6670 : /* Attempt to merge THIS with OTHER_MODEL, writing the result
6671 : to OUT_MODEL. Use POINT to distinguish values created as a
6672 : result of merging. */
6673 :
6674 : bool
6675 148012 : region_model::can_merge_with_p (const region_model &other_model,
6676 : const program_point &point,
6677 : region_model *out_model,
6678 : const extrinsic_state *ext_state,
6679 : const program_state *state_a,
6680 : const program_state *state_b) const
6681 : {
6682 148012 : gcc_assert (out_model);
6683 148012 : gcc_assert (m_mgr == other_model.m_mgr);
6684 148012 : gcc_assert (m_mgr == out_model->m_mgr);
6685 :
6686 148012 : if (m_current_frame != other_model.m_current_frame)
6687 : return false;
6688 148012 : out_model->m_current_frame = m_current_frame;
6689 :
6690 148012 : model_merger m (this, &other_model, point, out_model,
6691 148012 : ext_state, state_a, state_b);
6692 :
6693 148012 : if (!store::can_merge_p (&m_store, &other_model.m_store,
6694 148012 : &out_model->m_store, m_mgr->get_store_manager (),
6695 : &m))
6696 : return false;
6697 :
6698 42209 : if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
6699 : &out_model->m_dynamic_extents))
6700 : return false;
6701 :
6702 : /* Merge constraints. */
6703 40243 : constraint_manager::merge (*m_constraints,
6704 40243 : *other_model.m_constraints,
6705 : out_model->m_constraints);
6706 :
6707 40971 : for (auto iter : m.m_svals_changing_meaning)
6708 728 : out_model->m_constraints->purge_state_involving (iter);
6709 :
6710 40243 : if (m_thrown_exceptions_stack != other_model.m_thrown_exceptions_stack)
6711 : return false;
6712 40214 : out_model->m_thrown_exceptions_stack = m_thrown_exceptions_stack;
6713 :
6714 40214 : if (m_caught_exceptions_stack != other_model.m_caught_exceptions_stack)
6715 : return false;
6716 40214 : out_model->m_caught_exceptions_stack = m_caught_exceptions_stack;
6717 :
6718 40214 : return true;
6719 148012 : }
6720 :
6721 : /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6722 : otherwise. */
6723 :
6724 : tree
6725 925544 : region_model::get_fndecl_for_call (const gcall &call,
6726 : region_model_context *ctxt)
6727 : {
6728 925544 : tree fn_ptr = gimple_call_fn (&call);
6729 925544 : if (fn_ptr == NULL_TREE)
6730 : return NULL_TREE;
6731 883654 : const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
6732 1767308 : if (const region_svalue *fn_ptr_ptr
6733 883654 : = fn_ptr_sval->dyn_cast_region_svalue ())
6734 : {
6735 877646 : const region *reg = fn_ptr_ptr->get_pointee ();
6736 877646 : if (const function_region *fn_reg = reg->dyn_cast_function_region ())
6737 : {
6738 877590 : tree fn_decl = fn_reg->get_fndecl ();
6739 877590 : cgraph_node *node = cgraph_node::get (fn_decl);
6740 877590 : if (!node)
6741 : return NULL_TREE;
6742 877590 : const cgraph_node *ultimate_node = node->ultimate_alias_target ();
6743 877590 : if (ultimate_node)
6744 877590 : return ultimate_node->decl;
6745 : }
6746 : }
6747 :
6748 : return NULL_TREE;
6749 : }
6750 :
6751 : /* Would be much simpler to use a lambda here, if it were supported. */
6752 :
6753 : struct append_regions_cb_data
6754 : {
6755 : const region_model *model;
6756 : auto_vec<const decl_region *> *out;
6757 : };
6758 :
6759 : /* Populate *OUT with all decl_regions in the current
6760 : frame that have clusters within the store. */
6761 :
6762 : void
6763 393393 : region_model::
6764 : get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
6765 : {
6766 393393 : append_regions_cb_data data;
6767 393393 : data.model = this;
6768 393393 : data.out = out;
6769 393393 : m_store.for_each_cluster (append_regions_cb, &data);
6770 393393 : }
6771 :
6772 : /* Implementation detail of get_regions_for_current_frame. */
6773 :
6774 : void
6775 3096507 : region_model::append_regions_cb (const region *base_reg,
6776 : append_regions_cb_data *cb_data)
6777 : {
6778 3096507 : if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
6779 : return;
6780 1759728 : if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
6781 1741395 : cb_data->out->safe_push (decl_reg);
6782 : }
6783 :
6784 :
6785 : /* Abstract class for diagnostics related to the use of
6786 : floating-point arithmetic where precision is needed. */
6787 :
6788 25 : class imprecise_floating_point_arithmetic : public pending_diagnostic
6789 : {
6790 : public:
6791 50 : int get_controlling_option () const final override
6792 : {
6793 50 : return OPT_Wanalyzer_imprecise_fp_arithmetic;
6794 : }
6795 : };
6796 :
6797 : /* Concrete diagnostic to complain about uses of floating-point arithmetic
6798 : in the size argument of malloc etc. */
6799 :
6800 : class float_as_size_arg : public imprecise_floating_point_arithmetic
6801 : {
6802 : public:
6803 25 : float_as_size_arg (tree arg) : m_arg (arg)
6804 : {}
6805 :
6806 305 : const char *get_kind () const final override
6807 : {
6808 305 : return "float_as_size_arg_diagnostic";
6809 : }
6810 :
6811 25 : bool subclass_equal_p (const pending_diagnostic &other) const final override
6812 : {
6813 25 : return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
6814 : }
6815 :
6816 25 : bool emit (diagnostic_emission_context &ctxt) final override
6817 : {
6818 25 : bool warned = ctxt.warn ("use of floating-point arithmetic here might"
6819 : " yield unexpected results");
6820 25 : if (warned)
6821 25 : inform (ctxt.get_location (),
6822 : "only use operands of an integer type"
6823 : " inside the size argument");
6824 25 : return warned;
6825 : }
6826 :
6827 : bool
6828 50 : describe_final_event (pretty_printer &pp,
6829 : const evdesc::final_event &) final override
6830 : {
6831 50 : if (m_arg)
6832 50 : pp_printf (&pp,
6833 : "operand %qE is of type %qT",
6834 50 : m_arg, TREE_TYPE (m_arg));
6835 : else
6836 0 : pp_printf (&pp,
6837 : "at least one operand of the size argument is"
6838 : " of a floating-point type");
6839 50 : return true;
6840 : }
6841 :
6842 : private:
6843 : tree m_arg;
6844 : };
6845 :
6846 : /* Visitor to find uses of floating-point variables/constants in an svalue. */
6847 :
6848 : class contains_floating_point_visitor : public visitor
6849 : {
6850 : public:
6851 7810 : contains_floating_point_visitor (const svalue *root_sval) : m_result (nullptr)
6852 : {
6853 7810 : root_sval->accept (this);
6854 : }
6855 :
6856 7810 : const svalue *get_svalue_to_report ()
6857 : {
6858 7810 : return m_result;
6859 : }
6860 :
6861 7592 : void visit_constant_svalue (const constant_svalue *sval) final override
6862 : {
6863 : /* At the point the analyzer runs, constant integer operands in a floating
6864 : point expression are already implictly converted to floating-points.
6865 : Thus, we do prefer to report non-constants such that the diagnostic
6866 : always reports a floating-point operand. */
6867 7592 : tree type = sval->get_type ();
6868 7592 : if (type && FLOAT_TYPE_P (type) && !m_result)
6869 9 : m_result = sval;
6870 7592 : }
6871 :
6872 496 : void visit_conjured_svalue (const conjured_svalue *sval) final override
6873 : {
6874 496 : tree type = sval->get_type ();
6875 496 : if (type && FLOAT_TYPE_P (type))
6876 0 : m_result = sval;
6877 496 : }
6878 :
6879 949 : void visit_initial_svalue (const initial_svalue *sval) final override
6880 : {
6881 949 : tree type = sval->get_type ();
6882 949 : if (type && FLOAT_TYPE_P (type))
6883 16 : m_result = sval;
6884 949 : }
6885 :
6886 : private:
6887 : /* Non-null if at least one floating-point operand was found. */
6888 : const svalue *m_result;
6889 : };
6890 :
6891 : /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
6892 :
6893 : void
6894 7810 : region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
6895 : region_model_context *ctxt) const
6896 : {
6897 7810 : gcc_assert (ctxt);
6898 :
6899 7810 : contains_floating_point_visitor v (size_in_bytes);
6900 7810 : if (const svalue *float_sval = v.get_svalue_to_report ())
6901 : {
6902 25 : tree diag_arg = get_representative_tree (float_sval);
6903 25 : ctxt->warn (std::make_unique<float_as_size_arg> (diag_arg));
6904 : }
6905 7810 : }
6906 :
6907 : /* Return a region describing a heap-allocated block of memory.
6908 : Use CTXT to complain about tainted sizes.
6909 :
6910 : Reuse an existing heap_allocated_region if it's not being referenced by
6911 : this region_model; otherwise create a new one.
6912 :
6913 : Optionally (update_state_machine) transitions the pointer pointing to the
6914 : heap_allocated_region from start to assumed non-null. */
6915 :
6916 : const region *
6917 17368 : region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
6918 : region_model_context *ctxt,
6919 : bool update_state_machine,
6920 : const call_details *cd)
6921 : {
6922 : /* Determine which regions are referenced in this region_model, so that
6923 : we can reuse an existing heap_allocated_region if it's not in use on
6924 : this path. */
6925 17368 : auto_bitmap base_regs_in_use;
6926 17368 : get_referenced_base_regions (base_regs_in_use);
6927 :
6928 : /* Don't reuse regions that are marked as TOUCHED. */
6929 105775 : for (store::cluster_map_t::iterator iter = m_store.begin ();
6930 194182 : iter != m_store.end (); ++iter)
6931 88407 : if ((*iter).second->touched_p ())
6932 : {
6933 9346 : const region *base_reg = (*iter).first;
6934 9346 : bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
6935 : }
6936 :
6937 17368 : const region *reg
6938 17368 : = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
6939 17368 : if (size_in_bytes)
6940 11887 : if (compat_types_p (size_in_bytes->get_type (), size_type_node))
6941 11887 : set_dynamic_extents (reg, size_in_bytes, ctxt);
6942 :
6943 17368 : if (update_state_machine && cd)
6944 : {
6945 0 : const svalue *ptr_sval
6946 0 : = m_mgr->get_ptr_svalue (cd->get_lhs_type (), reg);
6947 0 : transition_ptr_sval_non_null (ctxt, ptr_sval);
6948 : }
6949 :
6950 17368 : return reg;
6951 17368 : }
6952 :
6953 : /* Populate OUT_IDS with the set of IDs of those base regions which are
6954 : reachable in this region_model. */
6955 :
6956 : void
6957 19271 : region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
6958 : {
6959 19271 : reachable_regions reachable_regs (const_cast<region_model *> (this));
6960 19271 : m_store.for_each_cluster (reachable_regions::init_cluster_cb,
6961 : &reachable_regs);
6962 : /* Get regions for locals that have explicitly bound values. */
6963 138092 : for (store::cluster_map_t::iterator iter = m_store.begin ();
6964 256913 : iter != m_store.end (); ++iter)
6965 : {
6966 118821 : const region *base_reg = (*iter).first;
6967 118821 : if (const region *parent = base_reg->get_parent_region ())
6968 118821 : if (parent->get_kind () == RK_FRAME)
6969 74055 : reachable_regs.add (base_reg, false);
6970 : }
6971 :
6972 19275 : for (auto &eh_node : m_thrown_exceptions_stack)
6973 4 : eh_node.add_to_reachable_regions (reachable_regs);
6974 19331 : for (auto &eh_node : m_caught_exceptions_stack)
6975 60 : eh_node.add_to_reachable_regions (reachable_regs);
6976 :
6977 :
6978 19271 : bitmap_clear (out_ids);
6979 141736 : for (auto iter_reg : reachable_regs)
6980 122465 : bitmap_set_bit (out_ids, iter_reg->get_id ());
6981 19271 : }
6982 :
6983 : /* Return a new region describing a block of memory allocated within the
6984 : current frame.
6985 : Use CTXT to complain about tainted sizes. */
6986 :
6987 : const region *
6988 426 : region_model::create_region_for_alloca (const svalue *size_in_bytes,
6989 : region_model_context *ctxt)
6990 : {
6991 426 : const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
6992 426 : if (compat_types_p (size_in_bytes->get_type (), size_type_node))
6993 425 : set_dynamic_extents (reg, size_in_bytes, ctxt);
6994 426 : return reg;
6995 : }
6996 :
6997 : /* Record that the size of REG is SIZE_IN_BYTES.
6998 : Use CTXT to complain about tainted sizes. */
6999 :
7000 : void
7001 12770 : region_model::set_dynamic_extents (const region *reg,
7002 : const svalue *size_in_bytes,
7003 : region_model_context *ctxt)
7004 : {
7005 12770 : assert_compat_types (size_in_bytes->get_type (), size_type_node);
7006 12770 : if (ctxt)
7007 : {
7008 7810 : check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
7009 : ctxt);
7010 7810 : check_dynamic_size_for_floats (size_in_bytes, ctxt);
7011 : }
7012 12770 : m_dynamic_extents.put (reg, size_in_bytes);
7013 12770 : }
7014 :
7015 : /* Get the recording of REG in bytes, or nullptr if no dynamic size was
7016 : recorded. */
7017 :
7018 : const svalue *
7019 61778 : region_model::get_dynamic_extents (const region *reg) const
7020 : {
7021 61778 : if (const svalue * const *slot = m_dynamic_extents.get (reg))
7022 12836 : return *slot;
7023 : return nullptr;
7024 : }
7025 :
7026 : /* Unset any recorded dynamic size of REG. */
7027 :
7028 : void
7029 49682 : region_model::unset_dynamic_extents (const region *reg)
7030 : {
7031 49682 : m_dynamic_extents.remove (reg);
7032 49682 : }
7033 :
7034 : /* A subclass of pending_diagnostic for complaining about uninitialized data
7035 : being copied across a trust boundary to an untrusted output
7036 : (e.g. copy_to_user infoleaks in the Linux kernel). */
7037 :
7038 : class exposure_through_uninit_copy
7039 : : public pending_diagnostic_subclass<exposure_through_uninit_copy>
7040 : {
7041 : public:
7042 25 : exposure_through_uninit_copy (const region *src_region,
7043 : const region *dest_region,
7044 : const svalue *copied_sval)
7045 25 : : m_src_region (src_region),
7046 25 : m_dest_region (dest_region),
7047 25 : m_copied_sval (copied_sval)
7048 : {
7049 25 : gcc_assert (m_copied_sval->get_kind () == SK_POISONED
7050 : || m_copied_sval->get_kind () == SK_COMPOUND);
7051 25 : }
7052 :
7053 294 : const char *get_kind () const final override
7054 : {
7055 294 : return "exposure_through_uninit_copy";
7056 : }
7057 :
7058 25 : bool operator== (const exposure_through_uninit_copy &other) const
7059 : {
7060 25 : return (m_src_region == other.m_src_region
7061 25 : && m_dest_region == other.m_dest_region
7062 50 : && m_copied_sval == other.m_copied_sval);
7063 : }
7064 :
7065 50 : int get_controlling_option () const final override
7066 : {
7067 50 : return OPT_Wanalyzer_exposure_through_uninit_copy;
7068 : }
7069 :
7070 25 : bool emit (diagnostic_emission_context &ctxt) final override
7071 : {
7072 : /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
7073 25 : ctxt.add_cwe (200);
7074 50 : enum memory_space mem_space = get_src_memory_space ();
7075 25 : bool warned;
7076 25 : switch (mem_space)
7077 : {
7078 0 : default:
7079 0 : warned = ctxt.warn ("potential exposure of sensitive information"
7080 : " by copying uninitialized data"
7081 : " across trust boundary");
7082 0 : break;
7083 25 : case MEMSPACE_STACK:
7084 25 : warned = ctxt.warn ("potential exposure of sensitive information"
7085 : " by copying uninitialized data from stack"
7086 : " across trust boundary");
7087 25 : break;
7088 0 : case MEMSPACE_HEAP:
7089 0 : warned = ctxt.warn ("potential exposure of sensitive information"
7090 : " by copying uninitialized data from heap"
7091 : " across trust boundary");
7092 0 : break;
7093 : }
7094 25 : if (warned)
7095 : {
7096 25 : const location_t loc = ctxt.get_location ();
7097 25 : inform_number_of_uninit_bits (loc);
7098 25 : complain_about_uninit_ranges (loc);
7099 :
7100 25 : if (mem_space == MEMSPACE_STACK)
7101 25 : maybe_emit_fixit_hint ();
7102 : }
7103 25 : return warned;
7104 : }
7105 :
7106 : bool
7107 50 : describe_final_event (pretty_printer &pp,
7108 : const evdesc::final_event &) final override
7109 : {
7110 100 : enum memory_space mem_space = get_src_memory_space ();
7111 50 : switch (mem_space)
7112 : {
7113 0 : default:
7114 0 : pp_string (&pp, "uninitialized data copied here");
7115 0 : return true;
7116 :
7117 50 : case MEMSPACE_STACK:
7118 50 : pp_string (&pp, "uninitialized data copied from stack here");
7119 50 : return true;
7120 :
7121 0 : case MEMSPACE_HEAP:
7122 0 : pp_string (&pp, "uninitialized data copied from heap here");
7123 0 : return true;
7124 : }
7125 : }
7126 :
7127 25 : void mark_interesting_stuff (interesting_t *interest) final override
7128 : {
7129 25 : if (m_src_region)
7130 25 : interest->add_region_creation (m_src_region);
7131 25 : }
7132 :
7133 : void
7134 0 : maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
7135 : const final override
7136 : {
7137 0 : auto &props = result_obj.get_or_create_properties ();
7138 : #define PROPERTY_PREFIX "gcc/-Wanalyzer-exposure-through-uninit-copy/"
7139 0 : props.set (PROPERTY_PREFIX "src_region", m_src_region->to_json ());
7140 0 : props.set (PROPERTY_PREFIX "dest_region", m_dest_region->to_json ());
7141 0 : props.set (PROPERTY_PREFIX "copied_sval", m_copied_sval->to_json ());
7142 : #undef PROPERTY_PREFIX
7143 0 : }
7144 :
7145 : private:
7146 75 : enum memory_space get_src_memory_space () const
7147 : {
7148 75 : return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
7149 : }
7150 :
7151 25 : bit_size_t calc_num_uninit_bits () const
7152 : {
7153 25 : switch (m_copied_sval->get_kind ())
7154 : {
7155 0 : default:
7156 0 : gcc_unreachable ();
7157 4 : break;
7158 4 : case SK_POISONED:
7159 4 : {
7160 4 : const poisoned_svalue *poisoned_sval
7161 4 : = as_a <const poisoned_svalue *> (m_copied_sval);
7162 4 : gcc_assert (poisoned_sval->get_poison_kind () == poison_kind::uninit);
7163 :
7164 : /* Give up if don't have type information. */
7165 4 : if (m_copied_sval->get_type () == NULL_TREE)
7166 0 : return 0;
7167 :
7168 4 : bit_size_t size_in_bits;
7169 4 : if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
7170 4 : return size_in_bits;
7171 :
7172 : /* Give up if we can't get the size of the type. */
7173 0 : return 0;
7174 : }
7175 21 : break;
7176 21 : case SK_COMPOUND:
7177 21 : {
7178 21 : const compound_svalue *compound_sval
7179 21 : = as_a <const compound_svalue *> (m_copied_sval);
7180 21 : bit_size_t result = 0;
7181 : /* Find keys for uninit svals. */
7182 82 : for (auto iter : compound_sval->get_map ().get_concrete_bindings ())
7183 : {
7184 61 : const svalue *sval = iter.second;
7185 122 : if (const poisoned_svalue *psval
7186 61 : = sval->dyn_cast_poisoned_svalue ())
7187 24 : if (psval->get_poison_kind () == poison_kind::uninit)
7188 : {
7189 24 : const bit_range &bits = iter.first;
7190 24 : result += bits.m_size_in_bits;
7191 : }
7192 : }
7193 21 : return result;
7194 : }
7195 : }
7196 : }
7197 :
7198 25 : void inform_number_of_uninit_bits (location_t loc) const
7199 : {
7200 25 : bit_size_t num_uninit_bits = calc_num_uninit_bits ();
7201 25 : if (num_uninit_bits <= 0)
7202 0 : return;
7203 25 : if (num_uninit_bits % BITS_PER_UNIT == 0)
7204 : {
7205 : /* Express in bytes. */
7206 25 : byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
7207 25 : if (num_uninit_bytes == 1)
7208 3 : inform (loc, "1 byte is uninitialized");
7209 : else
7210 22 : inform (loc,
7211 : "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
7212 : }
7213 : else
7214 : {
7215 : /* Express in bits. */
7216 0 : if (num_uninit_bits == 1)
7217 0 : inform (loc, "1 bit is uninitialized");
7218 : else
7219 0 : inform (loc,
7220 : "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
7221 : }
7222 : }
7223 :
7224 25 : void complain_about_uninit_ranges (location_t loc) const
7225 : {
7226 50 : if (const compound_svalue *compound_sval
7227 25 : = m_copied_sval->dyn_cast_compound_svalue ())
7228 : {
7229 : /* Find keys for uninit svals. */
7230 21 : auto_vec<bit_range> uninit_bit_ranges;
7231 82 : for (auto iter : compound_sval->get_map ().get_concrete_bindings ())
7232 : {
7233 61 : const svalue *sval = iter.second;
7234 122 : if (const poisoned_svalue *psval
7235 61 : = sval->dyn_cast_poisoned_svalue ())
7236 24 : if (psval->get_poison_kind () == poison_kind::uninit)
7237 24 : uninit_bit_ranges.safe_push (iter.first);
7238 : }
7239 :
7240 21 : std::unique_ptr<record_layout> layout;
7241 :
7242 21 : tree type = m_copied_sval->get_type ();
7243 21 : if (type && TREE_CODE (type) == RECORD_TYPE)
7244 : {
7245 17 : layout = std::make_unique<record_layout> (type);
7246 :
7247 17 : if (0)
7248 : layout->dump ();
7249 : }
7250 :
7251 : unsigned i;
7252 : bit_range *bits;
7253 45 : FOR_EACH_VEC_ELT (uninit_bit_ranges, i, bits)
7254 : {
7255 24 : bit_offset_t start_bit = bits->get_start_bit_offset ();
7256 24 : bit_offset_t next_bit = bits->get_next_bit_offset ();
7257 24 : complain_about_uninit_range (loc, start_bit, next_bit,
7258 24 : layout.get ());
7259 : }
7260 21 : }
7261 25 : }
7262 :
7263 24 : void complain_about_uninit_range (location_t loc,
7264 : bit_offset_t start_bit,
7265 : bit_offset_t next_bit,
7266 : const record_layout *layout) const
7267 : {
7268 24 : if (layout)
7269 : {
7270 75 : while (start_bit < next_bit)
7271 : {
7272 165 : if (const record_layout::item *item
7273 55 : = layout->get_item_at (start_bit))
7274 : {
7275 55 : gcc_assert (start_bit >= item->get_start_bit_offset ());
7276 55 : gcc_assert (start_bit < item->get_next_bit_offset ());
7277 55 : if (item->get_start_bit_offset () == start_bit
7278 108 : && item->get_next_bit_offset () <= next_bit)
7279 53 : complain_about_fully_uninit_item (*item);
7280 : else
7281 2 : complain_about_partially_uninit_item (*item);
7282 55 : start_bit = item->get_next_bit_offset ();
7283 55 : continue;
7284 : }
7285 : else
7286 : break;
7287 : }
7288 : }
7289 :
7290 24 : if (start_bit >= next_bit)
7291 : return;
7292 :
7293 4 : if (start_bit % 8 == 0 && next_bit % 8 == 0)
7294 : {
7295 : /* Express in bytes. */
7296 4 : byte_offset_t start_byte = start_bit / 8;
7297 4 : byte_offset_t last_byte = (next_bit / 8) - 1;
7298 4 : if (last_byte == start_byte)
7299 0 : inform (loc,
7300 : "byte %wu is uninitialized",
7301 : start_byte.to_uhwi ());
7302 : else
7303 4 : inform (loc,
7304 : "bytes %wu - %wu are uninitialized",
7305 : start_byte.to_uhwi (),
7306 : last_byte.to_uhwi ());
7307 : }
7308 : else
7309 : {
7310 : /* Express in bits. */
7311 0 : bit_offset_t last_bit = next_bit - 1;
7312 0 : if (last_bit == start_bit)
7313 0 : inform (loc,
7314 : "bit %wu is uninitialized",
7315 : start_bit.to_uhwi ());
7316 : else
7317 0 : inform (loc,
7318 : "bits %wu - %wu are uninitialized",
7319 : start_bit.to_uhwi (),
7320 : last_bit.to_uhwi ());
7321 : }
7322 : }
7323 :
7324 : static void
7325 53 : complain_about_fully_uninit_item (const record_layout::item &item)
7326 : {
7327 53 : const_tree field = item.m_field;
7328 53 : bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
7329 53 : if (item.m_is_padding)
7330 : {
7331 11 : if (num_bits % 8 == 0)
7332 : {
7333 : /* Express in bytes. */
7334 9 : byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
7335 9 : if (num_bytes == 1)
7336 2 : inform (DECL_SOURCE_LOCATION (field),
7337 : "padding after field %qD is uninitialized (1 byte)",
7338 : field);
7339 : else
7340 7 : inform (DECL_SOURCE_LOCATION (field),
7341 : "padding after field %qD is uninitialized (%wu bytes)",
7342 : field, num_bytes.to_uhwi ());
7343 : }
7344 : else
7345 : {
7346 : /* Express in bits. */
7347 2 : if (num_bits == 1)
7348 0 : inform (DECL_SOURCE_LOCATION (field),
7349 : "padding after field %qD is uninitialized (1 bit)",
7350 : field);
7351 : else
7352 2 : inform (DECL_SOURCE_LOCATION (field),
7353 : "padding after field %qD is uninitialized (%wu bits)",
7354 : field, num_bits.to_uhwi ());
7355 : }
7356 : }
7357 : else
7358 : {
7359 42 : if (num_bits % 8 == 0)
7360 : {
7361 : /* Express in bytes. */
7362 32 : byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
7363 32 : if (num_bytes == 1)
7364 1 : inform (DECL_SOURCE_LOCATION (field),
7365 : "field %qD is uninitialized (1 byte)", field);
7366 : else
7367 31 : inform (DECL_SOURCE_LOCATION (field),
7368 : "field %qD is uninitialized (%wu bytes)",
7369 : field, num_bytes.to_uhwi ());
7370 : }
7371 : else
7372 : {
7373 : /* Express in bits. */
7374 10 : if (num_bits == 1)
7375 9 : inform (DECL_SOURCE_LOCATION (field),
7376 : "field %qD is uninitialized (1 bit)", field);
7377 : else
7378 1 : inform (DECL_SOURCE_LOCATION (field),
7379 : "field %qD is uninitialized (%wu bits)",
7380 : field, num_bits.to_uhwi ());
7381 : }
7382 : }
7383 53 : }
7384 :
7385 : static void
7386 2 : complain_about_partially_uninit_item (const record_layout::item &item)
7387 : {
7388 2 : const_tree field = item.m_field;
7389 2 : if (item.m_is_padding)
7390 0 : inform (DECL_SOURCE_LOCATION (field),
7391 : "padding after field %qD is partially uninitialized",
7392 : field);
7393 : else
7394 2 : inform (DECL_SOURCE_LOCATION (field),
7395 : "field %qD is partially uninitialized",
7396 : field);
7397 : /* TODO: ideally we'd describe what parts are uninitialized. */
7398 2 : }
7399 :
7400 25 : void maybe_emit_fixit_hint () const
7401 : {
7402 25 : if (tree decl = m_src_region->maybe_get_decl ())
7403 : {
7404 25 : gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
7405 25 : hint_richloc.add_fixit_insert_after (" = {0}");
7406 25 : inform (&hint_richloc,
7407 : "suggest forcing zero-initialization by"
7408 : " providing a %<{0}%> initializer");
7409 25 : }
7410 25 : }
7411 :
7412 : private:
7413 : const region *m_src_region;
7414 : const region *m_dest_region;
7415 : const svalue *m_copied_sval;
7416 : };
7417 :
7418 : /* Return true if any part of SVAL is uninitialized. */
7419 :
7420 : static bool
7421 80 : contains_uninit_p (const svalue *sval)
7422 : {
7423 80 : switch (sval->get_kind ())
7424 : {
7425 : default:
7426 : return false;
7427 4 : case SK_POISONED:
7428 4 : {
7429 4 : const poisoned_svalue *psval
7430 4 : = as_a <const poisoned_svalue *> (sval);
7431 4 : return psval->get_poison_kind () == poison_kind::uninit;
7432 : }
7433 43 : case SK_COMPOUND:
7434 43 : {
7435 43 : const compound_svalue *compound_sval
7436 43 : = as_a <const compound_svalue *> (sval);
7437 :
7438 43 : for (auto iter = compound_sval->begin ();
7439 141 : iter != compound_sval->end (); ++iter)
7440 : {
7441 119 : const svalue *inner_sval = iter.get_svalue ();
7442 238 : if (const poisoned_svalue *psval
7443 119 : = inner_sval->dyn_cast_poisoned_svalue ())
7444 21 : if (psval->get_poison_kind () == poison_kind::uninit)
7445 21 : return true;
7446 : }
7447 :
7448 22 : return false;
7449 : }
7450 : }
7451 : }
7452 :
7453 : /* Function for use by plugins when simulating writing data through a
7454 : pointer to an "untrusted" region DST_REG (and thus crossing a security
7455 : boundary), such as copying data to user space in an OS kernel.
7456 :
7457 : Check that COPIED_SVAL is fully initialized. If not, complain about
7458 : an infoleak to CTXT.
7459 :
7460 : SRC_REG can be nullptr; if non-NULL it is used as a hint in the diagnostic
7461 : as to where COPIED_SVAL came from. */
7462 :
7463 : void
7464 80 : region_model::maybe_complain_about_infoleak (const region *dst_reg,
7465 : const svalue *copied_sval,
7466 : const region *src_reg,
7467 : region_model_context *ctxt)
7468 : {
7469 : /* Check for exposure. */
7470 80 : if (contains_uninit_p (copied_sval))
7471 25 : ctxt->warn
7472 25 : (std::make_unique<exposure_through_uninit_copy> (src_reg,
7473 : dst_reg,
7474 : copied_sval));
7475 80 : }
7476 :
7477 : /* Set errno to a positive symbolic int, as if some error has occurred. */
7478 :
7479 : void
7480 431 : region_model::set_errno (const call_details &cd)
7481 : {
7482 431 : const region *errno_reg = m_mgr->get_errno_region ();
7483 431 : conjured_purge p (this, cd.get_ctxt ());
7484 431 : const svalue *new_errno_sval
7485 431 : = m_mgr->get_or_create_conjured_svalue (integer_type_node,
7486 431 : &cd.get_call_stmt (),
7487 : errno_reg, p);
7488 431 : const svalue *zero
7489 431 : = m_mgr->get_or_create_int_cst (integer_type_node, 0);
7490 431 : add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
7491 431 : set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
7492 431 : }
7493 :
7494 : // class region_model_context
7495 :
7496 : bool
7497 3944 : region_model_context::
7498 : warn (std::unique_ptr<pending_diagnostic> d,
7499 : std::unique_ptr<pending_location::fixer_for_epath> ploc_fixer)
7500 : {
7501 3944 : pending_location ploc (get_pending_location_for_diag ());
7502 3944 : ploc.m_fixer_for_epath = std::move (ploc_fixer);
7503 3944 : return warn_at (std::move (d), std::move (ploc));
7504 3944 : }
7505 :
7506 : /* class noop_region_model_context : public region_model_context. */
7507 :
7508 : void
7509 0 : noop_region_model_context::add_note (std::unique_ptr<pending_note>)
7510 : {
7511 0 : }
7512 :
7513 : void
7514 0 : noop_region_model_context::add_event (std::unique_ptr<checker_event>)
7515 : {
7516 0 : }
7517 :
7518 : void
7519 78 : noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
7520 : {
7521 78 : }
7522 :
7523 : void
7524 0 : noop_region_model_context::terminate_path ()
7525 : {
7526 0 : }
7527 :
7528 : /* class region_model_context_decorator : public region_model_context. */
7529 :
7530 : void
7531 167 : region_model_context_decorator::add_event (std::unique_ptr<checker_event> event)
7532 : {
7533 167 : if (m_inner)
7534 167 : m_inner->add_event (std::move (event));
7535 167 : }
7536 :
7537 : /* struct model_merger. */
7538 :
7539 : /* Dump a multiline representation of this merger to PP. */
7540 :
7541 : void
7542 0 : model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
7543 : {
7544 0 : pp_string (pp, "model A:");
7545 0 : pp_newline (pp);
7546 0 : m_model_a->dump_to_pp (pp, simple, true);
7547 0 : pp_newline (pp);
7548 :
7549 0 : pp_string (pp, "model B:");
7550 0 : pp_newline (pp);
7551 0 : m_model_b->dump_to_pp (pp, simple, true);
7552 0 : pp_newline (pp);
7553 :
7554 0 : pp_string (pp, "merged model:");
7555 0 : pp_newline (pp);
7556 0 : m_merged_model->dump_to_pp (pp, simple, true);
7557 0 : pp_newline (pp);
7558 0 : }
7559 :
7560 : /* Dump a multiline representation of this merger to FILE. */
7561 :
7562 : void
7563 0 : model_merger::dump (FILE *fp, bool simple) const
7564 : {
7565 0 : tree_dump_pretty_printer pp (fp);
7566 0 : dump_to_pp (&pp, simple);
7567 0 : }
7568 :
7569 : /* Dump a multiline representation of this merger to stderr. */
7570 :
7571 : DEBUG_FUNCTION void
7572 0 : model_merger::dump (bool simple) const
7573 : {
7574 0 : dump (stderr, simple);
7575 0 : }
7576 :
7577 : /* Return true if it's OK to merge SVAL with other svalues. */
7578 :
7579 : bool
7580 562189 : model_merger::mergeable_svalue_p (const svalue *sval) const
7581 : {
7582 562189 : if (m_ext_state)
7583 : {
7584 : /* Reject merging svalues that have non-purgable sm-state,
7585 : to avoid falsely reporting memory leaks by merging them
7586 : with something else. For example, given a local var "p",
7587 : reject the merger of a:
7588 : store_a mapping "p" to a malloc-ed ptr
7589 : with:
7590 : store_b mapping "p" to a NULL ptr. */
7591 562141 : if (m_state_a)
7592 562141 : if (!m_state_a->can_purge_p (*m_ext_state, sval))
7593 : return false;
7594 560375 : if (m_state_b)
7595 560375 : if (!m_state_b->can_purge_p (*m_ext_state, sval))
7596 : return false;
7597 : }
7598 : return true;
7599 : }
7600 :
7601 : /* Mark WIDENING_SVAL as changing meaning during the merge. */
7602 :
7603 : void
7604 843 : model_merger::on_widening_reuse (const widening_svalue *widening_sval)
7605 : {
7606 843 : m_svals_changing_meaning.add (widening_sval);
7607 843 : }
7608 :
7609 : } // namespace ana
7610 :
7611 : /* Dump RMODEL fully to stderr (i.e. without summarization). */
7612 :
7613 : DEBUG_FUNCTION void
7614 0 : debug (const region_model &rmodel)
7615 : {
7616 0 : rmodel.dump (false);
7617 0 : }
7618 :
7619 : /* class rejected_op_constraint : public rejected_constraint. */
7620 :
7621 : void
7622 4 : rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
7623 : {
7624 4 : region_model m (m_model);
7625 4 : const svalue *lhs_sval = m.get_rvalue (m_lhs, nullptr);
7626 4 : const svalue *rhs_sval = m.get_rvalue (m_rhs, nullptr);
7627 4 : lhs_sval->dump_to_pp (pp, true);
7628 4 : pp_printf (pp, " %s ", op_symbol_code (m_op));
7629 4 : rhs_sval->dump_to_pp (pp, true);
7630 4 : }
7631 :
7632 : /* class rejected_default_case : public rejected_constraint. */
7633 :
7634 : void
7635 0 : rejected_default_case::dump_to_pp (pretty_printer *pp) const
7636 : {
7637 0 : pp_string (pp, "implicit default for enum");
7638 0 : }
7639 :
7640 : /* class rejected_ranges_constraint : public rejected_constraint. */
7641 :
7642 : void
7643 0 : rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
7644 : {
7645 0 : region_model m (m_model);
7646 0 : const svalue *sval = m.get_rvalue (m_expr, nullptr);
7647 0 : sval->dump_to_pp (pp, true);
7648 0 : pp_string (pp, " in ");
7649 0 : m_ranges->dump_to_pp (pp, true);
7650 0 : }
7651 :
7652 : /* class engine. */
7653 :
7654 : /* engine's ctor. */
7655 :
7656 3397 : engine::engine (region_model_manager &mgr,
7657 3397 : const supergraph *sg)
7658 3397 : : m_mgr (mgr),
7659 3397 : m_sg (sg)
7660 : {
7661 3397 : }
7662 :
7663 : /* Dump the managed objects by class to LOGGER, and the per-class totals. */
7664 :
7665 : void
7666 5 : engine::log_stats (logger *logger) const
7667 : {
7668 5 : m_mgr.log_stats (logger, true);
7669 5 : }
7670 :
7671 : namespace ana {
7672 :
7673 : #if CHECKING_P
7674 :
7675 : namespace selftest {
7676 :
7677 : /* Build a constant tree of the given type from STR. */
7678 :
7679 : static tree
7680 64 : build_real_cst_from_string (tree type, const char *str)
7681 : {
7682 64 : REAL_VALUE_TYPE real;
7683 64 : real_from_string (&real, str);
7684 64 : return build_real (type, real);
7685 : }
7686 :
7687 : /* Append various "interesting" constants to OUT (e.g. NaN). */
7688 :
7689 : static void
7690 8 : append_interesting_constants (auto_vec<tree> *out)
7691 : {
7692 8 : out->safe_push (integer_zero_node);
7693 8 : out->safe_push (build_int_cst (integer_type_node, 42));
7694 8 : out->safe_push (build_int_cst (unsigned_type_node, 0));
7695 8 : out->safe_push (build_int_cst (unsigned_type_node, 42));
7696 8 : out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
7697 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
7698 8 : out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
7699 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
7700 8 : out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
7701 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
7702 8 : out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
7703 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
7704 8 : }
7705 :
7706 : /* Verify that tree_cmp is a well-behaved comparator for qsort, even
7707 : if the underlying constants aren't comparable. */
7708 :
7709 : static void
7710 4 : test_tree_cmp_on_constants ()
7711 : {
7712 4 : auto_vec<tree> csts;
7713 4 : append_interesting_constants (&csts);
7714 :
7715 : /* Try sorting every triple. */
7716 4 : const unsigned num = csts.length ();
7717 52 : for (unsigned i = 0; i < num; i++)
7718 624 : for (unsigned j = 0; j < num; j++)
7719 7488 : for (unsigned k = 0; k < num; k++)
7720 : {
7721 6912 : auto_vec<tree> v (3);
7722 6912 : v.quick_push (csts[i]);
7723 6912 : v.quick_push (csts[j]);
7724 6912 : v.quick_push (csts[k]);
7725 6912 : v.qsort (tree_cmp);
7726 6912 : }
7727 4 : }
7728 :
7729 : /* Implementation detail of the ASSERT_CONDITION_* macros. */
7730 :
7731 : void
7732 8 : assert_condition (const location &loc,
7733 : region_model &model,
7734 : const svalue *lhs, tree_code op, const svalue *rhs,
7735 : tristate expected)
7736 : {
7737 8 : tristate actual = model.eval_condition (lhs, op, rhs);
7738 8 : ASSERT_EQ_AT (loc, actual, expected);
7739 8 : }
7740 :
7741 : /* Implementation detail of the ASSERT_CONDITION_* macros. */
7742 :
7743 : void
7744 3084 : assert_condition (const location &loc,
7745 : region_model &model,
7746 : tree lhs, tree_code op, tree rhs,
7747 : tristate expected)
7748 : {
7749 3084 : tristate actual = model.eval_condition (lhs, op, rhs, nullptr);
7750 3084 : ASSERT_EQ_AT (loc, actual, expected);
7751 3084 : }
7752 :
7753 : /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
7754 :
7755 : static void
7756 20 : assert_dump_tree_eq (const location &loc, tree t, const char *expected)
7757 : {
7758 20 : auto_fix_quotes sentinel;
7759 20 : pretty_printer pp;
7760 20 : pp_format_decoder (&pp) = default_tree_printer;
7761 20 : dump_tree (&pp, t);
7762 20 : ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7763 20 : }
7764 :
7765 : /* Assert that dump_tree (T) is EXPECTED. */
7766 :
7767 : #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
7768 : SELFTEST_BEGIN_STMT \
7769 : assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
7770 : SELFTEST_END_STMT
7771 :
7772 : /* Implementation detail of ASSERT_DUMP_EQ. */
7773 :
7774 : static void
7775 8 : assert_dump_eq (const location &loc,
7776 : const region_model &model,
7777 : bool summarize,
7778 : const char *expected)
7779 : {
7780 8 : auto_fix_quotes sentinel;
7781 8 : pretty_printer pp;
7782 8 : pp_format_decoder (&pp) = default_tree_printer;
7783 :
7784 8 : model.dump_to_pp (&pp, summarize, true);
7785 8 : ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7786 8 : }
7787 :
7788 : /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
7789 :
7790 : #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
7791 : SELFTEST_BEGIN_STMT \
7792 : assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
7793 : SELFTEST_END_STMT
7794 :
7795 : /* Smoketest for region_model::dump_to_pp. */
7796 :
7797 : static void
7798 4 : test_dump ()
7799 : {
7800 4 : region_model_manager mgr;
7801 4 : region_model model (&mgr);
7802 :
7803 4 : ASSERT_DUMP_EQ (model, false,
7804 : "stack depth: 0\n"
7805 : "m_called_unknown_fn: FALSE\n"
7806 : "constraint_manager:\n"
7807 : " equiv classes:\n"
7808 : " constraints:\n");
7809 4 : ASSERT_DUMP_EQ (model, true,
7810 : "stack depth: 0\n"
7811 : "m_called_unknown_fn: FALSE\n"
7812 : "constraint_manager:\n"
7813 : " equiv classes:\n"
7814 : " constraints:\n");
7815 :
7816 4 : text_art::ascii_theme theme;
7817 4 : pretty_printer pp;
7818 4 : dump_to_pp (model, &theme, &pp);
7819 4 : ASSERT_STREQ ("Region Model\n"
7820 : "`- Store\n"
7821 : " `- m_called_unknown_fn: false\n",
7822 : pp_formatted_text (&pp));
7823 4 : }
7824 :
7825 : /* Helper function for selftests. Create a struct or union type named NAME,
7826 : with the fields given by the FIELD_DECLS in FIELDS.
7827 : If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
7828 : create a UNION_TYPE. */
7829 :
7830 : static tree
7831 16 : make_test_compound_type (const char *name, bool is_struct,
7832 : const auto_vec<tree> *fields)
7833 : {
7834 16 : tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
7835 16 : TYPE_NAME (t) = get_identifier (name);
7836 16 : TYPE_SIZE (t) = 0;
7837 :
7838 16 : tree fieldlist = NULL_TREE;
7839 16 : int i;
7840 16 : tree field;
7841 48 : FOR_EACH_VEC_ELT (*fields, i, field)
7842 : {
7843 32 : gcc_assert (TREE_CODE (field) == FIELD_DECL);
7844 32 : DECL_CONTEXT (field) = t;
7845 32 : fieldlist = chainon (field, fieldlist);
7846 : }
7847 16 : fieldlist = nreverse (fieldlist);
7848 16 : TYPE_FIELDS (t) = fieldlist;
7849 :
7850 16 : layout_type (t);
7851 16 : return t;
7852 : }
7853 :
7854 : /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
7855 :
7856 : struct coord_test
7857 : {
7858 16 : coord_test ()
7859 16 : {
7860 16 : auto_vec<tree> fields;
7861 16 : m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7862 : get_identifier ("x"), integer_type_node);
7863 16 : fields.safe_push (m_x_field);
7864 16 : m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7865 : get_identifier ("y"), integer_type_node);
7866 16 : fields.safe_push (m_y_field);
7867 16 : m_coord_type = make_test_compound_type ("coord", true, &fields);
7868 16 : }
7869 :
7870 : tree m_x_field;
7871 : tree m_y_field;
7872 : tree m_coord_type;
7873 : };
7874 :
7875 : /* Verify usage of a struct. */
7876 :
7877 : static void
7878 4 : test_struct ()
7879 : {
7880 4 : coord_test ct;
7881 :
7882 4 : tree c = build_global_decl ("c", ct.m_coord_type);
7883 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7884 : c, ct.m_x_field, NULL_TREE);
7885 4 : tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7886 : c, ct.m_y_field, NULL_TREE);
7887 :
7888 4 : tree int_17 = build_int_cst (integer_type_node, 17);
7889 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
7890 :
7891 4 : region_model_manager mgr;
7892 4 : region_model model (&mgr);
7893 : /* Set fields in order y, then x. */
7894 4 : model.set_value (c_y, int_m3, nullptr);
7895 4 : model.set_value (c_x, int_17, nullptr);
7896 :
7897 : /* Verify get_offset for "c.x". */
7898 4 : {
7899 4 : const region *c_x_reg = model.get_lvalue (c_x, nullptr);
7900 4 : region_offset offset = c_x_reg->get_offset (&mgr);
7901 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, nullptr));
7902 4 : ASSERT_EQ (offset.get_bit_offset (), 0);
7903 : }
7904 :
7905 : /* Verify get_offset for "c.y". */
7906 4 : {
7907 4 : const region *c_y_reg = model.get_lvalue (c_y, nullptr);
7908 4 : region_offset offset = c_y_reg->get_offset (&mgr);
7909 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, nullptr));
7910 4 : ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7911 : }
7912 :
7913 : /* Check iteration order of binding_cluster (and thus of binding_map). */
7914 4 : {
7915 4 : std::vector<binding_map::binding_pair> vec;
7916 4 : auto cluster
7917 4 : = model.get_store ()->get_cluster (model.get_lvalue (c, nullptr));
7918 12 : for (auto iter : *cluster)
7919 8 : vec.push_back (iter);
7920 4 : ASSERT_EQ (vec.size (), 2);
7921 : /* we should get them back in ascending order in memory (x then y). */
7922 : /* x */
7923 4 : ASSERT_EQ (vec[0].m_key->dyn_cast_concrete_binding ()->get_bit_range (),
7924 : bit_range (0, INT_TYPE_SIZE));
7925 4 : ASSERT_TRUE (tree_int_cst_equal(vec[0].m_sval->maybe_get_constant (),
7926 : int_17));
7927 : /* y */
7928 4 : ASSERT_EQ (vec[1].m_key->dyn_cast_concrete_binding ()->get_bit_range (),
7929 : bit_range (INT_TYPE_SIZE, INT_TYPE_SIZE));
7930 4 : ASSERT_TRUE (tree_int_cst_equal(vec[1].m_sval->maybe_get_constant (),
7931 : int_m3));
7932 4 : }
7933 4 : }
7934 :
7935 : /* Verify usage of an array element. */
7936 :
7937 : static void
7938 4 : test_array_1 ()
7939 : {
7940 4 : tree tlen = size_int (10);
7941 4 : tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7942 :
7943 4 : tree a = build_global_decl ("a", arr_type);
7944 :
7945 4 : region_model_manager mgr;
7946 4 : region_model model (&mgr);
7947 4 : tree int_0 = integer_zero_node;
7948 4 : tree a_0 = build4 (ARRAY_REF, char_type_node,
7949 : a, int_0, NULL_TREE, NULL_TREE);
7950 4 : tree char_A = build_int_cst (char_type_node, 'A');
7951 4 : model.set_value (a_0, char_A, nullptr);
7952 4 : }
7953 :
7954 : /* Verify that region_model::get_representative_tree works as expected. */
7955 :
7956 : static void
7957 4 : test_get_representative_tree ()
7958 : {
7959 4 : region_model_manager mgr;
7960 :
7961 : /* STRING_CST. */
7962 4 : {
7963 4 : tree string_cst = build_string (4, "foo");
7964 4 : region_model m (&mgr);
7965 4 : const svalue *str_sval = m.get_rvalue (string_cst, nullptr);
7966 4 : tree rep = m.get_representative_tree (str_sval);
7967 4 : ASSERT_EQ (rep, string_cst);
7968 4 : }
7969 :
7970 : /* String literal. */
7971 4 : {
7972 4 : tree string_cst_ptr = build_string_literal (4, "foo");
7973 4 : region_model m (&mgr);
7974 4 : const svalue *str_sval = m.get_rvalue (string_cst_ptr, nullptr);
7975 4 : tree rep = m.get_representative_tree (str_sval);
7976 4 : ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
7977 4 : }
7978 :
7979 : /* Value of an element within an array. */
7980 4 : {
7981 4 : tree tlen = size_int (10);
7982 4 : tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7983 4 : tree a = build_global_decl ("a", arr_type);
7984 4 : placeholder_svalue test_sval (mgr.alloc_symbol_id (),
7985 4 : char_type_node, "test value");
7986 :
7987 : /* Value of a[3]. */
7988 4 : {
7989 4 : test_region_model_context ctxt;
7990 4 : region_model model (&mgr);
7991 4 : tree int_3 = build_int_cst (integer_type_node, 3);
7992 4 : tree a_3 = build4 (ARRAY_REF, char_type_node,
7993 : a, int_3, NULL_TREE, NULL_TREE);
7994 4 : const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
7995 4 : model.set_value (a_3_reg, &test_sval, &ctxt);
7996 4 : tree rep = model.get_representative_tree (&test_sval);
7997 4 : ASSERT_DUMP_TREE_EQ (rep, "a[3]");
7998 4 : }
7999 :
8000 : /* Value of a[0]. */
8001 4 : {
8002 4 : test_region_model_context ctxt;
8003 4 : region_model model (&mgr);
8004 4 : tree idx = integer_zero_node;
8005 4 : tree a_0 = build4 (ARRAY_REF, char_type_node,
8006 : a, idx, NULL_TREE, NULL_TREE);
8007 4 : const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
8008 4 : model.set_value (a_0_reg, &test_sval, &ctxt);
8009 4 : tree rep = model.get_representative_tree (&test_sval);
8010 4 : ASSERT_DUMP_TREE_EQ (rep, "a[0]");
8011 4 : }
8012 4 : }
8013 :
8014 : /* Value of a field within a struct. */
8015 4 : {
8016 4 : coord_test ct;
8017 :
8018 4 : tree c = build_global_decl ("c", ct.m_coord_type);
8019 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8020 : c, ct.m_x_field, NULL_TREE);
8021 4 : tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
8022 : c, ct.m_y_field, NULL_TREE);
8023 :
8024 4 : test_region_model_context ctxt;
8025 :
8026 : /* Value of initial field. */
8027 4 : {
8028 4 : region_model m (&mgr);
8029 4 : const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
8030 4 : placeholder_svalue test_sval_x (mgr.alloc_symbol_id (),
8031 4 : integer_type_node, "test x val");
8032 4 : m.set_value (c_x_reg, &test_sval_x, &ctxt);
8033 4 : tree rep = m.get_representative_tree (&test_sval_x);
8034 4 : ASSERT_DUMP_TREE_EQ (rep, "c.x");
8035 4 : }
8036 :
8037 : /* Value of non-initial field. */
8038 4 : {
8039 4 : region_model m (&mgr);
8040 4 : const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
8041 4 : placeholder_svalue test_sval_y (mgr.alloc_symbol_id (),
8042 4 : integer_type_node, "test y val");
8043 4 : m.set_value (c_y_reg, &test_sval_y, &ctxt);
8044 4 : tree rep = m.get_representative_tree (&test_sval_y);
8045 4 : ASSERT_DUMP_TREE_EQ (rep, "c.y");
8046 4 : }
8047 4 : }
8048 4 : }
8049 :
8050 : /* Verify that calling region_model::get_rvalue repeatedly on the same
8051 : tree constant retrieves the same svalue *. */
8052 :
8053 : static void
8054 4 : test_unique_constants ()
8055 : {
8056 4 : tree int_0 = integer_zero_node;
8057 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8058 :
8059 4 : test_region_model_context ctxt;
8060 4 : region_model_manager mgr;
8061 4 : region_model model (&mgr);
8062 4 : ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
8063 4 : ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
8064 : model.get_rvalue (int_42, &ctxt));
8065 4 : ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
8066 4 : ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
8067 :
8068 : /* A "(const int)42" will be a different tree from "(int)42)"... */
8069 4 : tree const_int_type_node
8070 4 : = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
8071 4 : tree const_int_42 = build_int_cst (const_int_type_node, 42);
8072 4 : ASSERT_NE (int_42, const_int_42);
8073 : /* It should have a different const_svalue. */
8074 4 : const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
8075 4 : const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
8076 4 : ASSERT_NE (int_42_sval, const_int_42_sval);
8077 : /* But they should compare as equal. */
8078 4 : ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
8079 4 : ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
8080 4 : }
8081 :
8082 : /* Verify that each type gets its own singleton unknown_svalue within a
8083 : region_model_manager, and that NULL_TREE gets its own singleton. */
8084 :
8085 : static void
8086 4 : test_unique_unknowns ()
8087 : {
8088 4 : region_model_manager mgr;
8089 4 : const svalue *unknown_int
8090 4 : = mgr.get_or_create_unknown_svalue (integer_type_node);
8091 : /* Repeated calls with the same type should get the same "unknown"
8092 : svalue. */
8093 4 : const svalue *unknown_int_2
8094 4 : = mgr.get_or_create_unknown_svalue (integer_type_node);
8095 4 : ASSERT_EQ (unknown_int, unknown_int_2);
8096 :
8097 : /* Different types (or the NULL type) should have different
8098 : unknown_svalues. */
8099 4 : const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (nullptr);
8100 4 : ASSERT_NE (unknown_NULL_type, unknown_int);
8101 :
8102 : /* Repeated calls with NULL for the type should get the same "unknown"
8103 : svalue. */
8104 4 : const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (nullptr);
8105 4 : ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
8106 4 : }
8107 :
8108 : /* Verify that initial_svalue are handled as expected. */
8109 :
8110 : static void
8111 4 : test_initial_svalue_folding ()
8112 : {
8113 4 : region_model_manager mgr;
8114 4 : tree x = build_global_decl ("x", integer_type_node);
8115 4 : tree y = build_global_decl ("y", integer_type_node);
8116 :
8117 4 : test_region_model_context ctxt;
8118 4 : region_model model (&mgr);
8119 4 : const svalue *x_init = model.get_rvalue (x, &ctxt);
8120 4 : const svalue *y_init = model.get_rvalue (y, &ctxt);
8121 4 : ASSERT_NE (x_init, y_init);
8122 4 : const region *x_reg = model.get_lvalue (x, &ctxt);
8123 4 : ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
8124 :
8125 4 : }
8126 :
8127 : /* Verify that unary ops are folded as expected. */
8128 :
8129 : static void
8130 4 : test_unaryop_svalue_folding ()
8131 : {
8132 4 : region_model_manager mgr;
8133 4 : tree x = build_global_decl ("x", integer_type_node);
8134 4 : tree y = build_global_decl ("y", integer_type_node);
8135 :
8136 4 : test_region_model_context ctxt;
8137 4 : region_model model (&mgr);
8138 4 : const svalue *x_init = model.get_rvalue (x, &ctxt);
8139 4 : const svalue *y_init = model.get_rvalue (y, &ctxt);
8140 4 : const region *x_reg = model.get_lvalue (x, &ctxt);
8141 4 : ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
8142 :
8143 : /* "(int)x" -> "x". */
8144 4 : ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
8145 :
8146 : /* "(void *)x" -> something other than "x". */
8147 4 : ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
8148 :
8149 : /* "!(x == y)" -> "x != y". */
8150 4 : ASSERT_EQ (mgr.get_or_create_unaryop
8151 : (boolean_type_node, TRUTH_NOT_EXPR,
8152 : mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
8153 : x_init, y_init)),
8154 : mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
8155 : x_init, y_init));
8156 : /* "!(x > y)" -> "x <= y". */
8157 4 : ASSERT_EQ (mgr.get_or_create_unaryop
8158 : (boolean_type_node, TRUTH_NOT_EXPR,
8159 : mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
8160 : x_init, y_init)),
8161 : mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
8162 : x_init, y_init));
8163 4 : }
8164 :
8165 : /* Verify that binops on constant svalues are folded. */
8166 :
8167 : static void
8168 4 : test_binop_svalue_folding ()
8169 : {
8170 : #define NUM_CSTS 10
8171 4 : tree cst_int[NUM_CSTS];
8172 4 : region_model_manager mgr;
8173 4 : const svalue *cst_sval[NUM_CSTS];
8174 44 : for (int i = 0; i < NUM_CSTS; i++)
8175 : {
8176 40 : cst_int[i] = build_int_cst (integer_type_node, i);
8177 40 : cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
8178 40 : ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
8179 40 : ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
8180 : }
8181 :
8182 44 : for (int i = 0; i < NUM_CSTS; i++)
8183 440 : for (int j = 0; j < NUM_CSTS; j++)
8184 : {
8185 400 : if (i != j)
8186 360 : ASSERT_NE (cst_sval[i], cst_sval[j]);
8187 400 : if (i + j < NUM_CSTS)
8188 : {
8189 220 : const svalue *sum
8190 220 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8191 : cst_sval[i], cst_sval[j]);
8192 220 : ASSERT_EQ (sum, cst_sval[i + j]);
8193 : }
8194 400 : if (i - j >= 0)
8195 : {
8196 220 : const svalue *difference
8197 220 : = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
8198 : cst_sval[i], cst_sval[j]);
8199 220 : ASSERT_EQ (difference, cst_sval[i - j]);
8200 : }
8201 400 : if (i * j < NUM_CSTS)
8202 : {
8203 168 : const svalue *product
8204 168 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8205 : cst_sval[i], cst_sval[j]);
8206 168 : ASSERT_EQ (product, cst_sval[i * j]);
8207 : }
8208 400 : const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
8209 : cst_sval[i], cst_sval[j]);
8210 400 : ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
8211 400 : const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
8212 : cst_sval[i], cst_sval[j]);
8213 400 : ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
8214 : // etc
8215 : }
8216 :
8217 4 : tree x = build_global_decl ("x", integer_type_node);
8218 :
8219 4 : test_region_model_context ctxt;
8220 4 : region_model model (&mgr);
8221 4 : const svalue *x_init = model.get_rvalue (x, &ctxt);
8222 :
8223 : /* PLUS_EXPR folding. */
8224 4 : const svalue *x_init_plus_zero
8225 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8226 : x_init, cst_sval[0]);
8227 4 : ASSERT_EQ (x_init_plus_zero, x_init);
8228 4 : const svalue *zero_plus_x_init
8229 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8230 : cst_sval[0], x_init);
8231 4 : ASSERT_EQ (zero_plus_x_init, x_init);
8232 :
8233 : /* MULT_EXPR folding. */
8234 4 : const svalue *x_init_times_zero
8235 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8236 : x_init, cst_sval[0]);
8237 4 : ASSERT_EQ (x_init_times_zero, cst_sval[0]);
8238 4 : const svalue *zero_times_x_init
8239 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8240 : cst_sval[0], x_init);
8241 4 : ASSERT_EQ (zero_times_x_init, cst_sval[0]);
8242 :
8243 4 : const svalue *x_init_times_one
8244 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8245 : x_init, cst_sval[1]);
8246 4 : ASSERT_EQ (x_init_times_one, x_init);
8247 4 : const svalue *one_times_x_init
8248 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8249 : cst_sval[1], x_init);
8250 4 : ASSERT_EQ (one_times_x_init, x_init);
8251 :
8252 : // etc
8253 : // TODO: do we want to use the match-and-simplify DSL for this?
8254 :
8255 : /* Verify that binops put any constants on the RHS. */
8256 4 : const svalue *four_times_x_init
8257 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8258 : cst_sval[4], x_init);
8259 4 : const svalue *x_init_times_four
8260 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8261 : x_init, cst_sval[4]);
8262 4 : ASSERT_EQ (four_times_x_init, x_init_times_four);
8263 4 : const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
8264 4 : ASSERT_EQ (binop->get_op (), MULT_EXPR);
8265 4 : ASSERT_EQ (binop->get_arg0 (), x_init);
8266 4 : ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
8267 :
8268 : /* Verify that ((x + 1) + 1) == (x + 2). */
8269 4 : const svalue *x_init_plus_one
8270 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8271 : x_init, cst_sval[1]);
8272 4 : const svalue *x_init_plus_two
8273 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8274 : x_init, cst_sval[2]);
8275 4 : const svalue *x_init_plus_one_plus_one
8276 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8277 : x_init_plus_one, cst_sval[1]);
8278 4 : ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
8279 :
8280 : /* Verify various binops on booleans. */
8281 4 : {
8282 4 : const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
8283 4 : const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
8284 4 : const svalue *sval_unknown
8285 4 : = mgr.get_or_create_unknown_svalue (boolean_type_node);
8286 4 : const placeholder_svalue sval_placeholder (mgr.alloc_symbol_id (),
8287 4 : boolean_type_node, "v");
8288 12 : for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
8289 : {
8290 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8291 : sval_true, sval_unknown),
8292 : sval_true);
8293 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8294 : sval_false, sval_unknown),
8295 : sval_unknown);
8296 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8297 : sval_false, &sval_placeholder),
8298 : &sval_placeholder);
8299 : }
8300 12 : for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
8301 : {
8302 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8303 : sval_false, sval_unknown),
8304 : sval_false);
8305 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8306 : sval_true, sval_unknown),
8307 : sval_unknown);
8308 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8309 : sval_true, &sval_placeholder),
8310 : &sval_placeholder);
8311 : }
8312 4 : }
8313 4 : }
8314 :
8315 : /* Verify that sub_svalues are folded as expected. */
8316 :
8317 : static void
8318 4 : test_sub_svalue_folding ()
8319 : {
8320 4 : coord_test ct;
8321 4 : tree c = build_global_decl ("c", ct.m_coord_type);
8322 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8323 : c, ct.m_x_field, NULL_TREE);
8324 :
8325 4 : region_model_manager mgr;
8326 4 : region_model model (&mgr);
8327 4 : test_region_model_context ctxt;
8328 4 : const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
8329 :
8330 : /* Verify that sub_svalue of "unknown" simply
8331 : yields an unknown. */
8332 :
8333 4 : const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
8334 4 : const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
8335 : unknown, c_x_reg);
8336 4 : ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
8337 4 : ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
8338 4 : }
8339 :
8340 : /* Get BIT within VAL as a symbolic value within MGR. */
8341 :
8342 : static const svalue *
8343 256 : get_bit (region_model_manager *mgr,
8344 : bit_offset_t bit,
8345 : unsigned HOST_WIDE_INT val)
8346 : {
8347 256 : const svalue *inner_svalue
8348 256 : = mgr->get_or_create_int_cst (unsigned_type_node, val);
8349 256 : return mgr->get_or_create_bits_within (boolean_type_node,
8350 256 : bit_range (bit, 1),
8351 256 : inner_svalue);
8352 : }
8353 :
8354 : /* Verify that bits_within_svalues are folded as expected. */
8355 :
8356 : static void
8357 4 : test_bits_within_svalue_folding ()
8358 : {
8359 4 : region_model_manager mgr;
8360 :
8361 4 : const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
8362 4 : const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
8363 :
8364 4 : {
8365 4 : const unsigned val = 0x0000;
8366 68 : for (unsigned bit = 0; bit < 16; bit++)
8367 64 : ASSERT_EQ (get_bit (&mgr, bit, val), zero);
8368 : }
8369 :
8370 4 : {
8371 4 : const unsigned val = 0x0001;
8372 4 : ASSERT_EQ (get_bit (&mgr, 0, val), one);
8373 64 : for (unsigned bit = 1; bit < 16; bit++)
8374 60 : ASSERT_EQ (get_bit (&mgr, bit, val), zero);
8375 : }
8376 :
8377 4 : {
8378 4 : const unsigned val = 0x8000;
8379 64 : for (unsigned bit = 0; bit < 15; bit++)
8380 60 : ASSERT_EQ (get_bit (&mgr, bit, val), zero);
8381 4 : ASSERT_EQ (get_bit (&mgr, 15, val), one);
8382 : }
8383 :
8384 4 : {
8385 4 : const unsigned val = 0xFFFF;
8386 68 : for (unsigned bit = 0; bit < 16; bit++)
8387 64 : ASSERT_EQ (get_bit (&mgr, bit, val), one);
8388 : }
8389 4 : }
8390 :
8391 : /* Test that region::descendent_of_p works as expected. */
8392 :
8393 : static void
8394 4 : test_descendent_of_p ()
8395 : {
8396 4 : region_model_manager mgr;
8397 4 : const region *stack = mgr.get_stack_region ();
8398 4 : const region *heap = mgr.get_heap_region ();
8399 4 : const region *code = mgr.get_code_region ();
8400 4 : const region *globals = mgr.get_globals_region ();
8401 :
8402 : /* descendent_of_p should return true when used on the region itself. */
8403 4 : ASSERT_TRUE (stack->descendent_of_p (stack));
8404 4 : ASSERT_FALSE (stack->descendent_of_p (heap));
8405 4 : ASSERT_FALSE (stack->descendent_of_p (code));
8406 4 : ASSERT_FALSE (stack->descendent_of_p (globals));
8407 :
8408 4 : tree x = build_global_decl ("x", integer_type_node);
8409 4 : const region *x_reg = mgr.get_region_for_global (x);
8410 4 : ASSERT_TRUE (x_reg->descendent_of_p (globals));
8411 :
8412 : /* A cast_region should be a descendent of the original region. */
8413 4 : const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
8414 4 : ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
8415 4 : }
8416 :
8417 : /* Verify that bit_range_region works as expected. */
8418 :
8419 : static void
8420 4 : test_bit_range_regions ()
8421 : {
8422 4 : tree x = build_global_decl ("x", integer_type_node);
8423 4 : region_model_manager mgr;
8424 4 : const region *x_reg = mgr.get_region_for_global (x);
8425 4 : const region *byte0
8426 4 : = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
8427 4 : const region *byte1
8428 4 : = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
8429 4 : ASSERT_TRUE (byte0->descendent_of_p (x_reg));
8430 4 : ASSERT_TRUE (byte1->descendent_of_p (x_reg));
8431 4 : ASSERT_NE (byte0, byte1);
8432 4 : }
8433 :
8434 : /* Verify that simple assignments work as expected. */
8435 :
8436 : static void
8437 4 : test_assignment ()
8438 : {
8439 4 : tree int_0 = integer_zero_node;
8440 4 : tree x = build_global_decl ("x", integer_type_node);
8441 4 : tree y = build_global_decl ("y", integer_type_node);
8442 :
8443 : /* "x == 0", then use of y, then "y = 0;". */
8444 4 : region_model_manager mgr;
8445 4 : region_model model (&mgr);
8446 4 : ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
8447 4 : ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
8448 4 : model.set_value (model.get_lvalue (y, nullptr),
8449 : model.get_rvalue (int_0, nullptr),
8450 : nullptr);
8451 4 : ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
8452 4 : ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
8453 4 : }
8454 :
8455 : /* Verify that compound assignments work as expected. */
8456 :
8457 : static void
8458 4 : test_compound_assignment ()
8459 : {
8460 4 : coord_test ct;
8461 :
8462 4 : tree c = build_global_decl ("c", ct.m_coord_type);
8463 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8464 : c, ct.m_x_field, NULL_TREE);
8465 4 : tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
8466 : c, ct.m_y_field, NULL_TREE);
8467 4 : tree d = build_global_decl ("d", ct.m_coord_type);
8468 4 : tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8469 : d, ct.m_x_field, NULL_TREE);
8470 4 : tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
8471 : d, ct.m_y_field, NULL_TREE);
8472 :
8473 4 : tree int_17 = build_int_cst (integer_type_node, 17);
8474 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
8475 :
8476 4 : region_model_manager mgr;
8477 4 : region_model model (&mgr);
8478 4 : model.set_value (c_x, int_17, nullptr);
8479 4 : model.set_value (c_y, int_m3, nullptr);
8480 :
8481 : /* Copy c to d. */
8482 4 : const svalue *sval = model.get_rvalue (c, nullptr);
8483 4 : model.set_value (model.get_lvalue (d, nullptr), sval, nullptr);
8484 :
8485 : /* Check that the fields have the same svalues. */
8486 4 : ASSERT_EQ (model.get_rvalue (c_x, nullptr), model.get_rvalue (d_x, nullptr));
8487 4 : ASSERT_EQ (model.get_rvalue (c_y, nullptr), model.get_rvalue (d_y, nullptr));
8488 4 : }
8489 :
8490 : /* Verify the details of pushing and popping stack frames. */
8491 :
8492 : static void
8493 4 : test_stack_frames ()
8494 : {
8495 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8496 4 : tree int_10 = build_int_cst (integer_type_node, 10);
8497 4 : tree int_5 = build_int_cst (integer_type_node, 5);
8498 4 : tree int_0 = integer_zero_node;
8499 :
8500 4 : auto_vec <tree> param_types;
8501 4 : tree parent_fndecl = make_fndecl (integer_type_node,
8502 : "parent_fn",
8503 : param_types);
8504 4 : allocate_struct_function (parent_fndecl, true);
8505 :
8506 4 : tree child_fndecl = make_fndecl (integer_type_node,
8507 : "child_fn",
8508 : param_types);
8509 4 : allocate_struct_function (child_fndecl, true);
8510 :
8511 : /* "a" and "b" in the parent frame. */
8512 4 : tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8513 : get_identifier ("a"),
8514 : integer_type_node);
8515 4 : DECL_CONTEXT (a) = parent_fndecl;
8516 4 : tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8517 : get_identifier ("b"),
8518 : integer_type_node);
8519 4 : DECL_CONTEXT (b) = parent_fndecl;
8520 : /* "x" and "y" in a child frame. */
8521 4 : tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8522 : get_identifier ("x"),
8523 : integer_type_node);
8524 4 : DECL_CONTEXT (x) = child_fndecl;
8525 4 : tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8526 : get_identifier ("y"),
8527 : integer_type_node);
8528 4 : DECL_CONTEXT (y) = child_fndecl;
8529 :
8530 : /* "p" global. */
8531 4 : tree p = build_global_decl ("p", ptr_type_node);
8532 :
8533 : /* "q" global. */
8534 4 : tree q = build_global_decl ("q", ptr_type_node);
8535 :
8536 4 : region_model_manager mgr;
8537 4 : test_region_model_context ctxt;
8538 4 : region_model model (&mgr);
8539 :
8540 : /* Push stack frame for "parent_fn". */
8541 4 : const region *parent_frame_reg
8542 4 : = model.push_frame (*DECL_STRUCT_FUNCTION (parent_fndecl),
8543 : nullptr, nullptr, &ctxt);
8544 4 : ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
8545 4 : ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
8546 4 : const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
8547 4 : model.set_value (a_in_parent_reg,
8548 : model.get_rvalue (int_42, &ctxt),
8549 : &ctxt);
8550 4 : ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
8551 :
8552 4 : model.add_constraint (b, LT_EXPR, int_10, &ctxt);
8553 4 : ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8554 : tristate (tristate::TS_TRUE));
8555 :
8556 : /* Push stack frame for "child_fn". */
8557 4 : const region *child_frame_reg
8558 4 : = model.push_frame (*DECL_STRUCT_FUNCTION (child_fndecl),
8559 : nullptr, nullptr, &ctxt);
8560 4 : ASSERT_EQ (model.get_current_frame (), child_frame_reg);
8561 4 : ASSERT_TRUE (model.region_exists_p (child_frame_reg));
8562 4 : const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
8563 4 : model.set_value (x_in_child_reg,
8564 : model.get_rvalue (int_0, &ctxt),
8565 : &ctxt);
8566 4 : ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
8567 :
8568 4 : model.add_constraint (y, NE_EXPR, int_5, &ctxt);
8569 4 : ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
8570 : tristate (tristate::TS_TRUE));
8571 :
8572 : /* Point a global pointer at a local in the child frame: p = &x. */
8573 4 : const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
8574 4 : model.set_value (p_in_globals_reg,
8575 : mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
8576 : &ctxt);
8577 4 : ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), nullptr);
8578 :
8579 : /* Point another global pointer at p: q = &p. */
8580 4 : const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
8581 4 : model.set_value (q_in_globals_reg,
8582 : mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
8583 : &ctxt);
8584 :
8585 : /* Test region::descendent_of_p. */
8586 4 : ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
8587 4 : ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
8588 4 : ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
8589 :
8590 : /* Pop the "child_fn" frame from the stack. */
8591 4 : model.pop_frame (nullptr, nullptr, &ctxt, nullptr);
8592 4 : ASSERT_FALSE (model.region_exists_p (child_frame_reg));
8593 4 : ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
8594 :
8595 : /* Verify that p (which was pointing at the local "x" in the popped
8596 : frame) has been poisoned. */
8597 4 : const svalue *new_p_sval = model.get_rvalue (p, nullptr);
8598 4 : ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
8599 4 : ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
8600 : poison_kind::popped_stack);
8601 :
8602 : /* Verify that q still points to p, in spite of the region
8603 : renumbering. */
8604 4 : const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
8605 4 : ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
8606 4 : ASSERT_EQ (new_q_sval->maybe_get_region (),
8607 : model.get_lvalue (p, &ctxt));
8608 :
8609 : /* Verify that top of stack has been updated. */
8610 4 : ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
8611 :
8612 : /* Verify locals in parent frame. */
8613 : /* Verify "a" still has its value. */
8614 4 : const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
8615 4 : ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
8616 4 : ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
8617 : int_42);
8618 : /* Verify "b" still has its constraint. */
8619 4 : ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8620 : tristate (tristate::TS_TRUE));
8621 4 : }
8622 :
8623 : /* Verify that get_representative_path_var works as expected, that
8624 : we can map from regions to parms and back within a recursive call
8625 : stack. */
8626 :
8627 : static void
8628 4 : test_get_representative_path_var ()
8629 : {
8630 4 : auto_vec <tree> param_types;
8631 4 : tree fndecl = make_fndecl (integer_type_node,
8632 : "factorial",
8633 : param_types);
8634 4 : allocate_struct_function (fndecl, true);
8635 :
8636 : /* Parm "n". */
8637 4 : tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8638 : get_identifier ("n"),
8639 : integer_type_node);
8640 4 : DECL_CONTEXT (n) = fndecl;
8641 :
8642 4 : region_model_manager mgr;
8643 4 : test_region_model_context ctxt;
8644 4 : region_model model (&mgr);
8645 :
8646 : /* Push 5 stack frames for "factorial", each with a param */
8647 4 : auto_vec<const region *> parm_regs;
8648 4 : auto_vec<const svalue *> parm_svals;
8649 24 : for (int depth = 0; depth < 5; depth++)
8650 : {
8651 20 : const region *frame_n_reg
8652 20 : = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl),
8653 : nullptr, nullptr, &ctxt);
8654 20 : const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
8655 20 : parm_regs.safe_push (parm_n_reg);
8656 :
8657 20 : ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
8658 20 : const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
8659 20 : parm_svals.safe_push (sval_n);
8660 : }
8661 :
8662 : /* Verify that we can recognize that the regions are the parms,
8663 : at every depth. */
8664 24 : for (int depth = 0; depth < 5; depth++)
8665 : {
8666 20 : {
8667 20 : svalue_set visited;
8668 40 : ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
8669 : &visited,
8670 : nullptr),
8671 : path_var (n, depth + 1));
8672 20 : }
8673 : /* ...and that we can lookup lvalues for locals for all frames,
8674 : not just the top. */
8675 20 : ASSERT_EQ (model.get_lvalue (path_var (n, depth), nullptr),
8676 : parm_regs[depth]);
8677 : /* ...and that we can locate the svalues. */
8678 20 : {
8679 20 : svalue_set visited;
8680 40 : ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
8681 : &visited,
8682 : nullptr),
8683 : path_var (n, depth + 1));
8684 20 : }
8685 : }
8686 4 : }
8687 :
8688 : /* Ensure that region_model::operator== works as expected. */
8689 :
8690 : static void
8691 4 : test_equality_1 ()
8692 : {
8693 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8694 4 : tree int_17 = build_int_cst (integer_type_node, 17);
8695 :
8696 : /* Verify that "empty" region_model instances are equal to each other. */
8697 4 : region_model_manager mgr;
8698 4 : region_model model0 (&mgr);
8699 4 : region_model model1 (&mgr);
8700 4 : ASSERT_EQ (model0, model1);
8701 :
8702 : /* Verify that setting state in model1 makes the models non-equal. */
8703 4 : tree x = build_global_decl ("x", integer_type_node);
8704 4 : model0.set_value (x, int_42, nullptr);
8705 4 : ASSERT_EQ (model0.get_rvalue (x, nullptr)->maybe_get_constant (), int_42);
8706 4 : ASSERT_NE (model0, model1);
8707 :
8708 : /* Verify the copy-ctor. */
8709 4 : region_model model2 (model0);
8710 4 : ASSERT_EQ (model0, model2);
8711 4 : ASSERT_EQ (model2.get_rvalue (x, nullptr)->maybe_get_constant (), int_42);
8712 4 : ASSERT_NE (model1, model2);
8713 :
8714 : /* Verify that models obtained from copy-ctor are independently editable
8715 : w/o affecting the original model. */
8716 4 : model2.set_value (x, int_17, nullptr);
8717 4 : ASSERT_NE (model0, model2);
8718 4 : ASSERT_EQ (model2.get_rvalue (x, nullptr)->maybe_get_constant (), int_17);
8719 4 : ASSERT_EQ (model0.get_rvalue (x, nullptr)->maybe_get_constant (), int_42);
8720 4 : }
8721 :
8722 : /* Verify that region models for
8723 : x = 42; y = 113;
8724 : and
8725 : y = 113; x = 42;
8726 : are equal. */
8727 :
8728 : static void
8729 4 : test_canonicalization_2 ()
8730 : {
8731 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8732 4 : tree int_113 = build_int_cst (integer_type_node, 113);
8733 4 : tree x = build_global_decl ("x", integer_type_node);
8734 4 : tree y = build_global_decl ("y", integer_type_node);
8735 :
8736 4 : region_model_manager mgr;
8737 4 : region_model model0 (&mgr);
8738 4 : model0.set_value (model0.get_lvalue (x, nullptr),
8739 : model0.get_rvalue (int_42, nullptr),
8740 : nullptr);
8741 4 : model0.set_value (model0.get_lvalue (y, nullptr),
8742 : model0.get_rvalue (int_113, nullptr),
8743 : nullptr);
8744 :
8745 4 : region_model model1 (&mgr);
8746 4 : model1.set_value (model1.get_lvalue (y, nullptr),
8747 : model1.get_rvalue (int_113, nullptr),
8748 : nullptr);
8749 4 : model1.set_value (model1.get_lvalue (x, nullptr),
8750 : model1.get_rvalue (int_42, nullptr),
8751 : nullptr);
8752 :
8753 4 : ASSERT_EQ (model0, model1);
8754 4 : }
8755 :
8756 : /* Verify that constraints for
8757 : x > 3 && y > 42
8758 : and
8759 : y > 42 && x > 3
8760 : are equal after canonicalization. */
8761 :
8762 : static void
8763 4 : test_canonicalization_3 ()
8764 : {
8765 4 : tree int_3 = build_int_cst (integer_type_node, 3);
8766 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8767 4 : tree x = build_global_decl ("x", integer_type_node);
8768 4 : tree y = build_global_decl ("y", integer_type_node);
8769 :
8770 4 : region_model_manager mgr;
8771 4 : region_model model0 (&mgr);
8772 4 : model0.add_constraint (x, GT_EXPR, int_3, nullptr);
8773 4 : model0.add_constraint (y, GT_EXPR, int_42, nullptr);
8774 :
8775 4 : region_model model1 (&mgr);
8776 4 : model1.add_constraint (y, GT_EXPR, int_42, nullptr);
8777 4 : model1.add_constraint (x, GT_EXPR, int_3, nullptr);
8778 :
8779 4 : model0.canonicalize ();
8780 4 : model1.canonicalize ();
8781 4 : ASSERT_EQ (model0, model1);
8782 4 : }
8783 :
8784 : /* Verify that we can canonicalize a model containing NaN and other real
8785 : constants. */
8786 :
8787 : static void
8788 4 : test_canonicalization_4 ()
8789 : {
8790 4 : auto_vec<tree> csts;
8791 4 : append_interesting_constants (&csts);
8792 :
8793 4 : region_model_manager mgr;
8794 4 : region_model model (&mgr);
8795 :
8796 60 : for (tree cst : csts)
8797 48 : model.get_rvalue (cst, nullptr);
8798 :
8799 4 : model.canonicalize ();
8800 4 : }
8801 :
8802 : /* Assert that if we have two region_model instances
8803 : with values VAL_A and VAL_B for EXPR that they are
8804 : mergable. Write the merged model to *OUT_MERGED_MODEL,
8805 : and the merged svalue ptr to *OUT_MERGED_SVALUE.
8806 : If VAL_A or VAL_B are nullptr_TREE, don't populate EXPR
8807 : for that region_model. */
8808 :
8809 : static void
8810 20 : assert_region_models_merge (tree expr, tree val_a, tree val_b,
8811 : region_model *out_merged_model,
8812 : const svalue **out_merged_svalue)
8813 : {
8814 20 : region_model_manager *mgr = out_merged_model->get_manager ();
8815 20 : program_point point (program_point::origin (*mgr));
8816 20 : test_region_model_context ctxt;
8817 20 : region_model model0 (mgr);
8818 20 : region_model model1 (mgr);
8819 20 : if (val_a)
8820 16 : model0.set_value (model0.get_lvalue (expr, &ctxt),
8821 : model0.get_rvalue (val_a, &ctxt),
8822 : &ctxt);
8823 20 : if (val_b)
8824 16 : model1.set_value (model1.get_lvalue (expr, &ctxt),
8825 : model1.get_rvalue (val_b, &ctxt),
8826 : &ctxt);
8827 :
8828 : /* They should be mergeable. */
8829 20 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
8830 20 : *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
8831 20 : }
8832 :
8833 : /* Verify that we can merge region_model instances. */
8834 :
8835 : static void
8836 4 : test_state_merging ()
8837 : {
8838 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8839 4 : tree int_113 = build_int_cst (integer_type_node, 113);
8840 4 : tree x = build_global_decl ("x", integer_type_node);
8841 4 : tree y = build_global_decl ("y", integer_type_node);
8842 4 : tree z = build_global_decl ("z", integer_type_node);
8843 4 : tree p = build_global_decl ("p", ptr_type_node);
8844 :
8845 4 : tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
8846 4 : tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
8847 :
8848 4 : auto_vec <tree> param_types;
8849 4 : tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
8850 4 : allocate_struct_function (test_fndecl, true);
8851 :
8852 : /* Param "a". */
8853 4 : tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8854 : get_identifier ("a"),
8855 : integer_type_node);
8856 4 : DECL_CONTEXT (a) = test_fndecl;
8857 4 : tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
8858 :
8859 : /* Param "q", a pointer. */
8860 4 : tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8861 : get_identifier ("q"),
8862 : ptr_type_node);
8863 4 : DECL_CONTEXT (q) = test_fndecl;
8864 :
8865 4 : region_model_manager mgr;
8866 4 : program_point point (program_point::origin (mgr));
8867 :
8868 4 : {
8869 4 : region_model model0 (&mgr);
8870 4 : region_model model1 (&mgr);
8871 4 : region_model merged (&mgr);
8872 : /* Verify empty models can be merged. */
8873 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8874 4 : ASSERT_EQ (model0, merged);
8875 4 : }
8876 :
8877 : /* Verify that we can merge two contradictory constraints on the
8878 : value for a global. */
8879 : /* TODO: verify that the merged model doesn't have a value for
8880 : the global */
8881 4 : {
8882 4 : region_model model0 (&mgr);
8883 4 : region_model model1 (&mgr);
8884 4 : region_model merged (&mgr);
8885 4 : test_region_model_context ctxt;
8886 4 : model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8887 4 : model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
8888 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8889 4 : ASSERT_NE (model0, merged);
8890 4 : ASSERT_NE (model1, merged);
8891 4 : }
8892 :
8893 : /* Verify handling of a PARM_DECL. */
8894 4 : {
8895 4 : test_region_model_context ctxt;
8896 4 : region_model model0 (&mgr);
8897 4 : region_model model1 (&mgr);
8898 4 : ASSERT_EQ (model0.get_stack_depth (), 0);
8899 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8900 : nullptr, nullptr, &ctxt);
8901 4 : ASSERT_EQ (model0.get_stack_depth (), 1);
8902 4 : model1.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8903 : nullptr, nullptr, &ctxt);
8904 :
8905 4 : placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8906 4 : integer_type_node, "test sval");
8907 4 : model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
8908 4 : model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
8909 4 : ASSERT_EQ (model0, model1);
8910 :
8911 : /* They should be mergeable, and the result should be the same. */
8912 4 : region_model merged (&mgr);
8913 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8914 4 : ASSERT_EQ (model0, merged);
8915 : /* In particular, "a" should have the placeholder value. */
8916 4 : ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
8917 4 : }
8918 :
8919 : /* Verify handling of a global. */
8920 4 : {
8921 4 : test_region_model_context ctxt;
8922 4 : region_model model0 (&mgr);
8923 4 : region_model model1 (&mgr);
8924 :
8925 4 : placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8926 4 : integer_type_node, "test sval");
8927 4 : model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8928 4 : model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8929 4 : ASSERT_EQ (model0, model1);
8930 :
8931 : /* They should be mergeable, and the result should be the same. */
8932 4 : region_model merged (&mgr);
8933 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8934 4 : ASSERT_EQ (model0, merged);
8935 : /* In particular, "x" should have the placeholder value. */
8936 4 : ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
8937 4 : }
8938 :
8939 : /* Use global-handling to verify various combinations of values. */
8940 :
8941 : /* Two equal constant values. */
8942 4 : {
8943 4 : region_model merged (&mgr);
8944 4 : const svalue *merged_x_sval;
8945 4 : assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
8946 :
8947 : /* In particular, there should be a constant value for "x". */
8948 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
8949 4 : ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
8950 : int_42);
8951 4 : }
8952 :
8953 : /* Two non-equal constant values. */
8954 4 : {
8955 4 : region_model merged (&mgr);
8956 4 : const svalue *merged_x_sval;
8957 4 : assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
8958 :
8959 : /* In particular, there should be a "widening" value for "x". */
8960 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
8961 4 : }
8962 :
8963 : /* Initial and constant. */
8964 4 : {
8965 4 : region_model merged (&mgr);
8966 4 : const svalue *merged_x_sval;
8967 4 : assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
8968 :
8969 : /* In particular, there should be an unknown value for "x". */
8970 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8971 4 : }
8972 :
8973 : /* Constant and initial. */
8974 4 : {
8975 4 : region_model merged (&mgr);
8976 4 : const svalue *merged_x_sval;
8977 4 : assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
8978 :
8979 : /* In particular, there should be an unknown value for "x". */
8980 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8981 4 : }
8982 :
8983 : /* Unknown and constant. */
8984 : // TODO
8985 :
8986 : /* Pointers: NULL and NULL. */
8987 : // TODO
8988 :
8989 : /* Pointers: NULL and non-NULL. */
8990 : // TODO
8991 :
8992 : /* Pointers: non-NULL and non-NULL: ptr to a local. */
8993 4 : {
8994 4 : region_model model0 (&mgr);
8995 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8996 : nullptr, nullptr, nullptr);
8997 4 : model0.set_value (model0.get_lvalue (p, nullptr),
8998 : model0.get_rvalue (addr_of_a, nullptr), nullptr);
8999 :
9000 4 : region_model model1 (model0);
9001 4 : ASSERT_EQ (model0, model1);
9002 :
9003 : /* They should be mergeable, and the result should be the same. */
9004 4 : region_model merged (&mgr);
9005 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9006 4 : ASSERT_EQ (model0, merged);
9007 4 : }
9008 :
9009 : /* Pointers: non-NULL and non-NULL: ptr to a global. */
9010 4 : {
9011 4 : region_model merged (&mgr);
9012 : /* p == &y in both input models. */
9013 4 : const svalue *merged_p_sval;
9014 4 : assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
9015 : &merged_p_sval);
9016 :
9017 : /* We should get p == &y in the merged model. */
9018 4 : ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
9019 4 : const region_svalue *merged_p_ptr
9020 4 : = merged_p_sval->dyn_cast_region_svalue ();
9021 4 : const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
9022 4 : ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, nullptr));
9023 4 : }
9024 :
9025 : /* Pointers: non-NULL ptrs to different globals should not merge;
9026 : see e.g. gcc.dg/analyzer/torture/uninit-pr108725.c */
9027 4 : {
9028 4 : region_model merged_model (&mgr);
9029 4 : program_point point (program_point::origin (mgr));
9030 4 : test_region_model_context ctxt;
9031 : /* x == &y vs x == &z in the input models; these are actually casts
9032 : of the ptrs to "int". */
9033 4 : region_model model0 (&mgr);
9034 4 : region_model model1 (&mgr);
9035 4 : model0.set_value (model0.get_lvalue (x, &ctxt),
9036 : model0.get_rvalue (addr_of_y, &ctxt),
9037 : &ctxt);
9038 4 : model1.set_value (model1.get_lvalue (x, &ctxt),
9039 : model1.get_rvalue (addr_of_z, &ctxt),
9040 : &ctxt);
9041 : /* They should not be mergeable. */
9042 4 : ASSERT_FALSE (model0.can_merge_with_p (model1, point, &merged_model));
9043 4 : }
9044 :
9045 : /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
9046 4 : {
9047 4 : test_region_model_context ctxt;
9048 4 : region_model model0 (&mgr);
9049 4 : tree size = build_int_cst (size_type_node, 1024);
9050 4 : const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
9051 4 : const region *new_reg
9052 4 : = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
9053 4 : const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
9054 4 : model0.set_value (model0.get_lvalue (p, &ctxt),
9055 : ptr_sval, &ctxt);
9056 :
9057 4 : region_model model1 (model0);
9058 :
9059 4 : ASSERT_EQ (model0, model1);
9060 :
9061 4 : region_model merged (&mgr);
9062 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9063 :
9064 : /* The merged model ought to be identical. */
9065 4 : ASSERT_EQ (model0, merged);
9066 4 : }
9067 :
9068 : /* Two regions sharing the same placeholder svalue should continue sharing
9069 : it after self-merger. */
9070 4 : {
9071 4 : test_region_model_context ctxt;
9072 4 : region_model model0 (&mgr);
9073 4 : placeholder_svalue placeholder_sval (mgr.alloc_symbol_id (),
9074 4 : integer_type_node, "test");
9075 4 : model0.set_value (model0.get_lvalue (x, &ctxt),
9076 : &placeholder_sval, &ctxt);
9077 4 : model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
9078 4 : region_model model1 (model0);
9079 :
9080 : /* They should be mergeable, and the result should be the same. */
9081 4 : region_model merged (&mgr);
9082 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9083 4 : ASSERT_EQ (model0, merged);
9084 :
9085 : /* In particular, we should have x == y. */
9086 4 : ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
9087 : tristate (tristate::TS_TRUE));
9088 4 : }
9089 :
9090 4 : {
9091 4 : region_model model0 (&mgr);
9092 4 : region_model model1 (&mgr);
9093 4 : test_region_model_context ctxt;
9094 4 : model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
9095 4 : model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
9096 4 : region_model merged (&mgr);
9097 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9098 4 : }
9099 :
9100 4 : {
9101 4 : region_model model0 (&mgr);
9102 4 : region_model model1 (&mgr);
9103 4 : test_region_model_context ctxt;
9104 4 : model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
9105 4 : model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
9106 4 : model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
9107 4 : region_model merged (&mgr);
9108 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9109 4 : }
9110 :
9111 : // TODO: what can't we merge? need at least one such test
9112 :
9113 : /* TODO: various things
9114 : - heap regions
9115 : - value merging:
9116 : - every combination, but in particular
9117 : - pairs of regions
9118 : */
9119 :
9120 : /* Views. */
9121 4 : {
9122 4 : test_region_model_context ctxt;
9123 4 : region_model model0 (&mgr);
9124 :
9125 4 : const region *x_reg = model0.get_lvalue (x, &ctxt);
9126 4 : const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
9127 4 : model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
9128 :
9129 4 : region_model model1 (model0);
9130 4 : ASSERT_EQ (model1, model0);
9131 :
9132 : /* They should be mergeable, and the result should be the same. */
9133 4 : region_model merged (&mgr);
9134 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9135 4 : }
9136 :
9137 : /* Verify that we can merge a model in which a local in an older stack
9138 : frame points to a local in a more recent stack frame. */
9139 4 : {
9140 4 : region_model model0 (&mgr);
9141 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
9142 : nullptr, nullptr, nullptr);
9143 4 : const region *q_in_first_frame = model0.get_lvalue (q, nullptr);
9144 :
9145 : /* Push a second frame. */
9146 4 : const region *reg_2nd_frame
9147 4 : = model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
9148 : nullptr, nullptr, nullptr);
9149 :
9150 : /* Have a pointer in the older frame point to a local in the
9151 : more recent frame. */
9152 4 : const svalue *sval_ptr = model0.get_rvalue (addr_of_a, nullptr);
9153 4 : model0.set_value (q_in_first_frame, sval_ptr, nullptr);
9154 :
9155 : /* Verify that it's pointing at the newer frame. */
9156 4 : const region *reg_pointee = sval_ptr->maybe_get_region ();
9157 4 : ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
9158 :
9159 4 : model0.canonicalize ();
9160 :
9161 4 : region_model model1 (model0);
9162 4 : ASSERT_EQ (model0, model1);
9163 :
9164 : /* They should be mergeable, and the result should be the same
9165 : (after canonicalization, at least). */
9166 4 : region_model merged (&mgr);
9167 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9168 4 : merged.canonicalize ();
9169 4 : ASSERT_EQ (model0, merged);
9170 4 : }
9171 :
9172 : /* Verify that we can merge a model in which a local points to a global. */
9173 4 : {
9174 4 : region_model model0 (&mgr);
9175 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
9176 : nullptr, nullptr, nullptr);
9177 4 : model0.set_value (model0.get_lvalue (q, nullptr),
9178 : model0.get_rvalue (addr_of_y, nullptr), nullptr);
9179 :
9180 4 : region_model model1 (model0);
9181 4 : ASSERT_EQ (model0, model1);
9182 :
9183 : /* They should be mergeable, and the result should be the same
9184 : (after canonicalization, at least). */
9185 4 : region_model merged (&mgr);
9186 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9187 4 : ASSERT_EQ (model0, merged);
9188 4 : }
9189 4 : }
9190 :
9191 : /* Verify that constraints are correctly merged when merging region_model
9192 : instances. */
9193 :
9194 : static void
9195 4 : test_constraint_merging ()
9196 : {
9197 4 : tree int_0 = integer_zero_node;
9198 4 : tree int_5 = build_int_cst (integer_type_node, 5);
9199 4 : tree x = build_global_decl ("x", integer_type_node);
9200 4 : tree y = build_global_decl ("y", integer_type_node);
9201 4 : tree z = build_global_decl ("z", integer_type_node);
9202 4 : tree n = build_global_decl ("n", integer_type_node);
9203 :
9204 4 : region_model_manager mgr;
9205 4 : test_region_model_context ctxt;
9206 :
9207 : /* model0: 0 <= (x == y) < n. */
9208 4 : region_model model0 (&mgr);
9209 4 : model0.add_constraint (x, EQ_EXPR, y, &ctxt);
9210 4 : model0.add_constraint (x, GE_EXPR, int_0, nullptr);
9211 4 : model0.add_constraint (x, LT_EXPR, n, nullptr);
9212 :
9213 : /* model1: z != 5 && (0 <= x < n). */
9214 4 : region_model model1 (&mgr);
9215 4 : model1.add_constraint (z, NE_EXPR, int_5, nullptr);
9216 4 : model1.add_constraint (x, GE_EXPR, int_0, nullptr);
9217 4 : model1.add_constraint (x, LT_EXPR, n, nullptr);
9218 :
9219 : /* They should be mergeable; the merged constraints should
9220 : be: (0 <= x < n). */
9221 4 : program_point point (program_point::origin (mgr));
9222 4 : region_model merged (&mgr);
9223 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9224 :
9225 4 : ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
9226 : tristate (tristate::TS_TRUE));
9227 4 : ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
9228 : tristate (tristate::TS_TRUE));
9229 :
9230 4 : ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
9231 : tristate (tristate::TS_UNKNOWN));
9232 4 : ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
9233 : tristate (tristate::TS_UNKNOWN));
9234 4 : }
9235 :
9236 : /* Verify that widening_svalue::eval_condition_without_cm works as
9237 : expected. */
9238 :
9239 : static void
9240 4 : test_widening_constraints ()
9241 : {
9242 4 : region_model_manager mgr;
9243 4 : const supernode *snode = nullptr;
9244 4 : tree int_0 = integer_zero_node;
9245 4 : tree int_m1 = build_int_cst (integer_type_node, -1);
9246 4 : tree int_1 = integer_one_node;
9247 4 : tree int_256 = build_int_cst (integer_type_node, 256);
9248 4 : test_region_model_context ctxt;
9249 4 : const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
9250 4 : const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
9251 4 : const svalue *w_zero_then_one_sval
9252 4 : = mgr.get_or_create_widening_svalue (integer_type_node, snode,
9253 : int_0_sval, int_1_sval);
9254 4 : const widening_svalue *w_zero_then_one
9255 4 : = w_zero_then_one_sval->dyn_cast_widening_svalue ();
9256 4 : ASSERT_EQ (w_zero_then_one->get_direction (),
9257 : widening_svalue::DIR_ASCENDING);
9258 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
9259 : tristate::TS_FALSE);
9260 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
9261 : tristate::TS_FALSE);
9262 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
9263 : tristate::TS_UNKNOWN);
9264 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
9265 : tristate::TS_UNKNOWN);
9266 :
9267 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
9268 : tristate::TS_FALSE);
9269 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
9270 : tristate::TS_UNKNOWN);
9271 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
9272 : tristate::TS_UNKNOWN);
9273 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
9274 : tristate::TS_UNKNOWN);
9275 :
9276 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
9277 : tristate::TS_TRUE);
9278 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
9279 : tristate::TS_UNKNOWN);
9280 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
9281 : tristate::TS_UNKNOWN);
9282 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
9283 : tristate::TS_UNKNOWN);
9284 :
9285 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
9286 : tristate::TS_TRUE);
9287 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
9288 : tristate::TS_TRUE);
9289 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
9290 : tristate::TS_UNKNOWN);
9291 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
9292 : tristate::TS_UNKNOWN);
9293 :
9294 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
9295 : tristate::TS_FALSE);
9296 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
9297 : tristate::TS_UNKNOWN);
9298 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
9299 : tristate::TS_UNKNOWN);
9300 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
9301 : tristate::TS_UNKNOWN);
9302 :
9303 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
9304 : tristate::TS_TRUE);
9305 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
9306 : tristate::TS_UNKNOWN);
9307 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
9308 : tristate::TS_UNKNOWN);
9309 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
9310 : tristate::TS_UNKNOWN);
9311 4 : }
9312 :
9313 : /* Verify merging constraints for states simulating successive iterations
9314 : of a loop.
9315 : Simulate:
9316 : for (i = 0; i < 256; i++)
9317 : [...body...]
9318 : i.e. this gimple:.
9319 : i_15 = 0;
9320 : goto <bb 4>;
9321 :
9322 : <bb 4> :
9323 : i_11 = PHI <i_15(2), i_23(3)>
9324 : if (i_11 <= 255)
9325 : goto <bb 3>;
9326 : else
9327 : goto [AFTER LOOP]
9328 :
9329 : <bb 3> :
9330 : [LOOP BODY]
9331 : i_23 = i_11 + 1;
9332 :
9333 : and thus these ops (and resultant states):
9334 : i_11 = PHI()
9335 : {i_11: 0}
9336 : add_constraint (i_11 <= 255) [for the true edge]
9337 : {i_11: 0} [constraint was a no-op]
9338 : i_23 = i_11 + 1;
9339 : {i_22: 1}
9340 : i_11 = PHI()
9341 : {i_11: WIDENED (at phi, 0, 1)}
9342 : add_constraint (i_11 <= 255) [for the true edge]
9343 : {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
9344 : i_23 = i_11 + 1;
9345 : {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
9346 : i_11 = PHI(); merge with state at phi above
9347 : {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
9348 : [changing meaning of "WIDENED" here]
9349 : if (i_11 <= 255)
9350 : T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
9351 : F: {i_11: 256}
9352 : */
9353 :
9354 : static void
9355 4 : test_iteration_1 ()
9356 : {
9357 4 : region_model_manager mgr;
9358 4 : program_point point (program_point::origin (mgr));
9359 :
9360 4 : tree int_0 = integer_zero_node;
9361 4 : tree int_1 = integer_one_node;
9362 4 : tree int_256 = build_int_cst (integer_type_node, 256);
9363 4 : tree i = build_global_decl ("i", integer_type_node);
9364 :
9365 4 : test_region_model_context ctxt;
9366 :
9367 : /* model0: i: 0. */
9368 4 : region_model model0 (&mgr);
9369 4 : model0.set_value (i, int_0, &ctxt);
9370 :
9371 : /* model1: i: 1. */
9372 4 : region_model model1 (&mgr);
9373 4 : model1.set_value (i, int_1, &ctxt);
9374 :
9375 : /* Should merge "i" to a widened value. */
9376 4 : region_model model2 (&mgr);
9377 4 : ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
9378 4 : const svalue *merged_i = model2.get_rvalue (i, &ctxt);
9379 4 : ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
9380 4 : const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
9381 4 : ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
9382 :
9383 : /* Add constraint: i < 256 */
9384 4 : model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
9385 4 : ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
9386 : tristate (tristate::TS_TRUE));
9387 4 : ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
9388 : tristate (tristate::TS_TRUE));
9389 :
9390 : /* Try merging with the initial state. */
9391 4 : region_model model3 (&mgr);
9392 4 : ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
9393 : /* Merging the merged value with the initial value should be idempotent,
9394 : so that the analysis converges. */
9395 4 : ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
9396 : /* Merger of 0 and a widening value with constraint < CST
9397 : should retain the constraint, even though it was implicit
9398 : for the 0 case. */
9399 4 : ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
9400 : tristate (tristate::TS_TRUE));
9401 : /* ...and we should have equality: the analysis should have converged. */
9402 4 : ASSERT_EQ (model3, model2);
9403 :
9404 : /* "i_23 = i_11 + 1;" */
9405 4 : region_model model4 (model3);
9406 4 : ASSERT_EQ (model4, model2);
9407 4 : model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
9408 4 : const svalue *plus_one = model4.get_rvalue (i, &ctxt);
9409 4 : ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
9410 :
9411 : /* Try merging with the "i: 1" state. */
9412 4 : region_model model5 (&mgr);
9413 4 : ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
9414 4 : ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
9415 4 : ASSERT_EQ (model5, model4);
9416 :
9417 : /* "i_11 = PHI();" merge with state at phi above.
9418 : For i, we should have a merger of WIDENING with WIDENING + 1,
9419 : and this should be WIDENING again. */
9420 4 : region_model model6 (&mgr);
9421 4 : ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
9422 4 : const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
9423 4 : ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
9424 4 : }
9425 :
9426 : /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
9427 : all cast pointers to that region are also known to be non-NULL. */
9428 :
9429 : static void
9430 4 : test_malloc_constraints ()
9431 : {
9432 4 : region_model_manager mgr;
9433 4 : region_model model (&mgr);
9434 4 : tree p = build_global_decl ("p", ptr_type_node);
9435 4 : tree char_star = build_pointer_type (char_type_node);
9436 4 : tree q = build_global_decl ("q", char_star);
9437 4 : tree null_ptr = build_int_cst (ptr_type_node, 0);
9438 :
9439 4 : const svalue *size_in_bytes
9440 4 : = mgr.get_or_create_unknown_svalue (size_type_node);
9441 4 : const region *reg
9442 4 : = model.get_or_create_region_for_heap_alloc (size_in_bytes, nullptr);
9443 4 : const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
9444 4 : model.set_value (model.get_lvalue (p, nullptr), sval, nullptr);
9445 4 : model.set_value (q, p, nullptr);
9446 :
9447 4 : ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
9448 4 : ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
9449 4 : ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
9450 4 : ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
9451 :
9452 4 : model.add_constraint (p, NE_EXPR, null_ptr, nullptr);
9453 :
9454 4 : ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
9455 4 : ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
9456 4 : ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
9457 4 : ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
9458 4 : }
9459 :
9460 : /* Smoketest of getting and setting the value of a variable. */
9461 :
9462 : static void
9463 4 : test_var ()
9464 : {
9465 : /* "int i;" */
9466 4 : tree i = build_global_decl ("i", integer_type_node);
9467 :
9468 4 : tree int_17 = build_int_cst (integer_type_node, 17);
9469 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
9470 :
9471 4 : region_model_manager mgr;
9472 4 : region_model model (&mgr);
9473 :
9474 4 : const region *i_reg = model.get_lvalue (i, nullptr);
9475 4 : ASSERT_EQ (i_reg->get_kind (), RK_DECL);
9476 :
9477 : /* Reading "i" should give a symbolic "initial value". */
9478 4 : const svalue *sval_init = model.get_rvalue (i, nullptr);
9479 4 : ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
9480 4 : ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
9481 : /* ..and doing it again should give the same "initial value". */
9482 4 : ASSERT_EQ (model.get_rvalue (i, nullptr), sval_init);
9483 :
9484 : /* "i = 17;". */
9485 4 : model.set_value (i, int_17, nullptr);
9486 4 : ASSERT_EQ (model.get_rvalue (i, nullptr),
9487 : model.get_rvalue (int_17, nullptr));
9488 :
9489 : /* "i = -3;". */
9490 4 : model.set_value (i, int_m3, nullptr);
9491 4 : ASSERT_EQ (model.get_rvalue (i, nullptr),
9492 : model.get_rvalue (int_m3, nullptr));
9493 :
9494 : /* Verify get_offset for "i". */
9495 4 : {
9496 4 : region_offset offset = i_reg->get_offset (&mgr);
9497 4 : ASSERT_EQ (offset.get_base_region (), i_reg);
9498 4 : ASSERT_EQ (offset.get_bit_offset (), 0);
9499 : }
9500 4 : }
9501 :
9502 : static void
9503 4 : test_array_2 ()
9504 : {
9505 : /* "int arr[10];" */
9506 4 : tree tlen = size_int (10);
9507 4 : tree arr_type
9508 4 : = build_array_type (integer_type_node, build_index_type (tlen));
9509 4 : tree arr = build_global_decl ("arr", arr_type);
9510 :
9511 : /* "int i;" */
9512 4 : tree i = build_global_decl ("i", integer_type_node);
9513 :
9514 4 : tree int_0 = integer_zero_node;
9515 4 : tree int_1 = integer_one_node;
9516 :
9517 4 : tree arr_0 = build4 (ARRAY_REF, integer_type_node,
9518 : arr, int_0, NULL_TREE, NULL_TREE);
9519 4 : tree arr_1 = build4 (ARRAY_REF, integer_type_node,
9520 : arr, int_1, NULL_TREE, NULL_TREE);
9521 4 : tree arr_i = build4 (ARRAY_REF, integer_type_node,
9522 : arr, i, NULL_TREE, NULL_TREE);
9523 :
9524 4 : tree int_17 = build_int_cst (integer_type_node, 17);
9525 4 : tree int_42 = build_int_cst (integer_type_node, 42);
9526 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
9527 :
9528 4 : region_model_manager mgr;
9529 4 : region_model model (&mgr);
9530 : /* "arr[0] = 17;". */
9531 4 : model.set_value (arr_0, int_17, nullptr);
9532 : /* "arr[1] = -3;". */
9533 4 : model.set_value (arr_1, int_m3, nullptr);
9534 :
9535 4 : ASSERT_EQ (model.get_rvalue (arr_0, nullptr),
9536 : model.get_rvalue (int_17, nullptr));
9537 4 : ASSERT_EQ (model.get_rvalue (arr_1, nullptr),
9538 : model.get_rvalue (int_m3, nullptr));
9539 :
9540 : /* Overwrite a pre-existing binding: "arr[1] = 42;". */
9541 4 : model.set_value (arr_1, int_42, nullptr);
9542 4 : ASSERT_EQ (model.get_rvalue (arr_1, nullptr),
9543 : model.get_rvalue (int_42, nullptr));
9544 :
9545 : /* Verify get_offset for "arr[0]". */
9546 4 : {
9547 4 : const region *arr_0_reg = model.get_lvalue (arr_0, nullptr);
9548 4 : region_offset offset = arr_0_reg->get_offset (&mgr);
9549 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, nullptr));
9550 4 : ASSERT_EQ (offset.get_bit_offset (), 0);
9551 : }
9552 :
9553 : /* Verify get_offset for "arr[1]". */
9554 4 : {
9555 4 : const region *arr_1_reg = model.get_lvalue (arr_1, nullptr);
9556 4 : region_offset offset = arr_1_reg->get_offset (&mgr);
9557 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, nullptr));
9558 4 : ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
9559 : }
9560 :
9561 : /* Verify get_offset for "arr[i]". */
9562 4 : {
9563 4 : const region *arr_i_reg = model.get_lvalue (arr_i, nullptr);
9564 4 : region_offset offset = arr_i_reg->get_offset (&mgr);
9565 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, nullptr));
9566 4 : const svalue *offset_sval = offset.get_symbolic_byte_offset ();
9567 4 : if (const svalue *cast = offset_sval->maybe_undo_cast ())
9568 4 : offset_sval = cast;
9569 4 : ASSERT_EQ (offset_sval->get_kind (), SK_BINOP);
9570 : }
9571 :
9572 : /* "arr[i] = i;" - this should remove the earlier bindings. */
9573 4 : model.set_value (arr_i, i, nullptr);
9574 4 : ASSERT_EQ (model.get_rvalue (arr_i, nullptr), model.get_rvalue (i, nullptr));
9575 4 : ASSERT_EQ (model.get_rvalue (arr_0, nullptr)->get_kind (), SK_UNKNOWN);
9576 :
9577 : /* "arr[0] = 17;" - this should remove the arr[i] binding. */
9578 4 : model.set_value (arr_0, int_17, nullptr);
9579 4 : ASSERT_EQ (model.get_rvalue (arr_0, nullptr),
9580 : model.get_rvalue (int_17, nullptr));
9581 4 : ASSERT_EQ (model.get_rvalue (arr_i, nullptr)->get_kind (), SK_UNKNOWN);
9582 4 : }
9583 :
9584 : /* Smoketest of dereferencing a pointer via MEM_REF. */
9585 :
9586 : static void
9587 4 : test_mem_ref ()
9588 : {
9589 : /*
9590 : x = 17;
9591 : p = &x;
9592 : *p;
9593 : */
9594 4 : tree x = build_global_decl ("x", integer_type_node);
9595 4 : tree int_star = build_pointer_type (integer_type_node);
9596 4 : tree p = build_global_decl ("p", int_star);
9597 :
9598 4 : tree int_17 = build_int_cst (integer_type_node, 17);
9599 4 : tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
9600 4 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9601 4 : tree offset_0 = build_int_cst (ptype, 0);
9602 4 : tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
9603 :
9604 4 : region_model_manager mgr;
9605 4 : region_model model (&mgr);
9606 :
9607 : /* "x = 17;". */
9608 4 : model.set_value (x, int_17, nullptr);
9609 :
9610 : /* "p = &x;". */
9611 4 : model.set_value (p, addr_of_x, nullptr);
9612 :
9613 4 : const svalue *sval = model.get_rvalue (star_p, nullptr);
9614 4 : ASSERT_EQ (sval->maybe_get_constant (), int_17);
9615 4 : }
9616 :
9617 : /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
9618 : Analogous to this code:
9619 : void test_6 (int a[10])
9620 : {
9621 : __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9622 : a[3] = 42;
9623 : __analyzer_eval (a[3] == 42); [should be TRUE]
9624 : }
9625 : from data-model-1.c, which looks like this at the gimple level:
9626 : # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9627 : int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
9628 : int _2 = *_1; # MEM_REF
9629 : _Bool _3 = _2 == 42;
9630 : int _4 = (int) _3;
9631 : __analyzer_eval (_4);
9632 :
9633 : # a[3] = 42;
9634 : int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
9635 : *_5 = 42; # MEM_REF
9636 :
9637 : # __analyzer_eval (a[3] == 42); [should be TRUE]
9638 : int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
9639 : int _7 = *_6; # MEM_REF
9640 : _Bool _8 = _7 == 42;
9641 : int _9 = (int) _8;
9642 : __analyzer_eval (_9); */
9643 :
9644 : static void
9645 4 : test_POINTER_PLUS_EXPR_then_MEM_REF ()
9646 : {
9647 4 : tree int_star = build_pointer_type (integer_type_node);
9648 4 : tree a = build_global_decl ("a", int_star);
9649 4 : tree offset_12 = build_int_cst (size_type_node, 12);
9650 4 : tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
9651 4 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9652 4 : tree offset_0 = build_int_cst (ptype, 0);
9653 4 : tree mem_ref = build2 (MEM_REF, integer_type_node,
9654 : pointer_plus_expr, offset_0);
9655 4 : region_model_manager mgr;
9656 4 : region_model m (&mgr);
9657 :
9658 4 : tree int_42 = build_int_cst (integer_type_node, 42);
9659 4 : m.set_value (mem_ref, int_42, nullptr);
9660 4 : ASSERT_EQ (m.get_rvalue (mem_ref, nullptr)->maybe_get_constant (), int_42);
9661 4 : }
9662 :
9663 : /* Verify that malloc works. */
9664 :
9665 : static void
9666 4 : test_malloc ()
9667 : {
9668 4 : tree int_star = build_pointer_type (integer_type_node);
9669 4 : tree p = build_global_decl ("p", int_star);
9670 4 : tree n = build_global_decl ("n", integer_type_node);
9671 4 : tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9672 : n, build_int_cst (size_type_node, 4));
9673 :
9674 4 : region_model_manager mgr;
9675 4 : test_region_model_context ctxt;
9676 4 : region_model model (&mgr);
9677 :
9678 : /* "p = malloc (n * 4);". */
9679 4 : const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
9680 4 : const region *reg
9681 4 : = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
9682 4 : const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9683 4 : model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9684 4 : ASSERT_EQ (model.get_capacity (reg), size_sval);
9685 4 : }
9686 :
9687 : /* Verify that alloca works. */
9688 :
9689 : static void
9690 4 : test_alloca ()
9691 : {
9692 4 : auto_vec <tree> param_types;
9693 4 : tree fndecl = make_fndecl (integer_type_node,
9694 : "test_fn",
9695 : param_types);
9696 4 : allocate_struct_function (fndecl, true);
9697 :
9698 :
9699 4 : tree int_star = build_pointer_type (integer_type_node);
9700 4 : tree p = build_global_decl ("p", int_star);
9701 4 : tree n = build_global_decl ("n", integer_type_node);
9702 4 : tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9703 : n, build_int_cst (size_type_node, 4));
9704 :
9705 4 : region_model_manager mgr;
9706 4 : test_region_model_context ctxt;
9707 4 : region_model model (&mgr);
9708 :
9709 : /* Push stack frame. */
9710 4 : const region *frame_reg
9711 4 : = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl),
9712 : nullptr, nullptr, &ctxt);
9713 : /* "p = alloca (n * 4);". */
9714 4 : const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
9715 4 : const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
9716 4 : ASSERT_EQ (reg->get_parent_region (), frame_reg);
9717 4 : const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9718 4 : model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9719 4 : ASSERT_EQ (model.get_capacity (reg), size_sval);
9720 :
9721 : /* Verify that the pointers to the alloca region are replaced by
9722 : poisoned values when the frame is popped. */
9723 4 : model.pop_frame (nullptr, nullptr, &ctxt, nullptr);
9724 4 : ASSERT_EQ (model.get_rvalue (p, nullptr)->get_kind (), SK_POISONED);
9725 4 : }
9726 :
9727 : /* Verify that svalue::involves_p works. */
9728 :
9729 : static void
9730 4 : test_involves_p ()
9731 : {
9732 4 : region_model_manager mgr;
9733 4 : tree int_star = build_pointer_type (integer_type_node);
9734 4 : tree p = build_global_decl ("p", int_star);
9735 4 : tree q = build_global_decl ("q", int_star);
9736 :
9737 4 : test_region_model_context ctxt;
9738 4 : region_model model (&mgr);
9739 4 : const svalue *p_init = model.get_rvalue (p, &ctxt);
9740 4 : const svalue *q_init = model.get_rvalue (q, &ctxt);
9741 :
9742 4 : ASSERT_TRUE (p_init->involves_p (p_init));
9743 4 : ASSERT_FALSE (p_init->involves_p (q_init));
9744 :
9745 4 : const region *star_p_reg = mgr.get_symbolic_region (p_init);
9746 4 : const region *star_q_reg = mgr.get_symbolic_region (q_init);
9747 :
9748 4 : const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
9749 4 : const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
9750 :
9751 4 : ASSERT_TRUE (init_star_p->involves_p (p_init));
9752 4 : ASSERT_FALSE (p_init->involves_p (init_star_p));
9753 4 : ASSERT_FALSE (init_star_p->involves_p (q_init));
9754 4 : ASSERT_TRUE (init_star_q->involves_p (q_init));
9755 4 : ASSERT_FALSE (init_star_q->involves_p (p_init));
9756 4 : }
9757 :
9758 : /* Run all of the selftests within this file. */
9759 :
9760 : void
9761 4 : analyzer_region_model_cc_tests ()
9762 : {
9763 4 : test_tree_cmp_on_constants ();
9764 4 : test_dump ();
9765 4 : test_struct ();
9766 4 : test_array_1 ();
9767 4 : test_get_representative_tree ();
9768 4 : test_unique_constants ();
9769 4 : test_unique_unknowns ();
9770 4 : test_initial_svalue_folding ();
9771 4 : test_unaryop_svalue_folding ();
9772 4 : test_binop_svalue_folding ();
9773 4 : test_sub_svalue_folding ();
9774 4 : test_bits_within_svalue_folding ();
9775 4 : test_descendent_of_p ();
9776 4 : test_bit_range_regions ();
9777 4 : test_assignment ();
9778 4 : test_compound_assignment ();
9779 4 : test_stack_frames ();
9780 4 : test_get_representative_path_var ();
9781 4 : test_equality_1 ();
9782 4 : test_canonicalization_2 ();
9783 4 : test_canonicalization_3 ();
9784 4 : test_canonicalization_4 ();
9785 4 : test_state_merging ();
9786 4 : test_constraint_merging ();
9787 4 : test_widening_constraints ();
9788 4 : test_iteration_1 ();
9789 4 : test_malloc_constraints ();
9790 4 : test_var ();
9791 4 : test_array_2 ();
9792 4 : test_mem_ref ();
9793 4 : test_POINTER_PLUS_EXPR_then_MEM_REF ();
9794 4 : test_malloc ();
9795 4 : test_alloca ();
9796 4 : test_involves_p ();
9797 4 : }
9798 :
9799 : } // namespace selftest
9800 :
9801 : #endif /* CHECKING_P */
9802 :
9803 : } // namespace ana
9804 :
9805 : #endif /* #if ENABLE_ANALYZER */
|