flux_refineck/ghost_statements/
points_to.rs

1//! This module implements a points-to analysis for mutable references.
2//!
3//! We use the result of the analysis to insert ghost statements at the points where pointers (`ptr(l)`)
4//! have to be converted to borrows (`&mut T`). For example, given the function
5//! ```ignore
6//! fn foo(mut x: i32, mut y: i32, b: bool) {
7//!     let r;
8//!     if b {
9//!         r = &mut x
10//!     } else {
11//!         r = &mut y
12//!     }
13//! }
14//! ```
15//! In the then branch (resp. else) we know `r` must point to `x` (resp. `y`). During refinement checking,
16//! we will give `r` types `ptr(x)` and `ptr(y)` in each branch respectively. However, at the join point
17//! `r` could pointn to either `x` or `y`. Thus, we use the result of the analysis to insert a ghost
18//! statement at the end of each branch to convert the pointers to a borrow `&mut T` for a type `T` that
19//! needs to be inferred.
20use std::{collections::VecDeque, fmt, iter, ops::Range};
21
22use flux_common::tracked_span_bug;
23use flux_middle::{
24    global_env::GlobalEnv,
25    queries::QueryResult,
26    rty::{self, Loc},
27};
28use rustc_abi::FieldIdx;
29use rustc_data_structures::stack::ensure_sufficient_stack;
30use rustc_hash::FxHashMap;
31use rustc_index::{IndexSlice, IndexVec, bit_set::DenseBitSet};
32use rustc_middle::{
33    mir::{self, BasicBlock, TerminatorEdges, visit::Visitor},
34    ty,
35};
36use rustc_mir_dataflow::{
37    Analysis, JoinSemiLattice, ResultsVisitor,
38    fmt::DebugWithContext,
39    lattice::{FlatSet, HasBottom, HasTop},
40    visit_reachable_results,
41};
42
43use super::GhostStatements;
44use crate::ghost_statements::{GhostStatement, Point};
45
46pub(crate) fn add_ghost_statements<'tcx>(
47    stmts: &mut GhostStatements,
48    genv: GlobalEnv<'_, 'tcx>,
49    body: &mir::Body<'tcx>,
50    fn_sig: Option<&rty::EarlyBinder<rty::PolyFnSig>>,
51) -> QueryResult {
52    let map = Map::new(body);
53    let points_to = PointsToAnalysis::new(&map, fn_sig);
54    let fixpoint = points_to.iterate_to_fixpoint(genv.tcx(), body, None);
55    let mut analysis = fixpoint.analysis;
56    let results = fixpoint.results;
57    let mut visitor = CollectPointerToBorrows::new(&map, stmts, &results);
58    visit_reachable_results(body, &mut analysis, &results, &mut visitor);
59
60    Ok(())
61}
62
63/// This implement a points to analysis for mutable references over a [`FlatSet`]. The analysis is
64/// a may analysis. If you want to know if a reference definitively points to a location you have to
65/// combine it with the result of a definitely initialized analysis. See module level documentation
66/// for more details.
67struct PointsToAnalysis<'a> {
68    fn_sig: Option<&'a rty::EarlyBinder<rty::PolyFnSig>>,
69    map: &'a Map,
70}
71
72impl<'a> PointsToAnalysis<'a> {
73    fn new(map: &'a Map, fn_sig: Option<&'a rty::EarlyBinder<rty::PolyFnSig>>) -> Self {
74        Self { fn_sig, map }
75    }
76
77    fn handle_statement(&self, statement: &mir::Statement, state: &mut State) {
78        match &statement.kind {
79            mir::StatementKind::Assign(box (target, rvalue)) => {
80                self.handle_assign(*target, rvalue, state);
81            }
82            mir::StatementKind::StorageLive(local) | mir::StatementKind::StorageDead(local) => {
83                // StorageLive leaves the local in an uninitialized state.
84                // StorageDead makes it UB to access the local afterwards.
85                state.flood_with(mir::Place::from(*local).as_ref(), self.map, FlatSet::BOTTOM);
86            }
87            mir::StatementKind::Deinit(box place) => {
88                // Deinit makes the place uninitialized.
89                state.flood_with(place.as_ref(), self.map, FlatSet::BOTTOM);
90            }
91            mir::StatementKind::Retag(..)
92            | mir::StatementKind::Intrinsic(..)
93            | mir::StatementKind::SetDiscriminant { .. }
94            | mir::StatementKind::ConstEvalCounter
95            | mir::StatementKind::Nop
96            | mir::StatementKind::FakeRead(..)
97            | mir::StatementKind::PlaceMention(..)
98            | mir::StatementKind::Coverage(..)
99            | mir::StatementKind::AscribeUserType(..)
100            | mir::StatementKind::BackwardIncompatibleDropHint { .. } => {}
101        }
102    }
103
104    fn handle_assign(&self, target: mir::Place, rvalue: &mir::Rvalue, state: &mut State) {
105        match rvalue {
106            mir::Rvalue::Use(operand) => {
107                let result = self
108                    .handle_operand(operand)
109                    .map_or(PlaceOrValue::TOP, PlaceOrValue::Place);
110                state.assign(target.as_ref(), result, self.map);
111            }
112            mir::Rvalue::Ref(_, _, place) => {
113                let result = PlaceOrValue::Value(self.handle_ref(place, state));
114                state.assign(target.as_ref(), result, self.map);
115            }
116            mir::Rvalue::Aggregate(box mir::AggregateKind::Tuple, operands) => {
117                state.flood(target.as_ref(), self.map);
118                let Some(target_idx) = self.map.find(target.as_ref()) else { return };
119                for (elem, operand) in operands.iter_enumerated() {
120                    let Some(rhs_idx) = self.handle_operand(operand) else { continue };
121                    if let Some(field) = self.map.apply(target_idx, elem) {
122                        state.insert_place_idx(field, rhs_idx, self.map);
123                    }
124                }
125            }
126            _ => {}
127        }
128    }
129
130    fn handle_ref(&self, place: &mir::Place, state: &State) -> FlatSet<Loc> {
131        match &place.projection[..] {
132            [] => FlatSet::Elem(Loc::Local(place.local)),
133            [mir::PlaceElem::Deref] => state.get(place.local.into(), self.map),
134            _ => FlatSet::Top,
135        }
136    }
137
138    fn handle_operand(&self, operand: &mir::Operand) -> Option<PlaceIndex> {
139        match operand {
140            mir::Operand::Constant(..) => None,
141            mir::Operand::Copy(place) | mir::Operand::Move(place) => {
142                // On move, we would ideally flood the place with bottom. But with the current
143                // framework this is not possible (similar to `InterpCx::eval_operand`).
144                self.map.find(place.as_ref())
145            }
146        }
147    }
148
149    /// The effect of a successful function call return should not be
150    /// applied here, see [`Analysis::apply_primary_terminator_effect`].
151    fn handle_terminator<'mir, 'tcx>(
152        &self,
153        terminator: &'mir mir::Terminator<'tcx>,
154        state: &mut State,
155    ) -> TerminatorEdges<'mir, 'tcx> {
156        match &terminator.kind {
157            mir::TerminatorKind::TailCall { .. }
158            | mir::TerminatorKind::Call { .. }
159            | mir::TerminatorKind::InlineAsm { .. } => {
160                // Effect is applied by `handle_call_return`.
161            }
162            mir::TerminatorKind::Drop { place, .. } => {
163                state.flood_with(place.as_ref(), self.map, FlatSet::BOTTOM);
164            }
165            mir::TerminatorKind::SwitchInt { .. }
166            | mir::TerminatorKind::Yield { .. }
167            | mir::TerminatorKind::Goto { .. }
168            | mir::TerminatorKind::UnwindResume
169            | mir::TerminatorKind::UnwindTerminate(_)
170            | mir::TerminatorKind::Return
171            | mir::TerminatorKind::Unreachable
172            | mir::TerminatorKind::Assert { .. }
173            | mir::TerminatorKind::CoroutineDrop
174            | mir::TerminatorKind::FalseEdge { .. }
175            | mir::TerminatorKind::FalseUnwind { .. } => {
176                // These terminators have no effect on the analysis.
177            }
178        }
179        terminator.edges()
180    }
181
182    fn handle_call_return(&self, return_places: mir::CallReturnPlaces, state: &mut State) {
183        return_places.for_each(|place| {
184            state.flood(place.as_ref(), self.map);
185        });
186    }
187}
188
189impl<'tcx> rustc_mir_dataflow::Analysis<'tcx> for PointsToAnalysis<'_> {
190    type Domain = State;
191
192    const NAME: &'static str = "PointsToAnalysis";
193
194    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
195        State { values: IndexVec::from_elem_n(FlatSet::BOTTOM, self.map.value_count) }
196    }
197
198    fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
199        // Since we are skipping the early binder, we are using the early bound variables as locs instead
200        // of fresh names. This is fine because the loc is just used as a unique value for the analysis.
201        // We never have late bounds locs.
202        if let Some(fn_sig) = self.fn_sig {
203            let fn_sig = fn_sig.as_ref().skip_binder().as_ref().skip_binder();
204            for (local, ty) in iter::zip(body.args_iter(), fn_sig.inputs()) {
205                if let rty::TyKind::Ptr(_, path) = ty.kind() {
206                    let loc = FlatSet::Elem(path.to_loc().unwrap_or_else(|| tracked_span_bug!()));
207                    state.flood_with(mir::PlaceRef { local, projection: &[] }, self.map, loc);
208                } else {
209                    state.flood(mir::PlaceRef { local, projection: &[] }, self.map);
210                }
211            }
212        } else {
213            for local in body.args_iter() {
214                state.flood(mir::PlaceRef { local, projection: &[] }, self.map);
215            }
216        }
217    }
218
219    fn apply_primary_statement_effect(
220        &mut self,
221        state: &mut Self::Domain,
222        statement: &mir::Statement<'tcx>,
223        _location: mir::Location,
224    ) {
225        self.handle_statement(statement, state);
226    }
227
228    fn apply_primary_terminator_effect<'mir>(
229        &mut self,
230        state: &mut Self::Domain,
231        terminator: &'mir mir::Terminator<'tcx>,
232        _location: mir::Location,
233    ) -> mir::TerminatorEdges<'mir, 'tcx> {
234        self.handle_terminator(terminator, state)
235    }
236
237    fn apply_call_return_effect(
238        &mut self,
239        state: &mut Self::Domain,
240        _block: BasicBlock,
241        return_places: mir::CallReturnPlaces<'_, 'tcx>,
242    ) {
243        self.handle_call_return(return_places, state);
244    }
245}
246
247struct CollectPointerToBorrows<'a> {
248    map: &'a Map,
249    tracked_places: FxHashMap<PlaceIndex, flux_rustc_bridge::mir::Place>,
250    stmts: &'a mut GhostStatements,
251    before_state: Vec<(PlaceIndex, FlatSet<Loc>)>,
252    results: &'a IndexSlice<BasicBlock, State>,
253}
254
255impl<'a> CollectPointerToBorrows<'a> {
256    fn new(
257        map: &'a Map,
258        stmts: &'a mut GhostStatements,
259        results: &'a IndexSlice<BasicBlock, State>,
260    ) -> Self {
261        let mut tracked_places = FxHashMap::default();
262        map.for_each_tracked_place(|place_idx, local, projection| {
263            let projection = projection
264                .iter()
265                .copied()
266                .map(flux_rustc_bridge::mir::PlaceElem::Field)
267                .collect();
268            tracked_places.insert(place_idx, flux_rustc_bridge::mir::Place::new(local, projection));
269        });
270        Self { map, tracked_places, stmts, before_state: vec![], results }
271    }
272}
273
274impl<'a, 'tcx> ResultsVisitor<'tcx, PointsToAnalysis<'a>> for CollectPointerToBorrows<'_> {
275    fn visit_block_start(&mut self, state: &State) {
276        self.before_state.clear();
277        for place_idx in self.tracked_places.keys() {
278            let value = state.get_idx(*place_idx, self.map);
279            self.before_state.push((*place_idx, value));
280        }
281    }
282
283    fn visit_after_primary_statement_effect(
284        &mut self,
285        _analysis: &mut PointsToAnalysis<'a>,
286        state: &State,
287        _statement: &mir::Statement<'tcx>,
288        location: mir::Location,
289    ) {
290        let point = Point::BeforeLocation(location);
291        for (place_idx, old_value) in &mut self.before_state {
292            let new_value = state.get_idx(*place_idx, self.map);
293            if let (FlatSet::Elem(_), FlatSet::Top) = (&old_value, &new_value) {
294                let place = self
295                    .tracked_places
296                    .get(place_idx)
297                    .unwrap_or_else(|| tracked_span_bug!())
298                    .clone();
299                self.stmts.insert_at(point, GhostStatement::PtrToRef(place));
300            }
301            *old_value = new_value;
302        }
303    }
304
305    fn visit_after_primary_terminator_effect(
306        &mut self,
307        _analysis: &mut PointsToAnalysis<'a>,
308        _state: &State,
309        terminator: &mir::Terminator<'tcx>,
310        location: mir::Location,
311    ) {
312        let block = location.block;
313        for target in terminator.successors() {
314            let point = Point::Edge(block, target);
315            let target_state = self.results.get(target).unwrap();
316            for (place_idx, old_value) in &self.before_state {
317                let new_value = target_state.get_idx(*place_idx, self.map);
318                if let (FlatSet::Elem(_), FlatSet::Top) = (&old_value, new_value) {
319                    let place = self
320                        .tracked_places
321                        .get(place_idx)
322                        .unwrap_or_else(|| tracked_span_bug!())
323                        .clone();
324                    self.stmts.insert_at(point, GhostStatement::PtrToRef(place));
325                }
326            }
327        }
328    }
329}
330
331/// Partial mapping from `Place` to [`PlaceIndex`], where some places also have a [`ValueIndex`].
332///
333/// This data structure essentially maintains a tree of places and their projections. Some
334/// additional bookkeeping is done, to speed up traversal over this tree:
335/// - For iteration, every [`PlaceInfo`] contains an intrusive linked list of its children.
336/// - To directly get the child for a specific projection, there is a `projections` map.
337#[derive(Debug)]
338pub struct Map {
339    locals: IndexVec<mir::Local, Option<PlaceIndex>>,
340    projections: FxHashMap<(PlaceIndex, FieldIdx), PlaceIndex>,
341    places: IndexVec<PlaceIndex, PlaceInfo>,
342    value_count: usize,
343    // The Range corresponds to a slice into `inner_values_buffer`.
344    inner_values: IndexVec<PlaceIndex, Range<usize>>,
345    inner_values_buffer: Vec<ValueIndex>,
346}
347
348impl Map {
349    /// Returns a map that only tracks places whose type has scalar layout.
350    ///
351    /// This is currently the only way to create a [`Map`]. The way in which the tracked places are
352    /// chosen is an implementation detail and may not be relied upon (other than that their type
353    /// are scalars).
354    fn new(body: &mir::Body) -> Self {
355        let mut map = Self {
356            locals: IndexVec::new(),
357            projections: FxHashMap::default(),
358            places: IndexVec::new(),
359            value_count: 0,
360            inner_values: IndexVec::new(),
361            inner_values_buffer: Vec::new(),
362        };
363        let exclude = excluded_locals(body);
364        map.register(body, exclude);
365        map
366    }
367
368    /// Register all non-excluded places that have scalar layout.
369    fn register(&mut self, body: &mir::Body, exclude: DenseBitSet<mir::Local>) {
370        let mut worklist = VecDeque::with_capacity(body.local_decls.len());
371
372        // Start by constructing the places for each bare local.
373        self.locals = IndexVec::from_elem(None, &body.local_decls);
374        for (local, decl) in body.local_decls.iter_enumerated() {
375            if exclude.contains(local) {
376                continue;
377            }
378
379            // Create a place for the local.
380            debug_assert!(self.locals[local].is_none());
381            let place = self.places.push(PlaceInfo::new(None));
382            self.locals[local] = Some(place);
383
384            // And push the eventual children places to the worklist.
385            self.register_children(place, decl.ty, &mut worklist);
386        }
387
388        // `place.elem` with type `ty`.
389        while let Some((mut place, elem, ty)) = worklist.pop_front() {
390            // Create a place for this projection.
391            place = *self.projections.entry((place, elem)).or_insert_with(|| {
392                // Prepend new child to the linked list.
393                let next = self.places.push(PlaceInfo::new(Some(elem)));
394                self.places[next].next_sibling = self.places[place].first_child;
395                self.places[place].first_child = Some(next);
396                next
397            });
398
399            // And push the eventual children places to the worklist.
400            self.register_children(place, ty, &mut worklist);
401        }
402
403        // Pre-compute the tree of ValueIndex nested in each PlaceIndex.
404        // `inner_values_buffer[inner_values[place]]` is the set of all the values
405        // reachable by projecting `place`.
406        self.inner_values_buffer = Vec::with_capacity(self.value_count);
407        self.inner_values = IndexVec::from_elem(0..0, &self.places);
408        for local in body.local_decls.indices() {
409            if let Some(place) = self.locals[local] {
410                self.cache_preorder_invoke(place);
411            }
412        }
413
414        // Trim useless places.
415        for opt_place in &mut self.locals {
416            if let Some(place) = *opt_place
417                && self.inner_values[place].is_empty()
418            {
419                *opt_place = None;
420            }
421        }
422        self.projections
423            .retain(|_, child| !self.inner_values[*child].is_empty());
424    }
425
426    /// Potentially register the (local, projection) place and its fields, recursively.
427    ///
428    /// Invariant: The projection must only contain trackable elements.
429    fn register_children<'tcx>(
430        &mut self,
431        place: PlaceIndex,
432        ty: ty::Ty<'tcx>,
433        worklist: &mut VecDeque<(PlaceIndex, FieldIdx, ty::Ty<'tcx>)>,
434    ) {
435        // Allocate a value slot if it doesn't have one, and the user requested one.
436        assert!(self.places[place].value_index.is_none());
437        if let ty::TyKind::Ref(.., mir::Mutability::Mut) = ty.kind() {
438            self.places[place].value_index = Some(self.value_count.into());
439            self.value_count += 1;
440        }
441
442        // Add tuple fields to the worklist.
443        if let ty::Tuple(list) = ty.kind() {
444            for (field, ty) in list.iter().enumerate() {
445                worklist.push_back((place, field.into(), ty));
446            }
447        }
448    }
449
450    /// Precompute the list of values inside `root` and store it inside
451    /// as a slice within `inner_values_buffer`.
452    fn cache_preorder_invoke(&mut self, root: PlaceIndex) {
453        let start = self.inner_values_buffer.len();
454        if let Some(vi) = self.places[root].value_index {
455            self.inner_values_buffer.push(vi);
456        }
457
458        // We manually iterate instead of using `children` as we need to mutate `self`.
459        let mut next_child = self.places[root].first_child;
460        while let Some(child) = next_child {
461            ensure_sufficient_stack(|| self.cache_preorder_invoke(child));
462            next_child = self.places[child].next_sibling;
463        }
464
465        let end = self.inner_values_buffer.len();
466        self.inner_values[root] = start..end;
467    }
468
469    /// Locates the given place, if it exists in the tree.
470    fn find(&self, place: mir::PlaceRef<'_>) -> Option<PlaceIndex> {
471        let mut index = *self.locals[place.local].as_ref()?;
472
473        for &elem in place.projection {
474            let mir::ProjectionElem::Field(elem, _) = elem else { return None };
475            index = self.apply(index, elem)?;
476        }
477
478        Some(index)
479    }
480
481    /// Iterate over all direct children.
482    fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> + '_ {
483        Children::new(self, parent)
484    }
485
486    /// Applies a single projection element, yielding the corresponding child.
487    fn apply(&self, place: PlaceIndex, elem: FieldIdx) -> Option<PlaceIndex> {
488        self.projections.get(&(place, elem)).copied()
489    }
490
491    /// Invoke a function on the given place and all places that may alias it.
492    fn for_each_aliasing_place(&self, place: mir::PlaceRef<'_>, f: &mut impl FnMut(ValueIndex)) {
493        let Some(mut index) = self.locals[place.local] else {
494            // The local is not tracked at all, so it does not alias anything.
495            return;
496        };
497        for elem in place.projection {
498            let mir::ProjectionElem::Field(elem, _) = *elem else {
499                return;
500            };
501            // A field aliases the parent place.
502            if let Some(vi) = self.places[index].value_index {
503                f(vi);
504            }
505
506            let Some(sub) = self.apply(index, elem) else {
507                return;
508            };
509            index = sub;
510        }
511        self.for_each_value_inside(index, f);
512    }
513
514    /// Invoke a function on each value in the given place and all descendants.
515    fn for_each_value_inside(&self, root: PlaceIndex, f: &mut impl FnMut(ValueIndex)) {
516        let range = self.inner_values[root].clone();
517        let values = &self.inner_values_buffer[range];
518        for &v in values {
519            f(v);
520        }
521    }
522
523    fn for_each_tracked_place(&self, mut f: impl FnMut(PlaceIndex, mir::Local, &[FieldIdx])) {
524        let mut projection = Vec::new();
525        for (local, place) in self.locals.iter_enumerated() {
526            if let Some(place) = place {
527                self.for_each_tracked_place_rec(
528                    *place,
529                    &mut projection,
530                    &mut |place, projection| {
531                        f(place, local, projection);
532                    },
533                );
534            }
535        }
536    }
537
538    fn for_each_tracked_place_rec(
539        &self,
540        root: PlaceIndex,
541        projection: &mut Vec<FieldIdx>,
542        f: &mut impl FnMut(PlaceIndex, &[FieldIdx]),
543    ) {
544        // Fast path is there is nothing to do.
545        if self.inner_values[root].is_empty() {
546            return;
547        }
548
549        if self.places[root].value_index.is_some() {
550            f(root, projection);
551        }
552
553        for child in self.children(root) {
554            let elem = self.places[child]
555                .proj_elem
556                .unwrap_or_else(|| tracked_span_bug!());
557            projection.push(elem);
558            self.for_each_tracked_place_rec(child, projection, f);
559            projection.pop();
560        }
561    }
562}
563
564/// This is the information tracked for every [`PlaceIndex`] and is stored by [`Map`].
565///
566/// Together, `first_child` and `next_sibling` form an intrusive linked list, which is used to
567/// model a tree structure (a replacement for a member like `children: Vec<PlaceIndex>`).
568#[derive(Debug)]
569struct PlaceInfo {
570    /// We store a [`ValueIndex`] if and only if the placed is tracked by the analysis.
571    value_index: Option<ValueIndex>,
572
573    /// The projection used to go from parent to this node (only None for root).
574    proj_elem: Option<FieldIdx>,
575
576    /// The left-most child.
577    first_child: Option<PlaceIndex>,
578
579    /// Index of the sibling to the right of this node.
580    next_sibling: Option<PlaceIndex>,
581}
582
583impl PlaceInfo {
584    fn new(proj_elem: Option<FieldIdx>) -> Self {
585        Self { next_sibling: None, proj_elem, first_child: None, value_index: None }
586    }
587}
588
589struct Children<'a> {
590    map: &'a Map,
591    next: Option<PlaceIndex>,
592}
593
594impl<'a> Children<'a> {
595    fn new(map: &'a Map, parent: PlaceIndex) -> Self {
596        Self { map, next: map.places[parent].first_child }
597    }
598}
599
600impl Iterator for Children<'_> {
601    type Item = PlaceIndex;
602
603    fn next(&mut self) -> Option<Self::Item> {
604        match self.next {
605            Some(child) => {
606                self.next = self.map.places[child].next_sibling;
607                Some(child)
608            }
609            None => None,
610        }
611    }
612}
613
614/// Returns all locals with projections that have their reference or address taken.
615fn excluded_locals(body: &mir::Body<'_>) -> DenseBitSet<mir::Local> {
616    struct Collector {
617        result: DenseBitSet<mir::Local>,
618    }
619
620    impl<'tcx> mir::visit::Visitor<'tcx> for Collector {
621        fn visit_place(
622            &mut self,
623            place: &mir::Place<'tcx>,
624            context: mir::visit::PlaceContext,
625            _location: mir::Location,
626        ) {
627            if (context.is_borrow()
628                || context.is_address_of()
629                || context.is_drop()
630                || context
631                    == mir::visit::PlaceContext::MutatingUse(
632                        mir::visit::MutatingUseContext::AsmOutput,
633                    ))
634                && !place.is_indirect()
635            {
636                // A pointer to a place could be used to access other places with the same local,
637                // hence we have to exclude the local completely.
638                self.result.insert(place.local);
639            }
640        }
641    }
642
643    let mut collector = Collector { result: DenseBitSet::new_empty(body.local_decls.len()) };
644    collector.visit_body(body);
645    collector.result
646}
647
648rustc_index::newtype_index!(
649    /// This index uniquely identifies a place.
650    ///
651    /// Not every place has a `PlaceIndex`, and not every `PlaceIndex` corresponds to a tracked
652    /// place. However, every tracked place and all places along its projection have a `PlaceIndex`.
653    struct PlaceIndex {}
654);
655
656rustc_index::newtype_index!(
657    /// This index uniquely identifies a tracked place and therefore a slot in [`State`].
658    ///
659    /// It is an implementation detail of this module.
660    struct ValueIndex {}
661);
662
663/// Used as the result for r-value.
664enum PlaceOrValue {
665    Value(FlatSet<Loc>),
666    Place(PlaceIndex),
667}
668
669impl PlaceOrValue {
670    const TOP: Self = PlaceOrValue::Value(FlatSet::TOP);
671}
672
673/// The dataflow state for the [`PointsToAnalysis`].
674///
675/// Every instance specifies a lattice that represents the possible values of a single tracked
676/// place. If we call this lattice `V` and set of tracked places `P`, then a [`State`] is an
677/// element of `{unreachable} ∪ (P -> V)`. This again forms a lattice, where the bottom element is
678/// `unreachable` and the top element is the mapping `p ↦ ⊤`. Note that the mapping `p ↦ ⊥` is not
679/// the bottom element (because joining an unreachable and any other reachable state yields a
680/// reachable state). All operations on unreachable states are ignored.
681///
682/// Flooding means assigning a value (by default `⊤`) to all tracked projections of a given place.
683#[derive(PartialEq, Eq, Debug)]
684struct State {
685    values: IndexVec<ValueIndex, FlatSet<Loc>>,
686}
687
688impl Clone for State {
689    fn clone(&self) -> Self {
690        Self { values: self.values.clone() }
691    }
692
693    fn clone_from(&mut self, source: &Self) {
694        self.values.clone_from(&source.values);
695    }
696}
697
698impl JoinSemiLattice for State {
699    fn join(&mut self, other: &Self) -> bool {
700        assert_eq!(self.values.len(), other.values.len());
701        let mut changed = false;
702        for (a, b) in iter::zip(&mut self.values, &other.values) {
703            changed |= a.join(b);
704        }
705        changed
706    }
707}
708
709impl State {
710    fn flood(&mut self, place: mir::PlaceRef<'_>, map: &Map) {
711        self.flood_with(place, map, FlatSet::TOP);
712    }
713
714    fn flood_with(&mut self, place: mir::PlaceRef<'_>, map: &Map, value: FlatSet<Loc>) {
715        map.for_each_aliasing_place(place, &mut |vi| {
716            self.values[vi] = value;
717        });
718    }
719
720    /// Helper method to interpret `target = result`.
721    fn assign(&mut self, target: mir::PlaceRef<'_>, result: PlaceOrValue, map: &Map) {
722        self.flood(target, map);
723        if let Some(target) = map.find(target) {
724            self.insert_idx(target, result, map);
725        }
726    }
727
728    /// Low-level method that assigns to a place.
729    /// This does nothing if the place is not tracked.
730    ///
731    /// The target place must have been flooded before calling this method.
732    fn insert_idx(&mut self, target: PlaceIndex, result: PlaceOrValue, map: &Map) {
733        match result {
734            PlaceOrValue::Value(value) => self.insert_value_idx(target, value, map),
735            PlaceOrValue::Place(source) => self.insert_place_idx(target, source, map),
736        }
737    }
738
739    /// Copies `source` to `target`, including all tracked places beneath.
740    ///
741    /// If `target` contains a place that is not contained in `source`, it will be overwritten with
742    /// Top. Also, because this will copy all entries one after another, it may only be used for
743    /// places that are non-overlapping or identical.
744    ///
745    /// The target place must have been flooded before calling this method.
746    fn insert_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) {
747        // If both places are tracked, we copy the value to the target.
748        // If the target is tracked, but the source is not, we do nothing, as invalidation has
749        // already been performed.
750        if let Some(target_value) = map.places[target].value_index
751            && let Some(source_value) = map.places[source].value_index
752        {
753            self.values[target_value] = self.values[source_value];
754        }
755        for target_child in map.children(target) {
756            // Try to find corresponding child and recurse. Reasoning is similar as above.
757            let projection = map.places[target_child]
758                .proj_elem
759                .unwrap_or_else(|| tracked_span_bug!());
760            if let Some(source_child) = map.projections.get(&(source, projection)) {
761                self.insert_place_idx(target_child, *source_child, map);
762            }
763        }
764    }
765
766    /// Low-level method that assigns a value to a place.
767    /// This does nothing if the place is not tracked.
768    ///
769    /// The target place must have been flooded before calling this method.
770    fn insert_value_idx(&mut self, target: PlaceIndex, value: FlatSet<Loc>, map: &Map) {
771        if let Some(value_index) = map.places[target].value_index {
772            self.values[value_index] = value;
773        }
774    }
775
776    /// Retrieve the value stored for a place, or ⊤ if it is not tracked.
777    fn get(&self, place: mir::PlaceRef<'_>, map: &Map) -> FlatSet<Loc> {
778        map.find(place)
779            .map_or(FlatSet::TOP, |place| self.get_idx(place, map))
780    }
781
782    /// Retrieve the value stored for a place index, or ⊤ if it is not tracked.
783    fn get_idx(&self, place: PlaceIndex, map: &Map) -> FlatSet<Loc> {
784        self.get_tracked_idx(place, map).unwrap_or(FlatSet::Top)
785    }
786
787    /// Retrieve the value stored for a place index if tracked
788    fn get_tracked_idx(&self, place: PlaceIndex, map: &Map) -> Option<FlatSet<Loc>> {
789        map.places[place].value_index.map(|v| self.values[v])
790    }
791}
792
793/// This is used to visualize the dataflow analysis.
794impl DebugWithContext<PointsToAnalysis<'_>> for State {
795    fn fmt_with(&self, ctxt: &PointsToAnalysis, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
796        debug_with_context(&self.values, None, ctxt.map, f)
797    }
798
799    fn fmt_diff_with(
800        &self,
801        old: &Self,
802        ctxt: &PointsToAnalysis,
803        f: &mut fmt::Formatter<'_>,
804    ) -> std::fmt::Result {
805        debug_with_context(&self.values, Some(&old.values), ctxt.map, f)
806    }
807}
808
809fn debug_with_context_rec<V: fmt::Debug + Eq>(
810    place: PlaceIndex,
811    place_str: &str,
812    new: &IndexSlice<ValueIndex, V>,
813    old: Option<&IndexSlice<ValueIndex, V>>,
814    map: &Map,
815    f: &mut fmt::Formatter<'_>,
816) -> std::fmt::Result {
817    if let Some(value) = map.places[place].value_index {
818        match old {
819            None => writeln!(f, "{}: {:?}", place_str, new[value])?,
820            Some(old) => {
821                if new[value] != old[value] {
822                    writeln!(f, "\u{001f}-{}: {:?}", place_str, old[value])?;
823                    writeln!(f, "\u{001f}+{}: {:?}", place_str, new[value])?;
824                }
825            }
826        }
827    }
828
829    for child in map.children(place) {
830        let info_elem = map.places[child]
831            .proj_elem
832            .unwrap_or_else(|| tracked_span_bug!());
833        let child_place_str = format!("{}.{}", place_str, info_elem.index());
834        debug_with_context_rec(child, &child_place_str, new, old, map, f)?;
835    }
836
837    Ok(())
838}
839
840fn debug_with_context<V: fmt::Debug + Eq>(
841    new: &IndexSlice<ValueIndex, V>,
842    old: Option<&IndexSlice<ValueIndex, V>>,
843    map: &Map,
844    f: &mut fmt::Formatter<'_>,
845) -> std::fmt::Result {
846    for (local, place) in map.locals.iter_enumerated() {
847        if let Some(place) = place {
848            debug_with_context_rec(*place, &format!("{local:?}"), new, old, map, f)?;
849        }
850    }
851    Ok(())
852}