diff --git a/src/librustc_mir/dataflow/generic.rs b/src/librustc_mir/dataflow/generic.rs
new file mode 100644
index 0000000000000..77067f75c49f8
--- /dev/null
+++ b/src/librustc_mir/dataflow/generic.rs
@@ -0,0 +1,443 @@
+use std::cmp::Ordering;
+use std::ops;
+
+use rustc::mir::{self, traversal, BasicBlock, Location};
+use rustc_data_structures::bit_set::BitSet;
+use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+use rustc_data_structures::work_queue::WorkQueue;
+
+use crate::dataflow::BottomValue;
+
+pub trait Analysis<'tcx>: BottomValue {
+    type Idx: Idx;
+
+    fn name() -> &'static str;
+
+    fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize;
+
+    fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut BitSet<Self::Idx>);
+
+    fn apply_statement_effect(
+        &self,
+        state: &mut BitSet<Self::Idx>,
+        statement: &mir::Statement<'tcx>,
+        location: Location,
+    );
+
+    fn apply_terminator_effect(
+        &self,
+        state: &mut BitSet<Self::Idx>,
+        terminator: &mir::Terminator<'tcx>,
+        location: Location,
+    );
+
+    fn apply_call_return_effect(
+        &self,
+        state: &mut BitSet<Self::Idx>,
+        block: BasicBlock,
+        func: &mir::Operand<'tcx>,
+        args: &[mir::Operand<'tcx>],
+        return_place: &mir::Place<'tcx>,
+    );
+
+    /// Applies the cumulative effect of an entire basic block to the dataflow state (except for
+    /// `call_return_effect`, which is handled in the `Engine`).
+    ///
+    /// The default implementation calls `statement_effect` for every statement in the block before
+    /// finally calling `terminator_effect`. However, some dataflow analyses are able to coalesce
+    /// transfer functions for an entire block and apply them at once. Such analyses should
+    /// override `block_effect`.
+    fn apply_whole_block_effect(
+        &self,
+        state: &mut BitSet<Self::Idx>,
+        block: BasicBlock,
+        block_data: &mir::BasicBlockData<'tcx>,
+    ) {
+        for (statement_index, stmt) in block_data.statements.iter().enumerate() {
+            let location = Location { block, statement_index };
+            self.apply_statement_effect(state, stmt, location);
+        }
+
+        let location = Location { block, statement_index: block_data.statements.len() };
+        self.apply_terminator_effect(state, block_data.terminator(), location);
+    }
+
+    /// Applies the cumulative effect of a sequence of statements (and possibly a terminator)
+    /// within a single basic block.
+    ///
+    /// When called with `0..block_data.statements.len() + 1` as the statement range, this function
+    /// is equivalent to `apply_whole_block_effect`.
+    fn apply_partial_block_effect(
+        &self,
+        state: &mut BitSet<Self::Idx>,
+        block: BasicBlock,
+        block_data: &mir::BasicBlockData<'tcx>,
+        mut range: ops::Range<usize>,
+    ) {
+        if range.is_empty() {
+            return;
+        }
+
+        // The final location might be a terminator, so iterate through all statements until the
+        // final one, then check to see whether the final one is a statement or terminator.
+        //
+        // This can't cause the range to wrap-around since we check that the range contains at
+        // least one element above.
+        range.end -= 1;
+        let final_location = Location { block, statement_index: range.end };
+
+        for statement_index in range {
+            let location = Location { block, statement_index };
+            let stmt = &block_data.statements[statement_index];
+            self.apply_statement_effect(state, stmt, location);
+        }
+
+        if final_location.statement_index == block_data.statements.len() {
+            let terminator = block_data.terminator();
+            self.apply_terminator_effect(state, terminator, final_location);
+        } else {
+            let stmt = &block_data.statements[final_location.statement_index];
+            self.apply_statement_effect(state, stmt, final_location);
+        }
+    }
+}
+
+#[derive(Clone, Copy, Debug)]
+enum CursorPosition {
+    AtBlockStart(BasicBlock),
+    After(Location),
+}
+
+impl CursorPosition {
+    fn block(&self) -> BasicBlock {
+        match *self {
+            Self::AtBlockStart(block) => block,
+            Self::After(Location { block, .. }) => block,
+        }
+    }
+}
+
+pub struct ResultsCursor<'mir, 'tcx, A>
+where
+    A: Analysis<'tcx>,
+{
+    body: &'mir mir::Body<'tcx>,
+    results: Results<'tcx, A>,
+    state: BitSet<A::Idx>,
+
+    pos: CursorPosition,
+
+    /// Whether the effects of `apply_call_return_effect` are currently stored in `state`.
+    ///
+    /// This flag ensures that multiple calls to `seek_after_assume_call_returns` with the same
+    /// target only result in one invocation of `apply_call_return_effect`.
+    is_call_return_effect_applied: bool,
+}
+
+impl<'mir, 'tcx, A> ResultsCursor<'mir, 'tcx, A>
+where
+    A: Analysis<'tcx>,
+{
+    /// Returns a new cursor for `results` that points to the start of the `START_BLOCK`.
+    pub fn new(body: &'mir mir::Body<'tcx>, results: Results<'tcx, A>) -> Self {
+        ResultsCursor {
+            body,
+            pos: CursorPosition::AtBlockStart(mir::START_BLOCK),
+            is_call_return_effect_applied: false,
+            state: results.entry_sets[mir::START_BLOCK].clone(),
+            results,
+        }
+    }
+
+    /// Resets the cursor to the start of the given `block`.
+    pub fn seek_to_block_start(&mut self, block: BasicBlock) {
+        self.state.overwrite(&self.results.entry_sets[block]);
+        self.pos = CursorPosition::AtBlockStart(block);
+        self.is_call_return_effect_applied = false;
+    }
+
+    /// Updates the cursor to hold the dataflow state immediately before `target`.
+    #[allow(unused)]
+    pub fn seek_before(&mut self, target: Location) {
+        assert!(target <= self.body.terminator_loc(target.block));
+
+        if target.statement_index == 0 {
+            self.seek_to_block_start(target.block);
+        } else {
+            self._seek_after(Location {
+                block: target.block,
+                statement_index: target.statement_index - 1,
+            });
+        }
+    }
+
+    /// Updates the cursor to hold the dataflow state at `target`.
+    ///
+    /// If `target` is a `Call` terminator, `apply_call_return_effect` will not be called. See
+    /// `seek_after_assume_call_returns` if you wish to observe the dataflow state upon a
+    /// successful return.
+    #[allow(unused)]
+    pub fn seek_after(&mut self, target: Location) {
+        assert!(target <= self.body.terminator_loc(target.block));
+
+        // This check ensures the correctness of a call to `seek_after_assume_call_returns`
+        // followed by one to `seek_after` with the same target.
+        if self.is_call_return_effect_applied {
+            self.seek_to_block_start(target.block);
+        }
+
+        self._seek_after(target);
+    }
+
+    /// Equivalent to `seek_after`, but also calls `apply_call_return_effect` if `target` is a
+    /// `Call` terminator whose operand is convergent.
+    pub fn seek_after_assume_call_returns(&mut self, target: Location) {
+        assert!(target <= self.body.terminator_loc(target.block));
+
+        self._seek_after(target);
+
+        if target != self.body.terminator_loc(target.block) {
+            return;
+        }
+
+        let term = self.body.basic_blocks()[target.block].terminator();
+        if let mir::TerminatorKind::Call {
+            destination: Some((return_place, _)),
+            func,
+            args,
+            ..
+        } = &term.kind {
+            if !self.is_call_return_effect_applied {
+                self.results.analysis.apply_call_return_effect(
+                    &mut self.state,
+                    target.block,
+                    func,
+                    args,
+                    return_place,
+                );
+            }
+        }
+    }
+
+    fn _seek_after(&mut self, target: Location) {
+        let Location { block: target_block, statement_index: target_index } = target;
+
+        if self.pos.block() != target_block {
+            self.seek_to_block_start(target_block);
+        }
+
+        // If we're in the same block but after the target statement, we need to reset to the start
+        // of the block.
+        if let CursorPosition::After(Location { statement_index: curr_index, .. }) = self.pos {
+            match curr_index.cmp(&target_index) {
+                Ordering::Equal => return,
+                Ordering::Less => {},
+                Ordering::Greater => self.seek_to_block_start(target_block),
+            }
+        }
+
+        // The cursor is now in the same block as the target location pointing at an earlier
+        // statement.
+        debug_assert_eq!(self.pos.block(), target_block);
+        if let CursorPosition::After(Location { statement_index, .. }) = self.pos {
+            debug_assert!(statement_index < target_index);
+        }
+
+        let first_unapplied_statement = match self.pos {
+            CursorPosition::AtBlockStart(_) => 0,
+            CursorPosition::After(Location { statement_index, .. }) => statement_index + 1,
+        };
+
+        let block_data = &self.body.basic_blocks()[target_block];
+        self.results.analysis.apply_partial_block_effect(
+            &mut self.state,
+            target_block,
+            block_data,
+            first_unapplied_statement..target_index + 1,
+        );
+
+        self.pos = CursorPosition::After(target);
+        self.is_call_return_effect_applied = false;
+    }
+
+    /// Gets the dataflow state at the current location.
+    pub fn get(&self) -> &BitSet<A::Idx> {
+        &self.state
+    }
+}
+
+pub struct Results<'tcx, A>
+where
+    A: Analysis<'tcx>,
+{
+    analysis: A,
+    entry_sets: IndexVec<BasicBlock, BitSet<A::Idx>>,
+}
+
+pub struct Engine<'a, 'tcx, A>
+where
+    A: Analysis<'tcx>,
+{
+    analysis: A,
+    bits_per_block: usize,
+    body: &'a mir::Body<'tcx>,
+    dead_unwinds: &'a BitSet<BasicBlock>,
+    entry_sets: IndexVec<BasicBlock, BitSet<A::Idx>>,
+}
+
+impl<A> Engine<'a, 'tcx, A>
+where
+    A: Analysis<'tcx>,
+{
+    pub fn new(
+        body: &'a mir::Body<'tcx>,
+        dead_unwinds: &'a BitSet<BasicBlock>,
+        analysis: A,
+    ) -> Self {
+        let bits_per_block = analysis.bits_per_block(body);
+
+        let bottom_value_set = if A::BOTTOM_VALUE == true {
+            BitSet::new_filled(bits_per_block)
+        } else {
+            BitSet::new_empty(bits_per_block)
+        };
+
+        let mut entry_sets = IndexVec::from_elem(bottom_value_set, body.basic_blocks());
+        analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
+
+        Engine {
+            analysis,
+            bits_per_block,
+            body,
+            dead_unwinds,
+            entry_sets,
+        }
+    }
+
+    pub fn iterate_to_fixpoint(mut self) -> Results<'tcx, A> {
+        let mut temp_state = BitSet::new_empty(self.bits_per_block);
+
+        let mut dirty_queue: WorkQueue<BasicBlock> =
+            WorkQueue::with_none(self.body.basic_blocks().len());
+
+        for (bb, _) in traversal::reverse_postorder(self.body) {
+            dirty_queue.insert(bb);
+        }
+
+        // Add blocks which are not reachable from START_BLOCK to the work queue. These blocks will
+        // be processed after the ones added above.
+        for bb in self.body.basic_blocks().indices() {
+            dirty_queue.insert(bb);
+        }
+
+        while let Some(bb) = dirty_queue.pop() {
+            let bb_data = &self.body[bb];
+            let on_entry = &self.entry_sets[bb];
+
+            temp_state.overwrite(on_entry);
+            self.analysis.apply_whole_block_effect(&mut temp_state, bb, bb_data);
+
+            self.propagate_bits_into_graph_successors_of(
+                &mut temp_state,
+                (bb, bb_data),
+                &mut dirty_queue,
+            );
+        }
+
+        Results {
+            analysis: self.analysis,
+            entry_sets: self.entry_sets,
+        }
+    }
+
+    fn propagate_bits_into_graph_successors_of(
+        &mut self,
+        in_out: &mut BitSet<A::Idx>,
+        (bb, bb_data): (BasicBlock, &'a mir::BasicBlockData<'tcx>),
+        dirty_list: &mut WorkQueue<BasicBlock>,
+    ) {
+        match bb_data.terminator().kind {
+            mir::TerminatorKind::Return
+            | mir::TerminatorKind::Resume
+            | mir::TerminatorKind::Abort
+            | mir::TerminatorKind::GeneratorDrop
+            | mir::TerminatorKind::Unreachable => {}
+
+            mir::TerminatorKind::Goto { target }
+            | mir::TerminatorKind::Assert { target, cleanup: None, .. }
+            | mir::TerminatorKind::Yield { resume: target, drop: None, .. }
+            | mir::TerminatorKind::Drop { target, location: _, unwind: None }
+            | mir::TerminatorKind::DropAndReplace { target, value: _, location: _, unwind: None } =>
+            {
+                self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
+            }
+
+            mir::TerminatorKind::Yield { resume: target, drop: Some(drop), .. } => {
+                self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
+                self.propagate_bits_into_entry_set_for(in_out, drop, dirty_list);
+            }
+
+            mir::TerminatorKind::Assert { target, cleanup: Some(unwind), .. }
+            | mir::TerminatorKind::Drop { target, location: _, unwind: Some(unwind) }
+            | mir::TerminatorKind::DropAndReplace {
+                target,
+                value: _,
+                location: _,
+                unwind: Some(unwind),
+            } => {
+                self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
+                if !self.dead_unwinds.contains(bb) {
+                    self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
+                }
+            }
+
+            mir::TerminatorKind::SwitchInt { ref targets, .. } => {
+                for target in targets {
+                    self.propagate_bits_into_entry_set_for(in_out, *target, dirty_list);
+                }
+            }
+
+            mir::TerminatorKind::Call { cleanup, ref destination, ref func, ref args, .. } => {
+                if let Some(unwind) = cleanup {
+                    if !self.dead_unwinds.contains(bb) {
+                        self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
+                    }
+                }
+
+                if let Some((ref dest_place, dest_bb)) = *destination {
+                    // N.B.: This must be done *last*, after all other
+                    // propagation, as documented in comment above.
+                    self.analysis.apply_call_return_effect(in_out, bb, func, args, dest_place);
+                    self.propagate_bits_into_entry_set_for(in_out, dest_bb, dirty_list);
+                }
+            }
+
+            mir::TerminatorKind::FalseEdges { real_target, imaginary_target } => {
+                self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
+                self.propagate_bits_into_entry_set_for(in_out, imaginary_target, dirty_list);
+            }
+
+            mir::TerminatorKind::FalseUnwind { real_target, unwind } => {
+                self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
+                if let Some(unwind) = unwind {
+                    if !self.dead_unwinds.contains(bb) {
+                        self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
+                    }
+                }
+            }
+        }
+    }
+
+    fn propagate_bits_into_entry_set_for(
+        &mut self,
+        in_out: &BitSet<A::Idx>,
+        bb: BasicBlock,
+        dirty_queue: &mut WorkQueue<BasicBlock>,
+    ) {
+        let entry_set = &mut self.entry_sets[bb];
+        let set_changed = self.analysis.join(entry_set, &in_out);
+        if set_changed {
+            dirty_queue.insert(bb);
+        }
+    }
+}
diff --git a/src/librustc_mir/dataflow/mod.rs b/src/librustc_mir/dataflow/mod.rs
index 7fe2a890a5371..9e800abc6116b 100644
--- a/src/librustc_mir/dataflow/mod.rs
+++ b/src/librustc_mir/dataflow/mod.rs
@@ -30,6 +30,7 @@ use self::move_paths::MoveData;
 
 mod at_location;
 pub mod drop_flag_effects;
+pub mod generic;
 mod graphviz;
 mod impls;
 pub mod move_paths;
@@ -453,6 +454,10 @@ where
     {
         self.flow_state.each_gen_bit(f)
     }
+
+    pub fn get(&self) -> &BitSet<BD::Idx> {
+        self.flow_state.as_dense()
+    }
 }
 
 pub fn state_for_location<'tcx, T: BitDenotation<'tcx>>(loc: Location,
diff --git a/src/librustc_mir/lib.rs b/src/librustc_mir/lib.rs
index f27db351b74db..c4738d846be63 100644
--- a/src/librustc_mir/lib.rs
+++ b/src/librustc_mir/lib.rs
@@ -23,6 +23,7 @@ Rust MIR: a lowered representation of Rust. Also: an experiment!
 #![feature(try_blocks)]
 #![feature(mem_take)]
 #![feature(associated_type_bounds)]
+#![feature(range_is_empty)]
 
 #![recursion_limit="256"]
 
diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs
index 32b49ee942300..a9d0c3768ead8 100644
--- a/src/librustc_mir/transform/qualify_consts.rs
+++ b/src/librustc_mir/transform/qualify_consts.rs
@@ -14,7 +14,7 @@ use rustc::traits::{self, TraitEngine};
 use rustc::ty::{self, TyCtxt, Ty, TypeFoldable};
 use rustc::ty::cast::CastTy;
 use rustc::ty::query::Providers;
-use rustc::mir::*;
+use rustc::mir::{self, *};
 use rustc::mir::interpret::ConstValue;
 use rustc::mir::traversal::ReversePostorder;
 use rustc::mir::visit::{PlaceContext, Visitor, MutatingUseContext, NonMutatingUseContext};
@@ -26,12 +26,15 @@ use syntax::symbol::sym;
 use syntax_pos::{Span, DUMMY_SP};
 
 use std::borrow::Cow;
-use std::cell::Cell;
+use std::cell::{Cell, RefCell};
 use std::fmt;
+use std::marker::PhantomData;
 use std::ops::{Deref, Index, IndexMut};
+use std::rc::Rc;
 use std::usize;
 
 use rustc::hir::HirId;
+use crate::dataflow::{self, do_dataflow, generic, HaveBeenBorrowedLocals};
 use crate::transform::{MirPass, MirSource};
 use super::promote_consts::{self, Candidate, TempState};
 
@@ -70,11 +73,9 @@ impl fmt::Display for Mode {
     }
 }
 
-const QUALIF_COUNT: usize = 4;
-
 // FIXME(eddyb) once we can use const generics, replace this array with
 // something like `IndexVec` but for fixed-size arrays (`IndexArray`?).
-#[derive(Copy, Clone, Default)]
+#[derive(Copy, Clone, Default, PartialEq, Eq)]
 struct PerQualif<T>([T; QUALIF_COUNT]);
 
 impl<T: Clone> PerQualif<T> {
@@ -126,13 +127,12 @@ impl<Q: Qualif, T> IndexMut<Q> for PerQualif<T> {
     }
 }
 
+#[derive(Clone, Copy)]
 struct ConstCx<'a, 'tcx> {
     tcx: TyCtxt<'tcx>,
     param_env: ty::ParamEnv<'tcx>,
     mode: Mode,
     body: &'a Body<'tcx>,
-
-    per_local: PerQualif<BitSet<Local>>,
 }
 
 impl<'a, 'tcx> ConstCx<'a, 'tcx> {
@@ -153,14 +153,20 @@ enum ValueSource<'a, 'tcx> {
     },
 }
 
+trait QualifIdx {
+    const IDX: usize;
+}
+
 /// A "qualif"(-ication) is a way to look for something "bad" in the MIR that would disqualify some
 /// code for promotion or prevent it from evaluating at compile time. So `return true` means
 /// "I found something bad, no reason to go on searching". `false` is only returned if we
 /// definitely cannot find anything bad anywhere.
 ///
 /// The default implementations proceed structurally.
-trait Qualif {
-    const IDX: usize;
+trait Qualif: QualifIdx {
+    fn is_cleared_on_move() -> bool {
+        false
+    }
 
     /// Return the qualification that is (conservatively) correct for any value
     /// of the type, or `None` if the qualification is not value/type-based.
@@ -174,8 +180,25 @@ trait Qualif {
         Self::in_any_value_of_ty(cx, ty).unwrap_or(true)
     }
 
-    fn in_local(cx: &ConstCx<'_, '_>, local: Local) -> bool {
-        cx.per_local.0[Self::IDX].contains(local)
+    fn in_arg_initially(cx: &ConstCx<'_, 'tcx>, local: Local) -> bool {
+        Self::in_any_value_of_ty(cx, cx.body.local_decls[local].ty)
+            .expect("`in_arg_initially` is overridden if `in_any_value_of_ty` is `None`")
+    }
+
+    fn in_temp_initially(
+        _cx: &ConstCx<'_, 'tcx>,
+        _local: Local,
+        _promotion_state: &IndexVec<Local, TempState>,
+    ) -> bool {
+        false
+    }
+
+    fn in_user_variable_initially(_cx: &ConstCx<'_, 'tcx>, _local: Local) -> bool {
+        false
+    }
+
+    fn in_return_place_initially(_cx: &ConstCx<'_, 'tcx>, _local: Local) -> bool {
+        false
     }
 
     fn in_static(_cx: &ConstCx<'_, 'tcx>, _static: &Static<'tcx>) -> bool {
@@ -185,11 +208,12 @@ trait Qualif {
 
     fn in_projection_structurally(
         cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
         place: PlaceRef<'_, 'tcx>,
     ) -> bool {
         let proj = place.projection.as_ref().unwrap();
 
-        let base_qualif = Self::in_place(cx, PlaceRef {
+        let base_qualif = Self::in_place(cx, per_local, PlaceRef {
             base: place.base,
             projection: &proj.base,
         });
@@ -206,23 +230,28 @@ trait Qualif {
             ProjectionElem::ConstantIndex { .. } |
             ProjectionElem::Downcast(..) => qualif,
 
-            ProjectionElem::Index(local) => qualif || Self::in_local(cx, local),
+            ProjectionElem::Index(local) => qualif || per_local.contains(local),
         }
     }
 
     fn in_projection(
         cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
         place: PlaceRef<'_, 'tcx>,
     ) -> bool {
-        Self::in_projection_structurally(cx, place)
+        Self::in_projection_structurally(cx, per_local, place)
     }
 
-    fn in_place(cx: &ConstCx<'_, 'tcx>, place: PlaceRef<'_, 'tcx>) -> bool {
+    fn in_place(
+        cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
+        place: PlaceRef<'_, 'tcx>,
+    ) -> bool {
         match place {
             PlaceRef {
                 base: PlaceBase::Local(local),
                 projection: None,
-            } => Self::in_local(cx, *local),
+            } => per_local.contains(*local),
             PlaceRef {
                 base: PlaceBase::Static(box Static {
                     kind: StaticKind::Promoted(..),
@@ -239,14 +268,18 @@ trait Qualif {
             PlaceRef {
                 base: _,
                 projection: Some(_),
-            } => Self::in_projection(cx, place),
+            } => Self::in_projection(cx, per_local, place),
         }
     }
 
-    fn in_operand(cx: &ConstCx<'_, 'tcx>, operand: &Operand<'tcx>) -> bool {
+    fn in_operand(
+        cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
+        operand: &Operand<'tcx>,
+    ) -> bool {
         match *operand {
             Operand::Copy(ref place) |
-            Operand::Move(ref place) => Self::in_place(cx, place.as_ref()),
+            Operand::Move(ref place) => Self::in_place(cx, per_local, place.as_ref()),
 
             Operand::Constant(ref constant) => {
                 if let ConstValue::Unevaluated(def_id, _) = constant.literal.val {
@@ -270,21 +303,25 @@ trait Qualif {
         }
     }
 
-    fn in_rvalue_structurally(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
+    fn in_rvalue_structurally(
+        cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
+        rvalue: &Rvalue<'tcx>,
+    ) -> bool {
         match *rvalue {
             Rvalue::NullaryOp(..) => false,
 
             Rvalue::Discriminant(ref place) |
-            Rvalue::Len(ref place) => Self::in_place(cx, place.as_ref()),
+            Rvalue::Len(ref place) => Self::in_place(cx, per_local, place.as_ref()),
 
             Rvalue::Use(ref operand) |
             Rvalue::Repeat(ref operand, _) |
             Rvalue::UnaryOp(_, ref operand) |
-            Rvalue::Cast(_, ref operand, _) => Self::in_operand(cx, operand),
+            Rvalue::Cast(_, ref operand, _) => Self::in_operand(cx, per_local, operand),
 
             Rvalue::BinaryOp(_, ref lhs, ref rhs) |
             Rvalue::CheckedBinaryOp(_, ref lhs, ref rhs) => {
-                Self::in_operand(cx, lhs) || Self::in_operand(cx, rhs)
+                Self::in_operand(cx, per_local, lhs) || Self::in_operand(cx, per_local, rhs)
             }
 
             Rvalue::Ref(_, _, ref place) => {
@@ -293,7 +330,7 @@ trait Qualif {
                     if let ProjectionElem::Deref = proj.elem {
                         let base_ty = Place::ty_from(&place.base, &proj.base, cx.body, cx.tcx).ty;
                         if let ty::Ref(..) = base_ty.sty {
-                            return Self::in_place(cx, PlaceRef {
+                            return Self::in_place(cx, per_local, PlaceRef {
                                 base: &place.base,
                                 projection: &proj.base,
                             });
@@ -301,21 +338,26 @@ trait Qualif {
                     }
                 }
 
-                Self::in_place(cx, place.as_ref())
+                Self::in_place(cx, per_local, place.as_ref())
             }
 
             Rvalue::Aggregate(_, ref operands) => {
-                operands.iter().any(|o| Self::in_operand(cx, o))
+                operands.iter().any(|o| Self::in_operand(cx, per_local, o))
             }
         }
     }
 
-    fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
-        Self::in_rvalue_structurally(cx, rvalue)
+    fn in_rvalue(
+        cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
+        rvalue: &Rvalue<'tcx>,
+    ) -> bool {
+        Self::in_rvalue_structurally(cx, per_local, rvalue)
     }
 
     fn in_call(
         cx: &ConstCx<'_, 'tcx>,
+        _per_local: &BitSet<Local>,
         _callee: &Operand<'tcx>,
         _args: &[Operand<'tcx>],
         return_ty: Ty<'tcx>,
@@ -324,12 +366,16 @@ trait Qualif {
         Self::in_any_value_of_ty(cx, return_ty).unwrap_or(false)
     }
 
-    fn in_value(cx: &ConstCx<'_, 'tcx>, source: ValueSource<'_, 'tcx>) -> bool {
+    fn in_value(
+        cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
+        source: ValueSource<'_, 'tcx>,
+    ) -> bool {
         match source {
-            ValueSource::Rvalue(rvalue) => Self::in_rvalue(cx, rvalue),
-            ValueSource::DropAndReplace(source) => Self::in_operand(cx, source),
+            ValueSource::Rvalue(rvalue) => Self::in_rvalue(cx, per_local, rvalue),
+            ValueSource::DropAndReplace(source) => Self::in_operand(cx, per_local, source),
             ValueSource::Call { callee, args, return_ty } => {
-                Self::in_call(cx, callee, args, return_ty)
+                Self::in_call(cx, per_local, callee, args, return_ty)
             }
         }
     }
@@ -340,16 +386,19 @@ trait Qualif {
 /// and at *any point* during the run-time would produce the same result. In particular,
 /// promotion of temporaries must not change program behavior; if the promoted could be
 /// written to, that would be a problem.
+#[derive(Clone, Copy, Debug)]
 struct HasMutInterior;
 
 impl Qualif for HasMutInterior {
-    const IDX: usize = 0;
-
     fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> Option<bool> {
         Some(!ty.is_freeze(cx.tcx, cx.param_env, DUMMY_SP))
     }
 
-    fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
+    fn in_rvalue(
+        cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
+        rvalue: &Rvalue<'tcx>,
+    ) -> bool {
         match *rvalue {
             // Returning `true` for `Rvalue::Ref` indicates the borrow isn't
             // allowed in constants (and the `Checker` will error), and/or it
@@ -393,7 +442,7 @@ impl Qualif for HasMutInterior {
             _ => {}
         }
 
-        Self::in_rvalue_structurally(cx, rvalue)
+        Self::in_rvalue_structurally(cx, per_local, rvalue)
     }
 }
 
@@ -401,16 +450,23 @@ impl Qualif for HasMutInterior {
 /// This must be ruled out (a) because we cannot run `Drop` during compile-time
 /// as that might not be a `const fn`, and (b) because implicit promotion would
 /// remove side-effects that occur as part of dropping that value.
+#[derive(Clone, Copy, Debug)]
 struct NeedsDrop;
 
 impl Qualif for NeedsDrop {
-    const IDX: usize = 1;
+    fn is_cleared_on_move() -> bool {
+        true
+    }
 
     fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> Option<bool> {
         Some(ty.needs_drop(cx.tcx, cx.param_env))
     }
 
-    fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
+    fn in_rvalue(
+        cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
+        rvalue: &Rvalue<'tcx>,
+    ) -> bool {
         if let Rvalue::Aggregate(ref kind, _) = *rvalue {
             if let AggregateKind::Adt(def, ..) = **kind {
                 if def.has_dtor(cx.tcx) {
@@ -419,7 +475,7 @@ impl Qualif for NeedsDrop {
             }
         }
 
-        Self::in_rvalue_structurally(cx, rvalue)
+        Self::in_rvalue_structurally(cx, per_local, rvalue)
     }
 }
 
@@ -429,10 +485,25 @@ impl Qualif for NeedsDrop {
 /// constant rules (modulo interior mutability or `Drop` rules which are handled `HasMutInterior`
 /// and `NeedsDrop` respectively). Basically this duplicates the checks that the const-checking
 /// visitor enforces by emitting errors when working in const context.
+#[derive(Clone, Copy, Debug)]
 struct IsNotPromotable;
 
 impl Qualif for IsNotPromotable {
-    const IDX: usize = 2;
+    fn in_arg_initially(_cx: &ConstCx<'_, 'tcx>, _local: Local) -> bool {
+        true
+    }
+
+    fn in_temp_initially(
+        _cx: &ConstCx<'_, 'tcx>,
+        local: Local,
+        promotion_state: &IndexVec<Local, TempState>,
+    ) -> bool {
+        !promotion_state[local].is_promotable()
+    }
+
+    fn in_user_variable_initially(_cx: &ConstCx<'_, 'tcx>, _local: Local) -> bool {
+        true
+    }
 
     fn in_static(cx: &ConstCx<'_, 'tcx>, static_: &Static<'tcx>) -> bool {
         match static_.kind {
@@ -451,6 +522,7 @@ impl Qualif for IsNotPromotable {
 
     fn in_projection(
         cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
         place: PlaceRef<'_, 'tcx>,
     ) -> bool {
         let proj = place.projection.as_ref().unwrap();
@@ -476,10 +548,14 @@ impl Qualif for IsNotPromotable {
             }
         }
 
-        Self::in_projection_structurally(cx, place)
+        Self::in_projection_structurally(cx, per_local, place)
     }
 
-    fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
+    fn in_rvalue(
+        cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
+        rvalue: &Rvalue<'tcx>,
+    ) -> bool {
         match *rvalue {
             Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) if cx.mode == Mode::NonConstFn => {
                 let operand_ty = operand.ty(cx.body, cx.tcx);
@@ -512,11 +588,12 @@ impl Qualif for IsNotPromotable {
             _ => {}
         }
 
-        Self::in_rvalue_structurally(cx, rvalue)
+        Self::in_rvalue_structurally(cx, per_local, rvalue)
     }
 
     fn in_call(
         cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
         callee: &Operand<'tcx>,
         args: &[Operand<'tcx>],
         _return_ty: Ty<'tcx>,
@@ -572,7 +649,8 @@ impl Qualif for IsNotPromotable {
             _ => return true,
         }
 
-        Self::in_operand(cx, callee) || args.iter().any(|arg| Self::in_operand(cx, arg))
+        Self::in_operand(cx, per_local, callee)
+            || args.iter().any(|arg| Self::in_operand(cx, per_local, arg))
     }
 }
 
@@ -581,13 +659,19 @@ impl Qualif for IsNotPromotable {
 /// Implicit promotion has almost the same rules, except that disallows `const fn` except for
 /// those marked `#[rustc_promotable]`. This is to avoid changing a legitimate run-time operation
 /// into a failing compile-time operation e.g. due to addresses being compared inside the function.
+#[derive(Clone, Copy, Debug)]
 struct IsNotImplicitlyPromotable;
 
 impl Qualif for IsNotImplicitlyPromotable {
-    const IDX: usize = 3;
+    /// Function parameters will still never be promoted because this same function returns `true`
+    /// for `IsNotPromotable`, and both are checked before promoting a `Local`.
+    fn in_arg_initially(_cx: &ConstCx<'_, 'tcx>, _local: Local) -> bool {
+        false
+    }
 
     fn in_call(
         cx: &ConstCx<'_, 'tcx>,
+        per_local: &BitSet<Local>,
         callee: &Operand<'tcx>,
         args: &[Operand<'tcx>],
         _return_ty: Ty<'tcx>,
@@ -602,53 +686,591 @@ impl Qualif for IsNotImplicitlyPromotable {
             }
         }
 
-        Self::in_operand(cx, callee) || args.iter().any(|arg| Self::in_operand(cx, arg))
+        Self::in_operand(cx, per_local, callee)
+            || args.iter().any(|arg| Self::in_operand(cx, per_local, arg))
     }
 }
 
-// Ensure the `IDX` values are sequential (`0..QUALIF_COUNT`).
-macro_rules! static_assert_seq_qualifs {
+macro_rules! define_qualifs {
+    // Top-level, non-recursive mode
+
+    ( $( $Q:ident, $field:ident );* $(;)? ) => {
+        $(
+            impl Index<$Q> for Resolvers<'mir, 'tcx> {
+                type Output = dyn 'mir + QualifResolver<'tcx, $Q>;
+
+                fn index(&self, _: $Q) -> &Self::Output {
+                    &*self.$field
+                }
+            }
+
+            impl IndexMut<$Q> for Resolvers<'mir, 'tcx> {
+                fn index_mut(&mut self, _: $Q) -> &mut Self::Output {
+                    &mut *self.$field
+                }
+            }
+        )*
+
+        /// Executes `body` once for each implementor of `Qualif`.
+        ///
+        /// This macro overloads closure syntax to put the type of each `Qualif` as well as
+        /// a value of that type into scope for `body`. For example, the following code would print
+        /// the result of `in_any_value_of_ty` for each `Qualif` (assuming `cx` and `ty` are
+        /// already in scope).
+        ///
+        /// ```
+        /// for_each_qualif!(|q: Q| dbg!(Q::in_any_value_of_ty(cx, ty)));
+        /// ```
+        ///
+        /// Note that the type annotation for the closure argument (the `Q` in `q: Q`) is
+        /// mandatory and must be a valid identifier (it is used as the name of a type alias within
+        /// the macro).
+        macro_rules! for_each_qualif {
+            ( |$q:ident : $ty:ident| $body:expr ) => {
+                {
+                    $(
+                        (|$q| {
+                            #[allow(unused)]
+                            type $ty = $Q;
+                            $body
+                        })($Q);
+                    )*
+                }
+            }
+        }
+
+        // Enter recursive mode to assign a numeric index to each `Qualif`
+        define_qualifs!(0 => $( $Q ),*);
+    };
+
+    // Recursive mode
+
     ($i:expr => $first:ident $(, $rest:ident)*) => {
-        static_assert!({
-            static_assert_seq_qualifs!($i + 1 => $($rest),*);
+        impl QualifIdx for $first {
+            const IDX: usize = $i;
+        }
 
-            $first::IDX == $i
-        });
+        define_qualifs!($i + 1 => $($rest),*);
     };
     ($i:expr =>) => {
-        static_assert!(QUALIF_COUNT == $i);
+        const QUALIF_COUNT: usize = $i;
     };
 }
-static_assert_seq_qualifs!(
-    0 => HasMutInterior, NeedsDrop, IsNotPromotable, IsNotImplicitlyPromotable
-);
 
-impl ConstCx<'_, 'tcx> {
-    fn qualifs_in_any_value_of_ty(&self, ty: Ty<'tcx>) -> PerQualif<bool> {
-        let mut qualifs = PerQualif::default();
-        qualifs[HasMutInterior] = HasMutInterior::in_any_value_of_ty(self, ty).unwrap_or(false);
-        qualifs[NeedsDrop] = NeedsDrop::in_any_value_of_ty(self, ty).unwrap_or(false);
-        qualifs[IsNotPromotable] = IsNotPromotable::in_any_value_of_ty(self, ty).unwrap_or(false);
-        qualifs[IsNotImplicitlyPromotable] =
-            IsNotImplicitlyPromotable::in_any_value_of_ty(self, ty).unwrap_or(false);
-        qualifs
+define_qualifs! {
+    HasMutInterior, has_mut_interior;
+    NeedsDrop, needs_drop;
+    IsNotPromotable, is_not_promotable;
+    IsNotImplicitlyPromotable, is_not_implicitly_promotable;
+}
+static_assert!(QUALIF_COUNT == 4);
+
+macro_rules! get_qualif {
+    ($Q:ident :: $fn:ident ( $self:expr $(, $args:expr),* )) => {
+        {
+            let args = vec![
+                $( format!("{:?}", $args) ),*
+            ];
+            trace!(
+                "{}::{}({})",
+                stringify!($Q),
+                stringify!($fn),
+                args.join(","),
+            );
+
+            $self.compare(
+                |this| {
+                    let qualifs = &this.per_local[$Q];
+                    $Q::$fn(&this.cx, qualifs, $($args),*)
+                },
+                |this| {
+                    let qualifs = &this.qualifs[$Q].get();
+                    $Q::$fn(&this.cx, qualifs, $($args),*)
+                }
+            )
+        }
     }
+}
 
-    fn qualifs_in_local(&self, local: Local) -> PerQualif<bool> {
-        let mut qualifs = PerQualif::default();
-        qualifs[HasMutInterior] = HasMutInterior::in_local(self, local);
-        qualifs[NeedsDrop] = NeedsDrop::in_local(self, local);
-        qualifs[IsNotPromotable] = IsNotPromotable::in_local(self, local);
-        qualifs[IsNotImplicitlyPromotable] = IsNotImplicitlyPromotable::in_local(self, local);
-        qualifs
+impl fmt::Debug for PerQualif<bool> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "(")?;
+
+        let mut first = true;
+        for_each_qualif!(|q: Q| {
+            if self.0[Q::IDX] {
+                if !first {
+                    let _ = write!(f, " | ");
+                }
+
+                let _ = write!(f, "{:?}", q);
+                first = false;
+            }
+        });
+
+        write!(f, ")")
     }
+}
 
-    fn qualifs_in_value(&self, source: ValueSource<'_, 'tcx>) -> PerQualif<bool> {
+/// Types that can compute the qualifs of each local at all locations in a `mir::Body`.
+///
+/// Code that wishes to use a `QualifResolver` must call `visit_{statement,terminator}` for each
+/// statement or terminator, processing blocks in reverse post-order beginning from the
+/// `START_BLOCK`. Calling code may optionally call `get` after visiting each statement or
+/// terminator to query the qualification state immediately after that statement or terminator.
+///
+/// These conditions are much more restrictive than woud be required by `FlowSensitiveResolver`
+/// alone. This is to allow a linear, on-demand `TempOnlyResolver` that can operate efficiently on
+/// simple CFGs.
+trait QualifResolver<'tcx, Q>: Visitor<'tcx> {
+    /// Get the qualifs of each local at the last location visited.
+    ///
+    /// This takes `&mut self` to allow qualifs to be computed lazily.
+    fn get(&mut self) -> &BitSet<Local>;
+
+    fn contains(&mut self, local: Local) -> bool {
+        self.get().contains(local)
+    }
+}
+
+/// This struct stores a resolver for each `Qualif` that must be computed when const-checking a
+/// `mir::Body`.
+///
+/// Resolvers can and should be accessed via the `Index` impl for the desired `Qualif` (e.g.
+/// `resolvers[NeedsDrop]`). This chicanery makes the use of macros like `for_each_qualif` more
+/// convenient.
+pub struct Resolvers<'mir, 'tcx> {
+    needs_drop: Box<dyn 'mir + QualifResolver<'tcx, NeedsDrop>>,
+    has_mut_interior: Box<dyn 'mir + QualifResolver<'tcx, HasMutInterior>>,
+    is_not_promotable: Box<dyn 'mir + QualifResolver<'tcx, IsNotPromotable>>,
+    is_not_implicitly_promotable: Box<dyn 'mir + QualifResolver<'tcx, IsNotImplicitlyPromotable>>,
+}
+
+impl Resolvers<'mir, 'tcx> {
+    fn new(
+        cx: &ConstCx<'mir, 'tcx>,
+        def_id: DefId,
+        temp_promotion_state: &IndexVec<Local, TempState>,
+    ) -> Self {
+        let dead_unwinds = BitSet::new_empty(cx.body.basic_blocks().len());
+
+        let borrowed_locals = do_dataflow(
+            cx.tcx,
+            cx.body,
+            def_id,
+            &[],
+            &dead_unwinds,
+            HaveBeenBorrowedLocals::new(cx.body),
+            |_, local| dataflow::DebugFormatted::new(&local),
+        );
+
+        let borrowed_locals = dataflow::DataflowResultsCursor::new(borrowed_locals, cx.body);
+        let borrowed_locals = Rc::new(RefCell::new(borrowed_locals));
+
+        let needs_drop = if cx.mode == Mode::NonConstFn {
+            Box::new(TempOnlyResolver::new(
+                NeedsDrop,
+                cx,
+                borrowed_locals.clone(),
+                temp_promotion_state,
+            )) as Box<_>
+        } else {
+            Box::new(FlowSensitiveResolver::new(
+                NeedsDrop,
+                cx,
+                borrowed_locals.clone(),
+                temp_promotion_state,
+                &dead_unwinds,
+            )) as Box<_>
+        };
+
+        let has_mut_interior = if cx.mode == Mode::NonConstFn {
+            Box::new(TempOnlyResolver::new(
+                HasMutInterior,
+                cx,
+                borrowed_locals.clone(),
+                temp_promotion_state,
+            )) as Box<_>
+        } else {
+            Box::new(FlowSensitiveResolver::new(
+                HasMutInterior,
+                cx,
+                borrowed_locals.clone(),
+                temp_promotion_state,
+                &dead_unwinds,
+            )) as Box<_>
+        };
+
+        let is_not_promotable = Box::new(TempOnlyResolver::new(
+            IsNotPromotable,
+            cx,
+            borrowed_locals.clone(),
+            temp_promotion_state,
+        ));
+        let is_not_implicitly_promotable = Box::new(TempOnlyResolver::new(
+            IsNotImplicitlyPromotable,
+            cx,
+            borrowed_locals.clone(),
+            temp_promotion_state,
+        ));
+
+        Resolvers {
+            needs_drop,
+            has_mut_interior,
+            is_not_promotable,
+            is_not_implicitly_promotable,
+        }
+    }
+}
+
+struct TempOnlyResolver<'mir, 'tcx, Q> {
+    cx: ConstCx<'mir, 'tcx>,
+    borrowed_locals: BorrowedLocalsResults<'mir, 'tcx>,
+    per_local: BitSet<Local>,
+    _qualif: PhantomData<Q>,
+}
+
+impl<Q> TempOnlyResolver<'mir, 'tcx, Q>
+where
+    Q: Qualif,
+{
+    fn new(
+        _: Q,
+        cx: &ConstCx<'mir, 'tcx>,
+        borrowed_locals: BorrowedLocalsResults<'mir, 'tcx>,
+        temp_promotion_state: &IndexVec<Local, TempState>,
+    ) -> Self {
+        let mut per_local = BitSet::new_empty(cx.body.local_decls.len());
+        initialize_qualifs::<Q>(&mut per_local, cx, temp_promotion_state);
+
+        TempOnlyResolver {
+            cx: (*cx).clone(),
+            borrowed_locals,
+            per_local,
+            _qualif: PhantomData,
+        }
+    }
+}
+
+
+impl<Q> Visitor<'tcx> for TempOnlyResolver<'_, 'tcx, Q>
+where
+    Q: Qualif
+{
+    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
+        QualifPropagator::<Q>::new(&self.cx, &mut self.per_local, &self.borrowed_locals)
+            .visit_statement(statement, location);
+    }
+
+    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
+        QualifPropagator::<Q>::new(&self.cx, &mut self.per_local, &self.borrowed_locals)
+            .visit_terminator(terminator, location);
+
+        if let mir::TerminatorKind::Call {
+            destination: Some((return_place, _)),
+            func,
+            args,
+            ..
+        } = &terminator.kind {
+            let return_ty = return_place.ty(self.cx.body, self.cx.tcx).ty;
+            let in_call = Q::in_call(&self.cx, &mut self.per_local, func, args, return_ty);
+            QualifPropagator::<Q>::new(&self.cx, &mut self.per_local, &self.borrowed_locals)
+                .assign_qualif(return_place, in_call, location);
+        }
+    }
+}
+
+impl<Q> QualifResolver<'tcx, Q> for TempOnlyResolver<'_, 'tcx, Q>
+where
+    Q: Qualif
+{
+    fn get(&mut self) -> &BitSet<Local> {
+        &self.per_local
+    }
+}
+
+struct FlowSensitiveResolver<'mir, 'tcx, Q>
+where
+    Q: Qualif
+{
+    cursor: generic::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'tcx, Q>>,
+    location: Location,
+}
+
+impl<Q> FlowSensitiveResolver<'mir, 'tcx, Q>
+where
+    Q: Qualif,
+{
+    fn new(
+        _: Q,
+        cx: &ConstCx<'mir, 'tcx>,
+        borrowed_locals: BorrowedLocalsResults<'mir, 'tcx>,
+        temp_promotion_state: &IndexVec<Local, TempState>,
+        dead_unwinds: &BitSet<BasicBlock>,
+    ) -> Self {
+        let analysis = FlowSensitiveAnalysis {
+            cx: (*cx).clone(),
+            borrowed_locals,
+            temp_promotion_state: temp_promotion_state.clone(),
+            _qualif: PhantomData,
+        };
+        let results = generic::Engine::new(cx.body, dead_unwinds, analysis).iterate_to_fixpoint();
+        let cursor = generic::ResultsCursor::new(cx.body, results);
+
+        FlowSensitiveResolver {
+            cursor,
+            location: Location { block: START_BLOCK, statement_index: 0 },
+        }
+    }
+}
+
+impl<Q> Visitor<'tcx> for FlowSensitiveResolver<'_, 'tcx, Q>
+where
+    Q: Qualif
+{
+    fn visit_statement(&mut self, _: &Statement<'tcx>, location: Location) {
+        self.location = location;
+    }
+
+    fn visit_terminator(&mut self, _: &Terminator<'tcx>, location: Location) {
+        self.location = location;
+    }
+}
+
+impl<Q> QualifResolver<'tcx, Q> for FlowSensitiveResolver<'_, 'tcx, Q>
+where
+    Q: Qualif
+{
+    fn get(&mut self) -> &BitSet<Local> {
+        self.cursor.seek_after_assume_call_returns(self.location);
+        self.cursor.get()
+    }
+}
+
+type BorrowedLocalsResults<'mir, 'tcx> =
+    Rc<RefCell<dataflow::DataflowResultsCursor<'mir, 'tcx, HaveBeenBorrowedLocals<'mir, 'tcx>>>>;
+
+struct FlowSensitiveAnalysis<'mir, 'tcx, Q> {
+    cx: ConstCx<'mir, 'tcx>,
+    temp_promotion_state: IndexVec<Local, TempState>,
+    borrowed_locals: BorrowedLocalsResults<'mir, 'tcx>,
+    _qualif: PhantomData<Q>,
+}
+
+impl<Q> dataflow::BottomValue for FlowSensitiveAnalysis<'_, '_, Q> {
+    const BOTTOM_VALUE: bool = false;
+}
+
+impl<Q> generic::Analysis<'tcx> for FlowSensitiveAnalysis<'_, 'tcx, Q>
+where
+    Q: Qualif,
+{
+    type Idx = Local;
+
+    fn name() -> &'static str {
+        "qualifier"
+    }
+
+    fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize {
+        body.local_decls.len()
+    }
+
+    fn initialize_start_block(&self, _body: &mir::Body<'tcx>, state: &mut BitSet<Self::Idx>) {
+        initialize_qualifs::<Q>(state, &self.cx, &self.temp_promotion_state);
+    }
+
+    fn apply_statement_effect(
+        &self,
+        state: &mut BitSet<Self::Idx>,
+        statement: &mir::Statement<'tcx>,
+        location: Location,
+    ) {
+        QualifPropagator::<Q>::new(&self.cx, state, &self.borrowed_locals)
+            .visit_statement(statement, location);
+    }
+
+    fn apply_terminator_effect(
+        &self,
+        state: &mut BitSet<Self::Idx>,
+        terminator: &mir::Terminator<'tcx>,
+        location: Location,
+    ) {
+        QualifPropagator::<Q>::new(&self.cx, state, &self.borrowed_locals)
+            .visit_terminator(terminator, location);
+    }
+
+    fn apply_call_return_effect(
+        &self,
+        state: &mut BitSet<Self::Idx>,
+        block: BasicBlock,
+        func: &mir::Operand<'tcx>,
+        args: &[mir::Operand<'tcx>],
+        return_place: &mir::Place<'tcx>,
+    ) {
+        let location = self.cx.body.terminator_loc(block);
+        let return_ty = return_place.ty(self.cx.body, self.cx.tcx).ty;
+        let in_call = Q::in_call(&self.cx, state, func, args, return_ty);
+        QualifPropagator::<Q>::new(&self.cx, state, &self.borrowed_locals)
+            .assign_qualif(return_place, in_call, location);
+    }
+}
+
+/// Returns `true` for terminators that may manipulate `Local`s in their environment.
+fn terminator_may_mutate_environment(term: &mir::TerminatorKind<'_>) -> bool {
+    match term {
+        | TerminatorKind::Drop { .. }
+        | TerminatorKind::DropAndReplace { .. }
+        | TerminatorKind::Call { .. }
+        | TerminatorKind::GeneratorDrop  // ?
+        => true,
+
+        | TerminatorKind::Goto { .. }
+        | TerminatorKind::SwitchInt { .. }
+        | TerminatorKind::Resume
+        | TerminatorKind::Abort
+        | TerminatorKind::Return
+        | TerminatorKind::Unreachable
+        | TerminatorKind::Assert { .. }
+        | TerminatorKind::Yield { .. }
+        | TerminatorKind::FalseEdges { .. }
+        | TerminatorKind::FalseUnwind { .. }
+        => false,
+    }
+}
+
+/// This `Visitor` is responsible for updating the qualifications of each `Local` with the
+/// effects of a statement or terminator.
+///
+/// Although the term "transfer function" comes from dataflow analysis, this type is also used
+/// to propagate qualifs in the simplified, temp-only analysis.
+struct QualifPropagator<'a, 'mir, 'tcx, Q> {
+    cx: &'a ConstCx<'mir, 'tcx>,
+    qualifs_per_local: &'a mut BitSet<Local>,
+    borrowed_locals: &'a BorrowedLocalsResults<'mir, 'tcx>,
+    _qualif: PhantomData<Q>,
+}
+
+impl<Q> QualifPropagator<'a, 'mir, 'tcx, Q>
+where
+    Q: Qualif,
+{
+    fn new(
+        cx: &'a ConstCx<'mir, 'tcx>,
+        state: &'a mut BitSet<Local>,
+        borrowed_locals: &'a BorrowedLocalsResults<'mir, 'tcx>,
+    ) -> Self {
+        QualifPropagator {
+            cx,
+            qualifs_per_local: state,
+            borrowed_locals,
+            _qualif: PhantomData }
+    }
+
+    fn assign_qualif(&mut self, place: &Place<'tcx>, qualif: bool, location: Location) {
+        // trace!("QualifPropagator::assign_qualif({:?}, {})", place, qualif);
+
+        // For now, we do not clear the qualif if a local is overwritten in full by
+        // an unqualified rvalue (e.g. `y = rvalue`). This is to be consistent
+        // with aggregates where we overwrite all fields with assignments, which would not
+        // get this feature.
+        if !qualif {
+            if let Place { base: PlaceBase::Local(_local), projection: None } = place {
+                // self.qualifs_per_local.remove(local);
+            }
+
+            return;
+        }
+
+        debug_assert!(qualif);
+
+        // If we are assigning to the target of a pointer dereference (e.g. `*p = rvalue`), we
+        // need to treat any local whose address has been observed as a possible target of the
+        // assignment.
+        if place.is_indirect() {
+            self.assign_qualif_indirect(location);
+        } else if let PlaceBase::Local(local) = place.base {
+            self.qualifs_per_local.insert(local);
+        }
+    }
+
+    fn assign_qualif_indirect(&mut self, location: Location) {
+        // trace!("QualifPropagator::assign_qualif_indirect()");
+
+        let mut borrowed_locals = self.borrowed_locals.borrow_mut();
+        borrowed_locals.seek(location);
+
+        for local in borrowed_locals.get().iter() {
+            let ty = self.cx.body.local_decls[local].ty;
+            if Q::in_any_value_of_ty(&self.cx, ty).unwrap_or(true) {
+                self.qualifs_per_local.insert(local);
+            }
+        }
+    }
+}
+
+impl<Q> Visitor<'tcx> for QualifPropagator<'_, '_, 'tcx, Q>
+where
+    Q: Qualif,
+{
+    fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
+        // trace!("QualifPropagator::visit_operand({:?})", operand);
+
+        self.super_operand(operand, location);
+
+        if !Q::is_cleared_on_move() {
+            return;
+        }
+
+        // If a local with no projections is moved from (e.g. `x` in `y = x`), record that
+        // it no longer needs to be dropped.
+        if let Operand::Move(Place { base: PlaceBase::Local(local), projection: None }) = *operand {
+            self.qualifs_per_local.remove(local);
+        }
+    }
+
+    fn visit_assign(
+        &mut self,
+        place: &Place<'tcx>,
+        rvalue: &Rvalue<'tcx>,
+        location: Location,
+    ) {
+        // trace!("QualifPropagator::visit_assign({:?}, {:?})", place, rvalue);
+
+        // We need to call `assign_qualif` before visiting `rvalue` since moving out of a local
+        // can clear `NeedsDrop` for that local.
+        self.assign_qualif(place, Q::in_rvalue(self.cx, self.qualifs_per_local, rvalue), location);
+
+        self.super_assign(place, rvalue, location);
+    }
+
+    fn visit_terminator_kind(&mut self, kind: &TerminatorKind<'tcx>, location: Location) {
+        // Function calls and custom drop impls could mutate our `Local`s via a pointer, causing
+        // them to become qualified.
+        if terminator_may_mutate_environment(kind) {
+            self.assign_qualif_indirect(location);
+        }
+
+        // We need to call `assign_qualif` before visiting `value` since moving out of a local
+        // can clear `NeedsDrop` for that local.
+        //
+        // Note that the effect of assignment to the return place in `TerminatorKind::Call` is not
+        // applied here. You need to explicitly call `apply_call_return_effect`.
+        if let TerminatorKind::DropAndReplace { value, location: dest, .. } = kind {
+            let qualif = Q::in_operand(self.cx, self.qualifs_per_local, value);
+            self.assign_qualif(dest, qualif, location);
+        }
+
+        self.super_terminator_kind(kind, location);
+    }
+}
+
+
+impl ConstCx<'_, 'tcx> {
+    fn qualifs_in_any_value_of_ty(&self, ty: Ty<'tcx>) -> PerQualif<bool> {
         let mut qualifs = PerQualif::default();
-        qualifs[HasMutInterior] = HasMutInterior::in_value(self, source);
-        qualifs[NeedsDrop] = NeedsDrop::in_value(self, source);
-        qualifs[IsNotPromotable] = IsNotPromotable::in_value(self, source);
-        qualifs[IsNotImplicitlyPromotable] = IsNotImplicitlyPromotable::in_value(self, source);
+        for_each_qualif!(|q: Q| {
+            qualifs[q] = Q::in_any_value_of_ty(self, ty).unwrap_or(false);
+        });
         qualifs
     }
 }
@@ -660,6 +1282,9 @@ impl ConstCx<'_, 'tcx> {
 /// both in functions and const/static items.
 struct Checker<'a, 'tcx> {
     cx: ConstCx<'a, 'tcx>,
+    per_local: PerQualif<BitSet<Local>>,
+    qualifs: Resolvers<'a, 'tcx>,
+    resolvers_location: Option<Location>,
 
     span: Span,
     def_id: DefId,
@@ -695,35 +1320,31 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
 
         let param_env = tcx.param_env(def_id);
 
-        let mut cx = ConstCx {
+        let cx = ConstCx {
             tcx,
             param_env,
             mode,
             body,
-            per_local: PerQualif::new(BitSet::new_empty(body.local_decls.len())),
         };
 
-        for (local, decl) in body.local_decls.iter_enumerated() {
-            if let LocalKind::Arg = body.local_kind(local) {
-                let qualifs = cx.qualifs_in_any_value_of_ty(decl.ty);
-                for (per_local, qualif) in &mut cx.per_local.as_mut().zip(qualifs).0 {
-                    if *qualif {
-                        per_local.insert(local);
-                    }
-                }
-            }
-            if !temps[local].is_promotable() {
-                cx.per_local[IsNotPromotable].insert(local);
-            }
+        let mut per_local = PerQualif::new(BitSet::new_empty(body.local_decls.len()));
+        for_each_qualif!(|q: Q| {
+            initialize_qualifs::<Q>(&mut per_local[q], &cx, &temps);
+        });
+
+        for (local, _) in body.local_decls.iter_enumerated() {
             if let LocalKind::Var = body.local_kind(local) {
                 // Sanity check to prevent implicit and explicit promotion of
                 // named locals
-                assert!(cx.per_local[IsNotPromotable].contains(local));
+                assert!(per_local[IsNotPromotable].contains(local));
             }
         }
 
         Checker {
             cx,
+            qualifs: Resolvers::new(&cx, def_id, &temps),
+            resolvers_location: None,
+            per_local,
             span: body.span,
             def_id,
             rpo,
@@ -732,6 +1353,70 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
         }
     }
 
+    fn qualifs_in_local(&mut self, local: Local) -> PerQualif<bool> {
+        trace!("qualifs_in_local({:?})", local);
+        self.compare(
+            |this| {
+                let mut qualifs = PerQualif::default();
+                for_each_qualif!(|q: Q| {
+                    qualifs[q] = this.per_local[q].contains(local);
+                });
+                qualifs
+            },
+            |this| {
+                let mut qualifs = PerQualif::default();
+                for_each_qualif!(|q: Q| {
+                    qualifs[q] = IndexMut::<Q>::index_mut(&mut this.qualifs, q).contains(local);
+                });
+                qualifs
+            }
+        )
+    }
+
+    fn qualifs_in_value(&mut self, source: ValueSource<'_, 'tcx>) -> PerQualif<bool> {
+        trace!("qualifs_in_value({:?})", source);
+        self.compare(
+            |this| {
+                let mut qualifs = PerQualif::default();
+                for_each_qualif!(|q: Q| {
+                    qualifs[q] = Q::in_value(this, &this.per_local[q], source);
+                });
+                qualifs
+            },
+            |this| {
+                let mut qualifs = PerQualif::default();
+                for_each_qualif!(|q: Q| {
+                    let per_local = IndexMut::<Q>::index_mut(&mut this.qualifs, q).get().clone();
+                    qualifs[q] = Q::in_value(this, &per_local, source);
+                });
+                qualifs
+            }
+        )
+    }
+
+    /// Computes the same value two different ways, compares them for equality and logs the output,
+    /// then returns the value computed by `new`.
+    fn compare<T: fmt::Debug + Eq>(
+        &mut self,
+        old: impl FnOnce(&mut Self) -> T,
+        new: impl FnOnce(&mut Self) -> T,
+    ) -> T {
+        let old = old(self);
+        let new = new(self);
+
+        if old != new {
+            warn!("new: {:?}", new);
+            warn!("old: {:?}", old);
+            if std::env::var("RUSTC_FAIL_FAST").is_ok() {
+                panic!("Differing qualifs");
+            }
+        } else {
+            debug!("val: {:?}", old);
+        }
+
+        new
+    }
+
     // FIXME(eddyb) we could split the errors into meaningful
     // categories, but enabling full miri would make that
     // slightly pointless (even with feature-gating).
@@ -759,7 +1444,7 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
     fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location: Location) {
         trace!("assign: {:?} <- {:?}", dest, source);
 
-        let mut qualifs = self.qualifs_in_value(source);
+        let qualifs = self.qualifs_in_value(source);
 
         match source {
             ValueSource::Rvalue(&Rvalue::Ref(_, kind, ref place)) => {
@@ -770,8 +1455,9 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
                 // Then `HasMutInterior` is replaced with `IsNotPromotable`,
                 // to avoid duplicate errors (e.g. from reborrowing).
                 if qualifs[HasMutInterior] {
-                    qualifs[HasMutInterior] = false;
-                    qualifs[IsNotPromotable] = true;
+                    // FIXME: what are these doing?
+                    //qualifs[HasMutInterior] = false;
+                    //qualifs[IsNotPromotable] = true;
 
                     if self.mode.requires_const_checking() {
                         if !self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
@@ -847,8 +1533,9 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
             },
             ValueSource::Rvalue(&Rvalue::Repeat(ref operand, _)) => {
                 let candidate = Candidate::Repeat(location);
-                let not_promotable = IsNotImplicitlyPromotable::in_operand(self, operand) ||
-                                     IsNotPromotable::in_operand(self, operand);
+                let not_promotable =
+                    get_qualif!(IsNotImplicitlyPromotable::in_operand(self, operand))
+                    || get_qualif!(IsNotPromotable::in_operand(self, operand));
                 debug!("assign: self.def_id={:?} operand={:?}", self.def_id, operand);
                 if !not_promotable && self.tcx.features().const_in_array_repeat_expressions {
                     debug!("assign: candidate={:?}", candidate);
@@ -903,7 +1590,7 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
         // While we could special case full assignments, this would be inconsistent with
         // aggregates where we overwrite all fields via assignments, which would not get
         // that feature.
-        for (per_local, qualif) in &mut self.cx.per_local.as_mut().zip(qualifs).0 {
+        for (per_local, qualif) in &mut self.per_local.as_mut().zip(qualifs).0 {
             if *qualif {
                 per_local.insert(index);
             }
@@ -917,9 +1604,43 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
         // be replaced with calling `insert` to re-set the bit).
         if kind == LocalKind::Temp {
             if !self.temp_promotion_state[index].is_promotable() {
-                assert!(self.cx.per_local[IsNotPromotable].contains(index));
+                assert!(self.per_local[IsNotPromotable].contains(index));
             }
         }
+
+        // Compare qualifs in target of assignment.
+        self.update_resolvers(location);
+        let _ = self.qualifs_in_local(index);
+    }
+
+    /// Updates `self.qualifs` to hold data for a given location.
+    ///
+    /// To keep the semantics of the old code, we sometimes need to update `self.qualifs` in the
+    /// middle of const-checking a statement. This function is idempotent, so we can call it
+    /// whenever we need to update the qualifs (such as in `assign`), as well as after checking a
+    /// statement (in case it was never called during checking).
+    fn update_resolvers(&mut self, location: Location) {
+        if self.resolvers_location == Some(location) {
+            return;
+        }
+
+        trace!("Updating resolvers at {:?}", location);
+
+        self.resolvers_location = Some(location);
+        let block_data = &self.cx.body.basic_blocks()[location.block];
+        if block_data.statements.len() == location.statement_index {
+            let term = block_data.terminator();
+            for_each_qualif!(|q: Q| {
+                IndexMut::<Q>::index_mut(&mut self.qualifs, q)
+                    .visit_terminator(term, location)
+            });
+        } else {
+            let stmt = &block_data.statements[location.statement_index];
+            for_each_qualif!(|q: Q| {
+                IndexMut::<Q>::index_mut(&mut self.qualifs, q)
+                    .visit_statement(stmt, location)
+            });
+        }
     }
 
     /// Check a whole const, static initializer or const fn.
@@ -1164,7 +1885,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> {
                     base: PlaceBase::Local(local),
                     projection: None,
                 } = *place {
-                    self.cx.per_local[NeedsDrop].remove(local);
+                    self.per_local[NeedsDrop].remove(local);
                 }
             }
             Operand::Copy(_) |
@@ -1439,7 +2160,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> {
                     // which happens even without the user requesting it.
                     // We can error out with a hard error if the argument is not
                     // constant here.
-                    if !IsNotPromotable::in_operand(self, arg) {
+                    if !get_qualif!(IsNotPromotable::in_operand(self, arg)) {
                         debug!("visit_terminator_kind: candidate={:?}", candidate);
                         self.promotion_candidates.push(candidate);
                     } else {
@@ -1479,7 +2200,13 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> {
                     base: PlaceBase::Local(local),
                     projection: None,
                 } = *place {
-                    if NeedsDrop::in_local(self, local) {
+                    trace!("NeedsDrop({:?})", local);
+                    let needs_drop = self.compare(
+                        |this| this.per_local[NeedsDrop].contains(local),
+                        |this| this.qualifs[NeedsDrop].contains(local),
+                    );
+
+                    if needs_drop {
                         Some(self.body.local_decls[local].source_info.span)
                     } else {
                         None
@@ -1548,6 +2275,12 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> {
             StatementKind::AscribeUserType(..) |
             StatementKind::Nop => {}
         }
+        self.update_resolvers(location);
+    }
+
+    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
+        self.super_terminator(terminator, location);
+        self.update_resolvers(location);
     }
 }
 
@@ -1570,7 +2303,8 @@ fn mir_const_qualif(tcx: TyCtxt<'_>, def_id: DefId) -> (u8, &BitSet<Local>) {
         return (1 << IsNotPromotable::IDX, tcx.arena.alloc(BitSet::new_empty(0)));
     }
 
-    Checker::new(tcx, def_id, body, Mode::Const).check_const()
+    let mut checker = Checker::new(tcx, def_id, body, Mode::Const);
+    checker.check_const()
 }
 
 pub struct QualifyAndPromoteConstants<'tcx> {
@@ -1626,6 +2360,7 @@ impl<'tcx> MirPass<'tcx> for QualifyAndPromoteConstants<'tcx> {
                         checker.check_const();
                     }
                 } else {
+                    trace!("promotion-checking {:?}", checker.def_id);
                     while let Some((bb, data)) = checker.rpo.next() {
                         checker.visit_basic_block_data(bb, data);
                     }
@@ -1764,3 +2499,22 @@ fn args_required_const(tcx: TyCtxt<'_>, def_id: DefId) -> Option<FxHashSet<usize
     }
     Some(ret)
 }
+
+fn initialize_qualifs<Q: Qualif>(
+    per_local: &mut BitSet<Local>,
+    cx: &ConstCx<'mir, 'tcx>,
+    temps: &IndexVec<Local, TempState>,
+) {
+    for (local, _) in cx.body.local_decls.iter_enumerated() {
+        let in_local_initially = match cx.body.local_kind(local) {
+            LocalKind::Arg => Q::in_arg_initially(cx, local),
+            LocalKind::Var => Q::in_user_variable_initially(cx, local),
+            LocalKind::ReturnPointer => Q::in_return_place_initially(cx, local),
+            LocalKind::Temp => Q::in_temp_initially(cx, local, temps),
+        };
+
+        if in_local_initially {
+            per_local.insert(local);
+        }
+    }
+}