From 613769193397374485485fed2d9ba3ea2eee5077 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 27 Aug 2019 19:54:54 +0200 Subject: [PATCH 1/8] const interning: move mutability computation into intern_shallow, and always intern constants as immutable --- src/librustc_mir/interpret/intern.rs | 125 +++++++++--------- .../mutable_references_ice.stderr | 2 +- 2 files changed, 60 insertions(+), 67 deletions(-) diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs index 4cbbc0ffe17cc..e05b31477e175 100644 --- a/src/librustc_mir/interpret/intern.rs +++ b/src/librustc_mir/interpret/intern.rs @@ -3,7 +3,7 @@ //! After a const evaluation has computed a value, before we destroy the const evaluator's session //! memory, we need to extract all memory allocations to the global memory pool so they stay around. -use rustc::ty::{Ty, TyCtxt, ParamEnv, self}; +use rustc::ty::{Ty, ParamEnv, self}; use rustc::mir::interpret::{InterpResult, ErrorHandled}; use rustc::hir; use rustc::hir::def_id::DefId; @@ -11,10 +11,9 @@ use super::validity::RefTracking; use rustc_data_structures::fx::FxHashSet; use syntax::ast::Mutability; -use syntax_pos::Span; use super::{ - ValueVisitor, MemoryKind, Pointer, AllocId, MPlaceTy, Scalar, + ValueVisitor, MemoryKind, AllocId, MPlaceTy, Scalar, }; use crate::const_eval::{CompileTimeInterpreter, CompileTimeEvalContext}; @@ -27,12 +26,10 @@ struct InternVisitor<'rt, 'mir, 'tcx> { /// for sanity assertions that will ICE when `const_qualif` screws up. mode: InternMode, /// This field stores the mutability of the value *currently* being checked. - /// It is set to mutable when an `UnsafeCell` is encountered - /// When recursing across a reference, we don't recurse but store the - /// value to be checked in `ref_tracking` together with the mutability at which we are checking - /// the value. - /// When encountering an immutable reference, we treat everything as immutable that is behind - /// it. + /// When encountering a mutable reference, we determine the pointee mutability + /// taking into account the mutability of the context: `& &mut i32` is entirely immutable, + /// despite the nested mutable reference! + /// The field gets updated when an `UnsafeCell` is encountered. mutability: Mutability, /// A list of all encountered relocations. After type-based interning, we traverse this list to /// also intern allocations that are only referenced by a raw pointer or inside a union. @@ -45,9 +42,10 @@ enum InternMode { /// `static`. In a `static mut` we start out as mutable and thus can also contain further `&mut` /// that will actually be treated as mutable. Static, - /// UnsafeCell is OK in the value of a constant, but not behind references in a constant + /// UnsafeCell is OK in the value of a constant: `const FOO = Cell::new(0)` creates + /// a new cell every time it is used. ConstBase, - /// `UnsafeCell` ICEs + /// `UnsafeCell` ICEs. Const, } @@ -56,26 +54,31 @@ enum InternMode { struct IsStaticOrFn; impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> { - /// Intern an allocation without looking at its children + /// Intern an allocation without looking at its children. + /// `mutablity` is the mutability of the place to be interned; even if that says + /// `immutable` things might become mutable if `ty` is not frozen. fn intern_shallow( &mut self, - ptr: Pointer, + alloc_id: AllocId, mutability: Mutability, + ty: Option>, ) -> InterpResult<'tcx, Option> { trace!( "InternVisitor::intern {:?} with {:?}", - ptr, mutability, + alloc_id, mutability, ); // remove allocation let tcx = self.ecx.tcx; let memory = self.ecx.memory_mut(); - let (kind, mut alloc) = match memory.alloc_map.remove(&ptr.alloc_id) { + let (kind, mut alloc) = match memory.alloc_map.remove(&alloc_id) { Some(entry) => entry, None => { - // if the pointer is dangling (neither in local nor global memory), we leave it + // Pointer not found in local memory map. It is either a pointer to the global + // map, or dangling. + // If the pointer is dangling (neither in local nor global memory), we leave it // to validation to error. The `delay_span_bug` ensures that we don't forget such // a check in validation. - if tcx.alloc_map.lock().get(ptr.alloc_id).is_none() { + if tcx.alloc_map.lock().get(alloc_id).is_none() { tcx.sess.delay_span_bug(self.ecx.tcx.span, "tried to intern dangling pointer"); } // treat dangling pointers like other statics @@ -88,14 +91,35 @@ impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> { match kind { MemoryKind::Stack | MemoryKind::Vtable => {}, } - // Ensure llvm knows to only put this into immutable memory if the value is immutable either - // by being behind a reference or by being part of a static or const without interior - // mutability - alloc.mutability = mutability; + // Set allocation mutability as appropriate. This is used by LLVM to put things into + // read-only memory, and also by Miri when evluating other constants/statics that + // access this one. + if self.mode == InternMode::Static { + let frozen = ty.map_or(true, |ty| ty.is_freeze( + self.ecx.tcx.tcx, + self.param_env, + self.ecx.tcx.span, + )); + // For statics, allocation mutability is the combination of the place mutability and + // the type mutability. + // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere. + if mutability == Mutability::Immutable && frozen { + alloc.mutability = Mutability::Immutable; + } else { + // Just making sure we are not "upgrading" an immutable allocation to mutable. + assert_eq!(alloc.mutability, Mutability::Mutable); + } + } else { + // We *could* be non-frozen at `ConstBase`, for constants like `Cell::new(0)`. + // But we still intern that as immutable as the memory cannot be changed once the + // initial value was computed. + // Constants are never mutable. + alloc.mutability = Mutability::Immutable; + }; // link the alloc id to the actual allocation let alloc = tcx.intern_const_alloc(alloc); self.leftover_relocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc)); - tcx.alloc_map.lock().set_alloc_id_memory(ptr.alloc_id, alloc); + tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); Ok(None) } } @@ -119,14 +143,16 @@ for ) -> InterpResult<'tcx> { if let Some(def) = mplace.layout.ty.ty_adt_def() { if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() { - // We are crossing over an `UnsafeCell`, we can mutate again + // We are crossing over an `UnsafeCell`, we can mutate again. This means that + // References we encounter inside here are interned as pointing to mutable + // allocations. let old = std::mem::replace(&mut self.mutability, Mutability::Mutable); assert_ne!( self.mode, InternMode::Const, "UnsafeCells are not allowed behind references in constants. This should have \ been prevented statically by const qualification. If this were allowed one \ - would be able to change a constant at one use site and other use sites may \ - arbitrarily decide to change, too.", + would be able to change a constant at one use site and other use sites could \ + observe that mutation.", ); let walked = self.walk_aggregate(mplace, fields); self.mutability = old; @@ -150,7 +176,7 @@ for if let Ok(vtable) = meta.unwrap().to_ptr() { // explitly choose `Immutable` here, since vtables are immutable, even // if the reference of the fat pointer is mutable - self.intern_shallow(vtable, Mutability::Immutable)?; + self.intern_shallow(vtable.alloc_id, Mutability::Immutable, None)?; } } } @@ -195,21 +221,13 @@ for (Mutability::Mutable, hir::Mutability::MutMutable) => Mutability::Mutable, _ => Mutability::Immutable, }; - // Compute the mutability of the allocation - let intern_mutability = intern_mutability( - self.ecx.tcx.tcx, - self.param_env, - mplace.layout.ty, - self.ecx.tcx.span, - mutability, - ); // Recursing behind references changes the intern mode for constants in order to // cause assertions to trigger if we encounter any `UnsafeCell`s. let mode = match self.mode { InternMode::ConstBase => InternMode::Const, other => other, }; - match self.intern_shallow(ptr, intern_mutability)? { + match self.intern_shallow(ptr.alloc_id, mutability, Some(mplace.layout.ty))? { // No need to recurse, these are interned already and statics may have // cycles, so we don't want to recurse there Some(IsStaticOrFn) => {}, @@ -224,23 +242,6 @@ for } } -/// Figure out the mutability of the allocation. -/// Mutable if it has interior mutability *anywhere* in the type. -fn intern_mutability<'tcx>( - tcx: TyCtxt<'tcx>, - param_env: ParamEnv<'tcx>, - ty: Ty<'tcx>, - span: Span, - mutability: Mutability, -) -> Mutability { - let has_interior_mutability = !ty.is_freeze(tcx, param_env, span); - if has_interior_mutability { - Mutability::Mutable - } else { - mutability - } -} - pub fn intern_const_alloc_recursive( ecx: &mut CompileTimeEvalContext<'mir, 'tcx>, def_id: DefId, @@ -251,7 +252,7 @@ pub fn intern_const_alloc_recursive( ) -> InterpResult<'tcx> { let tcx = ecx.tcx; // this `mutability` is the mutability of the place, ignoring the type - let (mutability, base_intern_mode) = match tcx.static_mutability(def_id) { + let (base_mutability, base_intern_mode) = match tcx.static_mutability(def_id) { Some(hir::Mutability::MutImmutable) => (Mutability::Immutable, InternMode::Static), None => (Mutability::Immutable, InternMode::ConstBase), // `static mut` doesn't care about interior mutability, it's mutable anyway @@ -259,17 +260,9 @@ pub fn intern_const_alloc_recursive( }; // type based interning - let mut ref_tracking = RefTracking::new((ret, mutability, base_intern_mode)); + let mut ref_tracking = RefTracking::new((ret, base_mutability, base_intern_mode)); let leftover_relocations = &mut FxHashSet::default(); - // This mutability is the combination of the place mutability and the type mutability. If either - // is mutable, `alloc_mutability` is mutable. This exists because the entire allocation needs - // to be mutable if it contains an `UnsafeCell` anywhere. The other `mutability` exists so that - // the visitor does not treat everything outside the `UnsafeCell` as mutable. - let alloc_mutability = intern_mutability( - tcx.tcx, param_env, ret.layout.ty, tcx.span, mutability, - ); - // start with the outermost allocation InternVisitor { ref_tracking: &mut ref_tracking, @@ -277,8 +270,8 @@ pub fn intern_const_alloc_recursive( mode: base_intern_mode, leftover_relocations, param_env, - mutability, - }.intern_shallow(ret.ptr.to_ptr()?, alloc_mutability)?; + mutability: base_mutability, + }.intern_shallow(ret.ptr.to_ptr()?.alloc_id, base_mutability, Some(ret.layout.ty))?; while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() { let interned = InternVisitor { @@ -312,8 +305,8 @@ pub fn intern_const_alloc_recursive( let mut todo: Vec<_> = leftover_relocations.iter().cloned().collect(); while let Some(alloc_id) = todo.pop() { if let Some((_, alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) { - // We can't call the `intern` method here, as its logic is tailored to safe references. - // So we hand-roll the interning logic here again + // We can't call the `intern_shallow` method here, as its logic is tailored to safe + // references. So we hand-roll the interning logic here again. let alloc = tcx.intern_const_alloc(alloc); tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); for &(_, ((), reloc)) in alloc.relocations().iter() { diff --git a/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr b/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr index 82569e260143c..28cf3537d605a 100644 --- a/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr +++ b/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr @@ -6,7 +6,7 @@ LL | *MUH.x.get() = 99; thread 'rustc' panicked at 'assertion failed: `(left != right)` left: `Const`, - right: `Const`: UnsafeCells are not allowed behind references in constants. This should have been prevented statically by const qualification. If this were allowed one would be able to change a constant at one use site and other use sites may arbitrarily decide to change, too.', src/librustc_mir/interpret/intern.rs:LL:CC + right: `Const`: UnsafeCells are not allowed behind references in constants. This should have been prevented statically by const qualification. If this were allowed one would be able to change a constant at one use site and other use sites could observe that mutation.', src/librustc_mir/interpret/intern.rs:LL:CC note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace. error: internal compiler error: unexpected panic From 7b8693eff85d39d7b836fa45cdcf30507d7f8731 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 27 Aug 2019 20:59:34 +0200 Subject: [PATCH 2/8] all memory behind a constant must be immutable --- src/librustc_mir/interpret/intern.rs | 7 ++++- .../ui/consts/miri_unleashed/mutable_const.rs | 20 ++++++++++++++ .../miri_unleashed/mutable_const.stderr | 27 +++++++++++++++++++ 3 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 src/test/ui/consts/miri_unleashed/mutable_const.rs create mode 100644 src/test/ui/consts/miri_unleashed/mutable_const.stderr diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs index e05b31477e175..6333fce2c47f2 100644 --- a/src/librustc_mir/interpret/intern.rs +++ b/src/librustc_mir/interpret/intern.rs @@ -304,9 +304,14 @@ pub fn intern_const_alloc_recursive( let mut todo: Vec<_> = leftover_relocations.iter().cloned().collect(); while let Some(alloc_id) = todo.pop() { - if let Some((_, alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) { + if let Some((_, mut alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) { // We can't call the `intern_shallow` method here, as its logic is tailored to safe // references. So we hand-roll the interning logic here again. + if base_intern_mode != InternMode::Static { + // If it's not a static, it *must* be immutable. + // We cannot have mutable memory inside a constant. + alloc.mutability = Mutability::Immutable; + } let alloc = tcx.intern_const_alloc(alloc); tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); for &(_, ((), reloc)) in alloc.relocations().iter() { diff --git a/src/test/ui/consts/miri_unleashed/mutable_const.rs b/src/test/ui/consts/miri_unleashed/mutable_const.rs new file mode 100644 index 0000000000000..b476e04529a52 --- /dev/null +++ b/src/test/ui/consts/miri_unleashed/mutable_const.rs @@ -0,0 +1,20 @@ +// compile-flags: -Zunleash-the-miri-inside-of-you + +#![feature(const_raw_ptr_deref)] +#![deny(const_err)] + +use std::cell::UnsafeCell; + +// make sure we do not just intern this as mutable +const MUTABLE_BEHIND_RAW: *mut i32 = &UnsafeCell::new(42) as *const _ as *mut _; + +const MUTATING_BEHIND_RAW: () = { + // Test that `MUTABLE_BEHIND_RAW` is actually immutable, by doing this at const time. + unsafe { + *MUTABLE_BEHIND_RAW = 99 //~ WARN skipping const checks + //~^ ERROR any use of this value will cause an error + //~^^ tried to modify constant memory + } +}; + +fn main() {} diff --git a/src/test/ui/consts/miri_unleashed/mutable_const.stderr b/src/test/ui/consts/miri_unleashed/mutable_const.stderr new file mode 100644 index 0000000000000..507d4823a111d --- /dev/null +++ b/src/test/ui/consts/miri_unleashed/mutable_const.stderr @@ -0,0 +1,27 @@ +warning: skipping const checks + --> $DIR/mutable_const.rs:14:9 + | +LL | *MUTABLE_BEHIND_RAW = 99 + | ^^^^^^^^^^^^^^^^^^^^^^^^ + +error: any use of this value will cause an error + --> $DIR/mutable_const.rs:14:9 + | +LL | / const MUTATING_BEHIND_RAW: () = { +LL | | // Test that `MUTABLE_BEHIND_RAW` is actually immutable, by doing this at const time. +LL | | unsafe { +LL | | *MUTABLE_BEHIND_RAW = 99 + | | ^^^^^^^^^^^^^^^^^^^^^^^^ tried to modify constant memory +... | +LL | | } +LL | | }; + | |__- + | +note: lint level defined here + --> $DIR/mutable_const.rs:4:9 + | +LL | #![deny(const_err)] + | ^^^^^^^^^ + +error: aborting due to previous error + From 75c82b4dd8152562ff2056f2172b20aa9c46def2 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 27 Aug 2019 21:13:05 +0200 Subject: [PATCH 3/8] drop redundant ParamEnv, and avoid constructing InternVisitor without visiting --- src/librustc_mir/const_eval.rs | 6 +- src/librustc_mir/interpret/intern.rs | 170 +++++++++++++++------------ 2 files changed, 95 insertions(+), 81 deletions(-) diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 57ddaa4eff038..3f53f842f314f 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -134,9 +134,8 @@ fn eval_body_using_ecx<'mir, 'tcx>( ecx: &mut CompileTimeEvalContext<'mir, 'tcx>, cid: GlobalId<'tcx>, body: &'mir mir::Body<'tcx>, - param_env: ty::ParamEnv<'tcx>, ) -> InterpResult<'tcx, MPlaceTy<'tcx>> { - debug!("eval_body_using_ecx: {:?}, {:?}", cid, param_env); + debug!("eval_body_using_ecx: {:?}, {:?}", cid, ecx.param_env); let tcx = ecx.tcx.tcx; let layout = ecx.layout_of(body.return_ty().subst(tcx, cid.instance.substs))?; assert!(!layout.is_unsized()); @@ -162,7 +161,6 @@ fn eval_body_using_ecx<'mir, 'tcx>( ecx, cid.instance.def_id(), ret, - param_env, )?; debug!("eval_body_using_ecx done: {:?}", *ret); @@ -658,7 +656,7 @@ pub fn const_eval_raw_provider<'tcx>( let res = ecx.load_mir(cid.instance.def, cid.promoted); res.and_then( - |body| eval_body_using_ecx(&mut ecx, cid, body, key.param_env) + |body| eval_body_using_ecx(&mut ecx, cid, body) ).and_then(|place| { Ok(RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs index 6333fce2c47f2..b657a33db516e 100644 --- a/src/librustc_mir/interpret/intern.rs +++ b/src/librustc_mir/interpret/intern.rs @@ -3,7 +3,7 @@ //! After a const evaluation has computed a value, before we destroy the const evaluator's session //! memory, we need to extract all memory allocations to the global memory pool so they stay around. -use rustc::ty::{Ty, ParamEnv, self}; +use rustc::ty::{Ty, self}; use rustc::mir::interpret::{InterpResult, ErrorHandled}; use rustc::hir; use rustc::hir::def_id::DefId; @@ -18,10 +18,10 @@ use super::{ use crate::const_eval::{CompileTimeInterpreter, CompileTimeEvalContext}; struct InternVisitor<'rt, 'mir, 'tcx> { - /// previously encountered safe references - ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>, + /// The ectx from which we intern. ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>, - param_env: ParamEnv<'tcx>, + /// Previously encountered safe references. + ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>, /// The root node of the value that we're looking at. This field is never mutated and only used /// for sanity assertions that will ICE when `const_qualif` screws up. mode: InternMode, @@ -53,74 +53,93 @@ enum InternMode { /// into the memory of other constants or statics struct IsStaticOrFn; +/// Intern an allocation without looking at its children. +/// `mode` is the mode of the environment where we found this pointer. +/// `mutablity` is the mutability of the place to be interned; even if that says +/// `immutable` things might become mutable if `ty` is not frozen. +fn intern_shallow<'rt, 'mir, 'tcx>( + ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>, + leftover_relocations: &'rt mut FxHashSet, + mode: InternMode, + alloc_id: AllocId, + mutability: Mutability, + ty: Option>, +) -> InterpResult<'tcx, Option> { + trace!( + "InternVisitor::intern {:?} with {:?}", + alloc_id, mutability, + ); + // remove allocation + let tcx = ecx.tcx; + let memory = ecx.memory_mut(); + let (kind, mut alloc) = match memory.alloc_map.remove(&alloc_id) { + Some(entry) => entry, + None => { + // Pointer not found in local memory map. It is either a pointer to the global + // map, or dangling. + // If the pointer is dangling (neither in local nor global memory), we leave it + // to validation to error. The `delay_span_bug` ensures that we don't forget such + // a check in validation. + if tcx.alloc_map.lock().get(alloc_id).is_none() { + tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer"); + } + // treat dangling pointers like other statics + // just to stop trying to recurse into them + return Ok(Some(IsStaticOrFn)); + }, + }; + // This match is just a canary for future changes to `MemoryKind`, which most likely need + // changes in this function. + match kind { + MemoryKind::Stack | MemoryKind::Vtable => {}, + } + // Set allocation mutability as appropriate. This is used by LLVM to put things into + // read-only memory, and also by Miri when evluating other constants/statics that + // access this one. + if mode == InternMode::Static { + let frozen = ty.map_or(true, |ty| ty.is_freeze( + ecx.tcx.tcx, + ecx.param_env, + ecx.tcx.span, + )); + // For statics, allocation mutability is the combination of the place mutability and + // the type mutability. + // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere. + if mutability == Mutability::Immutable && frozen { + alloc.mutability = Mutability::Immutable; + } else { + // Just making sure we are not "upgrading" an immutable allocation to mutable. + assert_eq!(alloc.mutability, Mutability::Mutable); + } + } else { + // We *could* be non-frozen at `ConstBase`, for constants like `Cell::new(0)`. + // But we still intern that as immutable as the memory cannot be changed once the + // initial value was computed. + // Constants are never mutable. + alloc.mutability = Mutability::Immutable; + }; + // link the alloc id to the actual allocation + let alloc = tcx.intern_const_alloc(alloc); + leftover_relocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc)); + tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); + Ok(None) +} + impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> { - /// Intern an allocation without looking at its children. - /// `mutablity` is the mutability of the place to be interned; even if that says - /// `immutable` things might become mutable if `ty` is not frozen. fn intern_shallow( &mut self, alloc_id: AllocId, mutability: Mutability, ty: Option>, ) -> InterpResult<'tcx, Option> { - trace!( - "InternVisitor::intern {:?} with {:?}", - alloc_id, mutability, - ); - // remove allocation - let tcx = self.ecx.tcx; - let memory = self.ecx.memory_mut(); - let (kind, mut alloc) = match memory.alloc_map.remove(&alloc_id) { - Some(entry) => entry, - None => { - // Pointer not found in local memory map. It is either a pointer to the global - // map, or dangling. - // If the pointer is dangling (neither in local nor global memory), we leave it - // to validation to error. The `delay_span_bug` ensures that we don't forget such - // a check in validation. - if tcx.alloc_map.lock().get(alloc_id).is_none() { - tcx.sess.delay_span_bug(self.ecx.tcx.span, "tried to intern dangling pointer"); - } - // treat dangling pointers like other statics - // just to stop trying to recurse into them - return Ok(Some(IsStaticOrFn)); - }, - }; - // This match is just a canary for future changes to `MemoryKind`, which most likely need - // changes in this function. - match kind { - MemoryKind::Stack | MemoryKind::Vtable => {}, - } - // Set allocation mutability as appropriate. This is used by LLVM to put things into - // read-only memory, and also by Miri when evluating other constants/statics that - // access this one. - if self.mode == InternMode::Static { - let frozen = ty.map_or(true, |ty| ty.is_freeze( - self.ecx.tcx.tcx, - self.param_env, - self.ecx.tcx.span, - )); - // For statics, allocation mutability is the combination of the place mutability and - // the type mutability. - // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere. - if mutability == Mutability::Immutable && frozen { - alloc.mutability = Mutability::Immutable; - } else { - // Just making sure we are not "upgrading" an immutable allocation to mutable. - assert_eq!(alloc.mutability, Mutability::Mutable); - } - } else { - // We *could* be non-frozen at `ConstBase`, for constants like `Cell::new(0)`. - // But we still intern that as immutable as the memory cannot be changed once the - // initial value was computed. - // Constants are never mutable. - alloc.mutability = Mutability::Immutable; - }; - // link the alloc id to the actual allocation - let alloc = tcx.intern_const_alloc(alloc); - self.leftover_relocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc)); - tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); - Ok(None) + intern_shallow( + self.ecx, + self.leftover_relocations, + self.mode, + alloc_id, + mutability, + ty, + ) } } @@ -171,7 +190,8 @@ for // Handle trait object vtables if let Ok(meta) = value.to_meta() { if let ty::Dynamic(..) = - self.ecx.tcx.struct_tail_erasing_lifetimes(referenced_ty, self.param_env).sty + self.ecx.tcx.struct_tail_erasing_lifetimes( + referenced_ty, self.ecx.param_env).sty { if let Ok(vtable) = meta.unwrap().to_ptr() { // explitly choose `Immutable` here, since vtables are immutable, even @@ -203,7 +223,7 @@ for (InternMode::Const, hir::Mutability::MutMutable) => { match referenced_ty.sty { ty::Array(_, n) - if n.eval_usize(self.ecx.tcx.tcx, self.param_env) == 0 => {} + if n.eval_usize(self.ecx.tcx.tcx, self.ecx.param_env) == 0 => {} ty::Slice(_) if value.to_meta().unwrap().unwrap().to_usize(self.ecx)? == 0 => {} _ => bug!("const qualif failed to prevent mutable references"), @@ -246,9 +266,6 @@ pub fn intern_const_alloc_recursive( ecx: &mut CompileTimeEvalContext<'mir, 'tcx>, def_id: DefId, ret: MPlaceTy<'tcx>, - // FIXME(oli-obk): can we scrap the param env? I think we can, the final value of a const eval - // must always be monomorphic, right? - param_env: ty::ParamEnv<'tcx>, ) -> InterpResult<'tcx> { let tcx = ecx.tcx; // this `mutability` is the mutability of the place, ignoring the type @@ -264,14 +281,14 @@ pub fn intern_const_alloc_recursive( let leftover_relocations = &mut FxHashSet::default(); // start with the outermost allocation - InternVisitor { - ref_tracking: &mut ref_tracking, + intern_shallow( ecx, - mode: base_intern_mode, leftover_relocations, - param_env, - mutability: base_mutability, - }.intern_shallow(ret.ptr.to_ptr()?.alloc_id, base_mutability, Some(ret.layout.ty))?; + base_intern_mode, + ret.ptr.to_ptr()?.alloc_id, + base_mutability, + Some(ret.layout.ty) + )?; while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() { let interned = InternVisitor { @@ -279,7 +296,6 @@ pub fn intern_const_alloc_recursive( ecx, mode, leftover_relocations, - param_env, mutability, }.visit_value(mplace); if let Err(error) = interned { From b1d4d2bfeaf45c9f3132b863867888ec89d1d40e Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 27 Aug 2019 21:15:55 +0200 Subject: [PATCH 4/8] relocations -> allocations --- src/librustc_mir/interpret/intern.rs | 31 ++++++++++++++++------------ 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs index b657a33db516e..85bc88c86c277 100644 --- a/src/librustc_mir/interpret/intern.rs +++ b/src/librustc_mir/interpret/intern.rs @@ -22,6 +22,9 @@ struct InternVisitor<'rt, 'mir, 'tcx> { ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>, /// Previously encountered safe references. ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>, + /// A list of all encountered allocations. After type-based interning, we traverse this list to + /// also intern allocations that are only referenced by a raw pointer or inside a union. + leftover_allocations: &'rt mut FxHashSet, /// The root node of the value that we're looking at. This field is never mutated and only used /// for sanity assertions that will ICE when `const_qualif` screws up. mode: InternMode, @@ -31,9 +34,6 @@ struct InternVisitor<'rt, 'mir, 'tcx> { /// despite the nested mutable reference! /// The field gets updated when an `UnsafeCell` is encountered. mutability: Mutability, - /// A list of all encountered relocations. After type-based interning, we traverse this list to - /// also intern allocations that are only referenced by a raw pointer or inside a union. - leftover_relocations: &'rt mut FxHashSet, } #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)] @@ -59,7 +59,7 @@ struct IsStaticOrFn; /// `immutable` things might become mutable if `ty` is not frozen. fn intern_shallow<'rt, 'mir, 'tcx>( ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>, - leftover_relocations: &'rt mut FxHashSet, + leftover_allocations: &'rt mut FxHashSet, mode: InternMode, alloc_id: AllocId, mutability: Mutability, @@ -120,7 +120,7 @@ fn intern_shallow<'rt, 'mir, 'tcx>( }; // link the alloc id to the actual allocation let alloc = tcx.intern_const_alloc(alloc); - leftover_relocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc)); + leftover_allocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc)); tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); Ok(None) } @@ -134,7 +134,7 @@ impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> { ) -> InterpResult<'tcx, Option> { intern_shallow( self.ecx, - self.leftover_relocations, + self.leftover_allocations, self.mode, alloc_id, mutability, @@ -276,14 +276,18 @@ pub fn intern_const_alloc_recursive( Some(hir::Mutability::MutMutable) => (Mutability::Mutable, InternMode::Static), }; - // type based interning + // Type based interning. + // `ref_tracking` tracks typed references we have seen and still need to crawl for + // more typed information inside them. + // `leftover_allocations` collects *all* allocations we see, because some might not + // be available in a typed way. They get interned at the end. let mut ref_tracking = RefTracking::new((ret, base_mutability, base_intern_mode)); - let leftover_relocations = &mut FxHashSet::default(); + let leftover_allocations = &mut FxHashSet::default(); // start with the outermost allocation intern_shallow( ecx, - leftover_relocations, + leftover_allocations, base_intern_mode, ret.ptr.to_ptr()?.alloc_id, base_mutability, @@ -295,7 +299,7 @@ pub fn intern_const_alloc_recursive( ref_tracking: &mut ref_tracking, ecx, mode, - leftover_relocations, + leftover_allocations, mutability, }.visit_value(mplace); if let Err(error) = interned { @@ -318,11 +322,12 @@ pub fn intern_const_alloc_recursive( // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw // pointers, ... So we can't intern them according to their type rules - let mut todo: Vec<_> = leftover_relocations.iter().cloned().collect(); + let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect(); while let Some(alloc_id) = todo.pop() { if let Some((_, mut alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) { // We can't call the `intern_shallow` method here, as its logic is tailored to safe - // references. So we hand-roll the interning logic here again. + // references and a `leftover_allocations` set (where we only have a todo-list here). + // So we hand-roll the interning logic here again. if base_intern_mode != InternMode::Static { // If it's not a static, it *must* be immutable. // We cannot have mutable memory inside a constant. @@ -331,7 +336,7 @@ pub fn intern_const_alloc_recursive( let alloc = tcx.intern_const_alloc(alloc); tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); for &(_, ((), reloc)) in alloc.relocations().iter() { - if leftover_relocations.insert(reloc) { + if leftover_allocations.insert(reloc) { todo.push(reloc); } } From 14e3506738e16ffdd1d913c371c9691e36e85b13 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 27 Aug 2019 21:24:30 +0200 Subject: [PATCH 5/8] note a FIXME --- src/librustc_mir/interpret/intern.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs index 85bc88c86c277..cf4f3fda090f3 100644 --- a/src/librustc_mir/interpret/intern.rs +++ b/src/librustc_mir/interpret/intern.rs @@ -331,6 +331,8 @@ pub fn intern_const_alloc_recursive( if base_intern_mode != InternMode::Static { // If it's not a static, it *must* be immutable. // We cannot have mutable memory inside a constant. + // FIXME: ideally we would assert that they already are immutable, to double- + // check our static checks. alloc.mutability = Mutability::Immutable; } let alloc = tcx.intern_const_alloc(alloc); From 342481185255acef5e44af8023bab372314afe51 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 28 Aug 2019 08:31:51 +0200 Subject: [PATCH 6/8] assert that nobody asks for mutable constants --- src/librustc_mir/interpret/intern.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs index cf4f3fda090f3..606f506434579 100644 --- a/src/librustc_mir/interpret/intern.rs +++ b/src/librustc_mir/interpret/intern.rs @@ -116,6 +116,10 @@ fn intern_shallow<'rt, 'mir, 'tcx>( // But we still intern that as immutable as the memory cannot be changed once the // initial value was computed. // Constants are never mutable. + assert_eq!( + mutability, Mutability::Immutable, + "Something went very wrong: mutability requested for a constant" + ); alloc.mutability = Mutability::Immutable; }; // link the alloc id to the actual allocation From 224e2e5e9e1d03db1a80455d08bb1d6d8686ec3a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 28 Aug 2019 09:44:48 +0200 Subject: [PATCH 7/8] explain ty == None --- src/librustc_mir/interpret/intern.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs index 606f506434579..0031dbc4d0d40 100644 --- a/src/librustc_mir/interpret/intern.rs +++ b/src/librustc_mir/interpret/intern.rs @@ -57,6 +57,8 @@ struct IsStaticOrFn; /// `mode` is the mode of the environment where we found this pointer. /// `mutablity` is the mutability of the place to be interned; even if that says /// `immutable` things might become mutable if `ty` is not frozen. +/// `ty` can be `None` if there is no potential interior mutability +/// to account for (e.g. for vtables). fn intern_shallow<'rt, 'mir, 'tcx>( ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>, leftover_allocations: &'rt mut FxHashSet, @@ -97,6 +99,7 @@ fn intern_shallow<'rt, 'mir, 'tcx>( // read-only memory, and also by Miri when evluating other constants/statics that // access this one. if mode == InternMode::Static { + // When `ty` is `None`, we assume no interior mutability. let frozen = ty.map_or(true, |ty| ty.is_freeze( ecx.tcx.tcx, ecx.param_env, From 5462ecb4b131a513b2d821d52b9af491781bd898 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 28 Aug 2019 09:54:30 +0200 Subject: [PATCH 8/8] update intern classification comment --- src/librustc_mir/interpret/intern.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs index 0031dbc4d0d40..95647ce642c5b 100644 --- a/src/librustc_mir/interpret/intern.rs +++ b/src/librustc_mir/interpret/intern.rs @@ -278,9 +278,10 @@ pub fn intern_const_alloc_recursive( // this `mutability` is the mutability of the place, ignoring the type let (base_mutability, base_intern_mode) = match tcx.static_mutability(def_id) { Some(hir::Mutability::MutImmutable) => (Mutability::Immutable, InternMode::Static), - None => (Mutability::Immutable, InternMode::ConstBase), // `static mut` doesn't care about interior mutability, it's mutable anyway Some(hir::Mutability::MutMutable) => (Mutability::Mutable, InternMode::Static), + // consts, promoteds. FIXME: what about array lengths, array initializers? + None => (Mutability::Immutable, InternMode::ConstBase), }; // Type based interning.