diff --git a/src/librustc/ich/impls_ty.rs b/src/librustc/ich/impls_ty.rs index 4f691c0d3948..2dfb2d0bffc6 100644 --- a/src/librustc/ich/impls_ty.rs +++ b/src/librustc/ich/impls_ty.rs @@ -417,10 +417,15 @@ impl<'a> HashStable> for mir::interpret::Allocation { } self.undef_mask.hash_stable(hcx, hasher); self.align.hash_stable(hcx, hasher); - self.mutable.hash_stable(hcx, hasher); + self.runtime_mutability.hash_stable(hcx, hasher); } } +impl_stable_hash_for!(enum ::syntax::ast::Mutability { + Immutable, + Mutable +}); + impl_stable_hash_for!(struct mir::interpret::Pointer{primval}); impl_stable_hash_for!(enum mir::interpret::PrimVal { diff --git a/src/librustc/mir/interpret/mod.rs b/src/librustc/mir/interpret/mod.rs index c395be549a39..b621f3f3731a 100644 --- a/src/librustc/mir/interpret/mod.rs +++ b/src/librustc/mir/interpret/mod.rs @@ -19,6 +19,7 @@ use ty; use ty::layout::{self, Align, HasDataLayout}; use middle::region; use std::iter; +use syntax::ast::Mutability; #[derive(Clone, Debug, PartialEq)] pub enum Lock { @@ -169,8 +170,10 @@ pub struct Allocation { pub undef_mask: UndefMask, /// The alignment of the allocation to detect unaligned reads. pub align: Align, - /// Whether the allocation should be put into mutable memory when translating via llvm - pub mutable: bool, + /// Whether the allocation (of a static) should be put into mutable memory when translating + /// + /// Only happens for `static mut` or `static` with interior mutability + pub runtime_mutability: Mutability, } impl Allocation { @@ -182,7 +185,7 @@ impl Allocation { relocations: BTreeMap::new(), undef_mask, align: Align::from_bytes(1, 1).unwrap(), - mutable: false, + runtime_mutability: Mutability::Immutable, } } } diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index a22158c73345..e67c36779111 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -105,7 +105,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> { relocations: BTreeMap::new(), undef_mask: UndefMask::new(size), align, - mutable: false, + runtime_mutability: Mutability::Immutable, }; let id = self.tcx.interpret_interner.reserve(); M::add_lock(self, id); @@ -544,7 +544,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> { let uninit = self.uninitialized_statics.remove(&alloc_id); if let Some(mut alloc) = alloc.or(uninit) { // ensure llvm knows not to put this into immutable memroy - alloc.mutable = mutability == Mutability::Mutable; + alloc.runtime_mutability = mutability; let alloc = self.tcx.intern_const_alloc(alloc); self.tcx.interpret_interner.intern_at_reserved(alloc_id, alloc); // recurse into inner allocations diff --git a/src/librustc_trans/mir/constant.rs b/src/librustc_trans/mir/constant.rs index 1b470665cd9c..c7a0724c1e72 100644 --- a/src/librustc_trans/mir/constant.rs +++ b/src/librustc_trans/mir/constant.rs @@ -24,6 +24,7 @@ use common::{C_bytes, C_struct, C_uint_big, C_undef, C_usize}; use consts; use type_of::LayoutLlvmExt; use type_::Type; +use syntax::ast::Mutability; use super::super::callee; use super::FunctionCx; @@ -57,7 +58,7 @@ pub fn primval_to_llvm(cx: &CodegenCx, } else if let Some(alloc) = cx.tcx.interpret_interner .get_alloc(ptr.alloc_id) { let init = global_initializer(cx, alloc); - if alloc.mutable { + if alloc.runtime_mutability == Mutability::Mutable { consts::addr_of_mut(cx, init, alloc.align, "byte_str") } else { consts::addr_of(cx, init, alloc.align, "byte_str")