@@ -61,26 +61,13 @@ impl HasStaticRootDefId for const_eval::CompileTimeInterpreter<'_, '_> {
6161fn intern_shallow < ' rt , ' mir , ' tcx , T , M : CompileTimeMachine < ' mir , ' tcx , T > > (
6262 ecx : & ' rt mut InterpCx < ' mir , ' tcx , M > ,
6363 alloc_id : AllocId ,
64- mutability : Mutability ,
6564) -> Result < impl Iterator < Item = CtfeProvenance > + ' tcx , ( ) > {
6665 trace ! ( "intern_shallow {:?}" , alloc_id) ;
6766 // remove allocation
6867 // FIXME(#120456) - is `swap_remove` correct?
69- let Some ( ( _kind, mut alloc) ) = ecx. memory . alloc_map . swap_remove ( & alloc_id) else {
68+ let Some ( ( _kind, alloc) ) = ecx. memory . alloc_map . swap_remove ( & alloc_id) else {
7069 return Err ( ( ) ) ;
7170 } ;
72- // Set allocation mutability as appropriate. This is used by LLVM to put things into
73- // read-only memory, and also by Miri when evaluating other globals that
74- // access this one.
75- match mutability {
76- Mutability :: Not => {
77- alloc. mutability = Mutability :: Not ;
78- }
79- Mutability :: Mut => {
80- // This must be already mutable, we won't "un-freeze" allocations ever.
81- assert_eq ! ( alloc. mutability, Mutability :: Mut ) ;
82- }
83- }
8471 // link the alloc id to the actual allocation
8572 let alloc = ecx. tcx . mk_const_alloc ( alloc) ;
8673 if let Some ( static_id) = ecx. machine . static_def_id ( ) {
@@ -122,14 +109,9 @@ pub enum InternKind {
122109 Promoted ,
123110}
124111
125- /// Intern `ret` and everything it references.
126- ///
127- /// This *cannot raise an interpreter error*. Doing so is left to validation, which
128- /// tracks where in the value we are and thus can show much better error messages.
129- ///
130- /// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
131- #[ instrument( level = "debug" , skip( ecx) ) ]
132- pub fn intern_const_alloc_recursive <
112+ /// Now that evaluation is finished, and we are not going to modify allocations anymore,
113+ /// recursively mark all allocations as immutable if the item kind calls for it (const/promoted/immut static).
114+ pub fn patch_mutability_of_allocs <
133115 ' mir ,
134116 ' tcx : ' mir ,
135117 M : CompileTimeMachine < ' mir , ' tcx , const_eval:: MemoryKind > ,
@@ -140,12 +122,12 @@ pub fn intern_const_alloc_recursive<
140122) -> Result < ( ) , ErrorGuaranteed > {
141123 // We are interning recursively, and for mutability we are distinguishing the "root" allocation
142124 // that we are starting in, and all other allocations that we are encountering recursively.
143- let ( base_mutability, inner_mutability, is_static ) = match intern_kind {
125+ let ( base_mutability, inner_mutability) = match intern_kind {
144126 InternKind :: Constant | InternKind :: Promoted => {
145127 // Completely immutable. Interning anything mutably here can only lead to unsoundness,
146128 // since all consts are conceptually independent values but share the same underlying
147129 // memory.
148- ( Mutability :: Not , Mutability :: Not , false )
130+ ( Mutability :: Not , Mutability :: Not )
149131 }
150132 InternKind :: Static ( Mutability :: Not ) => {
151133 (
@@ -158,30 +140,79 @@ pub fn intern_const_alloc_recursive<
158140 // Inner allocations are never mutable. They can only arise via the "tail
159141 // expression" / "outer scope" rule, and we treat them consistently with `const`.
160142 Mutability :: Not ,
161- true ,
162143 )
163144 }
164145 InternKind :: Static ( Mutability :: Mut ) => {
165146 // Just make everything mutable. We accept code like
166147 // `static mut X = &mut [42]`, so even inner allocations need to be mutable.
167- ( Mutability :: Mut , Mutability :: Mut , true )
148+ ( Mutability :: Mut , Mutability :: Mut )
168149 }
169150 } ;
151+ let base_alloc_id = ret. ptr ( ) . provenance . unwrap ( ) . alloc_id ( ) ;
152+ let mut todo: Vec < _ > = {
153+ let base_alloc = & mut ecx. memory . alloc_map . get_mut ( & base_alloc_id) . unwrap ( ) . 1 ;
154+ base_alloc. mutability = base_mutability;
155+ base_alloc. provenance ( ) . ptrs ( ) . iter ( ) . copied ( ) . collect ( )
156+ } ;
157+ let mut seen = FxHashSet :: default ( ) ;
158+ seen. insert ( base_alloc_id) ;
159+ while let Some ( ( _, prov) ) = todo. pop ( ) {
160+ if !seen. insert ( prov. alloc_id ( ) ) {
161+ // Already processed
162+ continue ;
163+ }
164+ let Some ( ( _, alloc) ) = & mut ecx. memory . alloc_map . get_mut ( & prov. alloc_id ( ) ) else {
165+ continue ;
166+ } ;
167+ // We always intern with `inner_mutability`, and furthermore we ensured above that if
168+ // that is "immutable", then there are *no* mutable pointers anywhere in the newly
169+ // interned memory -- justifying that we can indeed intern immutably. However this also
170+ // means we can *not* easily intern immutably here if `prov.immutable()` is true and
171+ // `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
172+ // we'd have to somehow check that they are *all* immutable before deciding that this
173+ // allocation can be made immutable. In the future we could consider analyzing all
174+ // pointers before deciding which allocations can be made immutable; but for now we are
175+ // okay with losing some potential for immutability here. This can anyway only affect
176+ // `static mut`.
177+ alloc. mutability = inner_mutability;
178+ todo. extend ( alloc. provenance ( ) . ptrs ( ) . iter ( ) . copied ( ) ) ;
179+ }
180+ Ok ( ( ) )
181+ }
182+
183+ /// Intern `ret` and everything it references.
184+ ///
185+ /// This *cannot raise an interpreter error*. Doing so is left to validation, which
186+ /// tracks where in the value we are and thus can show much better error messages.
187+ ///
188+ /// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
189+ #[ instrument( level = "debug" , skip( ecx) ) ]
190+ pub fn intern_const_alloc_recursive <
191+ ' mir ,
192+ ' tcx : ' mir ,
193+ M : CompileTimeMachine < ' mir , ' tcx , const_eval:: MemoryKind > ,
194+ > (
195+ ecx : & mut InterpCx < ' mir , ' tcx , M > ,
196+ intern_kind : InternKind ,
197+ ret : & MPlaceTy < ' tcx > ,
198+ ) -> Result < ( ) , ErrorGuaranteed > {
199+ let ( inner_mutability, is_static) = match intern_kind {
200+ InternKind :: Constant | InternKind :: Promoted => ( Mutability :: Not , false ) ,
201+ InternKind :: Static ( mutability) => ( mutability, true ) ,
202+ } ;
170203
171204 // Intern the base allocation, and initialize todo list for recursive interning.
172205 let base_alloc_id = ret. ptr ( ) . provenance . unwrap ( ) . alloc_id ( ) ;
173- trace ! ( ?base_alloc_id, ?base_mutability ) ;
206+ trace ! ( ?base_alloc_id) ;
174207 // First we intern the base allocation, as it requires a different mutability.
175208 // This gives us the initial set of nested allocations, which will then all be processed
176209 // recursively in the loop below.
177210 let mut todo: Vec < _ > = if is_static {
178211 // Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`.
179- // But still change its mutability to match the requested one.
180- let alloc = ecx. memory . alloc_map . get_mut ( & base_alloc_id) . unwrap ( ) ;
181- alloc. 1 . mutability = base_mutability;
212+ let alloc = ecx. memory . alloc_map . get ( & base_alloc_id) . unwrap ( ) ;
182213 alloc. 1 . provenance ( ) . ptrs ( ) . iter ( ) . map ( |& ( _, prov) | prov) . collect ( )
183214 } else {
184- intern_shallow ( ecx, base_alloc_id, base_mutability ) . unwrap ( ) . collect ( )
215+ intern_shallow ( ecx, base_alloc_id) . unwrap ( ) . collect ( )
185216 } ;
186217 // We need to distinguish "has just been interned" from "was already in `tcx`",
187218 // so we track this in a separate set.
@@ -247,17 +278,7 @@ pub fn intern_const_alloc_recursive<
247278 continue ;
248279 }
249280 just_interned. insert ( alloc_id) ;
250- // We always intern with `inner_mutability`, and furthermore we ensured above that if
251- // that is "immutable", then there are *no* mutable pointers anywhere in the newly
252- // interned memory -- justifying that we can indeed intern immutably. However this also
253- // means we can *not* easily intern immutably here if `prov.immutable()` is true and
254- // `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
255- // we'd have to somehow check that they are *all* immutable before deciding that this
256- // allocation can be made immutable. In the future we could consider analyzing all
257- // pointers before deciding which allocations can be made immutable; but for now we are
258- // okay with losing some potential for immutability here. This can anyway only affect
259- // `static mut`.
260- todo. extend ( intern_shallow ( ecx, alloc_id, inner_mutability) . map_err ( |( ) | {
281+ todo. extend ( intern_shallow ( ecx, alloc_id) . map_err ( |( ) | {
261282 ecx. tcx . dcx ( ) . emit_err ( DanglingPtrInFinal { span : ecx. tcx . span , kind : intern_kind } )
262283 } ) ?) ;
263284 }
@@ -287,7 +308,8 @@ pub fn intern_const_alloc_for_constprop<
287308 return Ok ( ( ) ) ;
288309 }
289310 // Move allocation to `tcx`.
290- for _ in intern_shallow ( ecx, alloc_id, Mutability :: Not ) . map_err ( |( ) | err_ub ! ( DeadLocal ) ) ? {
311+ ecx. memory . alloc_map . get_mut ( & alloc_id) . unwrap ( ) . 1 . mutability = Mutability :: Not ;
312+ for _ in intern_shallow ( ecx, alloc_id) . map_err ( |( ) | err_ub ! ( DeadLocal ) ) ? {
291313 // We are not doing recursive interning, so we don't currently support provenance.
292314 // (If this assertion ever triggers, we should just implement a
293315 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
@@ -314,7 +336,8 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
314336 let dest = self . allocate ( layout, MemoryKind :: Stack ) ?;
315337 f ( self , & dest. clone ( ) . into ( ) ) ?;
316338 let alloc_id = dest. ptr ( ) . provenance . unwrap ( ) . alloc_id ( ) ; // this was just allocated, it must have provenance
317- for prov in intern_shallow ( self , alloc_id, Mutability :: Not ) . unwrap ( ) {
339+ self . memory . alloc_map . get_mut ( & alloc_id) . unwrap ( ) . 1 . mutability = Mutability :: Not ;
340+ for prov in intern_shallow ( self , alloc_id) . unwrap ( ) {
318341 // We are not doing recursive interning, so we don't currently support provenance.
319342 // (If this assertion ever triggers, we should just implement a
320343 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
0 commit comments