@@ -89,6 +89,7 @@ use std::borrow::Cow;
89
89
use either:: Either ;
90
90
use itertools:: Itertools as _;
91
91
use rustc_abi:: { self as abi, BackendRepr , FIRST_VARIANT , FieldIdx , Primitive , Size , VariantIdx } ;
92
+ use rustc_arena:: DroplessArena ;
92
93
use rustc_const_eval:: const_eval:: DummyMachine ;
93
94
use rustc_const_eval:: interpret:: {
94
95
ImmTy , Immediate , InterpCx , MemPlaceMeta , MemoryKind , OpTy , Projectable , Scalar ,
@@ -127,7 +128,9 @@ impl<'tcx> crate::MirPass<'tcx> for GVN {
127
128
// Clone dominators because we need them while mutating the body.
128
129
let dominators = body. basic_blocks . dominators ( ) . clone ( ) ;
129
130
130
- let mut state = VnState :: new ( tcx, body, typing_env, & ssa, dominators, & body. local_decls ) ;
131
+ let arena = DroplessArena :: default ( ) ;
132
+ let mut state =
133
+ VnState :: new ( tcx, body, typing_env, & ssa, dominators, & body. local_decls , & arena) ;
131
134
132
135
for local in body. args_iter ( ) . filter ( |& local| ssa. is_ssa ( local) ) {
133
136
let opaque = state. new_opaque ( body. local_decls [ local] . ty ) ;
@@ -161,8 +164,8 @@ enum AddressKind {
161
164
Address ( RawPtrKind ) ,
162
165
}
163
166
164
- #[ derive( Debug , PartialEq , Eq , Hash ) ]
165
- enum Value < ' tcx > {
167
+ #[ derive( Copy , Clone , Debug , PartialEq , Eq , Hash ) ]
168
+ enum Value < ' a , ' tcx > {
166
169
// Root values.
167
170
/// Used to represent values we know nothing about.
168
171
/// The `usize` is a counter incremented by `new_opaque`.
@@ -177,7 +180,7 @@ enum Value<'tcx> {
177
180
} ,
178
181
/// An aggregate value, either tuple/closure/struct/enum.
179
182
/// This does not contain unions, as we cannot reason with the value.
180
- Aggregate ( VariantIdx , Vec < VnIndex > ) ,
183
+ Aggregate ( VariantIdx , & ' a [ VnIndex ] ) ,
181
184
/// A raw pointer aggregate built from a thin pointer and metadata.
182
185
RawPtr {
183
186
/// Thin pointer component. This is field 0 in MIR.
@@ -211,7 +214,7 @@ enum Value<'tcx> {
211
214
} ,
212
215
}
213
216
214
- struct VnState < ' body , ' tcx > {
217
+ struct VnState < ' body , ' a , ' tcx > {
215
218
tcx : TyCtxt < ' tcx > ,
216
219
ecx : InterpCx < ' tcx , DummyMachine > ,
217
220
local_decls : & ' body LocalDecls < ' tcx > ,
@@ -221,7 +224,7 @@ struct VnState<'body, 'tcx> {
221
224
/// Locals that are assigned that value.
222
225
// This vector does not hold all the values of `VnIndex` that we create.
223
226
rev_locals : IndexVec < VnIndex , SmallVec < [ Local ; 1 ] > > ,
224
- values : FxIndexSet < ( Value < ' tcx > , Ty < ' tcx > ) > ,
227
+ values : FxIndexSet < ( Value < ' a , ' tcx > , Ty < ' tcx > ) > ,
225
228
/// Values evaluated as constants if possible.
226
229
evaluated : IndexVec < VnIndex , Option < OpTy < ' tcx > > > ,
227
230
/// Counter to generate different values.
@@ -231,16 +234,18 @@ struct VnState<'body, 'tcx> {
231
234
ssa : & ' body SsaLocals ,
232
235
dominators : Dominators < BasicBlock > ,
233
236
reused_locals : DenseBitSet < Local > ,
237
+ arena : & ' a DroplessArena ,
234
238
}
235
239
236
- impl < ' body , ' tcx > VnState < ' body , ' tcx > {
240
+ impl < ' body , ' a , ' tcx > VnState < ' body , ' a , ' tcx > {
237
241
fn new (
238
242
tcx : TyCtxt < ' tcx > ,
239
243
body : & Body < ' tcx > ,
240
244
typing_env : ty:: TypingEnv < ' tcx > ,
241
245
ssa : & ' body SsaLocals ,
242
246
dominators : Dominators < BasicBlock > ,
243
247
local_decls : & ' body LocalDecls < ' tcx > ,
248
+ arena : & ' a DroplessArena ,
244
249
) -> Self {
245
250
// Compute a rough estimate of the number of values in the body from the number of
246
251
// statements. This is meant to reduce the number of allocations, but it's all right if
@@ -263,6 +268,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
263
268
ssa,
264
269
dominators,
265
270
reused_locals : DenseBitSet :: new_empty ( local_decls. len ( ) ) ,
271
+ arena,
266
272
}
267
273
}
268
274
@@ -271,7 +277,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
271
277
}
272
278
273
279
#[ instrument( level = "trace" , skip( self ) , ret) ]
274
- fn insert ( & mut self , ty : Ty < ' tcx > , value : Value < ' tcx > ) -> VnIndex {
280
+ fn insert ( & mut self , ty : Ty < ' tcx > , value : Value < ' a , ' tcx > ) -> VnIndex {
275
281
let ( index, new) = self . values . insert_full ( ( value, ty) ) ;
276
282
let index = VnIndex :: from_usize ( index) ;
277
283
if new {
@@ -314,8 +320,8 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
314
320
}
315
321
316
322
#[ inline]
317
- fn get ( & self , index : VnIndex ) -> & Value < ' tcx > {
318
- & self . values . get_index ( index. as_usize ( ) ) . unwrap ( ) . 0
323
+ fn get ( & self , index : VnIndex ) -> Value < ' a , ' tcx > {
324
+ self . values . get_index ( index. as_usize ( ) ) . unwrap ( ) . 0
319
325
}
320
326
321
327
#[ inline]
@@ -360,8 +366,8 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
360
366
self . insert ( ty, Value :: Constant { value, disambiguator : 0 } )
361
367
}
362
368
363
- fn insert_tuple ( & mut self , ty : Ty < ' tcx > , values : Vec < VnIndex > ) -> VnIndex {
364
- self . insert ( ty, Value :: Aggregate ( VariantIdx :: ZERO , values) )
369
+ fn insert_tuple ( & mut self , ty : Ty < ' tcx > , values : & [ VnIndex ] ) -> VnIndex {
370
+ self . insert ( ty, Value :: Aggregate ( VariantIdx :: ZERO , self . arena . alloc_slice ( values) ) )
365
371
}
366
372
367
373
fn insert_deref ( & mut self , ty : Ty < ' tcx > , value : VnIndex ) -> VnIndex {
@@ -387,7 +393,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
387
393
} else {
388
394
return None ;
389
395
} ;
390
- let op = match * self . get ( value) {
396
+ let op = match self . get ( value) {
391
397
_ if ty. is_zst ( ) => ImmTy :: uninit ( ty) . into ( ) ,
392
398
393
399
Opaque ( _) => return None ,
@@ -602,7 +608,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
602
608
if let Value :: Aggregate ( _, fields) = self . get ( value) {
603
609
return Some ( ( projection_ty, fields[ f. as_usize ( ) ] ) ) ;
604
610
} else if let Value :: Projection ( outer_value, ProjectionElem :: Downcast ( _, read_variant) ) = self . get ( value)
605
- && let Value :: Aggregate ( written_variant, fields) = self . get ( * outer_value)
611
+ && let Value :: Aggregate ( written_variant, fields) = self . get ( outer_value)
606
612
// This pass is not aware of control-flow, so we do not know whether the
607
613
// replacement we are doing is actually reachable. We could be in any arm of
608
614
// ```
@@ -627,15 +633,15 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
627
633
ProjectionElem :: Index ( idx) => {
628
634
if let Value :: Repeat ( inner, _) = self . get ( value) {
629
635
* from_non_ssa_index |= self . locals [ idx] . is_none ( ) ;
630
- return Some ( ( projection_ty, * inner) ) ;
636
+ return Some ( ( projection_ty, inner) ) ;
631
637
}
632
638
let idx = self . locals [ idx] ?;
633
639
ProjectionElem :: Index ( idx)
634
640
}
635
641
ProjectionElem :: ConstantIndex { offset, min_length, from_end } => {
636
642
match self . get ( value) {
637
643
Value :: Repeat ( inner, _) => {
638
- return Some ( ( projection_ty, * inner) ) ;
644
+ return Some ( ( projection_ty, inner) ) ;
639
645
}
640
646
Value :: Aggregate ( _, operands) => {
641
647
let offset = if from_end {
@@ -725,8 +731,8 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
725
731
let mut place_ty = PlaceTy :: from_ty ( self . local_decls [ place. local ] . ty ) ;
726
732
let mut from_non_ssa_index = false ;
727
733
for ( index, proj) in place. projection . iter ( ) . enumerate ( ) {
728
- if let Value :: Projection ( pointer, ProjectionElem :: Deref ) = * self . get ( value)
729
- && let Value :: Address { place : mut pointee, kind, .. } = * self . get ( pointer)
734
+ if let Value :: Projection ( pointer, ProjectionElem :: Deref ) = self . get ( value)
735
+ && let Value :: Address { place : mut pointee, kind, .. } = self . get ( pointer)
730
736
&& let AddressKind :: Ref ( BorrowKind :: Shared ) = kind
731
737
&& let Some ( v) = self . simplify_place_value ( & mut pointee, location)
732
738
{
@@ -749,8 +755,8 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
749
755
( place_ty, value) = self . project ( place_ty, value, proj, & mut from_non_ssa_index) ?;
750
756
}
751
757
752
- if let Value :: Projection ( pointer, ProjectionElem :: Deref ) = * self . get ( value)
753
- && let Value :: Address { place : mut pointee, kind, .. } = * self . get ( pointer)
758
+ if let Value :: Projection ( pointer, ProjectionElem :: Deref ) = self . get ( value)
759
+ && let Value :: Address { place : mut pointee, kind, .. } = self . get ( pointer)
754
760
&& let AddressKind :: Ref ( BorrowKind :: Shared ) = kind
755
761
&& let Some ( v) = self . simplify_place_value ( & mut pointee, location)
756
762
{
@@ -861,7 +867,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
861
867
fn simplify_discriminant ( & mut self , place : VnIndex ) -> Option < VnIndex > {
862
868
let enum_ty = self . ty ( place) ;
863
869
if enum_ty. is_enum ( )
864
- && let Value :: Aggregate ( variant, _) = * self . get ( place)
870
+ && let Value :: Aggregate ( variant, _) = self . get ( place)
865
871
{
866
872
let discr = self . ecx . discriminant_for_variant ( enum_ty, variant) . discard_err ( ) ?;
867
873
return Some ( self . insert_scalar ( discr. layout . ty , discr. to_scalar ( ) ) ) ;
@@ -893,11 +899,11 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
893
899
fields : & [ VnIndex ] ,
894
900
) -> Option < VnIndex > {
895
901
let Some ( & first_field) = fields. first ( ) else { return None } ;
896
- let Value :: Projection ( copy_from_value, _) = * self . get ( first_field) else { return None } ;
902
+ let Value :: Projection ( copy_from_value, _) = self . get ( first_field) else { return None } ;
897
903
898
904
// All fields must correspond one-to-one and come from the same aggregate value.
899
905
if fields. iter ( ) . enumerate ( ) . any ( |( index, & v) | {
900
- if let Value :: Projection ( pointer, ProjectionElem :: Field ( from_index, _) ) = * self . get ( v)
906
+ if let Value :: Projection ( pointer, ProjectionElem :: Field ( from_index, _) ) = self . get ( v)
901
907
&& copy_from_value == pointer
902
908
&& from_index. index ( ) == index
903
909
{
@@ -909,7 +915,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
909
915
}
910
916
911
917
let mut copy_from_local_value = copy_from_value;
912
- if let Value :: Projection ( pointer, proj) = * self . get ( copy_from_value)
918
+ if let Value :: Projection ( pointer, proj) = self . get ( copy_from_value)
913
919
&& let ProjectionElem :: Downcast ( _, read_variant) = proj
914
920
{
915
921
if variant_index == read_variant {
@@ -954,13 +960,10 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
954
960
}
955
961
}
956
962
957
- let fields: Vec < _ > = field_ops
958
- . iter_mut ( )
959
- . map ( |op| {
960
- self . simplify_operand ( op, location)
961
- . unwrap_or_else ( || self . new_opaque ( op. ty ( self . local_decls , self . tcx ) ) )
962
- } )
963
- . collect ( ) ;
963
+ let fields = self . arena . alloc_from_iter ( field_ops. iter_mut ( ) . map ( |op| {
964
+ self . simplify_operand ( op, location)
965
+ . unwrap_or_else ( || self . new_opaque ( op. ty ( self . local_decls , self . tcx ) ) )
966
+ } ) ) ;
964
967
965
968
let variant_index = match * kind {
966
969
AggregateKind :: Array ( ..) | AggregateKind :: Tuple => {
@@ -981,12 +984,12 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
981
984
let mut was_updated = false ;
982
985
while let Value :: Cast { kind : CastKind :: PtrToPtr , value : cast_value } =
983
986
self . get ( pointer)
984
- && let ty:: RawPtr ( from_pointee_ty, from_mtbl) = self . ty ( * cast_value) . kind ( )
987
+ && let ty:: RawPtr ( from_pointee_ty, from_mtbl) = self . ty ( cast_value) . kind ( )
985
988
&& let ty:: RawPtr ( _, output_mtbl) = ty. kind ( )
986
989
&& from_mtbl == output_mtbl
987
990
&& from_pointee_ty. is_sized ( self . tcx , self . typing_env ( ) )
988
991
{
989
- pointer = * cast_value;
992
+ pointer = cast_value;
990
993
was_updated = true ;
991
994
}
992
995
@@ -1051,16 +1054,16 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
1051
1054
// To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1052
1055
// it's fine to get a projection as the type.
1053
1056
Value :: Cast { kind : CastKind :: PtrToPtr , value : inner }
1054
- if self . pointers_have_same_metadata ( self . ty ( * inner) , arg_ty) =>
1057
+ if self . pointers_have_same_metadata ( self . ty ( inner) , arg_ty) =>
1055
1058
{
1056
- * inner
1059
+ inner
1057
1060
}
1058
1061
1059
1062
// We have an unsizing cast, which assigns the length to wide pointer metadata.
1060
1063
Value :: Cast {
1061
1064
kind : CastKind :: PointerCoercion ( ty:: adjustment:: PointerCoercion :: Unsize , _) ,
1062
1065
value : from,
1063
- } if let Some ( from) = self . ty ( * from) . builtin_deref ( true )
1066
+ } if let Some ( from) = self . ty ( from) . builtin_deref ( true )
1064
1067
&& let ty:: Array ( _, len) = from. kind ( )
1065
1068
&& let Some ( to) = self . ty ( arg_index) . builtin_deref ( true )
1066
1069
&& let ty:: Slice ( ..) = to. kind ( ) =>
@@ -1088,15 +1091,15 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
1088
1091
}
1089
1092
1090
1093
let value = match ( op, self . get ( arg_index) ) {
1091
- ( UnOp :: Not , Value :: UnaryOp ( UnOp :: Not , inner) ) => return Some ( * inner) ,
1092
- ( UnOp :: Neg , Value :: UnaryOp ( UnOp :: Neg , inner) ) => return Some ( * inner) ,
1094
+ ( UnOp :: Not , Value :: UnaryOp ( UnOp :: Not , inner) ) => return Some ( inner) ,
1095
+ ( UnOp :: Neg , Value :: UnaryOp ( UnOp :: Neg , inner) ) => return Some ( inner) ,
1093
1096
( UnOp :: Not , Value :: BinaryOp ( BinOp :: Eq , lhs, rhs) ) => {
1094
- Value :: BinaryOp ( BinOp :: Ne , * lhs, * rhs)
1097
+ Value :: BinaryOp ( BinOp :: Ne , lhs, rhs)
1095
1098
}
1096
1099
( UnOp :: Not , Value :: BinaryOp ( BinOp :: Ne , lhs, rhs) ) => {
1097
- Value :: BinaryOp ( BinOp :: Eq , * lhs, * rhs)
1100
+ Value :: BinaryOp ( BinOp :: Eq , lhs, rhs)
1098
1101
}
1099
- ( UnOp :: PtrMetadata , Value :: RawPtr { metadata, .. } ) => return Some ( * metadata) ,
1102
+ ( UnOp :: PtrMetadata , Value :: RawPtr { metadata, .. } ) => return Some ( metadata) ,
1100
1103
// We have an unsizing cast, which assigns the length to wide pointer metadata.
1101
1104
(
1102
1105
UnOp :: PtrMetadata ,
@@ -1105,7 +1108,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
1105
1108
value : inner,
1106
1109
} ,
1107
1110
) if let ty:: Slice ( ..) = arg_ty. builtin_deref ( true ) . unwrap ( ) . kind ( )
1108
- && let ty:: Array ( _, len) = self . ty ( * inner) . builtin_deref ( true ) . unwrap ( ) . kind ( ) =>
1111
+ && let ty:: Array ( _, len) = self . ty ( inner) . builtin_deref ( true ) . unwrap ( ) . kind ( ) =>
1109
1112
{
1110
1113
return Some ( self . insert_constant ( Const :: Ty ( self . tcx . types . usize , * len) ) ) ;
1111
1114
}
@@ -1138,12 +1141,12 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
1138
1141
&& lhs_ty. is_any_ptr ( )
1139
1142
&& let Value :: Cast { kind : CastKind :: PtrToPtr , value : lhs_value } = self . get ( lhs)
1140
1143
&& let Value :: Cast { kind : CastKind :: PtrToPtr , value : rhs_value } = self . get ( rhs)
1141
- && let lhs_from = self . ty ( * lhs_value)
1142
- && lhs_from == self . ty ( * rhs_value)
1144
+ && let lhs_from = self . ty ( lhs_value)
1145
+ && lhs_from == self . ty ( rhs_value)
1143
1146
&& self . pointers_have_same_metadata ( lhs_from, lhs_ty)
1144
1147
{
1145
- lhs = * lhs_value;
1146
- rhs = * rhs_value;
1148
+ lhs = lhs_value;
1149
+ rhs = rhs_value;
1147
1150
if let Some ( lhs_op) = self . try_as_operand ( lhs, location)
1148
1151
&& let Some ( rhs_op) = self . try_as_operand ( rhs, location)
1149
1152
{
@@ -1277,7 +1280,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
1277
1280
if op. is_overflowing ( ) {
1278
1281
let ty = Ty :: new_tup ( self . tcx , & [ self . ty ( result) , self . tcx . types . bool ] ) ;
1279
1282
let false_val = self . insert_bool ( false ) ;
1280
- Some ( self . insert_tuple ( ty, vec ! [ result, false_val] ) )
1283
+ Some ( self . insert_tuple ( ty, & [ result, false_val] ) )
1281
1284
} else {
1282
1285
Some ( result)
1283
1286
}
@@ -1330,11 +1333,11 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
1330
1333
&& let ty:: RawPtr ( to_pointee, _) = to. kind ( )
1331
1334
&& to_pointee. is_sized ( self . tcx , self . typing_env ( ) )
1332
1335
{
1333
- from = self . ty ( * pointer) ;
1334
- value = * pointer;
1336
+ from = self . ty ( pointer) ;
1337
+ value = pointer;
1335
1338
was_updated_this_iteration = true ;
1336
1339
if from == to {
1337
- return Some ( * pointer) ;
1340
+ return Some ( pointer) ;
1338
1341
}
1339
1342
}
1340
1343
@@ -1343,7 +1346,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
1343
1346
if let Transmute = kind
1344
1347
&& let Value :: Aggregate ( variant_idx, field_values) = self . get ( value)
1345
1348
&& let Some ( ( field_idx, field_ty) ) =
1346
- self . value_is_all_in_one_field ( from, * variant_idx)
1349
+ self . value_is_all_in_one_field ( from, variant_idx)
1347
1350
{
1348
1351
from = field_ty;
1349
1352
value = field_values[ field_idx. as_usize ( ) ] ;
@@ -1354,7 +1357,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
1354
1357
}
1355
1358
1356
1359
// Various cast-then-cast cases can be simplified.
1357
- if let Value :: Cast { kind : inner_kind, value : inner_value } = * self . get ( value) {
1360
+ if let Value :: Cast { kind : inner_kind, value : inner_value } = self . get ( value) {
1358
1361
let inner_from = self . ty ( inner_value) ;
1359
1362
let new_kind = match ( inner_kind, kind) {
1360
1363
// Even if there's a narrowing cast in here that's fine, because
@@ -1553,7 +1556,7 @@ fn op_to_prop_const<'tcx>(
1553
1556
None
1554
1557
}
1555
1558
1556
- impl < ' tcx > VnState < ' _ , ' tcx > {
1559
+ impl < ' tcx > VnState < ' _ , ' _ , ' tcx > {
1557
1560
/// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1558
1561
/// returns that result as an [`Operand`].
1559
1562
fn try_as_operand ( & mut self , index : VnIndex , location : Location ) -> Option < Operand < ' tcx > > {
@@ -1572,7 +1575,7 @@ impl<'tcx> VnState<'_, 'tcx> {
1572
1575
// This was already constant in MIR, do not change it. If the constant is not
1573
1576
// deterministic, adding an additional mention of it in MIR will not give the same value as
1574
1577
// the former mention.
1575
- if let Value :: Constant { value, disambiguator : 0 } = * self . get ( index) {
1578
+ if let Value :: Constant { value, disambiguator : 0 } = self . get ( index) {
1576
1579
debug_assert ! ( value. is_deterministic( ) ) ;
1577
1580
return Some ( ConstOperand { span : DUMMY_SP , user_ty : None , const_ : value } ) ;
1578
1581
}
@@ -1616,7 +1619,7 @@ impl<'tcx> VnState<'_, 'tcx> {
1616
1619
// If we are here, we failed to find a local, and we already have a `Deref`.
1617
1620
// Trying to add projections will only result in an ill-formed place.
1618
1621
return None ;
1619
- } else if let Value :: Projection ( pointer, proj) = * self . get ( index)
1622
+ } else if let Value :: Projection ( pointer, proj) = self . get ( index)
1620
1623
&& ( allow_complex_projection || proj. is_stable_offset ( ) )
1621
1624
&& let Some ( proj) = self . try_as_place_elem ( self . ty ( index) , proj, loc)
1622
1625
{
@@ -1639,7 +1642,7 @@ impl<'tcx> VnState<'_, 'tcx> {
1639
1642
}
1640
1643
}
1641
1644
1642
- impl < ' tcx > MutVisitor < ' tcx > for VnState < ' _ , ' tcx > {
1645
+ impl < ' tcx > MutVisitor < ' tcx > for VnState < ' _ , ' _ , ' tcx > {
1643
1646
fn tcx ( & self ) -> TyCtxt < ' tcx > {
1644
1647
self . tcx
1645
1648
}
0 commit comments