@@ -175,7 +175,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
175
175
| MemorySemantics :: SEQUENTIALLY_CONSISTENT
176
176
}
177
177
} ;
178
- let semantics = self . constant_u32 ( self . span ( ) , semantics. bits ( ) ) ;
178
+ let semantics = self . constant_bit32 ( self . span ( ) , semantics. bits ( ) ) ;
179
179
if invalid_seq_cst {
180
180
self . zombie (
181
181
semantics. def ( self ) ,
@@ -196,10 +196,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
196
196
. constant_u16 ( self . span ( ) , memset_fill_u16 ( fill_byte) )
197
197
. def ( self ) ,
198
198
32 => self
199
- . constant_u32 ( self . span ( ) , memset_fill_u32 ( fill_byte) )
199
+ . constant_bit32 ( self . span ( ) , memset_fill_u32 ( fill_byte) )
200
200
. def ( self ) ,
201
201
64 => self
202
- . constant_u64 ( self . span ( ) , memset_fill_u64 ( fill_byte) )
202
+ . constant_bit64 ( self . span ( ) , memset_fill_u64 ( fill_byte) )
203
203
. def ( self ) ,
204
204
_ => self . fatal ( format ! (
205
205
"memset on integer width {width} not implemented yet"
@@ -314,7 +314,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
314
314
self . store ( pat, ptr, Align :: from_bytes ( 0 ) . unwrap ( ) ) ;
315
315
} else {
316
316
for index in 0 ..count {
317
- let const_index = self . constant_u32 ( self . span ( ) , index as u32 ) ;
317
+ let const_index = self . constant_bit32 ( self . span ( ) , index as u32 ) ;
318
318
let gep_ptr = self . gep ( pat. ty , ptr, & [ const_index] ) ;
319
319
self . store ( pat, gep_ptr, Align :: from_bytes ( 0 ) . unwrap ( ) ) ;
320
320
}
@@ -428,7 +428,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
428
428
} else {
429
429
let indices = indices
430
430
. into_iter ( )
431
- . map ( |idx| self . constant_u32 ( self . span ( ) , idx) . def ( self ) )
431
+ . map ( |idx| self . constant_bit32 ( self . span ( ) , idx) . def ( self ) )
432
432
. collect :: < Vec < _ > > ( ) ;
433
433
self . emit ( )
434
434
. access_chain ( leaf_ptr_ty, None , ptr. def ( self ) , indices)
@@ -904,9 +904,9 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
904
904
) )
905
905
} else if signed {
906
906
// this cast chain can probably be collapsed, but, whatever, be safe
907
- Operand :: LiteralInt32 ( v as u8 as i8 as i32 as u32 )
907
+ Operand :: LiteralBit32 ( v as u8 as i8 as i32 as u32 )
908
908
} else {
909
- Operand :: LiteralInt32 ( v as u8 as u32 )
909
+ Operand :: LiteralBit32 ( v as u8 as u32 )
910
910
}
911
911
}
912
912
fn construct_16 ( self_ : & Builder < ' _ , ' _ > , signed : bool , v : u128 ) -> Operand {
@@ -915,9 +915,9 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
915
915
"Switches to values above u16::MAX not supported: {v:?}"
916
916
) )
917
917
} else if signed {
918
- Operand :: LiteralInt32 ( v as u16 as i16 as i32 as u32 )
918
+ Operand :: LiteralBit32 ( v as u16 as i16 as i32 as u32 )
919
919
} else {
920
- Operand :: LiteralInt32 ( v as u16 as u32 )
920
+ Operand :: LiteralBit32 ( v as u16 as u32 )
921
921
}
922
922
}
923
923
fn construct_32 ( self_ : & Builder < ' _ , ' _ > , _signed : bool , v : u128 ) -> Operand {
@@ -926,7 +926,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
926
926
"Switches to values above u32::MAX not supported: {v:?}"
927
927
) )
928
928
} else {
929
- Operand :: LiteralInt32 ( v as u32 )
929
+ Operand :: LiteralBit32 ( v as u32 )
930
930
}
931
931
}
932
932
fn construct_64 ( self_ : & Builder < ' _ , ' _ > , _signed : bool , v : u128 ) -> Operand {
@@ -935,7 +935,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
935
935
"Switches to values above u64::MAX not supported: {v:?}"
936
936
) )
937
937
} else {
938
- Operand :: LiteralInt64 ( v as u64 )
938
+ Operand :: LiteralBit64 ( v as u64 )
939
939
}
940
940
}
941
941
// pass in signed into the closure to be able to unify closure types
@@ -1217,7 +1217,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1217
1217
let ( ptr, access_ty) = self . adjust_pointer_for_typed_access ( ptr, ty) ;
1218
1218
1219
1219
// TODO: Default to device scope
1220
- let memory = self . constant_u32 ( self . span ( ) , Scope :: Device as u32 ) ;
1220
+ let memory = self . constant_bit32 ( self . span ( ) , Scope :: Device as u32 ) ;
1221
1221
let semantics = self . ordering_to_semantics_def ( order) ;
1222
1222
let result = self
1223
1223
. emit ( )
@@ -1347,7 +1347,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1347
1347
let val = self . bitcast ( val, access_ty) ;
1348
1348
1349
1349
// TODO: Default to device scope
1350
- let memory = self . constant_u32 ( self . span ( ) , Scope :: Device as u32 ) ;
1350
+ let memory = self . constant_bit32 ( self . span ( ) , Scope :: Device as u32 ) ;
1351
1351
let semantics = self . ordering_to_semantics_def ( order) ;
1352
1352
self . validate_atomic ( val. ty , ptr. def ( self ) ) ;
1353
1353
self . emit ( )
@@ -1413,7 +1413,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1413
1413
let original_ptr = ptr. def ( self ) ;
1414
1414
let indices = indices
1415
1415
. into_iter ( )
1416
- . map ( |idx| self . constant_u32 ( self . span ( ) , idx) . def ( self ) )
1416
+ . map ( |idx| self . constant_bit32 ( self . span ( ) , idx) . def ( self ) )
1417
1417
. collect :: < Vec < _ > > ( ) ;
1418
1418
return self
1419
1419
. emit ( )
@@ -1433,7 +1433,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1433
1433
if idx > u32:: MAX as u64 {
1434
1434
self . fatal ( "struct_gep bigger than u32::MAX" ) ;
1435
1435
}
1436
- let index_const = self . constant_u32 ( self . span ( ) , idx as u32 ) . def ( self ) ;
1436
+ let index_const = self . constant_bit32 ( self . span ( ) , idx as u32 ) . def ( self ) ;
1437
1437
self . emit ( )
1438
1438
. access_chain (
1439
1439
result_type,
@@ -1741,7 +1741,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1741
1741
) {
1742
1742
let indices = indices
1743
1743
. into_iter ( )
1744
- . map ( |idx| self . constant_u32 ( self . span ( ) , idx) . def ( self ) )
1744
+ . map ( |idx| self . constant_bit32 ( self . span ( ) , idx) . def ( self ) )
1745
1745
. collect :: < Vec < _ > > ( ) ;
1746
1746
self . emit ( )
1747
1747
. access_chain ( dest_ty, None , ptr. def ( self ) , indices)
@@ -2292,7 +2292,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
2292
2292
2293
2293
self . validate_atomic ( access_ty, dst. def ( self ) ) ;
2294
2294
// TODO: Default to device scope
2295
- let memory = self . constant_u32 ( self . span ( ) , Scope :: Device as u32 ) ;
2295
+ let memory = self . constant_bit32 ( self . span ( ) , Scope :: Device as u32 ) ;
2296
2296
let semantics_equal = self . ordering_to_semantics_def ( order) ;
2297
2297
let semantics_unequal = self . ordering_to_semantics_def ( failure_order) ;
2298
2298
// Note: OpAtomicCompareExchangeWeak is deprecated, and has the same semantics
@@ -2328,7 +2328,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
2328
2328
self . validate_atomic ( access_ty, dst. def ( self ) ) ;
2329
2329
// TODO: Default to device scope
2330
2330
let memory = self
2331
- . constant_u32 ( self . span ( ) , Scope :: Device as u32 )
2331
+ . constant_bit32 ( self . span ( ) , Scope :: Device as u32 )
2332
2332
. def ( self ) ;
2333
2333
let semantics = self . ordering_to_semantics_def ( order) . def ( self ) ;
2334
2334
use AtomicRmwBinOp :: * ;
@@ -2424,7 +2424,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
2424
2424
// Ignore sync scope (it only has "single thread" and "cross thread")
2425
2425
// TODO: Default to device scope
2426
2426
let memory = self
2427
- . constant_u32 ( self . span ( ) , Scope :: Device as u32 )
2427
+ . constant_bit32 ( self . span ( ) , Scope :: Device as u32 )
2428
2428
. def ( self ) ;
2429
2429
let semantics = self . ordering_to_semantics_def ( order) . def ( self ) ;
2430
2430
self . emit ( ) . memory_barrier ( memory, semantics) . unwrap ( ) ;
@@ -2697,7 +2697,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
2697
2697
2698
2698
// HACK(eddyb) avoid the logic below that assumes only ID operands
2699
2699
if inst. class . opcode == Op :: CompositeExtract {
2700
- if let ( Some ( r) , & [ Operand :: IdRef ( x) , Operand :: LiteralInt32 ( i) ] ) =
2700
+ if let ( Some ( r) , & [ Operand :: IdRef ( x) , Operand :: LiteralBit32 ( i) ] ) =
2701
2701
( inst. result_id , & inst. operands [ ..] )
2702
2702
{
2703
2703
return Some ( Inst :: CompositeExtract ( r, x, i) ) ;
0 commit comments