@@ -175,7 +175,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
175
175
| MemorySemantics :: SEQUENTIALLY_CONSISTENT
176
176
}
177
177
} ;
178
- let semantics = self . constant_u32 ( self . span ( ) , semantics. bits ( ) ) ;
178
+ let semantics = self . constant_bit32 ( self . span ( ) , semantics. bits ( ) ) ;
179
179
if invalid_seq_cst {
180
180
self . zombie (
181
181
semantics. def ( self ) ,
@@ -196,10 +196,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
196
196
. constant_u16 ( self . span ( ) , memset_fill_u16 ( fill_byte) )
197
197
. def ( self ) ,
198
198
32 => self
199
- . constant_u32 ( self . span ( ) , memset_fill_u32 ( fill_byte) )
199
+ . constant_bit32 ( self . span ( ) , memset_fill_u32 ( fill_byte) )
200
200
. def ( self ) ,
201
201
64 => self
202
- . constant_u64 ( self . span ( ) , memset_fill_u64 ( fill_byte) )
202
+ . constant_bit64 ( self . span ( ) , memset_fill_u64 ( fill_byte) )
203
203
. def ( self ) ,
204
204
_ => self . fatal ( format ! (
205
205
"memset on integer width {width} not implemented yet"
@@ -314,7 +314,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
314
314
self . store ( pat, ptr, Align :: from_bytes ( 0 ) . unwrap ( ) ) ;
315
315
} else {
316
316
for index in 0 ..count {
317
- let const_index = self . constant_u32 ( self . span ( ) , index as u32 ) ;
317
+ let const_index = self . constant_bit32 ( self . span ( ) , index as u32 ) ;
318
318
let gep_ptr = self . gep ( pat. ty , ptr, & [ const_index] ) ;
319
319
self . store ( pat, gep_ptr, Align :: from_bytes ( 0 ) . unwrap ( ) ) ;
320
320
}
@@ -431,7 +431,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
431
431
} else {
432
432
let indices = indices
433
433
. into_iter ( )
434
- . map ( |idx| self . constant_u32 ( self . span ( ) , idx) . def ( self ) )
434
+ . map ( |idx| self . constant_bit32 ( self . span ( ) , idx) . def ( self ) )
435
435
. collect :: < Vec < _ > > ( ) ;
436
436
self . emit ( )
437
437
. in_bounds_access_chain ( leaf_ptr_ty, None , ptr. def ( self ) , indices)
@@ -614,7 +614,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
614
614
} ;
615
615
let indices = base_indices
616
616
. into_iter ( )
617
- . map ( |idx| self . constant_u32 ( self . span ( ) , idx) . def ( self ) )
617
+ . map ( |idx| self . constant_bit32 ( self . span ( ) , idx) . def ( self ) )
618
618
. chain ( indices)
619
619
. collect ( ) ;
620
620
return self . emit_access_chain (
@@ -1106,9 +1106,9 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1106
1106
) )
1107
1107
} else if signed {
1108
1108
// this cast chain can probably be collapsed, but, whatever, be safe
1109
- Operand :: LiteralInt32 ( v as u8 as i8 as i32 as u32 )
1109
+ Operand :: LiteralBit32 ( v as u8 as i8 as i32 as u32 )
1110
1110
} else {
1111
- Operand :: LiteralInt32 ( v as u8 as u32 )
1111
+ Operand :: LiteralBit32 ( v as u8 as u32 )
1112
1112
}
1113
1113
}
1114
1114
fn construct_16 ( self_ : & Builder < ' _ , ' _ > , signed : bool , v : u128 ) -> Operand {
@@ -1117,9 +1117,9 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1117
1117
"Switches to values above u16::MAX not supported: {v:?}"
1118
1118
) )
1119
1119
} else if signed {
1120
- Operand :: LiteralInt32 ( v as u16 as i16 as i32 as u32 )
1120
+ Operand :: LiteralBit32 ( v as u16 as i16 as i32 as u32 )
1121
1121
} else {
1122
- Operand :: LiteralInt32 ( v as u16 as u32 )
1122
+ Operand :: LiteralBit32 ( v as u16 as u32 )
1123
1123
}
1124
1124
}
1125
1125
fn construct_32 ( self_ : & Builder < ' _ , ' _ > , _signed : bool , v : u128 ) -> Operand {
@@ -1128,7 +1128,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1128
1128
"Switches to values above u32::MAX not supported: {v:?}"
1129
1129
) )
1130
1130
} else {
1131
- Operand :: LiteralInt32 ( v as u32 )
1131
+ Operand :: LiteralBit32 ( v as u32 )
1132
1132
}
1133
1133
}
1134
1134
fn construct_64 ( self_ : & Builder < ' _ , ' _ > , _signed : bool , v : u128 ) -> Operand {
@@ -1137,7 +1137,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1137
1137
"Switches to values above u64::MAX not supported: {v:?}"
1138
1138
) )
1139
1139
} else {
1140
- Operand :: LiteralInt64 ( v as u64 )
1140
+ Operand :: LiteralBit64 ( v as u64 )
1141
1141
}
1142
1142
}
1143
1143
// pass in signed into the closure to be able to unify closure types
@@ -1478,7 +1478,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1478
1478
let ( ptr, access_ty) = self . adjust_pointer_for_typed_access ( ptr, ty) ;
1479
1479
1480
1480
// TODO: Default to device scope
1481
- let memory = self . constant_u32 ( self . span ( ) , Scope :: Device as u32 ) ;
1481
+ let memory = self . constant_bit32 ( self . span ( ) , Scope :: Device as u32 ) ;
1482
1482
let semantics = self . ordering_to_semantics_def ( order) ;
1483
1483
let result = self
1484
1484
. emit ( )
@@ -1611,7 +1611,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1611
1611
let val = self . bitcast ( val, access_ty) ;
1612
1612
1613
1613
// TODO: Default to device scope
1614
- let memory = self . constant_u32 ( self . span ( ) , Scope :: Device as u32 ) ;
1614
+ let memory = self . constant_bit32 ( self . span ( ) , Scope :: Device as u32 ) ;
1615
1615
let semantics = self . ordering_to_semantics_def ( order) ;
1616
1616
self . validate_atomic ( val. ty , ptr. def ( self ) ) ;
1617
1617
self . emit ( )
@@ -1944,7 +1944,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
1944
1944
) {
1945
1945
let indices = indices
1946
1946
. into_iter ( )
1947
- . map ( |idx| self . constant_u32 ( self . span ( ) , idx) . def ( self ) )
1947
+ . map ( |idx| self . constant_bit32 ( self . span ( ) , idx) . def ( self ) )
1948
1948
. collect :: < Vec < _ > > ( ) ;
1949
1949
self . emit ( )
1950
1950
. in_bounds_access_chain ( dest_ty, None , ptr. def ( self ) , indices)
@@ -2495,7 +2495,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
2495
2495
2496
2496
self . validate_atomic ( access_ty, dst. def ( self ) ) ;
2497
2497
// TODO: Default to device scope
2498
- let memory = self . constant_u32 ( self . span ( ) , Scope :: Device as u32 ) ;
2498
+ let memory = self . constant_bit32 ( self . span ( ) , Scope :: Device as u32 ) ;
2499
2499
let semantics_equal = self . ordering_to_semantics_def ( order) ;
2500
2500
let semantics_unequal = self . ordering_to_semantics_def ( failure_order) ;
2501
2501
// Note: OpAtomicCompareExchangeWeak is deprecated, and has the same semantics
@@ -2535,7 +2535,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
2535
2535
self . validate_atomic ( access_ty, dst. def ( self ) ) ;
2536
2536
// TODO: Default to device scope
2537
2537
let memory = self
2538
- . constant_u32 ( self . span ( ) , Scope :: Device as u32 )
2538
+ . constant_bit32 ( self . span ( ) , Scope :: Device as u32 )
2539
2539
. def ( self ) ;
2540
2540
let semantics = self . ordering_to_semantics_def ( order) . def ( self ) ;
2541
2541
use AtomicRmwBinOp :: * ;
@@ -2631,7 +2631,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
2631
2631
// Ignore sync scope (it only has "single thread" and "cross thread")
2632
2632
// TODO: Default to device scope
2633
2633
let memory = self
2634
- . constant_u32 ( self . span ( ) , Scope :: Device as u32 )
2634
+ . constant_bit32 ( self . span ( ) , Scope :: Device as u32 )
2635
2635
. def ( self ) ;
2636
2636
let semantics = self . ordering_to_semantics_def ( order) . def ( self ) ;
2637
2637
self . emit ( ) . memory_barrier ( memory, semantics) . unwrap ( ) ;
@@ -2915,7 +2915,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
2915
2915
2916
2916
// HACK(eddyb) avoid the logic below that assumes only ID operands
2917
2917
if inst. class . opcode == Op :: CompositeExtract {
2918
- if let ( Some ( r) , & [ Operand :: IdRef ( x) , Operand :: LiteralInt32 ( i) ] ) =
2918
+ if let ( Some ( r) , & [ Operand :: IdRef ( x) , Operand :: LiteralBit32 ( i) ] ) =
2919
2919
( inst. result_id , & inst. operands [ ..] )
2920
2920
{
2921
2921
return Some ( Inst :: CompositeExtract ( r, x, i) ) ;
0 commit comments