@@ -366,7 +366,15 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
366
366
#[ instrument( level = "trace" , skip( self ) , ret) ]
367
367
fn eval_to_const ( & mut self , value : VnIndex ) -> Option < OpTy < ' tcx > > {
368
368
use Value :: * ;
369
- let op = match * self . get ( value) {
369
+ // LLVM optimizes the load of `sizeof(size_t) * 2` as a single `mov`,
370
+ // which is cheap. Bigger values make more `mov` instructions generated.
371
+ // After GVN, it became a single load (`lea`) of an address in `.rodata`.
372
+ // But to avoid blessing differences between 32-bit and 64-bit target,
373
+ // let's choose `size_t = u64`.
374
+ const STACK_THRESHOLD : u64 = std:: mem:: size_of :: < u64 > ( ) as u64 * 2 ;
375
+ let vvalue = self . get ( value) ;
376
+ debug ! ( ?vvalue) ;
377
+ let op = match * vvalue {
370
378
Opaque ( _) => return None ,
371
379
// Do not bother evaluating repeat expressions. This would uselessly consume memory.
372
380
Repeat ( ..) => return None ,
@@ -381,10 +389,8 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
381
389
. collect :: < Option < Vec < _ > > > ( ) ?;
382
390
let ty = match kind {
383
391
AggregateTy :: Array => {
384
- let [ field, ..] = fields. as_slice ( ) else {
385
- bug ! ( "fields.len() == 0" ) ;
386
- } ;
387
- let field_ty = field. layout . ty ;
392
+ assert ! ( fields. len( ) > 0 ) ;
393
+ let field_ty = fields[ 0 ] . layout . ty ;
388
394
// Ignore nested array
389
395
if field_ty. is_array ( ) {
390
396
trace ! (
@@ -418,6 +424,9 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
418
424
let ptr_imm = Immediate :: new_pointer_with_meta ( data, meta, & self . ecx ) ;
419
425
ImmTy :: from_immediate ( ptr_imm, ty) . into ( )
420
426
} else if matches ! ( kind, AggregateTy :: Array ) {
427
+ if ty. layout . size ( ) . bytes ( ) <= STACK_THRESHOLD {
428
+ return None ;
429
+ }
421
430
let mut mplace = None ;
422
431
let alloc_id = self
423
432
. ecx
0 commit comments