@@ -353,7 +353,7 @@ void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm,
353
353
354
354
// The rest is saved with the optimized path
355
355
356
- uint num_saved_regs = 4 + (dst != rax ? 1 : 0 ) + 4 ;
356
+ uint num_saved_regs = 4 + (dst != rax ? 1 : 0 ) + 4 + (UseAPX ? 16 : 0 ) ;
357
357
__ subptr (rsp, num_saved_regs * wordSize);
358
358
uint slot = num_saved_regs;
359
359
if (dst != rax) {
@@ -367,6 +367,25 @@ void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm,
367
367
__ movptr (Address (rsp, (--slot) * wordSize), r9);
368
368
__ movptr (Address (rsp, (--slot) * wordSize), r10);
369
369
__ movptr (Address (rsp, (--slot) * wordSize), r11);
370
+ // Save APX extended registers r16–r31 if enabled
371
+ if (UseAPX) {
372
+ __ movptr (Address (rsp, (--slot) * wordSize), r16);
373
+ __ movptr (Address (rsp, (--slot) * wordSize), r17);
374
+ __ movptr (Address (rsp, (--slot) * wordSize), r18);
375
+ __ movptr (Address (rsp, (--slot) * wordSize), r19);
376
+ __ movptr (Address (rsp, (--slot) * wordSize), r20);
377
+ __ movptr (Address (rsp, (--slot) * wordSize), r21);
378
+ __ movptr (Address (rsp, (--slot) * wordSize), r22);
379
+ __ movptr (Address (rsp, (--slot) * wordSize), r23);
380
+ __ movptr (Address (rsp, (--slot) * wordSize), r24);
381
+ __ movptr (Address (rsp, (--slot) * wordSize), r25);
382
+ __ movptr (Address (rsp, (--slot) * wordSize), r26);
383
+ __ movptr (Address (rsp, (--slot) * wordSize), r27);
384
+ __ movptr (Address (rsp, (--slot) * wordSize), r28);
385
+ __ movptr (Address (rsp, (--slot) * wordSize), r29);
386
+ __ movptr (Address (rsp, (--slot) * wordSize), r30);
387
+ __ movptr (Address (rsp, (--slot) * wordSize), r31);
388
+ }
370
389
// r12-r15 are callee saved in all calling conventions
371
390
assert (slot == 0 , " must use all slots" );
372
391
@@ -398,6 +417,25 @@ void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm,
398
417
__ super_call_VM_leaf (CAST_FROM_FN_PTR (address, ShenandoahRuntime::load_reference_barrier_phantom), arg0, arg1);
399
418
}
400
419
420
+ // Restore APX extended registers r31–r16 if previously saved
421
+ if (UseAPX) {
422
+ __ movptr (r31, Address (rsp, (slot++) * wordSize));
423
+ __ movptr (r30, Address (rsp, (slot++) * wordSize));
424
+ __ movptr (r29, Address (rsp, (slot++) * wordSize));
425
+ __ movptr (r28, Address (rsp, (slot++) * wordSize));
426
+ __ movptr (r27, Address (rsp, (slot++) * wordSize));
427
+ __ movptr (r26, Address (rsp, (slot++) * wordSize));
428
+ __ movptr (r25, Address (rsp, (slot++) * wordSize));
429
+ __ movptr (r24, Address (rsp, (slot++) * wordSize));
430
+ __ movptr (r23, Address (rsp, (slot++) * wordSize));
431
+ __ movptr (r22, Address (rsp, (slot++) * wordSize));
432
+ __ movptr (r21, Address (rsp, (slot++) * wordSize));
433
+ __ movptr (r20, Address (rsp, (slot++) * wordSize));
434
+ __ movptr (r19, Address (rsp, (slot++) * wordSize));
435
+ __ movptr (r18, Address (rsp, (slot++) * wordSize));
436
+ __ movptr (r17, Address (rsp, (slot++) * wordSize));
437
+ __ movptr (r16, Address (rsp, (slot++) * wordSize));
438
+ }
401
439
__ movptr (r11, Address (rsp, (slot++) * wordSize));
402
440
__ movptr (r10, Address (rsp, (slot++) * wordSize));
403
441
__ movptr (r9, Address (rsp, (slot++) * wordSize));
0 commit comments