@@ -410,37 +410,14 @@ def run(self) -> None:
410
410
package_data [package_name ].append (file_name )
411
411
412
412
413
- def _is_hpu () -> bool :
414
- # if VLLM_TARGET_DEVICE env var was set explicitly, skip HPU autodetection
415
- if os .getenv ("VLLM_TARGET_DEVICE" , None ) == VLLM_TARGET_DEVICE :
416
- return VLLM_TARGET_DEVICE == "hpu"
417
-
418
- # if VLLM_TARGET_DEVICE was not set explicitly, check if hl-smi succeeds,
419
- # and if it doesn't, check if habanalabs driver is loaded
420
- is_hpu_available = False
421
- try :
422
- out = subprocess .run (["hl-smi" ], capture_output = True , check = True )
423
- is_hpu_available = out .returncode == 0
424
- except (FileNotFoundError , PermissionError , subprocess .CalledProcessError ):
425
- if sys .platform .startswith ("linux" ):
426
- try :
427
- output = subprocess .check_output (
428
- 'lsmod | grep habanalabs | wc -l' , shell = True )
429
- is_hpu_available = int (output ) > 0
430
- except (ValueError , FileNotFoundError , PermissionError ,
431
- subprocess .CalledProcessError ):
432
- pass
433
- return is_hpu_available
434
-
435
-
436
413
def _no_device () -> bool :
437
414
return VLLM_TARGET_DEVICE == "empty"
438
415
439
416
440
417
def _is_cuda () -> bool :
441
418
has_cuda = torch .version .cuda is not None
442
419
return (VLLM_TARGET_DEVICE == "cuda" and has_cuda
443
- and not (_is_neuron () or _is_tpu () or _is_hpu () ))
420
+ and not (_is_neuron () or _is_tpu ()))
444
421
445
422
446
423
def _is_hip () -> bool :
@@ -573,12 +550,6 @@ def get_vllm_version() -> str:
573
550
if neuron_version != MAIN_CUDA_VERSION :
574
551
neuron_version_str = neuron_version .replace ("." , "" )[:3 ]
575
552
version += f"{ sep } neuron{ neuron_version_str } "
576
- elif _is_hpu ():
577
- # Get the Intel Gaudi Software Suite version
578
- gaudi_sw_version = str (get_gaudi_sw_version ())
579
- if gaudi_sw_version != MAIN_CUDA_VERSION :
580
- gaudi_sw_version = gaudi_sw_version .replace ("." , "" )[:3 ]
581
- version += f"{ sep } gaudi{ gaudi_sw_version } "
582
553
elif _is_tpu ():
583
554
version += f"{ sep } tpu"
584
555
elif _is_cpu ():
@@ -625,8 +596,6 @@ def _read_requirements(filename: str) -> list[str]:
625
596
requirements = _read_requirements ("rocm.txt" )
626
597
elif _is_neuron ():
627
598
requirements = _read_requirements ("neuron.txt" )
628
- elif _is_hpu ():
629
- requirements = _read_requirements ("hpu.txt" )
630
599
elif _is_tpu ():
631
600
requirements = _read_requirements ("tpu.txt" )
632
601
elif _is_cpu ():
@@ -635,8 +604,7 @@ def _read_requirements(filename: str) -> list[str]:
635
604
requirements = _read_requirements ("xpu.txt" )
636
605
else :
637
606
raise ValueError (
638
- "Unsupported platform, please use CUDA, ROCm, Neuron, HPU, "
639
- "or CPU." )
607
+ "Unsupported platform, please use CUDA, ROCm, Neuron, or CPU." )
640
608
return requirements
641
609
642
610
0 commit comments