Skip to content

Commit e368332

Browse files
Wenbin Linfacebook-github-bot
authored andcommitted
Revert D76866940: Decouple embedding_ssd_{}_pt2_autograd from CUDA files
Differential Revision: D76866940 Original commit changeset: 2c09ae92cc0d Original Phabricator Diff: D76866940 fbshipit-source-id: 4957c3958c927204c82221132831ace92c1fe2e7
1 parent 0b21002 commit e368332

File tree

1 file changed

+4
-14
lines changed

1 file changed

+4
-14
lines changed

fbgemm_gpu/fbgemm_gpu/utils/loader.py

Lines changed: 4 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,7 @@
1414

1515

1616
def load_torch_module(
17-
unified_path: str,
18-
cuda_path: Optional[str] = None,
19-
hip_path: Optional[str] = None,
20-
mtia_path: Optional[str] = None,
17+
unified_path: str, cuda_path: Optional[str] = None, hip_path: Optional[str] = None
2118
) -> None:
2219
try:
2320
torch.ops.load_library(unified_path)
@@ -27,16 +24,9 @@ def load_torch_module(
2724
hip_path = f"{unified_path}_hip"
2825
torch.ops.load_library(hip_path)
2926
else:
30-
try:
31-
# pyre-ignore-next-line[21]
32-
import mtia.host_runtime.torch_mtia.dynamic_library # noqa
33-
34-
if mtia_path is not None:
35-
torch.ops.load_library(mtia_path)
36-
except OSError:
37-
if not cuda_path:
38-
cuda_path = f"{unified_path}_cuda"
39-
torch.ops.load_library(cuda_path)
27+
if not cuda_path:
28+
cuda_path = f"{unified_path}_cuda"
29+
torch.ops.load_library(cuda_path)
4030

4131

4232
def load_torch_module_bc(new_path: str, old_path: str) -> None:

0 commit comments

Comments
 (0)