Skip to content

Commit fd3390a

Browse files
committed
construct with same dtype, constructing on fp32 found no difference
Signed-off-by: Kyle Sayers <kylesayrs@gmail.com>
1 parent b6a0dd4 commit fd3390a

File tree

2 files changed

+4
-6
lines changed

2 files changed

+4
-6
lines changed

src/compressed_tensors/transform/factory/hadamard.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,8 @@ def _create_weight(
6767
construct_device: device,
6868
) -> Parameter:
6969
# construct on execution device, cache on offload device
70-
data = deterministic_hadamard_matrix(size, torch.float32, construct_device)
71-
data = data.to(dtype=dtype, device=device)
70+
data = deterministic_hadamard_matrix(size, dtype, construct_device)
71+
data = data.to(device=device)
7272
return Parameter(data, requires_grad=self.scheme.requires_grad)
7373

7474

src/compressed_tensors/transform/factory/random_hadamard.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,6 @@ def _create_weight(
3737
construct_device: device,
3838
) -> Parameter:
3939
# construct on execution device, cache on offload device
40-
data = random_hadamard_matrix(
41-
size, torch.float32, construct_device, self.generator
42-
)
43-
data = data.to(dtype=dtype, device=device)
40+
data = random_hadamard_matrix(size, dtype, construct_device, self.generator)
41+
data = data.to(device=device)
4442
return Parameter(data, requires_grad=self.scheme.requires_grad)

0 commit comments

Comments
 (0)