File tree Expand file tree Collapse file tree 3 files changed +23
-3
lines changed Expand file tree Collapse file tree 3 files changed +23
-3
lines changed Original file line number Diff line number Diff line change 16
16
)
17
17
from torchao .float8 .fsdp_utils import precompute_float8_dynamic_scale_for_fsdp
18
18
from torchao .float8 .inference import Float8MMConfig
19
+ from torchao .float8 .types import FP8Granularity
19
20
from torchao .utils import TORCH_VERSION_AT_LEAST_2_5
20
21
21
22
if TORCH_VERSION_AT_LEAST_2_5 :
41
42
# top level UX
42
43
"convert_to_float8_training" ,
43
44
"precompute_float8_dynamic_scale_for_fsdp" ,
45
+ # types
46
+ "FP8Granularity" ,
44
47
# note: Float8Tensor and Float8Linear are not public APIs
45
48
]
Original file line number Diff line number Diff line change 12
12
import torch
13
13
14
14
from torchao .float8 .float8_utils import is_row_major , pad_tensor_for_matmul
15
+ from torchao .float8 .types import FP8Granularity
15
16
from torchao .quantization .granularity import (
16
17
PerRow ,
17
18
PerTensor ,
@@ -116,9 +117,6 @@ def _is_rowwise_scaled(x) -> bool:
116
117
return x .block_size == (1 ,) * (x .dim () - 1 ) + (x .shape [- 1 ],)
117
118
118
119
119
- FP8Granularity = Union [PerTensor , PerRow ]
120
-
121
-
122
120
def _normalize_granularity (
123
121
granularity : Optional [
124
122
Union [
Original file line number Diff line number Diff line change
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the BSD 3-Clause license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+ """
7
+ Common types for float8 quantization
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ from typing import TYPE_CHECKING , Union
13
+
14
+ if TYPE_CHECKING :
15
+ from torchao .quantization .granularity import PerRow , PerTensor
16
+
17
+
18
+ # Define FP8Granularity type alias to break circular import dependencies
19
+ FP8Granularity = Union ["PerTensor" , "PerRow" ]
You can’t perform that action at this time.
0 commit comments