Skip to content

Commit 593e3bc

Browse files
committed
Update on "Remove internal usage of all config functions like int4_weight_only"
**Summary:** These are now deprecated as of #2994. We should stop using them internally as well. **Test Plan:** CI [ghstack-poisoned]
1 parent cbf82c7 commit 593e3bc

File tree

1 file changed

+24
-23
lines changed

1 file changed

+24
-23
lines changed

test/quantization/test_quant_api.py

Lines changed: 24 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -750,40 +750,41 @@ def test_int4wo_cuda_serialization(self):
750750
# load state_dict in cuda
751751
model.load_state_dict(sd, assign=True)
752752

753+
753754
def test_config_deprecation(self):
754755
"""
755-
Test that old config functions like `Int4WeightOnlyConfig` trigger deprecation warnings.
756+
Test that old config functions like `int4_weight_only` trigger deprecation warnings.
756757
"""
757758
from torchao.quantization import (
758-
Float8DynamicActivationFloat8WeightConfig,
759-
Float8StaticActivationFloat8WeightConfig,
760-
Float8WeightOnlyConfig,
761-
FPXWeightOnlyConfig,
762-
GemliteUIntXWeightOnlyConfig,
763-
Int4DynamicActivationInt4WeightConfig,
764-
Int4WeightOnlyConfig,
765-
Int8DynamicActivationInt4WeightConfig,
766-
Int8DynamicActivationInt8WeightConfig,
767-
Int8WeightOnlyConfig,
768-
UIntXWeightOnlyConfig,
759+
float8_dynamic_activation_float8_weight,
760+
float8_static_activation_float8_weight,
761+
float8_weight_only,
762+
fpx_weight_only,
763+
gemlite_uintx_weight_only,
764+
int4_dynamic_activation_int4_weight,
765+
int4_weight_only,
766+
int8_dynamic_activation_int4_weight,
767+
int8_dynamic_activation_int8_weight,
768+
int8_weight_only,
769+
uintx_weight_only,
769770
)
770771

771772
# Reset deprecation warning state, otherwise we won't log warnings here
772773
warnings.resetwarnings()
773774

774775
# Map from deprecated API to the args needed to instantiate it
775776
deprecated_apis_to_args = {
776-
Float8DynamicActivationFloat8WeightConfig: (),
777-
Float8StaticActivationFloat8WeightConfig: (torch.randn(3)),
778-
Float8WeightOnlyConfig: (),
779-
FPXWeightOnlyConfig: (3, 2),
780-
GemliteUIntXWeightOnlyConfig: (),
781-
Int4DynamicActivationInt4WeightConfig: (),
782-
Int4WeightOnlyConfig: (),
783-
Int8DynamicActivationInt4WeightConfig: (),
784-
Int8DynamicActivationInt8WeightConfig: (),
785-
Int8WeightOnlyConfig: (),
786-
UIntXWeightOnlyConfig: (torch.uint4,),
777+
float8_dynamic_activation_float8_weight: (),
778+
float8_static_activation_float8_weight: (torch.randn(3)),
779+
float8_weight_only: (),
780+
fpx_weight_only: (3, 2),
781+
gemlite_uintx_weight_only: (),
782+
int4_dynamic_activation_int4_weight: (),
783+
int4_weight_only: (),
784+
int8_dynamic_activation_int4_weight: (),
785+
int8_dynamic_activation_int8_weight: (),
786+
int8_weight_only: (),
787+
uintx_weight_only: (torch.uint4,),
787788
}
788789

789790
with warnings.catch_warnings(record=True) as _warnings:

0 commit comments

Comments
 (0)