Skip to content

Commit 4d3f8c1

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 93abf7f commit 4d3f8c1

File tree

2 files changed

+8
-5
lines changed

2 files changed

+8
-5
lines changed

neural_compressor/torch/quantization/algorithm_entry.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -402,7 +402,7 @@ def autoround_quantize_entry(
402402
if getattr(model, "quantizer", False):
403403
del model.quantizer
404404
else:
405-
model.quantizer = quantizer
405+
model.quantizer = quantizer
406406
logger.info("AutoRound quantization done.")
407407
return model
408408

test/3x/torch/quantization/weight_only/test_autoround.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1+
import copy
2+
13
import pytest
24
import torch
35
import transformers
4-
import copy
56

67
from neural_compressor.torch.algorithms.weight_only.autoround import AutoRoundQuantizer, get_autoround_default_run_fn
78
from neural_compressor.torch.quantization import (
@@ -20,18 +21,20 @@
2021
except ImportError:
2122
auto_round_installed = False
2223

24+
2325
def get_gpt_j():
2426
tiny_gptj = transformers.AutoModelForCausalLM.from_pretrained(
2527
"hf-internal-testing/tiny-random-GPTJForCausalLM",
2628
torchscript=True,
2729
)
2830
return tiny_gptj
2931

32+
3033
@pytest.mark.skipif(not auto_round_installed, reason="auto_round module is not installed")
3134
class TestAutoRound:
3235
def setup_class(self):
33-
self.gptj= get_gpt_j()
34-
36+
self.gptj = get_gpt_j()
37+
3538
def setup_method(self, method):
3639
logger.info(f"Running TestAutoRound test: {method.__name__}")
3740

@@ -100,7 +103,7 @@ def test_quantizer(self):
100103
}
101104
}
102105
quantizer = AutoRoundQuantizer(weight_config=weight_config)
103-
fp32_model = gpt_j_model
106+
fp32_model = gpt_j_model
104107

105108
# quantizer execute
106109
model = quantizer.prepare(model=fp32_model)

0 commit comments

Comments
 (0)