Skip to content

Commit 62249b4

Browse files
akxTitus-von-Koeller
authored andcommitted
Soft-require transformers in tests
1 parent 782ab96 commit 62249b4

File tree

1 file changed

+4
-7
lines changed

1 file changed

+4
-7
lines changed

tests/test_generation.py

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,14 @@
33

44
import pytest
55
import torch
6-
import transformers
7-
from transformers import (
8-
AutoModelForCausalLM,
9-
BitsAndBytesConfig,
10-
)
116

127
from tests.helpers import TRUE_FALSE, describe_dtype, id_formatter
138

9+
transformers = pytest.importorskip("transformers")
10+
1411

1512
def get_4bit_config():
16-
return BitsAndBytesConfig(
13+
return transformers.BitsAndBytesConfig(
1714
load_in_4bit=True,
1815
load_in_8bit=False,
1916
llm_int8_threshold=6.0,
@@ -31,7 +28,7 @@ def get_model_and_tokenizer(config):
3128
bnb_config.load_in_4bit = False
3229
else:
3330
bnb_config.bnb_4bit_quant_type= quant_type
34-
model = AutoModelForCausalLM.from_pretrained(model_name_or_path,
31+
model = transformers.AutoModelForCausalLM.from_pretrained(model_name_or_path,
3532
quantization_config=bnb_config,
3633
max_memory={0:'48GB'},
3734
device_map='auto',

0 commit comments

Comments
 (0)