-
Notifications
You must be signed in to change notification settings - Fork 5.9k
[Feature] AutoModel can load components using model_index.json #11401
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 11 commits
e506314
85024b0
6a0d0be
d86b0f2
314b6cc
528e002
6e92f40
76ea98d
0e53ad0
f697631
5614a15
f6b6b42
4e5cac1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -16,9 +16,11 @@ | |
import os | ||
from typing import Optional, Union | ||
|
||
from huggingface_hub.utils import validate_hf_hub_args | ||
from huggingface_hub import hf_hub_download | ||
from huggingface_hub.utils import EntryNotFoundError, validate_hf_hub_args | ||
|
||
from ..configuration_utils import ConfigMixin | ||
from ..pipelines.pipeline_loading_utils import ALL_IMPORTABLE_CLASSES, get_class_obj_and_candidates | ||
|
||
|
||
class AutoModel(ConfigMixin): | ||
|
@@ -156,12 +158,30 @@ def from_pretrained(cls, pretrained_model_or_path: Optional[Union[str, os.PathLi | |
"subfolder": subfolder, | ||
} | ||
|
||
config = cls.load_config(pretrained_model_or_path, **load_config_kwargs) | ||
orig_class_name = config["_class_name"] | ||
try: | ||
# To avoid circular import problem. | ||
from diffusers import pipelines | ||
|
||
mindex_kwargs = {k: v for k, v in load_config_kwargs.items() if k != "subfolder"} | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. move this section under a if subfolder is not None:
try:
... I think we are not supporting local path for There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What would be the complexity related to supporting local path here ? Shouldn't we also be able to load models from structure where there is no folder heirarchy using this code, Following is a working example try:
control_net = AutoModel.from_pretrained(
"ishan24/Sana_600M_1024px_ControlNet_diffusers",
torch_dtype=torch.float16
)
print(f"test passed!")
except Exception as e:
print(f"test failed: {e}") There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think for local path, you can append subfolder to the path as well ( we don't have to consider that for now) like unet = AutoModel.from_pretrained("ishan24/SDXL_diffusers/unet") There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ohk, so we dont want to support flat repo's like this to load models Because doing the following would mean that we dont support above case if subfolder is not None:
try:
... There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ohh no we need to support flat repos, we need to all repos (we don't need to support some edge cases with local path when useer include subfolder directly in the file path, not as so basically here There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. got it, made the change |
||
mindex_kwargs["filename"] = "model_index.json" | ||
config_path = hf_hub_download(pretrained_model_or_path, **mindex_kwargs) | ||
config = cls.load_config(config_path, **load_config_kwargs) | ||
library, orig_class_name = config[subfolder] | ||
model_cls, _ = get_class_obj_and_candidates( | ||
library_name=library, | ||
class_name=orig_class_name, | ||
importable_classes=ALL_IMPORTABLE_CLASSES, | ||
pipelines=pipelines, | ||
is_pipeline_module=hasattr(pipelines, library), | ||
) | ||
except EntryNotFoundError: | ||
# If `model_index.json` is not found, we try to load the model from the | ||
# `config.json` file and `diffusers` library. | ||
config = cls.load_config(pretrained_model_or_path, **load_config_kwargs) | ||
library = importlib.import_module("diffusers") | ||
orig_class_name = config["_class_name"] | ||
model_cls = getattr(library, orig_class_name, None) | ||
|
||
library = importlib.import_module("diffusers") | ||
|
||
model_cls = getattr(library, orig_class_name, None) | ||
if model_cls is None: | ||
raise ValueError(f"AutoModel can't find a model linked to {orig_class_name}.") | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
import unittest | ||
from unittest.mock import patch | ||
|
||
from huggingface_hub.utils import EntryNotFoundError | ||
from transformers import CLIPTextModel | ||
|
||
from diffusers.models import AutoModel, UNet2DConditionModel | ||
|
||
|
||
class TestAutoModel(unittest.TestCase): | ||
@patch("diffusers.models.auto_model.hf_hub_download", side_effect=EntryNotFoundError("File not found")) | ||
def test_from_pretrained_falls_back_on_entry_error(self, mock_hf_hub_download): | ||
model = AutoModel.from_pretrained("hf-internal-testing/tiny-stable-diffusion-torch", subfolder="unet") | ||
assert isinstance(model, UNet2DConditionModel) | ||
|
||
def test_from_pretrained_loads_successfully( | ||
self | ||
): | ||
model = AutoModel.from_pretrained("hf-internal-testing/tiny-stable-diffusion-torch", subfolder="text_encoder") | ||
assert isinstance(model, CLIPTextModel) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Would avoid using exceptions for control flow and simplify this a bit