Skip to content

transformers>=4.53.0 causes test_huggingface_client to break #3731

@yifanmai

Description

@yifanmai

On transformers 4.53.0 and timms 0.6.12, I get

_____________________________________________________ TestHuggingFaceClient.test_logprob ______________________________________________________

self = <helm.clients.test_huggingface_client.TestHuggingFaceClient object at 0x705579018850>

    def test_logprob(self):
        tokenizer = HuggingFaceTokenizer(
            BlackHoleCacheConfig(), "huggingface/gpt2", pretrained_model_name_or_path="openai-community/gpt2"
        )
        client = HuggingFaceClient(
            cache_config=BlackHoleCacheConfig(),
            tokenizer=tokenizer,
            pretrained_model_name_or_path="openai-community/gpt2",
        )
        prompt: str = "I am a computer scientist."
>       result: RequestResult = client.make_request(
            Request(
                model="openai/gpt2",
                model_deployment="huggingface/gpt2",
                prompt=prompt,
                num_completions=1,
                max_tokens=0,
                echo_prompt=True,
            )
        )

src/helm/clients/test_huggingface_client.py:68:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
src/helm/clients/huggingface_client.py:332: in make_request
    huggingface_model: HuggingFaceServer = HuggingFaceServerFactory.get_server(
src/helm/clients/huggingface_client.py:222: in get_server
    HuggingFaceServerFactory._servers[helm_model_name] = HuggingFaceServer(
src/helm/clients/huggingface_client.py:100: in __init__
    self.model = AutoModelForCausalLM.from_pretrained(pretrained_model_name_or_path, **kwargs).to(
../../.pyenv/versions/crfm-helm/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:563: in from_pretrained
    has_local_code = type(config) in cls._model_mapping.keys()
../../.pyenv/versions/crfm-helm/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:820: in keys
    mapping_keys = [
../../.pyenv/versions/crfm-helm/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:821: in <listcomp>
    self._load_attr_from_module(key, name)
../../.pyenv/versions/crfm-helm/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:817: in _load_attr_from_module
    return getattribute_from_module(self._modules[module_name], attr)
../../.pyenv/versions/crfm-helm/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:729: in getattribute_from_module
    if hasattr(module, attr):
../../.pyenv/versions/crfm-helm/lib/python3.10/site-packages/transformers/utils/import_utils.py:2154: in __getattr__
    module = self._get_module(self._class_to_module[name])
../../.pyenv/versions/crfm-helm/lib/python3.10/site-packages/transformers/utils/import_utils.py:2184: in _get_module
    raise e
../../.pyenv/versions/crfm-helm/lib/python3.10/site-packages/transformers/utils/import_utils.py:2182: in _get_module
    return importlib.import_module("." + module_name, self.__name__)
../../.pyenv/versions/3.10.13/lib/python3.10/importlib/__init__.py:126: in import_module
    return _bootstrap._gcd_import(name[level:], package, level)
<frozen importlib._bootstrap>:1050: in _gcd_import
    ???
<frozen importlib._bootstrap>:1027: in _find_and_load
    ???
<frozen importlib._bootstrap>:1006: in _find_and_load_unlocked
    ???
<frozen importlib._bootstrap>:688: in _load_unlocked
    ???
<frozen importlib._bootstrap_external>:883: in exec_module
    ???
<frozen importlib._bootstrap>:241: in _call_with_frames_removed
    ???
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

    #                🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨
    #           This file was automatically generated from src/transformers/models/gemma3n/modular_gemma3n.py.
    #               Do NOT edit this file manually as any edits will be overwritten by the generation of
    #             the file from the modular. If any change should be done, please apply the change to the
    #                          modular_gemma3n.py file directly. One of our CI enforces this.
    #                🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨
    # coding=utf-8
    # Copyright 2025 Google Inc. HuggingFace Inc. team. All rights reserved.
    #
    #
    # Licensed under the Apache License, Version 2.0 (the "License");
    # you may not use this file except in compliance with the License.
    # You may obtain a copy of the License at
    #
    #     http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    from collections.abc import Sequence
    from typing import Any, Optional, Union

    from ...configuration_utils import PretrainedConfig, layer_type_validation
    from ...modeling_rope_utils import rope_config_validation
    from ...utils import is_timm_available, logging, requires_backends


    if is_timm_available():
>       from timm.data import ImageNetInfo, infer_imagenet_subset
E       ImportError: cannot import name 'ImageNetInfo' from 'timm.data' (/home/yifanmai/.pyenv/versions/crfm-helm/lib/python3.10/site-packages/timm/data/__init__.py)

../../.pyenv/versions/crfm-helm/lib/python3.10/site-packages/transformers/models/gemma3n/configuration_gemma3n.py:31: ImportError

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions