Skip to content

Commit 95a0c78

Browse files
Isotr0pyhuydhn
authored andcommitted
[Misc] Clean up InternVL family config registration (vllm-project#19992)
Signed-off-by: Isotr0py <2037008807@qq.com>
1 parent e739c5e commit 95a0c78

File tree

5 files changed

+40
-82
lines changed

5 files changed

+40
-82
lines changed

vllm/transformers_utils/config.py

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -33,10 +33,8 @@
3333
from vllm.transformers_utils.configs import (ChatGLMConfig, Cohere2Config,
3434
DbrxConfig, DeepseekVLV2Config,
3535
EAGLEConfig, ExaoneConfig,
36-
H2OVLChatConfig,
37-
InternVLChatConfig, JAISConfig,
38-
KimiVLConfig, MedusaConfig,
39-
MiniMaxText01Config,
36+
JAISConfig, KimiVLConfig,
37+
MedusaConfig, MiniMaxText01Config,
4038
MiniMaxVL01Config, MllamaConfig,
4139
MLPSpeculatorConfig, MPTConfig,
4240
NemotronConfig, NVLM_D_Config,
@@ -90,8 +88,6 @@ def _get_hf_token() -> Optional[str]:
9088
"medusa": MedusaConfig,
9189
"eagle": EAGLEConfig,
9290
"exaone": ExaoneConfig,
93-
"h2ovl_chat": H2OVLChatConfig,
94-
"internvl_chat": InternVLChatConfig,
9591
"minimax_text_01": MiniMaxText01Config,
9692
"minimax_vl_01": MiniMaxVL01Config,
9793
"nemotron": NemotronConfig,
@@ -104,6 +100,10 @@ def _get_hf_token() -> Optional[str]:
104100
**_CONFIG_REGISTRY_OVERRIDE_HF
105101
}
106102

103+
_CONFIG_ATTRS_MAPPING: dict[str, str] = {
104+
"llm_config": "text_config",
105+
}
106+
107107

108108
class ConfigFormat(str, enum.Enum):
109109
AUTO = "auto"
@@ -286,6 +286,18 @@ def is_encoder_decoder(config: PretrainedConfig) -> bool:
286286
return getattr(config, "is_encoder_decoder", False)
287287

288288

289+
def _maybe_remap_hf_config_attrs(config: PretrainedConfig) -> PretrainedConfig:
290+
"""Remap config attributes to match the expected names."""
291+
for old_attr, new_attr in _CONFIG_ATTRS_MAPPING.items():
292+
if hasattr(config, old_attr):
293+
if not hasattr(config, new_attr):
294+
config.update({new_attr: getattr(config, old_attr)})
295+
delattr(config, old_attr)
296+
logger.debug("Remapped config attribute '%s' to '%s'", old_attr,
297+
new_attr)
298+
return config
299+
300+
289301
def get_config(
290302
model: Union[str, Path],
291303
trust_remote_code: bool,
@@ -361,6 +373,9 @@ def get_config(
361373
revision=revision,
362374
code_revision=code_revision,
363375
token=_get_hf_token(),
376+
# some old custom model's config needs
377+
# `has_no_defaults_at_init=True` to work.
378+
has_no_defaults_at_init=trust_remote_code,
364379
**kwargs,
365380
)
366381
except ValueError as e:
@@ -376,6 +391,7 @@ def get_config(
376391
raise RuntimeError(err_msg) from e
377392
else:
378393
raise e
394+
config = _maybe_remap_hf_config_attrs(config)
379395

380396
elif config_format == ConfigFormat.MISTRAL:
381397
config = load_params_config(model, revision, **kwargs)

vllm/transformers_utils/configs/__init__.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,6 @@
1111
# tiiuae/falcon-7b(-instruct) models. Newer Falcon models will use the
1212
# `FalconConfig` class from the official HuggingFace transformers library.
1313
from vllm.transformers_utils.configs.falcon import RWConfig
14-
from vllm.transformers_utils.configs.h2ovl import H2OVLChatConfig
15-
from vllm.transformers_utils.configs.internvl import InternVLChatConfig
1614
from vllm.transformers_utils.configs.jais import JAISConfig
1715
from vllm.transformers_utils.configs.kimi_vl import KimiVLConfig
1816
from vllm.transformers_utils.configs.medusa import MedusaConfig
@@ -38,8 +36,6 @@
3836
"DeepseekVLV2Config",
3937
"MPTConfig",
4038
"RWConfig",
41-
"H2OVLChatConfig",
42-
"InternVLChatConfig",
4339
"JAISConfig",
4440
"MedusaConfig",
4541
"EAGLEConfig",

vllm/transformers_utils/configs/h2ovl.py

Lines changed: 0 additions & 16 deletions
This file was deleted.

vllm/transformers_utils/configs/internvl.py

Lines changed: 0 additions & 54 deletions
This file was deleted.

vllm/transformers_utils/configs/nvlm_d.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,24 @@
88
# Copyright (c) 2024 NVIDIA
99
# Licensed under Apache 2.0 License [see LICENSE for details]
1010
# --------------------------------------------------------
11-
from .internvl import InternVLChatConfig
11+
from transformers import Qwen2Config
12+
from transformers.configuration_utils import PretrainedConfig
1213

1314

14-
class NVLM_D_Config(InternVLChatConfig):
15+
class NVLM_D_Config(PretrainedConfig):
1516
model_type = 'NVLM_D'
17+
is_composition = True
18+
19+
def __init__(self, vision_config=None, llm_config=None, **kwargs):
20+
super().__init__(**kwargs)
21+
22+
# Handle vision_config initialization
23+
if vision_config is None:
24+
vision_config = {}
25+
26+
# Handle llm_config initialization
27+
if llm_config is None:
28+
llm_config = {}
29+
30+
self.vision_config = PretrainedConfig(**vision_config)
31+
self.text_config = Qwen2Config(**llm_config)

0 commit comments

Comments
 (0)