@@ -89,6 +89,7 @@ def __getitem__(self, key):
8989 step3_text = "Step3TextConfig" ,
9090 qwen3_next = "Qwen3NextConfig" ,
9191 lfm2_moe = "Lfm2MoeConfig" ,
92+ tarsier2 = "Tarsier2Config" ,
9293)
9394
9495_CONFIG_ATTRS_MAPPING : dict [str , str ] = {
@@ -127,6 +128,9 @@ def parse(
127128 if config_dict .get ("speculators_config" ) is not None
128129 else model_type
129130 )
131+ # Allow hf_overrides to override model_type before checking _CONFIG_REGISTRY
132+ if (hf_overrides := kwargs .pop ("hf_overrides" , None )) is not None :
133+ model_type = hf_overrides .get ("model_type" , model_type )
130134
131135 if model_type in _CONFIG_REGISTRY :
132136 config_class = _CONFIG_REGISTRY [model_type ]
@@ -310,7 +314,7 @@ def patch_rope_parameters(config: PretrainedConfig) -> None:
310314 config .rope_parameters ["rope_theta" ] = rope_theta
311315
312316 # No RoPE parameters to patch
313- if not hasattr (config , "rope_parameters" ) :
317+ if getattr (config , "rope_parameters" , None ) is None :
314318 return
315319
316320 # Add original_max_position_embeddings if present
@@ -351,7 +355,10 @@ def patch_rope_parameters_dict(rope_parameters: dict[str, Any]) -> None:
351355 rope_parameters ["rope_type" ] = "longrope"
352356 logger .warning ("Replacing legacy rope_type 'su' with 'longrope'" )
353357 elif rope_parameters ["rope_type" ] == "mrope" :
354- assert "mrope_section" in rope_parameters
358+ if "mrope_section" not in rope_parameters :
359+ raise ValueError (
360+ "Legacy rope_type 'mrope' requires 'mrope_section' in rope_parameters"
361+ )
355362 rope_parameters ["rope_type" ] = "default"
356363 logger .warning ("Replacing legacy rope_type 'mrope' with 'default'" )
357364
@@ -584,6 +591,7 @@ def get_config(
584591 trust_remote_code = trust_remote_code ,
585592 revision = revision ,
586593 code_revision = code_revision ,
594+ hf_overrides = hf_overrides_kw ,
587595 ** kwargs ,
588596 )
589597 # Special architecture mapping check for GGUF models
@@ -915,11 +923,13 @@ def get_hf_text_config(config: PretrainedConfig):
915923 """
916924 text_config = config .get_text_config ()
917925
918- if text_config is not config :
919- # The code operates under the assumption that text_config should have
920- # `num_attention_heads` (among others). Assert here to fail early
921- # if transformers config doesn't align with this assumption.
922- assert hasattr (text_config , "num_attention_heads" )
926+ if text_config is not config and not hasattr (text_config , "num_attention_heads" ):
927+ raise ValueError (
928+ "The text_config extracted from the model config does not have "
929+ "`num_attention_heads` attribute. This indicates a mismatch "
930+ "between the model config and vLLM's expectations. Please "
931+ "ensure that the model config is compatible with vLLM."
932+ )
923933
924934 return text_config
925935
0 commit comments