debugging

This commit is contained in:
Chranos
2026-02-09 15:24:55 +08:00
parent 8ecba6115e
commit 1d70f93cfc

View File

@@ -28,6 +28,9 @@ from .interfaces_base import is_embedding_model, is_text_generation_model
logger = init_logger(__name__) logger = init_logger(__name__)
# Cache for architectures that have already been logged
_logged_transformers_architectures: set = set()
# yapf: disable # yapf: disable
_TEXT_GENERATION_MODELS = { _TEXT_GENERATION_MODELS = {
# [Decoder-only] # [Decoder-only]
@@ -403,11 +406,14 @@ class _ModelRegistry:
model_module = getattr(transformers, architecture, None) model_module = getattr(transformers, architecture, None)
if model_module is not None: if model_module is not None:
# Model exists in transformers, can use TransformersForCausalLM wrapper # Model exists in transformers, can use TransformersForCausalLM wrapper
logger.info( # Only log once per architecture to avoid spam
"Architecture %s found in transformers library, " if architecture not in _logged_transformers_architectures:
"using TransformersForCausalLM wrapper", _logged_transformers_architectures.add(architecture)
architecture logger.info(
) "Architecture %s found in transformers library, "
"using TransformersForCausalLM wrapper",
architecture
)
return "TransformersForCausalLM" return "TransformersForCausalLM"
# Get auto_map from hf_config # Get auto_map from hf_config