Files

131 lines
3.4 KiB
Python
Raw Permalink Normal View History

2025-08-13 19:46:19 +08:00
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
import json
2025-11-15 20:14:45 +08:00
import os
import struct
2025-08-13 19:46:19 +08:00
from functools import cache
from os import PathLike
from pathlib import Path
2025-11-15 20:14:45 +08:00
from typing import Any
2025-08-13 19:46:19 +08:00
2025-11-15 20:14:45 +08:00
import vllm.envs as envs
2025-08-13 19:46:19 +08:00
from vllm.logger import init_logger
logger = init_logger(__name__)
def is_s3(model_or_path: str) -> bool:
2025-11-15 20:14:45 +08:00
return model_or_path.lower().startswith("s3://")
2025-08-13 19:46:19 +08:00
2025-11-15 20:14:45 +08:00
def is_gcs(model_or_path: str) -> bool:
return model_or_path.lower().startswith("gs://")
def is_cloud_storage(model_or_path: str) -> bool:
return is_s3(model_or_path) or is_gcs(model_or_path)
def check_gguf_file(model: str | PathLike) -> bool:
2025-08-13 19:46:19 +08:00
"""Check if the file is a GGUF model."""
model = Path(model)
if not model.is_file():
return False
elif model.suffix == ".gguf":
return True
try:
with model.open("rb") as f:
header = f.read(4)
return header == b"GGUF"
except Exception as e:
logger.debug("Error reading file %s: %s", model, e)
return False
def modelscope_list_repo_files(
repo_id: str,
2025-11-15 20:14:45 +08:00
revision: str | None = None,
token: str | bool | None = None,
2025-08-13 19:46:19 +08:00
) -> list[str]:
"""List files in a modelscope repo."""
from modelscope.hub.api import HubApi
2025-11-15 20:14:45 +08:00
2025-08-13 19:46:19 +08:00
api = HubApi()
api.login(token)
# same as huggingface_hub.list_repo_files
files = [
2025-11-15 20:14:45 +08:00
file["Path"]
for file in api.get_model_files(
model_id=repo_id, revision=revision, recursive=True
)
if file["Type"] == "blob"
2025-08-13 19:46:19 +08:00
]
return files
2025-11-15 20:14:45 +08:00
def _maybe_json_dict(path: str | PathLike) -> dict[str, str]:
2025-08-13 19:46:19 +08:00
with open(path) as f:
try:
return json.loads(f.read())
except Exception:
return dict[str, str]()
2025-11-15 20:14:45 +08:00
def _maybe_space_split_dict(path: str | PathLike) -> dict[str, str]:
2025-08-13 19:46:19 +08:00
parsed_dict = dict[str, str]()
with open(path) as f:
for line in f.readlines():
try:
model_name, redirect_name = line.strip().split()
parsed_dict[model_name] = redirect_name
except Exception:
pass
return parsed_dict
@cache
def maybe_model_redirect(model: str) -> str:
"""
Use model_redirect to redirect the model name to a local folder.
:param model: hf model name
:return: maybe redirect to a local folder
"""
2025-11-15 20:14:45 +08:00
model_redirect_path = envs.VLLM_MODEL_REDIRECT_PATH
2025-08-13 19:46:19 +08:00
if not model_redirect_path:
return model
if not Path(model_redirect_path).exists():
return model
2025-11-15 20:14:45 +08:00
redirect_dict = _maybe_json_dict(model_redirect_path) or _maybe_space_split_dict(
model_redirect_path
)
if redirect_model := redirect_dict.get(model):
2025-08-13 19:46:19 +08:00
logger.info("model redirect: [ %s ] -> [ %s ]", model, redirect_model)
return redirect_model
return model
2025-11-15 20:14:45 +08:00
def parse_safetensors_file_metadata(path: str | PathLike) -> dict[str, Any]:
with open(path, "rb") as f:
length_of_metadata = struct.unpack("<Q", f.read(8))[0]
metadata = json.loads(f.read(length_of_metadata).decode("utf-8"))
return metadata
def convert_model_repo_to_path(model_repo: str) -> str:
"""When VLLM_USE_MODELSCOPE is True convert a model
repository string to a Path str."""
if not envs.VLLM_USE_MODELSCOPE or Path(model_repo).exists():
return model_repo
from modelscope.utils.file_utils import get_model_cache_root
return os.path.join(get_model_cache_root(), model_repo)