Files
sglang/python/sglang/srt/mem_cache/hicache_storage.py
Vishwanath Venkatesan 2cd2e27f80 SGLang HiCache NIXL Connector (#8488)
Signed-off-by: Vishwanath Venkatesan <vvenkatesan@nvidia.com>
Co-authored-by: Moein Khazraee <moein@nvidia.com>
Co-authored-by: Zhiqiang Xie <xiezhq@stanford.edu>
2025-07-31 13:09:42 -07:00

212 lines
6.6 KiB
Python

import hashlib
import logging
import os
from abc import ABC, abstractmethod
from typing import Any, List, Optional
import torch
logger = logging.getLogger(__name__)
from sglang.srt.distributed import (
get_tensor_model_parallel_rank,
get_tensor_model_parallel_world_size,
)
def get_hash_str(token_ids: List[int], prior_hash: Optional[str] = None) -> str:
hasher = hashlib.sha256()
if prior_hash:
hasher.update(bytes.fromhex(prior_hash))
for t in token_ids:
hasher.update(t.to_bytes(4, byteorder="little", signed=False))
return hasher.hexdigest()
class HiCacheStorage(ABC):
"""
HiCacheStorage is a class that provides a generic key-value interface for storing and retrieving KV cache.
It abstracts the underlying storage mechanism, allowing different implementations to be used.
"""
# todo, translate tensor object access for different TP ranks
# potentially pass model and TP configs into storage backend
# todo, the page size of storage backend does not have to be the same as the same as host memory pool
@abstractmethod
def get(
self,
key: str,
target_location: Optional[Any] = None,
target_sizes: Optional[Any] = None,
) -> torch.Tensor | None:
"""
Retrieve the value associated with the given key.
Returns None if the key does not exist.
"""
pass
@abstractmethod
def batch_get(
self,
keys: List[str],
target_locations: Optional[Any] = None,
target_sizes: Optional[Any] = None,
) -> List[torch.Tensor | None]:
"""
Retrieve values for multiple keys.
Returns a list of tensors or None for each key.
"""
pass
@abstractmethod
def set(
self,
key: str,
value: Optional[Any] = None,
target_location: Optional[Any] = None,
target_sizes: Optional[Any] = None,
) -> bool:
"""
Store the value associated with the given key.
Returns True if the operation was successful, False otherwise.
"""
pass
@abstractmethod
def batch_set(
self,
keys: List[str],
values: Optional[Any] = None,
target_locations: Optional[Any] = None,
target_sizes: Optional[Any] = None,
) -> bool:
"""
Store multiple key-value pairs.
Returns True if all operations were successful, False otherwise.
"""
pass
@abstractmethod
def exists(self, key: str) -> bool | dict:
"""
Check if the key exists in the storage.
Returns True if the key exists, False otherwise.
"""
pass
class HiCacheFile(HiCacheStorage):
def __init__(self, file_path: str = "/tmp/hicache"):
self.file_path = os.getenv("SGLANG_HICACHE_FILE_BACKEND_STORAGE_DIR", file_path)
tp_rank = get_tensor_model_parallel_rank()
tp_size = get_tensor_model_parallel_world_size()
self.tp_suffix = f"_{tp_rank}_{tp_size}" if tp_size > 1 else ""
if not os.path.exists(self.file_path) and tp_rank == 0:
os.makedirs(self.file_path)
logger.info(f"Created HiCacheFile storage directory at {self.file_path}")
def _get_suffixed_key(self, key: str) -> str:
return key + self.tp_suffix
def get(
self,
key: str,
target_location: Optional[Any] = None,
target_sizes: Optional[Any] = None,
) -> torch.Tensor | None:
key = self._get_suffixed_key(key)
tensor_path = os.path.join(self.file_path, f"{key}.bin")
try:
if target_location is not None:
# Load directly into target_location's memory buffer
with open(tensor_path, "rb") as f:
target_location.set_(
torch.frombuffer(f.read(), dtype=target_location.dtype)
.reshape(target_location.shape)
.storage()
)
return target_location
else:
loaded_tensor = torch.load(tensor_path)
if isinstance(loaded_tensor, torch.Tensor):
return loaded_tensor
else:
logger.error(f"Loaded data for key {key} is not a tensor.")
return None
except FileNotFoundError:
return None
def batch_get(
self,
keys: List[str],
target_locations: Optional[Any] = None,
target_sizes: Optional[Any] = None,
) -> List[torch.Tensor | None]:
return [
self.get(key, target_location)
for key, target_location in zip(
keys, target_locations or [None] * len(keys)
)
]
def set(
self,
key: str,
value: Optional[Any] = None,
target_location: Optional[Any] = None,
target_sizes: Optional[Any] = None,
) -> bool:
key = self._get_suffixed_key(key)
tensor_path = os.path.join(self.file_path, f"{key}.bin")
if self.exists(key):
logger.debug(f"Key {key} already exists. Skipped.")
return True
try:
torch.save(value, tensor_path)
return True
except Exception as e:
logger.error(f"Failed to save tensor {key}: {e}")
return False
def batch_set(
self,
keys: List[str],
values: Optional[Any] = None,
target_locations: Optional[Any] = None,
target_sizes: Optional[Any] = None,
) -> bool:
for key, value in zip(keys, values):
if not self.set(key, value):
return False
return True
def exists(self, key: str) -> bool:
key = self._get_suffixed_key(key)
tensor_path = os.path.join(self.file_path, f"{key}.bin")
return os.path.exists(tensor_path)
def delete(self, key: str) -> None:
key = self._get_suffixed_key(key)
tensor_path = os.path.join(self.file_path, f"{key}.bin")
try:
os.remove(tensor_path)
except FileNotFoundError:
logger.warning(f"Key {key} does not exist. Cannot delete.")
return
def clear(self) -> None:
try:
for filename in os.listdir(self.file_path):
file_path = os.path.join(self.file_path, filename)
if os.path.isfile(file_path):
os.remove(file_path)
logger.info("Cleared all entries in HiCacheFile storage.")
except Exception as e:
logger.error(f"Failed to clear HiCacheFile storage: {e}")