set default attention backend for deterministic inference (#11801)
This commit is contained in:
@@ -174,6 +174,15 @@ def is_blackwell():
|
||||
return torch.cuda.get_device_capability()[0] == 10
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def is_sm120_supported(device=None) -> bool:
|
||||
if not is_cuda_alike():
|
||||
return False
|
||||
return (torch.cuda.get_device_capability(device)[0] == 12) and (
|
||||
torch.version.cuda >= "12.8"
|
||||
)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def is_sm100_supported(device=None) -> bool:
|
||||
if not is_cuda_alike():
|
||||
|
||||
Reference in New Issue
Block a user