From baed0b5003394573a534c95303df2f6bb3efcf42 Mon Sep 17 00:00:00 2001 From: aiyueqi Date: Thu, 7 May 2026 17:22:34 +0800 Subject: [PATCH] =?UTF-8?q?=E6=9B=B4=E6=96=B0=20transformers=5Fserver.py?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- transformers_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/transformers_server.py b/transformers_server.py index ebdb7af..905f9f5 100644 --- a/transformers_server.py +++ b/transformers_server.py @@ -79,7 +79,7 @@ def load_model(): processor_class = resolve_transformers_class(config["processer_class"]) model_class = resolve_transformers_class(config["model_class"]) torch_dtype = resolve_torch_dtype(config["torch_dtype"]) - device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + device = torch.device("cuda") logger.info( f"model config: model_class={config['model_class']}, "