Updates transformers and timm dependencies (#7577)
Signed-off-by: Xinyuan Tong <justinning0323@outlook.com>
This commit is contained in:
@@ -43,7 +43,8 @@ runtime_common = [
|
||||
"soundfile==0.13.1",
|
||||
"scipy",
|
||||
"torchao==0.9.0",
|
||||
"transformers==4.52.3",
|
||||
"transformers==4.53.0",
|
||||
"timm==1.0.16",
|
||||
"uvicorn",
|
||||
"uvloop",
|
||||
"xgrammar==0.1.19",
|
||||
@@ -105,7 +106,6 @@ test = [
|
||||
"matplotlib",
|
||||
"pandas",
|
||||
"peft",
|
||||
"timm",
|
||||
"sentence_transformers",
|
||||
]
|
||||
all = ["sglang[srt]", "sglang[openai]", "sglang[anthropic]", "sglang[litellm]", "sglang[torch_memory_saver]", "sglang[decord]"]
|
||||
|
||||
@@ -166,8 +166,7 @@ class Gemma3Attention(nn.Module):
|
||||
prefix=add_prefix("o_proj", prefix),
|
||||
)
|
||||
|
||||
# Determine if layer uses sliding window based on pattern
|
||||
self.is_sliding = bool((layer_id + 1) % config.sliding_window_pattern)
|
||||
self.is_sliding = config.layer_types[layer_id] == "sliding_attention"
|
||||
|
||||
# Initialize the rotary embedding.
|
||||
if self.is_sliding:
|
||||
|
||||
Reference in New Issue
Block a user