Add retry for flaky tests in CI (#4755)

This commit is contained in:
fzyzcjy
2025-03-26 07:53:12 +08:00
committed by GitHub
parent 52029bd1e3
commit 15ddd84322
112 changed files with 273 additions and 152 deletions

View File

@@ -18,6 +18,7 @@ import unittest
import torch
from sglang.test.runners import HFRunner, SRTRunner
from sglang.test.test_utils import CustomTestCase
LORA_SETS = [
# {
@@ -70,7 +71,7 @@ What do you know about llamas?
# PROMPTS.append(sample[0]["content"][:2000])
class TestLoRA(unittest.TestCase):
class TestLoRA(CustomTestCase):
def inference(self, prompts, lora_set, tp_size, torch_dtype, max_new_tokens):
print("=================== testing inference =======================")

View File

@@ -21,7 +21,7 @@ import torch
from utils import BACKENDS, TORCH_DTYPES, LoRAAdaptor, LoRAModelCase
from sglang.test.runners import HFRunner, SRTRunner
from sglang.test.test_utils import calculate_rouge_l, is_in_ci
from sglang.test.test_utils import CustomTestCase, calculate_rouge_l, is_in_ci
CI_LORA_MODELS = [
LoRAModelCase(
@@ -67,7 +67,7 @@ PROMPTS = [
]
class TestLoRABackend(unittest.TestCase):
class TestLoRABackend(CustomTestCase):
def run_backend(
self,
prompt: str,

View File

@@ -21,7 +21,7 @@ import torch
from utils import TORCH_DTYPES, LoRAAdaptor, LoRAModelCase
from sglang.test.runners import HFRunner, SRTRunner
from sglang.test.test_utils import calculate_rouge_l, is_in_ci
from sglang.test.test_utils import CustomTestCase, calculate_rouge_l, is_in_ci
CI_LORA_MODELS = [
LoRAModelCase(
@@ -69,7 +69,7 @@ PROMPTS = [
BACKEND = "triton"
class TestLoRATP(unittest.TestCase):
class TestLoRATP(CustomTestCase):
def run_tp(
self,
prompt: str,

View File

@@ -19,7 +19,7 @@ from typing import List
import torch
from utils import BACKENDS, TORCH_DTYPES, LoRAAdaptor, LoRAModelCase
from sglang.test.test_utils import is_in_ci
from sglang.test.test_utils import CustomTestCase, is_in_ci
MULTI_LORA_MODELS = [
LoRAModelCase(
@@ -51,7 +51,7 @@ PROMPTS = [
]
class TestMultiLoRABackend(unittest.TestCase):
class TestMultiLoRABackend(CustomTestCase):
def run_backend_batch(
self,
prompts: List[str],