Test the case when max_new_tokens is very large (#1038)
This commit is contained in:
@@ -5,13 +5,15 @@ from sglang.test.test_utils import run_unittest_files
|
||||
|
||||
suites = {
|
||||
"minimal": [
|
||||
"test_eval_accuracy.py",
|
||||
"test_openai_server.py",
|
||||
"test_vision_openai_server.py",
|
||||
"test_embedding_openai_server.py",
|
||||
"test_chunked_prefill.py",
|
||||
"test_embedding_openai_server.py",
|
||||
"test_eval_accuracy.py",
|
||||
"test_large_max_new_tokens.py",
|
||||
"test_openai_server.py",
|
||||
"test_skip_tokenizer_init.py",
|
||||
"test_torch_compile.py",
|
||||
"test_models_from_modelscope.py",
|
||||
"test_vision_openai_server.py",
|
||||
"test_large_max_new_tokens.py",
|
||||
"models/test_generation_models.py",
|
||||
"models/test_embedding_models.py",
|
||||
"sampling/penaltylib",
|
||||
|
||||
Reference in New Issue
Block a user