Update the transformers version in CI (#1690)
This commit is contained in:
18
.github/workflows/pr-test.yml
vendored
18
.github/workflows/pr-test.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -e "python[dev]"
|
||||
pip install transformers==4.44
|
||||
pip install transformers==4.45.2
|
||||
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
|
||||
|
||||
- name: Run test
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -e "python[dev]"
|
||||
pip install transformers==4.44
|
||||
pip install transformers==4.45.2
|
||||
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
|
||||
|
||||
- name: Run test
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -e "python[dev]"
|
||||
pip install transformers==4.44
|
||||
pip install transformers==4.45.2
|
||||
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
|
||||
|
||||
- name: Run test
|
||||
@@ -89,7 +89,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -e "python[dev]"
|
||||
pip install transformers==4.44
|
||||
pip install transformers==4.45.2
|
||||
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
|
||||
|
||||
- name: Run test
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -e "python[all]"
|
||||
pip install transformers==4.44
|
||||
pip install transformers==4.45.2
|
||||
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
|
||||
|
||||
- name: Benchmark Single Latency
|
||||
@@ -147,7 +147,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -e "python[all]"
|
||||
pip install transformers==4.44
|
||||
pip install transformers==4.45.2
|
||||
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
|
||||
|
||||
- name: Benchmark Offline Throughput (w/o RadixAttention)
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -e "python[all]"
|
||||
pip install transformers==4.44
|
||||
pip install transformers==4.45.2
|
||||
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
|
||||
|
||||
- name: Benchmark Offline Throughput (TP=2)
|
||||
@@ -211,7 +211,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -e "python[all]"
|
||||
pip install transformers==4.44
|
||||
pip install transformers==4.45.2
|
||||
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
|
||||
|
||||
git clone https://github.com/merrymercy/human-eval.git
|
||||
@@ -235,7 +235,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -e "python[all]"
|
||||
pip install transformers==4.44
|
||||
pip install transformers==4.45.2
|
||||
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
|
||||
|
||||
git clone https://github.com/merrymercy/human-eval.git
|
||||
|
||||
@@ -432,9 +432,11 @@ def launch_server(
|
||||
LOGGING_CONFIG["formatters"]["default"][
|
||||
"fmt"
|
||||
] = "[%(asctime)s] %(levelprefix)s %(message)s"
|
||||
LOGGING_CONFIG["formatters"]["default"]["datefmt"] = "%Y-%m-%d %H:%M:%S"
|
||||
LOGGING_CONFIG["formatters"]["access"][
|
||||
"fmt"
|
||||
] = '[%(asctime)s] %(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s'
|
||||
LOGGING_CONFIG["formatters"]["access"]["datefmt"] = "%Y-%m-%d %H:%M:%S"
|
||||
uvicorn.run(
|
||||
app,
|
||||
host=server_args.host,
|
||||
|
||||
Reference in New Issue
Block a user