[Nightly] Nightly pre-build image (#7388)
### What this PR does / why we need it?
This pull request refactor nightly image build and simplify the logic of
multi workflows.
1. Nightly image build become the prerequisite when the test are
triggered by `schedule` or `workflow_dispatch`
2. Simplify the pull request select case logic
3. Next step: Implement replaceable nightly tests. Specifically, if
nightly tests are manually triggered, they can accept any optional
docker image to meet the needs of different commits(Which means the
image is customizable).
### Does this PR introduce _any_ user-facing change?
### How was this patch tested?
- vLLM version: v0.17.0
- vLLM main:
4034c3d32e
---------
Signed-off-by: wangli <wangli858794774@gmail.com>
This commit is contained in:
21
.github/workflows/_e2e_nightly_single_node.yaml
vendored
21
.github/workflows/_e2e_nightly_single_node.yaml
vendored
@@ -40,10 +40,7 @@ on:
|
||||
required: false
|
||||
type: string
|
||||
default: "v0.18.0"
|
||||
is_pr_test:
|
||||
required: true
|
||||
type: boolean
|
||||
is_run:
|
||||
should_run:
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
@@ -64,7 +61,7 @@ jobs:
|
||||
e2e-nightly:
|
||||
name: ${{ inputs.name || inputs.config_file_path || inputs.tests }}
|
||||
runs-on: ${{ inputs.runner }}
|
||||
if: ${{ inputs.is_run }}
|
||||
if: ${{ inputs.should_run }}
|
||||
timeout-minutes: 600
|
||||
container:
|
||||
image: ${{ inputs.image }}
|
||||
@@ -85,14 +82,14 @@ jobs:
|
||||
pip install uv
|
||||
|
||||
- name: uninstall vlm vllm-ascend and remove code (if pr test)
|
||||
if: ${{ inputs.is_pr_test }}
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
run: |
|
||||
pip uninstall -y vllm vllm-ascend || true
|
||||
cp -r /vllm-workspace/vllm-ascend/benchmark /tmp/aisbench-backup || true
|
||||
rm -rf /vllm-workspace/vllm /vllm-workspace/vllm-ascend
|
||||
|
||||
- name: Checkout vllm-project/vllm repo
|
||||
if: ${{ inputs.is_pr_test }}
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
repository: vllm-project/vllm
|
||||
@@ -101,27 +98,27 @@ jobs:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Checkout vllm-project/vllm-ascend repo
|
||||
if: ${{ inputs.is_pr_test }}
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
path: ./temp-vllm-ascend
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Move code to /vllm-workspace
|
||||
if: ${{ inputs.is_pr_test }}
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
run: |
|
||||
mv ./temp-vllm /vllm-workspace/vllm
|
||||
mv ./temp-vllm-ascend /vllm-workspace/vllm-ascend
|
||||
ls -R /vllm-workspace
|
||||
|
||||
- name: Install vllm-project/vllm from source
|
||||
if: ${{ inputs.is_pr_test }}
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
working-directory: /vllm-workspace/vllm
|
||||
run: |
|
||||
VLLM_TARGET_DEVICE=empty uv pip install -e .
|
||||
|
||||
- name: Install vllm-project/vllm-ascend
|
||||
if: ${{ inputs.is_pr_test }}
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
working-directory: /vllm-workspace/vllm-ascend
|
||||
env:
|
||||
PIP_EXTRA_INDEX_URL: https://mirrors.huaweicloud.com/ascend/repos/pypi
|
||||
@@ -132,7 +129,7 @@ jobs:
|
||||
uv pip install -v -e .
|
||||
|
||||
- name: Install aisbench
|
||||
if: ${{ inputs.is_pr_test }}
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
shell: bash -l {0}
|
||||
run: |
|
||||
cp -r /tmp/aisbench-backup /vllm-workspace/vllm-ascend/benchmark
|
||||
|
||||
Reference in New Issue
Block a user