2024-12-01 18:55:26 +08:00
|
|
|
# Install the dependency in CI.
|
2024-10-30 02:49:08 -07:00
|
|
|
|
2024-12-01 18:55:26 +08:00
|
|
|
# Use repo from environment variable, passed from GitHub Actions
|
2024-12-16 23:02:49 +08:00
|
|
|
FLASHINFER_REPO="${FLASHINFER_REPO:-https://flashinfer.ai/whl/cu121/torch2.4/flashinfer}"
|
2024-12-01 18:55:26 +08:00
|
|
|
|
|
|
|
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|
|
|
|
bash "${SCRIPT_DIR}/killall_sglang.sh"
|
2024-11-30 00:24:30 -08:00
|
|
|
|
2024-10-26 04:32:36 -07:00
|
|
|
pip install --upgrade pip
|
2024-11-30 01:41:16 +08:00
|
|
|
pip install -e "python[all]" --find-links https://flashinfer.ai/whl/cu121/torch2.4/flashinfer/
|
2024-12-01 01:47:30 -08:00
|
|
|
|
|
|
|
|
# Force reinstall flashinfer
|
2024-12-19 23:24:30 +08:00
|
|
|
pip install flashinfer==0.1.6 --find-links ${FLASHINFER_REPO} --force-reinstall --no-deps
|
2024-12-01 01:47:30 -08:00
|
|
|
|
2024-11-09 15:43:20 -08:00
|
|
|
pip install transformers==4.45.2 sentence_transformers accelerate peft
|
2024-12-01 01:47:30 -08:00
|
|
|
|
|
|
|
|
# For compling eagle kernels
|
2024-11-25 04:58:16 -08:00
|
|
|
pip install cutex
|
2024-12-01 01:47:30 -08:00
|
|
|
|
|
|
|
|
# For compling xgrammar kernels
|
2024-11-25 04:58:16 -08:00
|
|
|
pip install cuda-python nvidia-cuda-nvrtc-cu12
|