2024-07-19 09:23:20 -06:00
|
|
|
commit_cuda := d243e9dc7e2c9c2e36a4150ec8e64809cb55c01b
|
2024-09-30 02:54:32 -06:00
|
|
|
commit_rocm := 4e0929e6e4fa0a3d09d358715c288020ea9dc247
|
2024-06-04 11:38:46 -06:00
|
|
|
build-vllm-cuda:
|
|
|
|
if [ ! -d 'vllm' ]; then \
|
|
|
|
pip install -U ninja packaging --no-cache-dir && \
|
2024-06-04 15:34:03 -06:00
|
|
|
git clone https://github.com/Narsil/vllm.git vllm; \
|
2024-06-04 11:38:46 -06:00
|
|
|
fi
|
2024-07-19 09:23:20 -06:00
|
|
|
cd vllm && git fetch origin && git checkout $(commit_cuda) && python setup.py build
|
2024-06-04 15:34:03 -06:00
|
|
|
|
2023-12-11 04:46:30 -07:00
|
|
|
install-vllm-cuda: build-vllm-cuda
|
2024-07-19 09:23:20 -06:00
|
|
|
cd vllm && git fetch origin && git checkout $(commit_cuda) && pip install -e .
|
2023-06-30 11:09:59 -06:00
|
|
|
|
2024-06-04 11:38:46 -06:00
|
|
|
build-vllm-rocm:
|
|
|
|
if [ ! -d 'vllm' ]; then \
|
|
|
|
pip install -U ninja packaging --no-cache-dir && \
|
2024-09-30 02:54:32 -06:00
|
|
|
git clone https://github.com/mht-sharma/vllm.git vllm; \
|
2024-06-04 11:38:46 -06:00
|
|
|
fi
|
2024-06-04 15:34:03 -06:00
|
|
|
cd vllm && git fetch && git checkout $(commit_rocm) && \
|
|
|
|
PYTORCH_ROCM_ARCH="gfx90a;gfx942" python setup.py build
|
2023-06-30 11:09:59 -06:00
|
|
|
|
2023-12-11 04:46:30 -07:00
|
|
|
install-vllm-rocm: build-vllm-rocm
|
2024-06-10 01:09:50 -06:00
|
|
|
cd vllm && git fetch && git checkout $(commit_rocm) && \
|
2024-06-04 15:34:03 -06:00
|
|
|
PYTORCH_ROCM_ARCH="gfx90a;gfx942" pip install -e .
|