Skip to content

Commit

Permalink
[Core] Make Ray an optional "extras" requirement
Browse files Browse the repository at this point in the history
Still included in built docker images
  • Loading branch information
njhill authored and joerunde committed Jun 17, 2024
1 parent be5e8a1 commit 1f2da25
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 7 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ RUN ldconfig /usr/local/cuda-12.4/compat/
# install vllm wheel first, so that torch etc will be installed
RUN --mount=type=bind,from=build,src=/workspace/dist,target=/vllm-workspace/dist \
--mount=type=cache,target=/root/.cache/pip \
pip install dist/*.whl --verbose
pip install "$(echo dist/*.whl)[ray]" --verbose
#################### vLLM installation IMAGE ####################


Expand Down
1 change: 0 additions & 1 deletion requirements-cuda.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
-r requirements-common.txt

# Dependencies for NVIDIA GPUs
ray >= 2.9
nvidia-ml-py # for pynvml package
torch == 2.3.0
xformers == 0.0.26.post1 # Requires PyTorch 2.3.0
Expand Down
1 change: 0 additions & 1 deletion requirements-rocm.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,4 @@
-r requirements-common.txt

# Dependencies for AMD GPUs
ray >= 2.10.0
pytest-asyncio
20 changes: 16 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import subprocess
import sys
from shutil import which
from typing import Dict, List
from typing import Dict, List, Optional

import torch
from packaging.version import Version, parse
Expand Down Expand Up @@ -392,6 +392,20 @@ def _read_requirements(filename: str) -> List[str]:
return requirements


def get_extra_requirements() -> Optional[Dict[str, List[str]]]:
extras = {"tensorizer": ["tensorizer>=2.9.0"]}
if _is_cuda():
extras["ray"] = ["ray>=2.9"]
elif _is_hip():
extras["ray"] = ["ray==2.9.3"]
elif _is_neuron() or _is_cpu():
pass
else:
raise ValueError(
"Unsupported platform, please use CUDA, ROCM or Neuron.")
return extras


ext_modules = []

if _is_cuda() or _is_hip():
Expand Down Expand Up @@ -437,9 +451,7 @@ def _read_requirements(filename: str) -> List[str]:
python_requires=">=3.8",
install_requires=get_requirements(),
ext_modules=ext_modules,
extras_require={
"tensorizer": ["tensorizer>=2.9.0"],
},
extras_require=get_extra_requirements(),
cmdclass={"build_ext": cmake_build_ext} if _build_custom_ops() else {},
package_data=package_data,
)

0 comments on commit 1f2da25

Please sign in to comment.