Skip to content

Commit

Permalink
shuffle some dependency versions around
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Apr 22, 2024
1 parent f807516 commit 8401b6f
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 4 deletions.
7 changes: 4 additions & 3 deletions docker/Dockerfile-beta
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ RUN apt update && apt install -y python3.10-venv git-lfs

RUN python3 -m pip install --upgrade pip && \
pip install packaging && \
pip install -U torch==2.2.2
pip uninstall torch-tensorrt && \
pip install -U torch==2.2.2 && \
pip install git+https://github.com/NVIDIA/TransformerEngine.git@stable

RUN groupadd axolotl && \
useradd -m -g axolotl -s /bin/bash axolotl && \
Expand All @@ -20,8 +22,7 @@ ENV PATH="/home/axolotl/venv/axolotl/bin:$PATH"

RUN echo "source /home/axolotl/venv/axolotl/bin/activate" >> /home/axolotl/.bashrc

RUN git lfs install --skip-repo && \
pip3 install awscli
RUN git lfs install --skip-repo

WORKDIR /workspace

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def parse_requirements():
dependency_links=dependency_links,
extras_require={
"flash-attn": [
"flash-attn==2.5.5",
"flash-attn>=2.4.2",
],
"fused-dense-lib": [
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib",
Expand Down

0 comments on commit 8401b6f

Please sign in to comment.