Skip to content

Commit

Permalink
Fix sdist and wheels
Browse files Browse the repository at this point in the history
ghstack-source-id: 84ce73854d2348f5f4446c1fa141efaadffc614c
Pull Request resolved: https://github.com/fairinternal/xformers/pull/420

__original_commit__ = fairinternal/xformers@191ed045f7cc308d7e263eff4f40bc51714afcd4
  • Loading branch information
danthe3rd authored and xFormers Bot committed Jan 12, 2023
1 parent f7f2fcb commit bf5a358
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 22 deletions.
5 changes: 3 additions & 2 deletions .github/workflows/wheels.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
name: Build wheels

on:
workflow_dispatch: {}
push:
branches:
- main
Expand Down Expand Up @@ -116,13 +117,13 @@ jobs:
path: dist/*.whl

- name: Upload wheel to PyPi
if: matrix.config.publish
if: ${{ github.ref_name == 'main' && matrix.config.publish }}
run: $PY -m twine upload dist/*.whl
env:
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}

- name: Upload source distribution to PyPi
if: matrix.config.publish && matrix.sdist
if: ${{ github.ref_name == 'main' && matrix.config.publish && matrix.sdist }}
run: |
rm -rf dist/
# unpin pytorch version
Expand Down
10 changes: 8 additions & 2 deletions .github/workflows/win-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,5 +47,11 @@ jobs:
$PY -c "import torch; print('torch', torch.__version__)"
$PY -c "import torch; print('torch.cuda', torch.version.cuda)"
- name: Build
run: $PY setup.py develop
- name: Create sdist
run: $PY setup.py sdist

- name: Build from sdist
run: $PY -m pip install -v dist/*

- name: Info
run: $PY -m xformers.info
16 changes: 9 additions & 7 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
include LICENSE
include requirements.txt
include version.txt
recursive-include xformers/components/attention/csrc/ *
recursive-include third_party/sputnik/ *
recursive-include third_party/cutlass/include/ *
recursive-include third_party/cutlass/examples/ *
recursive-include third_party/flash-attention/csrc/ *
recursive-exclude third_party/flash-attention/csrc/flash_attn/cutlass/ *
recursive-include third_party/flash-attention/flash_attn/ *

recursive-include xformers/csrc *
recursive-include third_party/sputnik *
recursive-include third_party/cutlass/include *
recursive-include third_party/cutlass/examples *
recursive-include third_party/flash-attention/csrc *
recursive-include third_party/flash-attention/flash_attn *

prune third_party/flash-attention/csrc/flash_attn/cutlass/docs
26 changes: 15 additions & 11 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import datetime
import distutils.command.clean
import glob
import importlib.util
import json
import os
import platform
Expand All @@ -27,7 +28,7 @@
CUDAExtension,
)

this_dir = os.path.dirname(os.path.abspath(__file__))
this_dir = os.path.dirname(__file__)


def get_extra_nvcc_flags_for_build_type() -> List[str]:
Expand Down Expand Up @@ -130,7 +131,6 @@ def get_flash_attention_extensions(cuda_version: int, extra_compile_args):
if not nvcc_archs_flags:
return []

this_dir = os.path.dirname(os.path.abspath(__file__))
flash_root = os.path.join(this_dir, "third_party", "flash-attention")
if not os.path.exists(flash_root):
raise RuntimeError(
Expand All @@ -142,7 +142,7 @@ def get_flash_attention_extensions(cuda_version: int, extra_compile_args):
CUDAExtension(
name="xformers._C_flashattention",
sources=[
os.path.join(this_dir, "third_party", "flash-attention", path)
os.path.join("third_party", "flash-attention", path)
for path in [
"csrc/flash_attn/fmha_api.cpp",
"csrc/flash_attn/src/fmha_fwd_hdim32.cu",
Expand Down Expand Up @@ -180,8 +180,7 @@ def get_flash_attention_extensions(cuda_version: int, extra_compile_args):


def get_extensions():
this_dir = os.path.dirname(os.path.abspath(__file__))
extensions_dir = os.path.join(this_dir, "xformers", "csrc")
extensions_dir = os.path.join("xformers", "csrc")

sources = glob.glob(os.path.join(extensions_dir, "**", "*.cpp"), recursive=True)
source_cuda = glob.glob(os.path.join(extensions_dir, "**", "*.cu"), recursive=True)
Expand Down Expand Up @@ -251,8 +250,6 @@ def get_extensions():
cuda_version=cuda_version, extra_compile_args=extra_compile_args
)

sources = [os.path.join(extensions_dir, s) for s in sources]

ext_modules.append(
extension(
"xformers._C",
Expand Down Expand Up @@ -314,10 +311,17 @@ def build_extensions(self) -> None:

try:
# when installing as a source distribution, the version module should exist
from xformers.version import __version__

version = __version__
except ModuleNotFoundError:
# Let's import it manually to not trigger the load of the C++
# library - which does not exist yet, and creates a WARNING
spec = importlib.util.spec_from_file_location(
"xformers_version", os.path.join(this_dir, "xformers", "version.py")
)
if spec is None or spec.loader is None:
raise FileNotFoundError()
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
version = module.__version__
except FileNotFoundError:
if os.getenv("BUILD_VERSION"): # In CI
version = os.getenv("BUILD_VERSION", "0.0.0")
else:
Expand Down

0 comments on commit bf5a358

Please sign in to comment.