Skip to content

Commit

Permalink
fix requirements + aggregate setup & req. & config in pyproject (#453)
Browse files Browse the repository at this point in the history
  • Loading branch information
Borda authored Aug 22, 2024
1 parent a55c9b1 commit 2b47e10
Show file tree
Hide file tree
Showing 9 changed files with 80 additions and 51 deletions.
2 changes: 1 addition & 1 deletion .github/azure-gpu-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ jobs:
python -c "import torch ; mgpu = torch.cuda.device_count() ; assert mgpu == 2, f'GPU: {mgpu}'"
displayName: 'Image info & NVIDIA'
- script: pip install pytest -r requirements.txt
- script: pip install ".[all]" "pytest"
displayName: 'Install dependencies'

- bash: pytest -v --durations=10 --disable-pytest-warnings --strict-markers --color=yes
Expand Down
37 changes: 25 additions & 12 deletions .github/workflows/cpu-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,14 @@ defaults:
shell: bash

jobs:
cpu-tests:
pytester:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- {os: "macOS-11", python-version: "3.10"}
- {os: "ubuntu-20.04", python-version: "3.10"}
- {os: "windows-2022", python-version: "3.10"}
os: ["ubuntu-22.04", "macos-13", "windows-2022"]
python-version: ["3.10"]
pkg-install: ["no", "yes"]
timeout-minutes: 15

steps:
Expand All @@ -35,18 +34,32 @@ jobs:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: |
requirements.txt
pyproject.toml
setup.py
- name: Run tests without the package installed
- name: Install package & dependencies
run: |
pip install pytest -r requirements.txt
pip install ".[all]" pytest
pip list
pytest --disable-pytest-warnings --strict-markers --color=yes
- name: Drop package itself
if: matrix.pkg-install == 'no'
run: pip uninstall -y lit-llama

- name: Run tests
run: |
pip install . --no-deps
run: pytest -v --durations=10


pytest -v --durations=10 --disable-pytest-warnings --strict-markers --color=yes
testing-guardian:
runs-on: ubuntu-latest
needs: pytester
if: always()
steps:
- run: echo "${{ needs.pytester.result }}"
- name: failing...
if: needs.pytester.result == 'failure'
run: exit 1
- name: cancelled or skipped...
if: contains(fromJSON('["cancelled", "skipped"]'), needs.pytester.result)
timeout-minutes: 1
run: sleep 90
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,6 @@ wandb

# downloaded by our tests
original_model.py
original_adapter.py
original_adapter.py

.ruff_cache/
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ cd lit-llama
install dependencies

```bash
pip install -r requirements.txt
pip install -e ".[all]"
```

You are all set! 🎉
Expand Down
2 changes: 1 addition & 1 deletion howto/tpus.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Now that you are in the machine, let's clone the repository and install the depe
```shell
git clone https://github.com/Lightning-AI/lit-llama
cd lit-llama
pip install -r requirements.txt
pip install -e ".[all]"
```

By default, computations will run using the new (and experimental) PjRT runtime. Still, it's recommended that you set the following environment variables
Expand Down
46 changes: 46 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
[build-system]
requires = ["setuptools", "setuptools-scm"]
build-backend = "setuptools.build_meta"

[project]
name = "lit-llama"
version = "0.1.0"
description = "Implementation of the LLaMA language model"
license = {text = "Apache-2.0"}
authors = [
{ name = "Lightning AI", email = "community@lightning.ai" }
]
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"torch>=2.0.0",
"lightning @ git+https://github.com/Lightning-AI/lightning@master",
"sentencepiece",
"bitsandbytes",
]
classifiers = [
"Topic :: Text Processing"
]

[project.optional-dependencies]
all = [
"tqdm", # convert_checkpoint.py
"numpy <2.0", # train.py dataset memmap
"jsonargparse[signatures]", # generate.py, convert_checkpoint.py CLI
"datasets", # evaluate.py
"zstandard", # prepare_redpajama.py"
]

[tool.setuptools.packages.find]
where = ["."] # list of folders that contain the packages (["."] by default)
include = ["lit_llama"] # package names should match these glob patterns (["*"] by default)
exclude = [] # exclude packages matching these glob patterns (empty by default)
namespaces = false # to disable scanning PEP 420 namespaces (true by default)


[tool.pytest.ini_options]
addopts = [
"--strict-markers",
"--color=yes",
"--disable-pytest-warnings",
]
9 changes: 0 additions & 9 deletions requirements.txt

This file was deleted.

27 changes: 2 additions & 25 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,5 @@
# Copyright Lightning AI. Licensed under the Apache License 2.0, see LICENSE file.

import os
from setuptools import setup

from setuptools import setup, find_packages


_PATH_ROOT = os.path.dirname(__file__)

with open(os.path.join(_PATH_ROOT, "README.md"), encoding="utf-8") as fo:
readme = fo.read()

setup(
name='lit-llama',
version='0.1.0',
description='Implementation of the LLaMA language model',
author='Lightning AI',
url='https://github.com/lightning-AI/lit-llama',
install_requires=[
"torch>=2.0.0",
"lightning @ git+https://github.com/Lightning-AI/lightning@master",
"sentencepiece",
"bitsandbytes",
],
packages=find_packages(),
long_description=readme,
long_description_content_type="text/markdown",
)
setup()
2 changes: 1 addition & 1 deletion tests/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def test_adapter_parity(orig_llama_adapter):

expected = orig_llama_model(token_sample, 0)
out = llama_model(token_sample)
assert torch.allclose(out, expected)
assert torch.allclose(out, expected, atol=1e-5, rtol=1e-5)


@pytest.mark.skipif(sys.platform in ("win32", "darwin"), reason="torch.compile not supported on this platform")
Expand Down

0 comments on commit 2b47e10

Please sign in to comment.