-
Notifications
You must be signed in to change notification settings - Fork 449
/
Copy pathpyproject.toml
104 lines (95 loc) · 2.57 KB
/
pyproject.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
[build-system]
requires = ["maturin>=1,<2"]
build-backend = "maturin"
[project]
name = "deltalake"
version = "0.22.4"
description = "Native Delta Lake Python binding based on delta-rs with Pandas integration"
readme = "README.md"
license = { file = "licenses/deltalake_license.txt" }
requires-python = ">=3.9"
keywords = ["deltalake", "delta", "datalake", "pandas", "arrow"]
classifiers = [
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
]
dependencies = [
"polars>=1.20.0",
"pyarrow>=16",
]
[project.optional-dependencies]
pandas = ["pandas"]
devel = [
"azure-storage-blob==12.20.0",
"packaging>=20",
"pytest",
"pytest-mock",
"pytest-cov",
"pytest-timeout",
"sphinx<=4.5",
"sphinx-rtd-theme",
"toml",
"wheel",
"pip>=24.0",
"pytest-benchmark",
# keep ruff and mypy versions in sync with .github/workflows/python_build.yml
"mypy==1.10.1",
"ruff==0.5.2",
]
pyspark = [
"pyspark",
"delta-spark",
"numpy==1.26.4", # pyspark is not compatible with latest numpy
]
[project.urls]
documentation = "https://delta-io.github.io/delta-rs/"
repository = "https://github.com/delta-io/delta-rs/tree/main/python/"
[tool.maturin]
module-name = "deltalake._internal"
[tool.mypy]
files = "deltalake/*.py"
exclude = "^tests"
mypy_path = "./stubs"
disallow_any_generics = true
disallow_subclassing_any = true
disallow_untyped_calls = true
disallow_untyped_defs = true
disallow_incomplete_defs = true
check_untyped_defs = true
disallow_untyped_decorators = true
no_implicit_optional = true
warn_redundant_casts = true
warn_unused_ignores = true
warn_return_any = false
implicit_reexport = true
strict_equality = true
[tool.ruff.lint]
select = [
# pycodestyle error
"E",
# pyflakes
"F",
# isort
"I",
# ruff-specific rules
"RUF",
]
ignore = ["E501"]
[tool.ruff.lint.isort]
known-first-party = ["deltalake"]
[tool.pytest.ini_options]
addopts = "-v -m 'not integration and not benchmark'"
testpaths = ["tests", "deltalake"]
markers = [
"integration: marks tests as integration tests (deselect with '-m \"not integration\"')",
"s3: marks tests as integration tests with S3 (deselect with '-m \"not s3\"')",
"azure: marks tests as integration tests with Azure Blob Store",
"pandas: marks tests that require pandas",
"pyspark: marks tests that require pyspark",
]
[tool.coverage.run]
branch = true
source = ["deltalake"]