Skip to content

Commit

Permalink
Upgrade tooling (#126)
Browse files Browse the repository at this point in the history
## Changes

- Align our tooling with our other products
- Drop support for Python 3.8 and 3.9. Our default runtime is Jammy,
which comes with 3.10
    - upgraded type hints for a more modern code base
- Fix compatibility with Python 3.11+
- The previous flake8 configuration was not properly enforced for some
reason. Migrating to ruff made me fix a bunch of violations (which means
adding low value docstrings, to be honest)
  • Loading branch information
Batalex authored Jan 21, 2025
1 parent 8bd96c0 commit 450e031
Show file tree
Hide file tree
Showing 21 changed files with 528 additions and 494 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci-checks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ jobs:
fail-fast: true
matrix:
python-version:
- "3.9"
- "3.10"
- "3.12"
steps:
- id: checkout
name: Checkout repo
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ jobs:
fail-fast: true
matrix:
python-version:
- "3.9"
- "3.10"
- "3.12"
needs:
- checks
steps:
Expand Down
356 changes: 150 additions & 206 deletions poetry.lock

Large diffs are not rendered by default.

123 changes: 63 additions & 60 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,61 +1,19 @@
[tool.pytest.ini_options]
addopts = "--doctest-modules --cov=./spark8t"

[tool.flake8]
per-file-ignores = [
'__init__.py:F401',
'tests/*: D',
'tests/test_utils.py: D, F601'
]
ignore = [
# Ignored by black
'E203', 'E266', 'E501', 'W503',
# Ignored to conform to PEP257
'D203', 'D212', 'D213', 'D214', 'D215', 'D404', 'D405', 'D406', 'D407', 'D408', 'D409', 'D410', 'D411',
'D413', 'D415', 'D416', 'D417',
# Ignored to work with Sphinx
'RST303', 'RST304', 'RST307'
]
# line length is intentionally set to 80 here because black uses Bugbear
# See https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-length for more details
max-line-length = "120"
max-complexity = "18"
select = ['B','C','D','E','F','W','T4','B9','RST','DAR']
docstring_style = "sphinx"

[tool.isort]
py_version = 3
profile = 'black'
known_first_party = ['spark8t','tests']
skip_gitignore = true

[tool.mypy]
follow_imports = "silent"

[[tool.mypy.overrides]]
module = [
"parameterized",
"envyaml",
"pytest"
]
ignore_missing_imports = true

[tool.poetry]
name = "spark8t"
version = "0.0.11"
description = "This project provides some utilities function and CLI commands to run Spark on K8s."
authors = [
"Canonical Data Platform <data-platform@lists.launchpad.net>"
"Canonical Data Platform <data-platform@lists.launchpad.net>",
]
license = "Apache-2.0"
readme = "README.md"
packages = []
requires-poetry = ">=2.0.0"

[tool.poetry.requires-plugins]
poetry-plugin-export = ">=1.0"

[tool.poetry.dependencies]
python = ">3.8,<4.0"
python = ">=3.10,<4.0"
lightkube = ">=0.11"
jinja2 = ">=3.1.2"
envyaml = ">=1.10.211231"
Expand All @@ -64,46 +22,91 @@ envyaml = ">=1.10.211231"
optional = true

[tool.poetry.group.fmt.dependencies]
black = ">=21.12b0"
isort = ">=5.10"
lightkube = ">=0.11"
ruff = "^0.8.6"

[tool.poetry.group.lint]
optional = true

[tool.poetry.group.lint.dependencies]
ruff = "^0.8.6"
codespell = "^2.1.0"
flake8 = ">=4.0.1"
Flake8-pyproject = ">=1.1.0"
mypy = ">=0.910"
mypy = "^1.0.0"
pytest-mypy = ">=0.10.3"
lightkube = ">=0.11"
types-pyyaml = "^6.0.12.20241230"
types-pygments = "^2.19.0.20250107"
types-colorama = "^0.4.15.20240311"
types-pyopenssl = "^24.1.0.20240722"

[tool.poetry.group.unit]
optional = true

[tool.poetry.group.unit.dependencies]
tox = ">3.21.4"
mypy = ">=0.910"
pytest-cov = ">=3.0"
pytest = ">=6.2"
pytest-mock = ">=3.10"
lightkube = ">=0.11"
pyOpenSSL = ">=23.1.1"
tox = "^4.23.2"

[tool.poetry.group.integration]
optional = true

[tool.poetry.group.integration.dependencies]
tox = ">3.21.4"
mypy = ">=0.910"
pytest-cov = ">=3.0"
pytest = ">=6.2"
pytest-mock = ">=3.10"
lightkube = ">=0.11"
parameterized = ">=0.9.0"
tox = "^4.23.2"

[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

[tool.pytest.ini_options]
addopts = "--doctest-modules --cov=./spark8t"

[tool.ruff]
extend-exclude = ["__pycache__", "*.egg_info"]
target-version = "py38"
src = ["spark8t", "tests"]

[tool.ruff.lint]
select = ['B', 'C', 'D', 'E', 'F', 'W', 'B9']
ignore = ["E501", "D107"]
extend-ignore = [
# Ignored by black
'E203',
'E266',
'E501',
# Ignored to conform to PEP257
'D203',
'D212',
'D213',
'D214',
'D215',
'D404',
'D405',
'D406',
'D407',
'D408',
'D409',
'D410',
'D411',
'D413',
'D415',
'D416',
'D417',
]
per-file-ignores = { "__init__.py" = ["F401"], "tests/*" = ["D"], "tests/test_utils.py" = ["F601"] }
mccabe.max-complexity = 18

[tool.ruff.lint.isort]
known-first-party = ["spark8t", "tests"]

[tool.mypy]
follow_imports = "silent"

[[tool.mypy.overrides]]
module = [
"parameterized",
"envyaml",
"pytest",
]
ignore_missing_imports = true
30 changes: 15 additions & 15 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,37 +1,37 @@
anyio==4.5.2 ; python_full_version > "3.8.0" and python_version < "4.0" \
anyio==4.5.2 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b \
--hash=sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f
certifi==2024.12.14 ; python_full_version > "3.8.0" and python_version < "4.0" \
certifi==2024.12.14 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \
--hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db
envyaml==1.10.211231 ; python_full_version > "3.8.0" and python_version < "4.0" \
envyaml==1.10.211231 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:88f8a076159e3c317d3450a5f404132b6ac91aecee4934ea72eac65f911f1244 \
--hash=sha256:8d7a7a6be12587cc5da32a587067506b47b849f4643981099ad148015a72de52
exceptiongroup==1.2.2 ; python_full_version > "3.8.0" and python_version < "3.11" \
exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11" \
--hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \
--hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc
h11==0.14.0 ; python_full_version > "3.8.0" and python_version < "4.0" \
h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \
--hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761
httpcore==1.0.7 ; python_full_version > "3.8.0" and python_version < "4.0" \
httpcore==1.0.7 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c \
--hash=sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd
httpx==0.28.1 ; python_full_version > "3.8.0" and python_version < "4.0" \
httpx==0.28.1 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc \
--hash=sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad
idna==3.10 ; python_full_version > "3.8.0" and python_version < "4.0" \
idna==3.10 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
jinja2==3.1.5 ; python_full_version > "3.8.0" and python_version < "4.0" \
jinja2==3.1.5 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \
--hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb
lightkube-models==1.32.0.8 ; python_full_version > "3.8.0" and python_version < "4.0" \
lightkube-models==1.32.0.8 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:73786dac63085521f4c88aa69d86bfdc76a67da997c1770e5bdcef8482e4b2a0 \
--hash=sha256:97f6c2ab554a23a69554dd56ffbd94173fb416af6490c3a21b1e0b8e13a2bafe
lightkube==0.17.1 ; python_full_version > "3.8.0" and python_version < "4.0" \
lightkube==0.17.1 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:3d046c2c46191b3745471763710ef4ed2df4259a7405f798b577df2ae390358a \
--hash=sha256:e0d6b71476a4fa7cbda7080da1f0943e43c7e747212db9f2ec7d87415bf8d23e
markupsafe==2.1.5 ; python_full_version > "3.8.0" and python_version < "4.0" \
markupsafe==2.1.5 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
--hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
--hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \
Expand Down Expand Up @@ -92,7 +92,7 @@ markupsafe==2.1.5 ; python_full_version > "3.8.0" and python_version < "4.0" \
--hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \
--hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \
--hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68
pyyaml==6.0.2 ; python_full_version > "3.8.0" and python_version < "4.0" \
pyyaml==6.0.2 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \
--hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \
--hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \
Expand Down Expand Up @@ -146,9 +146,9 @@ pyyaml==6.0.2 ; python_full_version > "3.8.0" and python_version < "4.0" \
--hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \
--hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \
--hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4
sniffio==1.3.1 ; python_full_version > "3.8.0" and python_version < "4.0" \
sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
typing-extensions==4.12.2 ; python_full_version > "3.8.0" and python_version < "3.11" \
typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "3.11" \
--hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
--hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
1 change: 1 addition & 0 deletions spark8t/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""This project provides some utilities function and CLI commands to run Spark on K8s."""
2 changes: 2 additions & 0 deletions spark8t/cli/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""CLI interface for spark8t."""

import os

from spark8t.domain import Defaults
Expand Down
26 changes: 15 additions & 11 deletions spark8t/cli/params.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
"""Parameters module."""

import logging
from argparse import ArgumentParser, Namespace
from typing import Callable, List, Optional
from typing import Callable

from spark8t.cli import defaults
from spark8t.services import AbstractKubeInterface, KubeInterface, LightKube
from spark8t.utils import DEFAULT_LOGGING_FILE, config_from_file, environ


def parse_arguments_with(
parsers: List[Callable[[ArgumentParser], ArgumentParser]],
base_parser: Optional[ArgumentParser] = None,
parsers: list[Callable[[ArgumentParser], ArgumentParser]],
base_parser: ArgumentParser | None = None,
):
"""
Specify a chain of parsers to help parse the list of arguments to main
Specify a chain of parsers to help parse the list of arguments to main.
:param parsers: List of parsers to be applied.
:param namespace: Namespace to be used for parsing.
Expand All @@ -26,7 +28,7 @@ def parse_arguments_with(

def add_logging_arguments(parser: ArgumentParser) -> ArgumentParser:
"""
Add logging argument parsing to the existing parser context
Add logging argument parsing to the existing parser context.
:param parser: Input parser to decorate with parsing support for logging args.
"""
Expand All @@ -46,7 +48,7 @@ def add_logging_arguments(parser: ArgumentParser) -> ArgumentParser:

def add_ignore_integration_hub(parser: ArgumentParser) -> ArgumentParser:
"""
Add option to exclude the configuration provided by the Spark Integration Hub
Add option to exclude the configuration provided by the Spark Integration Hub.
:param parser: Input parser to decorate with parsing support for logging args.
"""
Expand All @@ -61,7 +63,7 @@ def add_ignore_integration_hub(parser: ArgumentParser) -> ArgumentParser:

def spark_user_parser(parser: ArgumentParser) -> ArgumentParser:
"""
Add Spark user related argument parsing to the existing parser context
Add Spark user related argument parsing to the existing parser context.
:param parser: Input parser to decorate with parsing support for Spark params.
"""
Expand All @@ -82,7 +84,7 @@ def spark_user_parser(parser: ArgumentParser) -> ArgumentParser:

def k8s_parser(parser: ArgumentParser) -> ArgumentParser:
"""
Add K8s related argument parsing to the existing parser context
Add K8s related argument parsing to the existing parser context.
:param parser: Input parser to decorate with parsing support for Spark params.
"""
Expand All @@ -107,7 +109,7 @@ def k8s_parser(parser: ArgumentParser) -> ArgumentParser:

def add_config_arguments(parser: ArgumentParser) -> ArgumentParser:
"""
Add arguments to provide extra configurations for the spark properties
Add arguments to provide extra configurations for the spark properties.
:param parser: Input parser to decorate with parsing support for deploy arguments.
"""
Expand All @@ -128,7 +130,7 @@ def add_config_arguments(parser: ArgumentParser) -> ArgumentParser:

def add_deploy_arguments(parser: ArgumentParser) -> ArgumentParser:
"""
Add deployment related argument parsing to the existing parser context
Add deployment related argument parsing to the existing parser context.
:param parser: Input parser to decorate with parsing support for deploy arguments.
"""
Expand All @@ -143,6 +145,7 @@ def add_deploy_arguments(parser: ArgumentParser) -> ArgumentParser:


def get_kube_interface(args: Namespace) -> AbstractKubeInterface:
"""Get configured kube interface."""
_class = LightKube if args.backend == "lightkube" else KubeInterface

return _class(
Expand All @@ -151,8 +154,9 @@ def get_kube_interface(args: Namespace) -> AbstractKubeInterface:


def setup_logging(
log_level: str, config_file: Optional[str], logger_name: Optional[str] = None
log_level: str, config_file: str | None, logger_name: str | None = None
) -> logging.Logger:
"""Set up logging from configuration file."""
with environ(LOG_LEVEL=log_level) as _:
config_from_file(config_file or DEFAULT_LOGGING_FILE)
return logging.getLogger(logger_name) if logger_name else logging.root
5 changes: 3 additions & 2 deletions spark8t/cli/pyspark.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
#!/usr/bin/env python3
"""Pyspark module."""

import re
from argparse import Namespace
from logging import Logger
from typing import Optional

from spark8t.cli.params import (
add_config_arguments,
Expand All @@ -22,6 +22,7 @@


def main(args: Namespace, logger: Logger):
"""Pyspark main entrypoint."""
kube_interface = get_kube_interface(args)

registry = K8sServiceAccountRegistry(
Expand All @@ -30,7 +31,7 @@ def main(args: Namespace, logger: Logger):
else kube_interface
)

service_account: Optional[ServiceAccount] = (
service_account: ServiceAccount | None = (
registry.get_primary()
if args.username is None and args.namespace is None
else registry.get(f"{args.namespace or 'default'}:{args.username or 'spark'}")
Expand Down
Loading

0 comments on commit 450e031

Please sign in to comment.