Skip to content

Commit

Permalink
feat(ruff): enable flake8-annotations, flake8-2020, pycodestyle (#22404)
Browse files Browse the repository at this point in the history
  • Loading branch information
hongbo-miao authored Jan 8, 2025
1 parent ce0a4f9 commit 198f0ed
Show file tree
Hide file tree
Showing 154 changed files with 357 additions and 270 deletions.
8 changes: 6 additions & 2 deletions .ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,7 @@ exclude = [
[lint]
select = [
"AIR", # Airflow
"ANN", # flake8-annotations
"ASYNC", # flake8-async
"C90", # mccabe
"COM", # flake8-commas
Expand All @@ -197,9 +198,11 @@ select = [
"S", # flake8-bandit
"TRY", # tryceratops
"UP", # pyupgrade
"W", # pycodestyle
"YTT", # flake8-2020
]
ignore = [
"S101",
"COM812",
"D100",
"D101",
"D102",
Expand All @@ -210,9 +213,10 @@ ignore = [
"D203",
"D212",
"E501",
"ISC001",
"PLR0913",
"PLR0915",
"PLR2004",
"S101",
"TRY003",
"COM812",
]
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -737,6 +737,8 @@ The diagram illustrates the repository's architecture, which is considered overl
- **End-to-end testing**
- **Contract testing**
- **Mutation testing**
- **Penetration testing**
- **Ethical testing**
- **Performance testing**
- **Permutation testing**
- **Holdout testing**
Expand Down
2 changes: 1 addition & 1 deletion aerospace/hm-aerosandbox/src/dummy_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
2 changes: 1 addition & 1 deletion aerospace/hm-openaerostruct/src/dummy_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
2 changes: 1 addition & 1 deletion api-python/routers/health_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
client = TestClient(app)


def test_get_health():
def test_get_health() -> None:
response = client.get("/")
assert response.status_code == 200
assert response.json() == {"api": "ok"}
2 changes: 1 addition & 1 deletion api-python/routers/motor.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
config = Config()


def get_producer():
def get_producer() -> Producer:
producer = Producer({"bootstrap.servers": config.KAFKA_BOOTSTRAP_SERVERS})
try:
yield producer
Expand Down
2 changes: 1 addition & 1 deletion api-python/routers/motor_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@


@patch("routers.motor.Producer")
def test_generate_motor_data(mock_producer):
def test_generate_motor_data(mock_producer: MagicMock) -> None:
mock_producer_instance = MagicMock()
mock_producer.return_value = mock_producer_instance

Expand Down
2 changes: 1 addition & 1 deletion api-python/utils/kafka_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from utils.logger import logger


def delivery_report(err: cimpl.KafkaError, msg: cimpl.Message):
def delivery_report(err: cimpl.KafkaError, msg: cimpl.Message) -> None:
if err is not None:
logger.error(f"Message delivery failed: {err}")
else:
Expand Down
4 changes: 2 additions & 2 deletions api-rust/scripts/download_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
logger = logging.getLogger(__name__)


def download_labels():
def download_labels() -> None:
labels_url = (
"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt"
)
Expand All @@ -24,7 +24,7 @@ def download_labels():
logger.info("Labels file already exists")


def download_resnet18():
def download_resnet18() -> None:
model_path = Path("models/resnet18.ot")

if not model_path.exists():
Expand Down
26 changes: 19 additions & 7 deletions authorization/hm-opal-client/opal_fetcher_postgres/provider.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# https://github.com/permitio/opal-fetcher-postgres

import json
from types import TracebackType
from typing import Any, ClassVar

import asyncpg
Expand Down Expand Up @@ -59,7 +60,7 @@ def __init__(self, event: PostgresFetchEvent) -> None:
def parse_event(self, event: FetchEvent) -> PostgresFetchEvent:
return PostgresFetchEvent(**event.dict(exclude={"config"}), config=event.config)

async def __aenter__(self):
async def __aenter__(self) -> "PostgresFetchProvider":
# self._event: PostgresFetchEvent # type casting

dsn: str = self._event.url
Expand All @@ -69,7 +70,7 @@ async def __aenter__(self):
else self._event.config.connection_params.dict(exclude_none=True)
)

self._connection: asyncpg.Connection = await asyncpg.connect(
self._connection = await asyncpg.connect(
dsn,
**connection_params,
)
Expand All @@ -81,31 +82,42 @@ async def __aenter__(self):
schema="pg_catalog",
)

self._transaction: Transaction = self._connection.transaction(readonly=True)
self._transaction = self._connection.transaction(readonly=True)
await self._transaction.__aenter__()

return self

async def __aexit__(self, exc_type=None, exc_val=None, tb=None):
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
tb: TracebackType | None,
) -> None:
if self._transaction is not None:
await self._transaction.__aexit__(exc_type, exc_val, tb)
if self._connection is not None:
await self._connection.close()

async def _fetch_(self):
async def _fetch_(self) -> list[asyncpg.Record]:
if self._event.config is None:
logger.warning("Incomplete fetcher config!")
return
return []

logger.debug(f"{self.__class__.__name__} fetching from {self._url}")

if self._event.config.fetch_one:
if self._connection is None:
return []
row = await self._connection.fetchrow(self._event.config.query)
return [row]
else:
if self._connection is None:
return []
return await self._connection.fetch(self._event.config.query)

async def _process_(self, records: list[asyncpg.Record]):
async def _process_(
self, records: list[asyncpg.Record]
) -> dict[str, asyncpg.Record] | list[asyncpg.Record]:
if self._event.config is not None and self._event.config.fetch_one:
if records and len(records) > 0:
return dict(records[0])
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
2 changes: 1 addition & 1 deletion cloud-computing/hm-skypilot/dummy_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
2 changes: 1 addition & 1 deletion cloud-infrastructure/hm-pulumi/dummy_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from utils.train import train


def model_fn(model_dir):
def model_fn(model_dir: str) -> Net:
if torch.cuda.is_available():
device = "cuda"
elif torch.backends.mps.is_available():
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
import torch.nn.functional as F # noqa: N812 # noqa: N812
from torch import nn
from torch import Tensor, nn


class Net(nn.Module):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 10)

def forward(self, x):
def forward(self, x: Tensor) -> Tensor:
x = F.relu(F.max_pool2d(self.conv1(x), 2))
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
x = x.view(-1, 320)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import torch
import torch.distributed as dist


def average_gradients(model):
def average_gradients(model: torch.nn.Module) -> None:
# Gradient averaging.
size = float(dist.get_world_size())
for param in model.parameters():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import os


def get_args():
def get_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"--batch-size",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
import logging
from typing import Any

import torch
import torch.utils.data
import torch.utils.data.distributed
from torch.utils.data import DataLoader
from torchvision import datasets, transforms

logger = logging.getLogger(__name__)


def get_test_data_loader(test_batch_size, training_dir, **kwargs):
def get_test_data_loader(
test_batch_size: int,
training_dir: str,
**kwargs: Any, # noqa: ANN401
) -> DataLoader:
logger.info("Get test data loader")
return torch.utils.data.DataLoader(
datasets.MNIST(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,26 +1,31 @@
import logging
from typing import Any

import torch
import torch.utils.data
import torch.utils.data.distributed
from torch.utils.data import DataLoader, Dataset
from torch.utils.data.distributed import DistributedSampler
from torchvision import datasets, transforms

logger = logging.getLogger(__name__)


def get_train_data_loader(batch_size, training_dir, is_distributed, **kwargs):
def get_train_data_loader(
batch_size: int,
training_dir: str,
is_distributed: bool,
**kwargs: Any, # noqa: ANN401
) -> DataLoader:
logger.info("Get train data loader")
dataset = datasets.MNIST(
dataset: Dataset = datasets.MNIST(
training_dir,
train=True,
transform=transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))],
),
)
train_sampler = (
torch.utils.data.distributed.DistributedSampler(dataset)
if is_distributed
else None

train_sampler: DistributedSampler | None = (
DistributedSampler(dataset) if is_distributed else None
)
return torch.utils.data.DataLoader(
dataset,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import logging
import os
from pathlib import Path

import torch
import torch.utils.data
import torch.utils.data.distributed
from torch.nn import Module

logger = logging.getLogger(__name__)


def save_model(model, model_dir):
def save_model(model: Module, model_dir_path: Path) -> None:
logger.info("Save the model.")
path = os.path.join(model_dir, "model.pth")
path = os.path.join(model_dir_path, "model.pth")
torch.save(model.cpu().state_dict(), path)
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@

import torch
import torch.nn.functional as F # noqa: N812
import torch.utils.data
import torch.utils.data.distributed
from torch.nn import Module
from torch.utils.data import DataLoader

logger = logging.getLogger(__name__)


def test(model, test_loader, device):
def test(model: Module, test_loader: DataLoader, device: str | torch.device) -> None:
model.eval()
test_loss = 0
test_loss = 0.0
correct = 0
with torch.no_grad():
for data, target in test_loader:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import argparse
import logging
import os
from pathlib import Path

import torch
import torch.distributed as dist
Expand All @@ -17,7 +19,7 @@
logger = logging.getLogger(__name__)


def train(args):
def train(args: argparse.Namespace) -> None:
is_distributed = len(args.hosts) > 1 and args.backend is not None
logger.info(f"Distributed training: {is_distributed}")

Expand Down Expand Up @@ -90,4 +92,4 @@ def train(args):
f"Train Epoch: {epoch} [{batch_idx * len(device_data)}/{len(train_loader.sampler)} ({100.0 * batch_idx / len(train_loader):.0f}%)] Loss: {loss.item():.6f}",
)
test(model, test_loader, device)
save_model(model, args.model_dir)
save_model(model, Path(args.model_dir))
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
2 changes: 1 addition & 1 deletion cloud-security/hm-prowler/dummy_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
2 changes: 1 addition & 1 deletion computer-vision/hm-imagebind/src/dummy_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
class TestDummy:
def test_dummy(self):
def test_dummy(self) -> None:
assert 1 + 1 == 2
Loading

0 comments on commit 198f0ed

Please sign in to comment.