Skip to content

Commit

Permalink
feat(ruff): enable flake8-comprehensions, flake8-copyright, flake8-da…
Browse files Browse the repository at this point in the history
…tetimez (#22610)
  • Loading branch information
hongbo-miao authored Jan 10, 2025
1 parent 5db3275 commit 972bbb6
Show file tree
Hide file tree
Showing 15 changed files with 28 additions and 25 deletions.
3 changes: 3 additions & 0 deletions .ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,12 @@ select = [
"ASYNC", # flake8-async
"B", # flake8-bugbear
"BLE", # flake8-blind-except
"C4", # flake8-comprehensions
"C90", # mccabe
"COM", # flake8-commas
"CPY", # flake8-copyright
"D", # pydocstyle
"DTZ", # flake8-datetimez
"E", # pycodestyle
"EM", # flake8-errmsg
"EXE", # flake8-executable
Expand Down
4 changes: 2 additions & 2 deletions aerospace/hm-aerosandbox/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def main() -> None:
aero = vlm.run()
for k, v in aero.items():
logger.info(f"{k.rjust(4)} : {v}")
vlm.draw(show_kwargs=dict(jupyter_backend="static"))
vlm.draw(show_kwargs={"jupyter_backend": "static"})

# Operating Point Optimization
opti = asb.Opti()
Expand Down Expand Up @@ -161,7 +161,7 @@ def main() -> None:
opti.minimize(aero["CD"])
sol = opti.solve()
vlm = sol(vlm)
vlm.draw(show_kwargs=dict(jupyter_backend="static"))
vlm.draw(show_kwargs={"jupyter_backend": "static"})

# Compare our optimized solution with known analytic solution (an elliptical lift distribution)
plt.plot(
Expand Down
6 changes: 3 additions & 3 deletions data-orchestration/hm-airflow/dags/greet.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime
from datetime import UTC, datetime

from airflow.decorators import task, task_group
from airflow.models import DAG
Expand All @@ -7,7 +7,7 @@

with DAG(
"greet",
start_date=datetime(2022, 1, 1),
start_date=datetime(2022, 1, 1, tzinfo=UTC),
schedule_interval="@once",
catchup=False,
params={
Expand Down Expand Up @@ -52,7 +52,7 @@ def greet(initials: str, ti: TaskInstance | None = None) -> str:
if time is None:
msg = "Failed to get time from xcom"
raise ValueError(msg)
dt = datetime.strptime(time, "%Y-%m-%d %H:%M:%S")
dt = datetime.strptime(time, "%Y-%m-%d %H:%M:%S").astimezone(UTC)
return f"Hello {initials}, {dt.strftime('at %H:%M on %B %d, %Y')}!"

@task_group
Expand Down
4 changes: 2 additions & 2 deletions data-orchestration/hm-airflow/dags/s3_download.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from datetime import datetime
from datetime import UTC, datetime
from pathlib import Path

from airflow import DAG
Expand All @@ -8,7 +8,7 @@

with DAG(
"s3_download",
start_date=datetime(2022, 1, 1),
start_date=datetime(2022, 1, 1, tzinfo=UTC),
schedule_interval="@once",
catchup=False,
) as dag:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ def load_trips(spark: SparkSession, data_paths: list[str]) -> DataFrame:


def preprocess_trips(df: DataFrame) -> DataFrame:
column_names = list(map(lambda x: x.lower(), df.columns))
column_names = [x.lower() for x in df.columns]
return df.toDF(*column_names)
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,6 @@ def load_zones(spark: SparkSession, zone_data_path: str) -> DataFrame:


def preprocess_zones(df: DataFrame) -> DataFrame:
column_names = list(map(lambda x: x.lower(), df.columns))
column_names = [x.lower() for x in df.columns]
df = df.toDF(*column_names)
return df.drop("objectid")
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ def load_trips(spark: SparkSession, data_paths: list[str]) -> DataFrame:


def preprocess_trips(df: DataFrame) -> DataFrame:
column_names = list(map(lambda x: x.lower(), df.columns))
column_names = [x.lower() for x in df.columns]
return df.toDF(*column_names)
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,6 @@ def load_zones(spark: SparkSession, zone_data_path: str) -> DataFrame:


def preprocess_zones(df: DataFrame) -> DataFrame:
column_names = list(map(lambda x: x.lower(), df.columns))
column_names = [x.lower() for x in df.columns]
df = df.toDF(*column_names)
return df.drop("objectid")
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import shutil
import subprocess
import tempfile
from datetime import datetime
from datetime import UTC, datetime
from decimal import Decimal
from pathlib import Path
from typing import Any
Expand Down Expand Up @@ -61,7 +61,7 @@ def get_irig_times(iads_metadata_path: Path) -> tuple[str, str, int]:
elif line.startswith("FlightDate"):
# Parse date using datetime (format: MM/DD/YY)
date_str = line.split("=")[1].strip()
date = datetime.strptime(date_str, "%m/%d/%y")
date = datetime.strptime(date_str, "%m/%d/%y").astimezone(UTC)
year = date.year

if not irig_start_time or not irig_end_time or year is None:
Expand Down
2 changes: 1 addition & 1 deletion embedded-systems/decode-can-data/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def initialize_schema_dict(
unit_dict: dict[str, dict[str, str]],
) -> dict[str, pa.Schema]:
schema_dict = {}
unique_types = set(unit["type"] for unit in unit_dict.values())
unique_types = {unit["type"] for unit in unit_dict.values()}
for unit_type in unique_types:
if unit_type not in schema_dict:
schema_dict[unit_type] = CanUtils.get_dbc_schema(dbc_dict, unit_type)
Expand Down
8 changes: 4 additions & 4 deletions machine-learning/feature-store/generate_training_data.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime, timedelta
from datetime import UTC, datetime, timedelta

import pandas as pd
from feast import FeatureStore
Expand All @@ -10,9 +10,9 @@
"driver_id": [1001, 1002, 1003],
"label_driver_reported_satisfaction": [1, 5, 3],
"event_timestamp": [
datetime.now() - timedelta(minutes=11),
datetime.now() - timedelta(minutes=36),
datetime.now() - timedelta(minutes=73),
datetime.now(tz=UTC) - timedelta(minutes=11),
datetime.now(tz=UTC) - timedelta(minutes=36),
datetime.now(tz=UTC) - timedelta(minutes=73),
],
},
)
Expand Down
2 changes: 1 addition & 1 deletion machine-learning/hm-faster-whisper/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def main(audio_path: Path, transcription_file: Path) -> None:
audio_path,
beam_size=5,
vad_filter=True,
vad_parameters=dict(min_silence_duration_ms=500, speech_pad_ms=400),
vad_parameters={"min_silence_duration_ms": 500, "speech_pad_ms": 400},
language="en",
task="transcribe",
initial_prompt=None,
Expand Down
4 changes: 2 additions & 2 deletions machine-learning/hm-rasa/actions/actions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime
from datetime import UTC, datetime
from typing import Any

from rasa_sdk import Action, FormValidationAction, Tracker
Expand All @@ -16,7 +16,7 @@ def run(
tracker: Tracker,
domain: dict[str, Any],
) -> list[dict[str, Any]]:
now = datetime.now()
now = datetime.now(tz=UTC)
dispatcher.utter_message(text=f"{now}")
return []

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import time
from datetime import datetime
from datetime import UTC, datetime

import numpy as np
import polars as pl
Expand All @@ -23,7 +23,7 @@ def main() -> None:
placeholder = st.empty()

while True:
current_time = datetime.now()
current_time = datetime.now(tz=UTC)
new_data_point = (
generator.standard_normal(2) / 10.0 + prev_values
if prev_values is not None
Expand Down
4 changes: 2 additions & 2 deletions machine-learning/stable-diffusion/src/main.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
import secrets
from datetime import datetime
from datetime import UTC, datetime
from pathlib import Path

import torch
Expand Down Expand Up @@ -87,7 +87,7 @@ def generate_images(

# Save images
for idx, image in enumerate(result.images):
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
timestamp = datetime.now(tz=UTC).strftime("%Y%m%d_%H%M%S")
path = output_dir / f"{timestamp}_{idx}.png"
image.save(path)

Expand Down

0 comments on commit 972bbb6

Please sign in to comment.