Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ruff changes #200

Merged
merged 4 commits into from
Nov 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .github/workflows/python-formatting.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
name: check format using ruff
on: [push]
jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: chartboost/ruff-action@v1
with:
args: format --check
9 changes: 3 additions & 6 deletions .github/workflows/python-linting.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
name: Check Python formatting using Black and Ruff

name: lint code using ruff
on: [push]

jobs:
lint:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: psf/black@stable
- uses: actions/checkout@v4
- uses: chartboost/ruff-action@v1
20 changes: 8 additions & 12 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
repos:
- repo: https://github.com/psf/black
rev: 23.7.0
hooks:
- id: black
language_version: "python3.10"

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.280
hooks:
- id: ruff
language_version: "python3.10"
args: [--exit-non-zero-on-fix]
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.5
hooks:
# Run the linter.
- id: ruff
# Run the formatter.
- id: ruff-format
136 changes: 0 additions & 136 deletions pyproject.toml

This file was deleted.

63 changes: 63 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
# Copied originally from pandas
target-version = "py310"

# fix = true
unfixable = []

select = [
"I", # isort
"F", # pyflakes
"E", "W", # pycodestyle
"YTT", # flake8-2020
"B", # flake8-bugbear
"Q", # flake8-quotes
"T10", # flake8-debugger
"INT", # flake8-gettext
"PLC", "PLE", "PLR", "PLW", # pylint
"PIE", # misc lints
"PYI", # flake8-pyi
"TID", # tidy imports
"ISC", # implicit string concatenation
"TCH", # type-checking imports
"C4", # comprehensions
"PGH" # pygrep-hooks
]

# Some additional rules that are useful
extend-select = [
"UP009", # UTF-8 encoding declaration is unnecessary
"SIM118", # Use `key in dict` instead of `key in dict.keys()`
"D205", # One blank line required between summary line and description
"ARG001", # Unused function argument
"RSE102", # Unnecessary parentheses on raised exception
"PERF401", # Use a list comprehension to create a transformed list
]

ignore = [
"ISC001", # Disable this for compatibility with ruff format
"B028", # No explicit `stacklevel` keyword argument found
"B905", # `zip()` without an explicit `strict=` parameter
"E402", # module level import not at top of file
"E731", # do not assign a lambda expression, use a def
"PLC1901", # compare-to-empty-string
"PLR0911", # Too many returns
"PLR0912", # Too many branches
"PLR0913", # Too many arguments to function call
"PLR0915", # Too many statements
"PLR2004", # Magic number
]

extend-exclude = [
"docs",
]

[pycodestyle]
max-line-length = 100 # E501 reports lines that exceed the length of 100.

[lint.extend-per-file-ignores]
"__init__.py" = ["E402", "F401", "F403"]
# For tests:
# - D205: Don't worry about test docstrings
# - ARG001: Unused function argument false positives for some fixtures
"**/tests/test_*.py" = ["D205", "ARG001"]

28 changes: 15 additions & 13 deletions sparkles/core.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# coding: utf-8
# Licensed under a 3-clause BSD style license - see LICENSE.rst

"""
Expand Down Expand Up @@ -1175,8 +1174,8 @@ def check_catalog(self):
self.check_include_exclude()

def check_guide_overlap(self):
"""
Check for overlapping tracked items.
"""Check for overlapping tracked items.

Overlap is defined as within 12 pixels.
"""
ok = np.in1d(self["type"], ("GUI", "BOT", "FID", "MON"))
Expand Down Expand Up @@ -1266,8 +1265,9 @@ def check_guide_fid_position_on_ccd(self, entry):
col_lim = ACA.max_ccd_col - ACA.CCD["window_pad"]

def sign(axis):
"""Return sign of the corresponding entry value. Note that np.sign returns 0
if the value is 0.0, not the right thing here.
"""Return sign of the corresponding entry value.

Note that np.sign returns 0 if the value is 0.0, not the right thing here.
"""
return -1 if (entry[axis] < 0) else 1

Expand Down Expand Up @@ -1445,9 +1445,10 @@ def check_guide_count(self):
# Add a check that for ORs with guide count between 3.5 and 4.0, the
# dither is 4 arcsec if dynamic background not enabled.
def check_dither(self):
"""
Check dither. This presently checks that dither is 4x4 arcsec if
dynamic background is not in use and the field has a low guide_count.
"""Check dither.

This presently checks that dither is 4x4 arcsec if dynamic background is not in
use and the field has a low guide_count.
"""

# Skip check if guide_count is 4.0 or greater
Expand Down Expand Up @@ -1488,11 +1489,12 @@ def check_imposters_guide(self, star):

# Borrow the imposter offset method from starcheck
def imposter_offset(cand_mag, imposter_mag):
"""
For a given candidate star and the pseudomagnitude of the brightest 2x2 imposter
calculate the max offset of the imposter counts are at the edge of the 6x6
(as if they were in one pixel). This is somewhat the inverse of
proseco.get_pixmag_for_offset .
"""Get imposter offset.

For a given candidate star and the pseudomagnitude of the brightest 2x2
imposter calculate the max offset of the imposter counts are at the edge of
the 6x6 (as if they were in one pixel). This is somewhat the inverse of
proseco.get_pixmag_for_offset.
"""
cand_counts = mag_to_count_rate(cand_mag)
spoil_counts = mag_to_count_rate(imposter_mag)
Expand Down
7 changes: 1 addition & 6 deletions sparkles/find_er_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,12 +235,7 @@ def convert_atts_to_list_of_quats(atts):
if isinstance(atts, Quat):
out = [Quat(q) for q in atts.q.reshape(-1, 4)]
else:
out = []
# Assume atts is a flat list of Quats or Quat-compatible objects
for att in atts:
if not isinstance(att, Quat):
att = Quat(att)
out.append(att)
out = [(att if isinstance(att, Quat) else Quat(att)) for att in atts]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

A code change -- and totally fine.

return out


Expand Down
20 changes: 12 additions & 8 deletions sparkles/roll_optimize.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# coding: utf-8
# Licensed under a 3-clause BSD style license - see LICENSE.rst

"""
Expand All @@ -23,8 +22,8 @@


def logical_intervals(vals, x=None):
"""
Determine contiguous intervals during which ``vals`` is True.
"""Determine contiguous intervals during which ``vals`` is True.

Returns an Astropy Table with a row for each interval. Columns are:
* idx_start: index of interval start
* idx_stop: index of interval stop
Expand Down Expand Up @@ -59,15 +58,16 @@ def logical_intervals(vals, x=None):
class RollOptimizeMixin:
def get_candidate_better_stars(self):
"""Find stars that *might* substantially improve guide or acq catalogs.

Get stars that might be candidates at a different roll. This takes
stars outside the original square CCD FOV (but made smaller by 40
pixels) and inside a circle corresponding to the box corners (but made
bigger by 40 pixels). The inward padding ensures any stars that were
originally excluded because of dither size etc are considered.
:returns: list of indexes into self.stars
"""
# First define a spatial mask ``sp_ok`` on ``stars`` that is the
# region (mentioned above) between an inner square and outer circle.
# First define a spatial mask ``sp_ok`` on ``stars`` that is the region
# (mentioned above) between an inner square and outer circle.
rc_pad = 40
stars = self.stars
in_fov = (np.abs(stars["row"]) < CCD["row_max"] - rc_pad) & (
Expand Down Expand Up @@ -123,6 +123,7 @@ def get_roll_intervals(
max_roll_dev=None,
):
"""Find a list of rolls that might substantially improve guide or acq catalogs.

If ``roll_nom`` is not specified then an approximate value is computed
via ska_sun for the catalog ``date``. if ``roll_dev`` (max allowed
off-nominal roll) is not specified it is computed using the OFLS table.
Expand Down Expand Up @@ -276,9 +277,10 @@ def _get_roll_intervals_uniform(
def _get_roll_intervals_uniq_ids(
ids0, ids_list, roll, roll_min, roll_max, roll_offsets, d_roll
):
"""Private method to get roll intervals that span a range where there is a unique
set of available candidate stars within the entire interval.
"""Get roll intervals.

Private method to get roll intervals that span a range where there is a unique
set of available candidate stars within the entire interval.
"""
# Get all unique sets of stars that are in the FOV over the sampled
# roll offsets. Ignore ids sets that do not add new candidate stars.
Expand Down Expand Up @@ -426,7 +428,9 @@ def get_roll_options(
self.roll_options = roll_options

def sort_and_limit_roll_options(self, roll_level, max_roll_options):
"""Sort the roll options based on two keys:
"""Sort the roll options based on two keys.

Keys are:
- Number of warnings at roll_level or worse (e.g. number of criticals,
so smaller is better)
- Negative of improvement (larger improvement is better)
Expand Down
Loading