Skip to content

Commit

Permalink
JP-2796: Made style changes due to new flake8 5.0.3 (#114)
Browse files Browse the repository at this point in the history
* Making style changes.

* Removed set trace

* Updating change log.

* Updating change log.
  • Loading branch information
kmacdonald-stsci authored Aug 9, 2022
1 parent b77ac29 commit 5013a69
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 19 deletions.
6 changes: 6 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
1.0.1 (unreleased)
==================

General
-------

- Made style changes due to the new 5.0.3 version of flake8, which
noted many missing white spaces after keywords. [#114]

Bug Fixes
---------

Expand Down
2 changes: 1 addition & 1 deletion src/stcal/dark_current/dark_sub.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ def average_dark_frames_4d(dark_data, nints, ngroups, nframes, groupgap):
# nints. (if science data only has 1 integration then there is no
# need to average the second integration of the dark refefence file)
num_ints = dint
if(dint > nints):
if dint > nints:
num_ints = nints

# Loop over valid integrations in dark reference file
Expand Down
8 changes: 4 additions & 4 deletions src/stcal/jump/twopoint_difference.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,11 +293,11 @@ def calc_med_first_diffs(first_diffs):
mask[np.nanargmax(np.abs(first_diffs))] = False # clip the diff with the largest abs value
return np.nanmedian(first_diffs[mask])
elif num_usable_groups == 3: # if 3, no clipping just return median
return(np.nanmedian(first_diffs))
return np.nanmedian(first_diffs)
elif num_usable_groups == 2: # if 2, return diff with minimum abs
return(first_diffs[np.nanargmin(np.abs(first_diffs))])
return first_diffs[np.nanargmin(np.abs(first_diffs))]
else:
return(np.nan)
return np.nan

# if input is multi-dimensional

Expand Down Expand Up @@ -332,4 +332,4 @@ def calc_med_first_diffs(first_diffs):
row_none, col_none = np.where(num_usable_groups < 2)
median_diffs[row_none, col_none] = np.nan

return(median_diffs)
return median_diffs
2 changes: 1 addition & 1 deletion src/stcal/linearity/linearity.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def correct_for_flag(lin_coeffs, lin_dq, dqflags):

# If there are pixels flagged as 'NO_LIN_CORR', update the corresponding
# coefficients so that those SCI values will be unchanged.
if (num_flag > 0):
if num_flag > 0:
ben_cor = ben_coeffs(lin_coeffs) # get benign coefficients

for ii in range(num_flag):
Expand Down
6 changes: 3 additions & 3 deletions src/stcal/ramp_fitting/gls_fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def create_output_image(ramp_data):
pdq = np.zeros(image_shape, dtype=np.uint32)
err = np.zeros(image_shape, dtype=np.float32)

return (slope, pdq, err)
return slope, pdq, err


def create_output_integ(ramp_data):
Expand All @@ -252,7 +252,7 @@ def create_output_integ(ramp_data):
pdq = np.zeros(image_shape, dtype=np.uint32)
err = np.zeros(image_shape, dtype=np.float32)

return (slope, pdq, err)
return slope, pdq, err


def create_output_opt_res(ramp_data):
Expand Down Expand Up @@ -1589,4 +1589,4 @@ def gls_fit(ramp_data, prev_fit_data, prev_slope_data, readnoise, gain, frame_ti
# shape of both result2d and variances is (nz, 2 + num_cr)
fitparam_uncs = fitparam_cov.diagonal(axis1=1, axis2=2).copy()

return (fitparam2d, fitparam_uncs)
return fitparam2d, fitparam_uncs
20 changes: 10 additions & 10 deletions src/stcal/ramp_fitting/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def shrink_crmag(self, n_int, dq_cube, imshape, nreads, jump_det):
for nn in range(len(cr_int_has_cr[0])):
y, x = cr_int_has_cr[0][nn], cr_int_has_cr[1][nn]

if (cr_mag_int[k_rd, y, x] > 0.):
if cr_mag_int[k_rd, y, x] > 0.:
cr_com[ii_int, end_cr[y, x], y, x] = cr_mag_int[k_rd, y, x]
end_cr[y, x] += 1

Expand Down Expand Up @@ -364,7 +364,7 @@ def alloc_arrays_1(n_int, imshape):
# for estimated median slopes
sat_0th_group_int = np.zeros((n_int,) + imshape, dtype=np.uint8)

return (dq_int, num_seg_per_int, sat_0th_group_int)
return dq_int, num_seg_per_int, sat_0th_group_int


def alloc_arrays_2(n_int, imshape, max_seg):
Expand Down Expand Up @@ -523,7 +523,7 @@ def calc_slope_vars(ramp_data, rn_sect, gain_sect, gdq_sect, group_time, max_seg

i_read = 0
# Loop over reads for all pixels to get segments (segments per pixel)
while (i_read < nreads and np.any(pix_not_done)):
while i_read < nreads and np.any(pix_not_done):
gdq_1d = gdq_2d_nan[i_read, :]
wh_good = np.where(gdq_1d == 0) # good groups

Expand All @@ -537,7 +537,7 @@ def calc_slope_vars(ramp_data, rn_sect, gain_sect, gdq_sect, group_time, max_seg
gdq_2d_nan[i_read, :].astype(np.int32) & ramp_data.flags_jump_det > 0)

# ... but not on final read:
if (len(wh_cr[0]) > 0 and (i_read < nreads - 1)):
if len(wh_cr[0]) > 0 and (i_read < nreads - 1):
sr_index[wh_cr[0]] += 1
segs[sr_index[wh_cr], wh_cr] += 1

Expand Down Expand Up @@ -614,7 +614,7 @@ def calc_slope_vars(ramp_data, rn_sect, gain_sect, gdq_sect, group_time, max_seg
den_r3[wh_seg_pos] = 1. / (segs_beg_3[wh_seg_pos] ** 3. - segs_beg_3[wh_seg_pos])
warnings.resetwarnings()

return (den_r3, den_p3, num_r3, segs_beg_3)
return den_r3, den_p3, num_r3, segs_beg_3


def calc_pedestal(ramp_data, num_int, slope_int, firstf_int, dq_first, nframes,
Expand Down Expand Up @@ -843,7 +843,7 @@ def get_efftim_ped(ramp_data):
frame_time = ramp_data.frame_time
dropframes1 = ramp_data.drop_frames1

if (dropframes1 is None): # set to default if missing
if dropframes1 is None: # set to default if missing
dropframes1 = 0
log.debug('Missing keyword DRPFRMS1, so setting to default value of 0')

Expand Down Expand Up @@ -950,7 +950,7 @@ def get_more_info(ramp_data, saturated_flag, jump_flag): # pragma: no cover
saturated_flag = ramp_data.flags_saturated
jump_flag = ramp_data.flags_jump_det

return (group_time, nframes_used, saturated_flag, jump_flag)
return group_time, nframes_used, saturated_flag, jump_flag


def get_max_num_cr(gdq_cube, jump_flag): # pragma: no cover
Expand Down Expand Up @@ -1045,13 +1045,13 @@ def remove_bad_singles(segs_beg_3):
tot_num_single_grp_ramps = len(np.where((segs_beg_3 == 1) &
(segs_beg_3.sum(axis=0) > 1))[0])

while(tot_num_single_grp_ramps > 0):
while tot_num_single_grp_ramps > 0:
# until there are no more single-group segments
for ii_0 in range(max_seg):
slice_0 = segs_beg_3[ii_0, :, :]

for ii_1 in range(max_seg): # correctly includes EARLIER segments
if (ii_0 == ii_1): # don't compare with itself
if ii_0 == ii_1: # don't compare with itself
continue

slice_1 = segs_beg_3[ii_1, :, :]
Expand All @@ -1060,7 +1060,7 @@ def remove_bad_singles(segs_beg_3):
# either earlier or later
wh_y, wh_x = np.where((slice_0 == 1) & (slice_1 > 0))

if (len(wh_y) == 0):
if len(wh_y) == 0:
# Are none, so go to next pair of segments to check
continue

Expand Down

0 comments on commit 5013a69

Please sign in to comment.