Skip to content

Commit

Permalink
Merge pull request #4 from spacetelescope/main
Browse files Browse the repository at this point in the history
catch up
  • Loading branch information
mwregan2 authored Nov 1, 2022
2 parents 48c9ceb + 33aa4f4 commit dbe7f18
Show file tree
Hide file tree
Showing 9 changed files with 314 additions and 24 deletions.
2 changes: 1 addition & 1 deletion .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ documentation:
- 'docs/**/*'
- any: [ '*.rst', '!CHANGES.rst' ]
- '*.md'
- '.readthedocs.yml'
- '.readthedocs.yaml'
- 'LICENSE'

installation:
Expand Down
2 changes: 1 addition & 1 deletion .readthedocs.yml → .readthedocs.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# .readthedocs.yml
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details

Expand Down
44 changes: 35 additions & 9 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,7 +1,40 @@
1.1.1 (unreleased)
1.2.2 (unreleased)
==================

-


1.2.1 (2022-10-14)
==================

Bug Fixes
---------

jump
~~~~
- Changes to limit the expansion of MIRI shower ellipses to be the same
number of pixels for both the major and minor axis. JP-2944 [#123]

1.2.0 (2022-10-07)
==================

Bug Fixes
---------

dark_current
~~~~~~~~~~~~

- Bug fix for computation of the total number of frames when science data
use on-board frame averaging and/or group gaps. [#121]

jump
~~~~

- Changes to flag both NIR snowballs and MIRI showers
for JP-#2645. [#118]

- Early in the step, the object arrays are converted from DN to electrons
by multiplying by the gain. The values need to be reverted back to DN
at the end of the step. [#116]

1.1.0 (2022-08-17)
==================
Expand All @@ -15,13 +48,6 @@ General
Bug Fixes
---------

jump
~~~~

- Early in the step, the object arrays are converted from DN to electrons
by multiplying by the gain. The values need to be reverted back to DN
at the end of the step. [#116]

ramp_fitting
~~~~~~~~~~~~

Expand Down
3 changes: 3 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,3 +53,6 @@ def setup(app):
html_domain_indices = True
html_sidebars = {"**": ["globaltoc.html", "relations.html", "searchbox.html"]}
html_use_index = True

# Enable nitpicky mode - which ensures that all references in the docs resolve.
nitpicky = True
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ install_requires =
numpy>=1.17
astropy>=5.0.4
scipy>=1.6.0
opencv-python>=4.6.0.66

package_dir =
=src
Expand Down
7 changes: 3 additions & 4 deletions src/stcal/dark_current/dark_sub.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,12 +96,11 @@ def do_correction_data(science_data, dark_data, dark_output=None):

# Check that the number of groups in the science data does not exceed
# the number of groups in the dark current array.
sci_total_frames = sci_ngroups * (sci_nframes + sci_groupgap)
drk_total_frames = drk_ngroups * (drk_nframes + drk_groupgap)
sci_total_frames = sci_ngroups * sci_nframes + (sci_ngroups - 1) * sci_groupgap
drk_total_frames = drk_ngroups * drk_nframes + (drk_ngroups - 1) * drk_groupgap
if sci_total_frames > drk_total_frames:
log.warning(
"Not enough data in dark reference file to match to "
"science data."
"Not enough data in dark reference file to match to science data."
)
log.warning("Input will be returned without subtracting dark current.")
science_data.cal_step = 'SKIPPED'
Expand Down
211 changes: 205 additions & 6 deletions src/stcal/jump/jump.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import time
import logging
import numpy as np

import cv2 as cv
from . import twopoint_difference as twopt
from . import constants

import multiprocessing # 05/18/21: TODO- commented out now; reinstate later
import multiprocessing

log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
Expand All @@ -18,7 +18,13 @@ def detect_jumps(frames_per_group, data, gdq, pdq, err,
after_jump_flag_dn1=0.0,
after_jump_flag_n1=0,
after_jump_flag_dn2=0.0,
after_jump_flag_n2=0):
after_jump_flag_n2=0,
min_sat_area=1,
min_jump_area=5,
expand_factor=2.0,
use_ellipses=False,
sat_required_snowball=True,
expand_large_events=True):
"""
This is the high-level controlling routine for the jump detection process.
It loads and sets the various input data and parameters needed by each of
Expand Down Expand Up @@ -108,6 +114,30 @@ def detect_jumps(frames_per_group, data, gdq, pdq, err,
Gives the number of groups to flag after jumps with DN values above that
given by after_jump_flag_dn2
min_sat_area : float
The minimum area of saturated pixels at the center of a snowball. Only contours
with area above the minimum will create snowballs.
min_jump_area : float
The minimum contour area to trigger the creation of enclosing ellipses or circles.
expand_factor : float
The factor that is used to increase the size of the enclosing circle/ellipse jump
flagged pixels.
use_ellipses : bool
If true the minimum enclosing ellipse will be created for jump regions that meet the area
requirement. This is best for MIRI showers which are only rarely circular. For the NIR detectors
this should set to False to force circles to be used.
sat_required_snowball : bool
If true there must be a saturation circle within the radius of the jump circle to trigger
the creation of a snowball. All true snowballs appear to have at least one saturated pixel.
expand_large_events : bool
When True this triggers the flagging of snowballs and showers for NIR and MIRI detectors. If
set to False nether type of extended flagging occurrs.
Returns
-------
gdq : int, 4D array
Expand All @@ -117,7 +147,8 @@ def detect_jumps(frames_per_group, data, gdq, pdq, err,
updated pixel dq array
"""
constants.update_dqflags(dqflags) # populate dq flags

sat_flag = dqflags["SATURATED"]
jump_flag = dqflags["JUMP_DET"]
# Flag the pixeldq where the gain is <=0 or NaN so they will be ignored
wh_g = np.where(gain_2d <= 0.)
if len(wh_g[0] > 0):
Expand Down Expand Up @@ -176,7 +207,13 @@ def detect_jumps(frames_per_group, data, gdq, pdq, err,
after_jump_flag_e2=after_jump_flag_e2,
after_jump_flag_n2=after_jump_flag_n2)

elapsed = time.time() - start
# This is the flag that controls the flagging of either snowballs or showers.
if expand_large_events:
flag_large_events(gdq, jump_flag, sat_flag, min_sat_area=min_sat_area,
min_jump_area=min_jump_area,
expand_factor=expand_factor, use_ellipses=use_ellipses,
sat_required_snowball=sat_required_snowball)

else:
yinc = int(n_rows / n_slices)
slices = []
Expand Down Expand Up @@ -249,7 +286,13 @@ def detect_jumps(frames_per_group, data, gdq, pdq, err,
# save the neighbors to be flagged that will be in the next slice
previous_row_above_gdq = row_above_gdq.copy()
k += 1
elapsed = time.time() - start

# This is the flag that controls the flagging of either snowballs or showers.
if expand_large_events:
flag_large_events(gdq, jump_flag, sat_flag, min_sat_area=min_sat_area,
min_jump_area=min_jump_area,
expand_factor=expand_factor, use_ellipses=use_ellipses,
sat_required_snowball=sat_required_snowball)

elapsed = time.time() - start
log.info('Total elapsed time = %g sec' % elapsed)
Expand All @@ -262,3 +305,159 @@ def detect_jumps(frames_per_group, data, gdq, pdq, err,

# Return the updated data quality arrays
return gdq, pdq


def flag_large_events(gdq, jump_flag, sat_flag, min_sat_area=1,
min_jump_area=6,
expand_factor=1.9, use_ellipses=False,
sat_required_snowball=True):
"""
This routine controls the creation of expanded regions that are flagged as jumps. These are called
snowballs for the NIR and are almost always circular with a saturated core. For MIRI they are better
fit with ellipses.
:param gdq: The group DQ cube for all integrations
:param jump_flag: The bit value that represents jump
:param sat_flag: The bit value that represents saturation
:param min_sat_area: The minimum area of saturated pixels within the jump circle to trigger the
creation of a snowball.
:param min_jump_area: The minimum area of a contour to cause the creation of the minimum
enclosing ellipse or circle
:param expand_factor: The factor that increases the size of the snowball or enclosing ellipse.
:param use_ellipses: Use ellipses rather than circles (better for MIRI)
:param sat_required_snowball: Require that there is a saturated pixel within the radius of the jump
circle to trigger the formation of a snowball.
:return: none
"""

log.info('Flagging large events (snowballs, showers).')

n_showers_grp = []
n_showers_grp_ellipse = []
for integration in range(gdq.shape[0]):
for group in range(1, gdq.shape[1]):
if use_ellipses:
jump_ellipses = find_ellipses(gdq[integration, group, :, :], jump_flag, min_jump_area)
n_showers_grp_ellipse.append(len(jump_ellipses))
gdq[integration, group, :, :], num_events = \
extend_ellipses(gdq[integration, group, :, :],
jump_ellipses, sat_flag, jump_flag,
expansion=expand_factor)
else:
# this line needs to be changed later to just look at saturation flags.
new_flagged_pixels = gdq[integration, group, :, :] - gdq[integration, group - 1, :, :]

sat_circles = find_circles(new_flagged_pixels, sat_flag, min_sat_area)
sat_pixels = np.bitwise_and(new_flagged_pixels, sat_flag)
saty, satx = np.where(sat_pixels == sat_flag)
only_jump = gdq[integration, group, :, :].copy()
only_jump[saty, satx] = jump_flag
jump_circles = find_circles(only_jump, jump_flag, min_jump_area)

if sat_required_snowball:
snowballs = make_snowballs(jump_circles, sat_circles)
else:
snowballs = jump_circles
n_showers_grp.append(len(snowballs))
gdq[integration, group, :, :], num_events = extend_snowballs(gdq[integration, group, :, :],
snowballs, sat_flag,
jump_flag,
expansion=expand_factor)
if use_ellipses:
if np.all(np.array(n_showers_grp_ellipse) == 0):
log.info(f'No showers found in integration {integration}.')
else:
log.info(f' In integration {integration}, number of' +
f'showers in each group = {n_showers_grp_ellipse}')
else:
if np.all(np.array(n_showers_grp) == 0):
log.info(f'No snowballs found in integration {integration}.')
else:
log.info(f' In integration {integration}, number of snowballs ' +
f'in each group = {n_showers_grp}')


def extend_snowballs(plane, snowballs, sat_flag, jump_flag, expansion=1.5):
# For a given DQ plane it will use the list of snowballs to create expanded circles of pixels with
# the jump flag set.
image = np.zeros(shape=(plane.shape[0], plane.shape[1], 3), dtype=np.uint8)
num_circles = len(snowballs)
sat_pix = np.bitwise_and(plane, 2)
for snowball in snowballs:
jump_radius = snowball[1]
jump_center = snowball[0]
cenx = jump_center[1]
ceny = jump_center[0]
extend_radius = round(jump_radius * expansion)
image = cv.circle(image, (round(ceny), round(cenx)), extend_radius, (0, 0, 4), -1)
jump_circle = image[:, :, 2]
saty, satx = np.where(sat_pix == 2)
jump_circle[saty, satx] = 0
plane = np.bitwise_or(plane, jump_circle)

return plane, num_circles


def extend_ellipses(plane, ellipses, sat_flag, jump_flag, expansion=1.1):
# For a given DQ plane it will use the list of ellipses to create expanded ellipses of pixels with
# the jump flag set.
image = np.zeros(shape=(plane.shape[0], plane.shape[1], 3), dtype=np.uint8)
num_ellipses = len(ellipses)
sat_pix = np.bitwise_and(plane, 2)
for ellipse in ellipses:
ceny = ellipse[0][0]
cenx = ellipse[0][1]
# Expand the ellipse by the expansion factor. The number of pixels added to both axes is
# the number of pixels added to the minor axis. This prevents very large flagged ellipses
# with high axis ratio ellipses. The major and minor axis are not always the same index.
# Therefore, we have to test to find which is actually the minor axis.
if ellipse[1][1] < ellipse[1][0]:
axis1 = ellipse[1][0] + (expansion - 1.0) * ellipse[1][1]
axis2 = ellipse[1][1] * expansion
else:
axis1 = ellipse[1][0] * expansion
axis2 = ellipse[1][1] + (expansion - 1.0) * ellipse[1][0]
alpha = ellipse[2]
image = cv.ellipse(image, (round(ceny), round(cenx)), (round(axis1 / 2),
round(axis2 / 2)), alpha, 0, 360, (0, 0, 4), -1)
jump_ellipse = image[:, :, 2]
saty, satx = np.where(sat_pix == 2)
jump_ellipse[saty, satx] = 0
plane = np.bitwise_or(plane, jump_ellipse)
return plane, num_ellipses


def find_circles(dqplane, bitmask, min_area):
# Using an input DQ plane this routine will find the groups of pixels with at least the minimum
# area and return a list of the minimum enclosing circle parameters.
pixels = np.bitwise_and(dqplane, bitmask)
contours, hierarchy = cv.findContours(pixels, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
bigcontours = [con for con in contours if cv.contourArea(con) >= min_area]
circles = [cv.minEnclosingCircle(con) for con in bigcontours]
return circles


def find_ellipses(dqplane, bitmask, min_area):
# Using an input DQ plane this routine will find the groups of pixels with at least the minimum
# area and return a list of the minimum enclosing ellipse parameters.
pixels = np.bitwise_and(dqplane, bitmask)
contours, hierarchy = cv.findContours(pixels, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
bigcontours = [con for con in contours if cv.contourArea(con) > min_area]
# minAreaRect is used becuase fitEllipse requires 5 points and it is possible to have a contour
# with just 4 points.
ellipses = [cv.minAreaRect(con) for con in bigcontours]
return ellipses


def make_snowballs(jump_circles, sat_circles):
# Ths routine will create a list of snowballs (circles) that have the center of the saturation circle
# within the unclosing jump circle.
snowballs = []
for jump in jump_circles:
radius = jump[1]
for sat in sat_circles:
distance = np.sqrt((jump[0][0] - sat[0][0]) ** 2 + (jump[0][1] - sat[0][1]) ** 2)
if distance < radius: # center of saturation is within the enclosing jump circle
if jump not in snowballs:
snowballs.append(jump)
return snowballs
3 changes: 0 additions & 3 deletions src/stcal/jump/twopoint_difference.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,6 @@ def find_crs(dataa, group_dq, read_noise, rejection_thresh,

log_str = 'From highest outlier, two-point found {} pixels with at least one CR from {} groups.'
log.info(log_str.format(len(row4cr), 'five or more'))
log.info(log_str.format(len(row3cr), 'four'))
log.info(log_str.format(len(row2cr), 'three'))

# get the rows, col pairs for all pixels with at least one CR
all_crs_row = np.concatenate((row4cr, row3cr, row2cr))
Expand Down Expand Up @@ -288,7 +286,6 @@ def find_crs(dataa, group_dq, read_noise, rejection_thresh,
flag_groups = [after_jump_flag_n1, after_jump_flag_n2]

cr_group, cr_row, cr_col = np.where(np.bitwise_and(gdq[integ], jump_flag))

for cthres, cgroup in zip(flag_e_threshold, flag_groups):
if cgroup > 0:
log.info(f"Flagging {cgroup} groups after detected jumps with e >= {np.mean(cthres)}.")
Expand Down
Loading

0 comments on commit dbe7f18

Please sign in to comment.