Skip to content

Commit

Permalink
various minor bugfixes
Browse files Browse the repository at this point in the history
  • Loading branch information
GarethCabournDavies committed Nov 27, 2023
1 parent b3b6073 commit 9dbed91
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 12 deletions.
2 changes: 1 addition & 1 deletion bin/all_sky_search/pycbc_fit_sngls_split_binned
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ trigs = SingleDetTriggers(
args.trigger_file,
args.ifo,
filter_rank=args.sngl_ranking,
filter_threshold=args.args.plot_lower_stat_limit,
filter_threshold=args.plot_lower_stat_limit,
)
# This is the direct pointer to the HDF file, used later on
trigf = trigs.trigs_f
Expand Down
11 changes: 6 additions & 5 deletions bin/minifollowups/pycbc_injection_minifollowup
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ if args.ifar_threshold is not None:
ifars = f['found_after_vetoes']['ifar'][:]
except KeyError:
ifars = f['found_after_vetoes']['ifar_exc'][:]
logging.warn('Inclusive IFAR not found, using exclusive')
logging.warning('Inclusive IFAR not found, using exclusive')
lgc_arr = ifars < args.ifar_threshold
missed = numpy.append(missed,
f['found_after_vetoes']['injection_index'][lgc_arr])
Expand Down Expand Up @@ -178,6 +178,7 @@ def nearby_missedinj(endtime, snr):
trigger_idx = {}
trigger_snrs = {}
trigger_times = {}
# This finds the triggers near to _any_ missed injection
for trig in single_triggers:
ifo = trig.ifo
with HFile(trig.lfn, 'r') as trig_f:
Expand Down Expand Up @@ -230,7 +231,7 @@ for num_event in range(num_events):
tags=args.tags + [str(num_event)])[0],)]

for sngl in single_triggers:
# Find the triggers close to this injection at this IFO
# Find the triggers close to _this_ injection at this IFO
ifo = sngl.ifo
trig_tdiff = abs(inj_params[ifo + '_end_time'] - trigger_times[ifo])
nearby = trig_tdiff < args.nearby_triggers_window
Expand All @@ -240,7 +241,7 @@ for num_event in range(num_events):
continue
# Find the loudest SNR in this window
loudest = numpy.argmax(trigger_snrs[ifo][nearby])
# Convert to the indexin the trigger file
# Convert to the index in the trigger file
nearby_trigger_idx = trigger_idx[ifo][nearby][loudest]
# Make the info snippet
sngl_info = mini.make_sngl_ifo(workflow, sngl, tmpltbank_file,
Expand Down Expand Up @@ -310,7 +311,7 @@ for num_event in range(num_events):
single_fname = args.single_detector_triggers[curr_ifo]
idx = HFile(single_fname).select(
lambda t: abs(t - inj_params['tc']) < args.inj_window,
f'{ifo}/end_time',
f'{curr_ifo}/end_time',
indices_only=True,
)

Expand All @@ -327,7 +328,7 @@ for num_event in range(num_events):
# Next, find the loudest within this set of triggers
# Use SNR here or NewSNR, or other??
loudest_idx = hd_sngl.snr.argmax()
hd_sngl.apply_mask(loudest_idx)
hd_sngl.apply_mask([loudest_idx])

# What are the parameters of this trigger?
curr_params = copy.deepcopy(inj_params)
Expand Down
4 changes: 2 additions & 2 deletions bin/minifollowups/pycbc_page_snglinfo
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,12 @@ sngl_file = hdf.SingleDetTriggers(
args.instrument,
bank_file=args.bank_file,
veto_file=args.veto_file,
veto_segment_name=args.veto_segment_name,
segment_name=args.veto_segment_name,
)

if args.trigger_id is not None:
# Apply a mask which is just the trigger of interest
sngl_file.apply_mask(np.array([args.trigger_id]))
sngl_file.apply_mask(numpy.array([args.trigger_id]))
elif args.n_loudest is not None:
# Cluster by a ranking statistic and retain only the loudest n clustered
# triggers
Expand Down
9 changes: 5 additions & 4 deletions pycbc/io/hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def select(self, fcn, *args, **kwds):
size = None
group = kwds.get('group', '')
for ds in dsets:
refs[ds] = self[group][ds]
refs[ds] = self[group + '/' + ds]
if (size is not None) and (refs[ds].size != size):
raise RuntimeError(f"Dataset {ds} is {self[ds].size} "
"entries long, which does not match "
Expand Down Expand Up @@ -648,17 +648,18 @@ def and_masks(self, logic_mask):

# Use intersection of the indices of True values in the masks
if hasattr(logic_mask, 'dtype') and (logic_mask.dtype == 'bool'):
new_indices = self.mask.nonzero()[0][logic_mask]
new_indices = logic_mask.nonzero()[0]
else:
new_indices = np.array(logic_mask)

if hasattr(self.mask, 'dtype') and (self.mask.dtype == 'bool'):
orig_indices = self.mask.nonzero()[0][logic_mask]
orig_indices = self.mask.nonzero()[0]
else:
orig_indices = np.array(self.mask)

self.mask[:] = False
self.mask[np.intersect1d(new_indices, orig_indices)] = True
and_indices = np.intersect1d(new_indices, orig_indices)
self.mask[and_indices.astype(np.uint64)] = True

def mask_to_n_loudest_clustered_events(self, rank_method,
ranking_threshold=6,
Expand Down

0 comments on commit 9dbed91

Please sign in to comment.