Skip to content

Commit

Permalink
added detgate (#4262)
Browse files Browse the repository at this point in the history
* added detgate

* replace extend with concatenate

* added else statment in detgate

* correction for indent

* Minor formatting

* Minor formatting

* Minor formatting / comments

---------

Co-authored-by: Thomas Dent <thomas.dent@usc.es>
  • Loading branch information
PRAVEEN-mnl and tdent authored Feb 14, 2023
1 parent 4fd0c0d commit 2c6e843
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 32 deletions.
11 changes: 8 additions & 3 deletions bin/all_sky_search/pycbc_fit_sngls_by_template
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ for veto_file, veto_segment_name in zip(args.veto_file, args.veto_segment_name):
time = time[retain]
logging.info('%i trigs left after vetoing with %s' %
(len(stat), veto_file))
#now include gating veto
# Include gating vetoes
if args.gating_veto_windows:
gating_veto = args.gating_veto_windows[args.ifo].split(',')
gveto_before = float(gating_veto[0])
Expand All @@ -212,7 +212,12 @@ if args.gating_veto_windows:
raise ValueError("Gating veto window values must be negative before "
"gates and positive after gates.")
if not (gveto_before == 0 and gveto_after == 0):
gate_times = np.unique(trigf[args.ifo + '/gating/auto/time'][:])
autogate_times = np.unique(trigf[args.ifo + '/gating/auto/time'][:])
if args.ifo + '/gating/file' in trigf:
detgate_times = trigf[args.ifo + '/gating/file/time'][:]
else:
detgate_times = []
gate_times = np.concatenate((autogate_times, detgate_times))
gveto_segs = events.veto.start_end_to_segments(gate_times + gveto_before,
gate_times + gveto_after).coalesce()
all_segments -= gveto_segs
Expand Down Expand Up @@ -363,7 +368,7 @@ if args.save_trig_param:
outfile.create_dataset("template_param", data=tpars)
outfile.create_dataset("count_in_template", data=counts_total)
outfile.create_dataset("median_sigma", data=median_sigma)
# add some metadata
# Add some metadata
outfile.attrs.create("ifo", data=args.ifo.encode())
outfile.attrs.create("fit_function", data=args.fit_function.encode())
outfile.attrs.create("sngl_stat", data=args.sngl_ranking)
Expand Down
32 changes: 18 additions & 14 deletions bin/all_sky_search/pycbc_fit_sngls_split_binned
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ for i, lower_2, upper_2 in zip(range(args.split_two_nbins),
id_in_bin2[i] = np.intersect1d(np.argwhere(params[args.split_param_two] > lower_2),
np.argwhere(params[args.split_param_two] <= upper_2))

logging.info('getting template boundaries from trigger file')
logging.info('Getting template boundaries from trigger file')
boundaries = trigf[args.ifo + '/template_boundaries'][:]
max_boundary_id = np.argmax(boundaries)
sorted_boundary_list = np.sort(boundaries)
Expand All @@ -210,12 +210,12 @@ for idx, idx_start in enumerate(boundaries):
where_idx_end[idx] = sorted_boundary_list[
np.argmax(sorted_boundary_list == idx_start) + 1]

logging.info('calculating single stat values from trigger file')
logging.info('Calculating single stat values from trigger file')
rank_method = pystat.get_statistic_from_opts(args, [args.ifo])
stat = rank_method.get_sngl_ranking(trigf[args.ifo])

if args.veto_file:
logging.info('applying DQ vetoes')
logging.info('Applying DQ vetoes')
time = trigf[args.ifo + '/end_time'][:]
remove, junk = events.veto.indices_within_segments(time, [args.veto_file],
ifo=args.ifo, segment_name=args.veto_segment_name)
Expand All @@ -228,7 +228,7 @@ if args.veto_file:
remove.size, stat.size, args.veto_segment_name, args.veto_file))

if args.gating_veto_windows:
logging.info('applying veto to triggers near gates')
logging.info('Applying veto to triggers near gates')
gating_veto = args.gating_veto_windows[args.ifo].split(',')
gveto_before = float(gating_veto[0])
gveto_after = float(gating_veto[1])
Expand All @@ -237,7 +237,12 @@ if args.gating_veto_windows:
"gates and positive after gates.")
if not (gveto_before == 0 and gveto_after == 0):
time = trigf[args.ifo + '/end_time'][:]
gate_times = np.unique(trigf[args.ifo + '/gating/auto/time'][:])
autogate_times = np.unique(trigf[args.ifo + '/gating/auto/time'][:])
if args.ifo + '/gating/file' in trigf:
detgate_times = trigf[args.ifo + '/gating/file/time'][:]
else:
detgate_times = []
gate_times = np.concatenate((autogate_times, detgate_times))
gveto_remove = events.veto.indices_within_times(time, gate_times + gveto_before,
gate_times + gveto_after)
stat[gveto_remove] = np.zeros_like(gveto_remove)
Expand Down Expand Up @@ -295,7 +300,7 @@ for x in range(args.split_one_nbins):

trigf.close()

logging.info('setting up plotting and fitting limit values')
logging.info('Setting up plotting and fitting limit values')
minplot = max(stat[np.nonzero(stat)].min(), args.stat_fit_threshold - 1)
min_fit = max(minplot, args.stat_fit_threshold)
max_fit = 1.05 * stat.max()
Expand All @@ -305,14 +310,14 @@ else:
maxplot = max_fit
fitrange = np.linspace(min_fit, max_fit, 100)

logging.info('setting up plotting variables')
logging.info('Setting up plotting variables')
histcolors = ['r',(1.0,0.6,0),'y','g','c','b','m','k',(0.8,0.25,0),(0.25,0.8,0)]
fig, axes = plt.subplots(args.split_one_nbins, args.split_two_nbins,
sharex=True, sharey=True, squeeze=False,
figsize=(3 * (args.split_two_nbins + 1),
3 * args.split_one_nbins))

# setting up overall legend outside the split-up plots
# Setting up overall legend outside the split-up plots
lines = []
labels = []
for i, lower, upper in zip(range(args.num_bins), pbins.lower(), pbins.upper()):
Expand All @@ -333,7 +338,7 @@ pidx = []
for i in range(args.num_bins):
pidx.append([np.argwhere(pind == i)])

logging.info('starting bin, histogram and plot loop')
logging.info('Starting bin, histogram and plot loop')
maxyval = 0
for x in range(args.split_one_nbins):
id_bin1 = id_in_bin1[x]
Expand Down Expand Up @@ -376,10 +381,10 @@ for x in range(args.split_one_nbins):
# alpha - sig_alpha,
# args.stat_fit_threshold)

# make histogram of fitted values
# Histogram of fitted values
histcounts, edges = np.histogram(vals_inbin, bins=50)
cum_counts = histcounts[::-1].cumsum()[::-1]
# plot the lines!
# Plot the lines!
ax.semilogy(edges[:-1], cum_counts, linewidth=2,
color=histcolors[i], alpha=0.6)
ax.semilogy(fitrange, fitted_cum_counts, "--", color=histcolors[i],
Expand All @@ -391,7 +396,6 @@ for x in range(args.split_one_nbins):
maxyval = max(maxyval, cum_counts.max())
ax.grid()

logging.info('setting up labels')
for i in range(args.split_one_nbins):
for j in range(args.split_two_nbins):
axes[i,j].semilogy([args.stat_fit_threshold, args.stat_fit_threshold],
Expand All @@ -411,7 +415,7 @@ for j in range(args.split_two_nbins):

for i in range(args.split_one_nbins):
if args.split_one_nbins == 1:
axes[0, 0].set_ylabel('cumulative number', size='large')
axes[0, 0].set_ylabel('Cumulative number', size='large')
break
axes[i, 0].set_ylabel(args.split_param_one + ': ' +
(formats[args.split_param_one] + ' to ' +
Expand All @@ -421,7 +425,7 @@ for i in range(args.split_one_nbins):

fig.tight_layout(rect=(1./(args.split_two_nbins+1), 0, 1, 1))

logging.info('saving to file ' + args.output_file)
logging.info('Saving to file ' + args.output_file)
results.save_fig_with_metadata(
fig, args.output_file,
title="{}: {} histogram of single detector triggers split by"
Expand Down
37 changes: 22 additions & 15 deletions pycbc/io/hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -956,8 +956,7 @@ def to_coinc_xml_object(self, file_name):
sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta(
sngl.mass1, sngl.mass2)
sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr
# If exact match is not used, then take mean
# masses from the single triggers
# If exact match is not used, get masses from single triggers
sngl_mchirps += [sngl.mchirp]
sngl_mtots += [sngl.mtotal]

Expand All @@ -970,6 +969,7 @@ def to_coinc_xml_object(self, file_name):
coinc_map_row.event_id = event_id
coinc_event_map_table.append(coinc_map_row)

# Take the mean if exact match is not used
sngl_combined_mchirp = np.mean(sngl_mchirps)
sngl_combined_mtot = np.mean(sngl_mtots)

Expand All @@ -978,8 +978,8 @@ def to_coinc_xml_object(self, file_name):
coinc_inspiral_row = lsctables.CoincInspiral()
coinc_event_row.coinc_def_id = coinc_def_id
coinc_event_row.nevents = len(triggered_ifos)
# Note that simply `coinc_event_row.instruments = triggered_ifos`
# does not lead to a correct result with ligo.lw 1.7.1
# NB, `coinc_event_row.instruments = triggered_ifos does not give a
# correct result with ligo.lw 1.7.1
coinc_event_row.instruments = ','.join(sorted(triggered_ifos))
coinc_inspiral_row.instruments = triggered_ifos
coinc_event_row.time_slide_id = time_slide_id
Expand Down Expand Up @@ -1093,24 +1093,24 @@ def to_coinc_hdf_object(self, file_name):
ofd.create_dataset('network_snr', data=network_snr, dtype=np.float32)

logging.info("Triggered detectors")
# This creates a n_ifos by n_events matrix, with the ifo letter
# if the event contains a trigger from the ifo, empty string if not
# Create a n_ifos by n_events matrix, with the ifo letter if the
# event contains a trigger from the ifo, empty string if not
triggered_matrix = [[ifo[0] if v else ''
for v in snr_vals_valid[ifo][1]]
for ifo in self.ifos]
# This combines the ifo letters to make a single string per event
# Combine the ifo letters to make a single string per event
triggered_detectors = [''.join(triggered).encode('ascii')
for triggered in zip(*triggered_matrix)]
ofd.create_dataset('trig', data=triggered_detectors,
dtype='<S3')

logging.info("active detectors")
# This creates a n_ifos by n_events matrix, with the ifo letter
# if the ifo was active at the event time, empty string if not
# Create a n_ifos by n_events matrix, with the ifo letter if the
# ifo was active at the event time, empty string if not
active_matrix = [[ifo[0] if t in self.active_segments[ifo]
else '' for t in time]
for ifo in self.ifos]
# This combines the ifo letters to make a single string per event
# Combine the ifo letters to make a single string per event
active_detectors = [''.join(active_at_time).encode('ascii')
for active_at_time in zip(*active_matrix)]
ofd.create_dataset('obs', data=active_detectors,
Expand All @@ -1120,7 +1120,7 @@ def to_coinc_hdf_object(self, file_name):


class ReadByTemplate(object):
# default assignment to {} is OK for a variable used only in __init__
# Default assignment to {} is OK for a variable used only in __init__
def __init__(self, filename, bank=None, segment_name=None, veto_files=None,
gating_veto_windows={}):
self.filename = filename
Expand Down Expand Up @@ -1150,10 +1150,17 @@ def __init__(self, filename, bank=None, segment_name=None, veto_files=None,
raise ValueError("Gating veto window values must be negative "
"before gates and positive after gates.")
if not (gveto_before == 0 and gveto_after == 0):
gate_times = np.unique(
self.file[self.ifo + '/gating/auto/time'][:])
gating_veto_segs = veto.start_end_to_segments(gate_times + gveto_before,
gate_times + gveto_after).coalesce()
autogate_times = np.unique(
self.file[self.ifo + '/gating/auto/time'][:])
if self.ifo + '/gating/file' in self.file:
detgate_times = self.file[self.ifo + '/gating/file/time'][:]
else:
detgate_times = []
gate_times = np.concatenate((autogate_times, detgate_times))
gating_veto_segs = veto.start_end_to_segments(
gate_times + gveto_before,
gate_times + gveto_after
).coalesce()
self.segs = (self.segs - gating_veto_segs).coalesce()
self.valid = veto.segments_to_start_end(self.segs)

Expand Down

0 comments on commit 2c6e843

Please sign in to comment.