Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Generate and use search sky-grids in pygrb worflows #4985

Merged
merged 7 commits into from
Dec 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions bin/pygrb/pycbc_make_offline_grb_workflow
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,13 @@ if wflow.cp.has_option("workflow-segments", "segments-vetoes"):
tags=['veto'])
datafind_veto_files.append(veto_file)

# Generate sky grid if needed
skygrid_file = None
if wflow.cp.has_option("workflow", "sky-error"):
logging.info("Generating sky-grid file.")
skygrid_file = _workflow.make_skygrid_node(wflow, df_dir, tags=['SEARCH'])
datafind_veto_files.extend(skygrid_file)

# Config file consistency check for IPN GRBs
if wflow.cp.has_option("workflow-inspiral", "ipn-search-points") \
and wflow.cp.has_option("workflow-injections", "ipn-sim-points"):
Expand Down
22 changes: 22 additions & 0 deletions pycbc/workflow/grb_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,28 @@ def get_sky_grid_scale(
return out


def make_skygrid_node(workflow, out_dir, tags=None):
"""
Adds a job to the workflow to produce the PyGRB search skygrid."""

tags = [] if tags is None else tags

# Initialize job node
grb_name = workflow.cp.get('workflow', 'trigger-name')
extra_tags = ['GRB'+grb_name]
node = Executable(workflow.cp, 'make_sky_grid',
ifos=workflow.ifos, out_dir=out_dir,
tags=tags+extra_tags).create_node()
node.add_opt('--instruments', ' '.join(workflow.ifos))
node.new_output_file_opt(workflow.analysis_time, '.h5', '--output',
tags=extra_tags, store_file=True)

# Add job node to the workflow
workflow += node

return node.output_files


def generate_tc_prior(wflow, tc_path, buffer_seg):
"""
Generate the configuration file for the prior on the coalescence
Expand Down
15 changes: 12 additions & 3 deletions pycbc/workflow/jobsetup.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,7 @@ def multi_ifo_coherent_job_setup(workflow, out_files, curr_exe_job,
curr_out_files = FileList([])
ipn_sky_points = None
bank_veto = None
skygrid_file = None
input_files = FileList(datafind_outs)
for f in datafind_outs:
if 'IPN_SKY_POINTS' in f.description:
Expand All @@ -287,6 +288,9 @@ def multi_ifo_coherent_job_setup(workflow, out_files, curr_exe_job,
elif 'INPUT_BANK_VETO_BANK' in f.description:
bank_veto = f
input_files.remove(f)
elif 'make_sky_grid' in f.description:
skygrid_file = f
input_files.remove(f)

split_bank_counter = 0

Expand All @@ -296,7 +300,8 @@ def multi_ifo_coherent_job_setup(workflow, out_files, curr_exe_job,
tag.append(split_bank.tag_str)
node = curr_exe_job.create_node(data_seg, job_valid_seg,
parent=split_bank, dfParents=input_files,
bankVetoBank=bank_veto, ipn_file=ipn_sky_points,
bankVetoBank=bank_veto,
skygrid_file=skygrid_file, ipn_file=ipn_sky_points,
slide=slide_dict, tags=tag)
workflow.add_node(node)
split_bank_counter += 1
Expand All @@ -310,7 +315,7 @@ def multi_ifo_coherent_job_setup(workflow, out_files, curr_exe_job,
node = curr_exe_job.create_node(data_seg, job_valid_seg,
parent=split_bank, inj_file=inj_file, tags=tag,
dfParents=input_files, bankVetoBank=bank_veto,
ipn_file=ipn_sky_points)
skygrid_file=skygrid_file, ipn_file=ipn_sky_points)
workflow.add_node(node)
split_bank_counter += 1
curr_out_files.extend(node.output_files)
Expand Down Expand Up @@ -670,7 +675,8 @@ def __init__(self, cp, name, ifo=None, injection_file=None,
self.num_threads = 1

def create_node(self, data_seg, valid_seg, parent=None, inj_file=None,
dfParents=None, bankVetoBank=None, ipn_file=None,
dfParents=None, bankVetoBank=None,
skygrid_file=None, ipn_file=None,
slide=None, tags=None):
if tags is None:
tags = []
Expand Down Expand Up @@ -718,6 +724,9 @@ def create_node(self, data_seg, valid_seg, parent=None, inj_file=None,
node.add_input(frame_file)
node.add_arg(frame_arg)

if skygrid_file is not None:
node.add_input_opt('--sky-grid', skygrid_file)

if ipn_file is not None:
node.add_input_opt('--sky-positions-file', ipn_file)

Expand Down
24 changes: 15 additions & 9 deletions pycbc/workflow/matched_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,8 +202,9 @@ def setup_matchedfltr_dax_generated_multi(workflow, science_segs, datafind_outs,
science_segs : ifo-keyed dictionary of ligo.segments.segmentlist instances
The list of times that are being analysed in this workflow.
datafind_outs : pycbc.workflow.core.FileList
An FileList of the datafind files that are needed to obtain the
data used in the analysis.
A FileList of the datafind files that are needed to obtain the
data used in the analysis, and (if requested by the user) the vetoes
File and (if requested by the user) the search sky-grid File.
tmplt_banks : pycbc.workflow.core.FileList
An FileList of the template bank files that will serve as input
in this stage.
Expand Down Expand Up @@ -242,13 +243,18 @@ def setup_matchedfltr_dax_generated_multi(workflow, science_segs, datafind_outs,

if match_fltr_exe == 'pycbc_multi_inspiral':
exe_class = select_matchedfilter_class(match_fltr_exe)
# Right ascension + declination must be provided in radians
cp.set('inspiral', 'ra',
cp.get('workflow', 'ra'))
cp.set('inspiral', 'dec',
cp.get('workflow', 'dec'))
# At the moment we aren't using sky grids, but when we do this code
# might be used then.
bool_sg = ['make_sky_grid' in f.description for f in datafind_outs]
n_sg = sum(bool_sg)
if n_sg == 0:
cp.set('inspiral', 'ra',
cp.get('workflow', 'ra'))
cp.set('inspiral', 'dec',
cp.get('workflow', 'dec'))
elif n_sg > 1:
msg = f'{datafind_outs} has {n_sg} sky-grid files, '
msg += 'instead of only one.'
raise RuntimeError(msg)
# Code lines for Fermi GBM are commented out for the time being
# from pycbc.workflow.grb_utils import get_sky_grid_scale
# if cp.has_option("jitter_skyloc", "apply-fermi-error"):
# cp.set('inspiral', 'sky-error',
Expand Down
Loading