diff --git a/.github/workflows/sngls-search-workflow.yml b/.github/workflows/sngls-search-workflow.yml new file mode 100644 index 00000000000..3b3e8b62583 --- /dev/null +++ b/.github/workflows/sngls-search-workflow.yml @@ -0,0 +1,58 @@ +name: run small singles-included search using pegasus + condor + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v1 + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - name: install condor + run: | + wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - + echo "deb http://research.cs.wisc.edu/htcondor/ubuntu/8.9/focal focal contrib" | sudo tee -a /etc/apt/sources.list + echo "deb-src http://research.cs.wisc.edu/htcondor/ubuntu/8.9/focal focal contrib" | sudo tee -a /etc/apt/sources.list + sudo apt-get update + sudo apt-get install minihtcondor + sudo systemctl start condor + sudo systemctl enable condor + - name: install pegasus + run: | + wget https://download.pegasus.isi.edu/pegasus/ubuntu/dists/bionic/main/binary-amd64/pegasus_5.0.1-1+ubuntu18_amd64.deb + sudo apt install ./pegasus_5.0.1-1+ubuntu18_amd64.deb + - run: sudo apt-get install *fftw3* intel-mkl* + - name: Install pycbc + run: | + python -m pip install --upgrade 'pip<22.0' setuptools + pip install -r requirements.txt + pip install . + - name: retrieving frame data + run: bash -e examples/search/get.sh + - name: generating template bank + run: bash -e examples/search/bank.sh + - name: generating statistic files + run: bash -e examples/search/stats.sh + - name: running workflow + run: | + cp examples/singles_search/*.ini ./ + bash -e examples/singles_search/gen.sh + cp *.gwf output/ + cd output + bash -e ../examples/search/submit.sh + python ../examples/search/check_job.py + find submitdir/work/ -type f -name '*.tar.gz' -delete + - name: store log files + if: always() + uses: actions/upload-artifact@v2 + with: + name: logs + path: output/submitdir/work + - name: store result page + uses: actions/upload-artifact@v2 + with: + name: results + path: html diff --git a/bin/workflows/pycbc_create_offline_search_workflow b/bin/workflows/pycbc_create_offline_search_workflow new file mode 100644 index 00000000000..6f6a85e2070 --- /dev/null +++ b/bin/workflows/pycbc_create_offline_search_workflow @@ -0,0 +1,798 @@ +#!/usr/bin/env python + +# Copyright (C) 2021, Gareth Cabourn Davies (but largely based on +# pycbc_make_coinc_search_workflow by Ian W. Harry, Alex Nitz, Marton Tapai +# and GCD) +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 3 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +""" +Program for running offline analysis through and coincident and +single-detector trigger ranking then generate post-processing +and plots. +""" +import pycbc +import pycbc.version +__version__ = pycbc.version.git_verbose_msg +__date__ = pycbc.version.date +__program__ = "pycbc_offline" + +import sys +import socket +import pycbc.events, pycbc.workflow as wf +import os, argparse, logging +from six.moves import configparser as ConfigParser +from ligo import segments +import numpy, lal, datetime, itertools +from pycbc.results import create_versioning_page, static_table, layout +from pycbc.results.versioning import save_fig_with_metadata +from pycbc.results.metadata import html_escape + + +def symlink_path(f, path): + if f is None: + return + try: + os.symlink(f.storage_path, os.path.join(path, f.name)) + except OSError: + pass + +def symlink_result(f, rdir_path): + symlink_path(f, rdir[rdir_path]) + + +# Generator for producing ifo combinations +def ifo_combos(ifos): + for i in range(2, len(ifos)+1): + combinations = itertools.combinations(ifos, i) + for ifocomb in combinations: + yield ifocomb + + +# Log to the screen until we know where the output file is +logging.basicConfig(format='%(asctime)s:%(levelname)s : %(message)s', + level=logging.INFO) + +parser = argparse.ArgumentParser(description=__doc__[1:]) +parser.add_argument('--version', action='version', version=__version__) +wf.add_workflow_command_line_group(parser) +wf.add_workflow_settings_cli(parser) +args = parser.parse_args() + +container = wf.Workflow(args, args.workflow_name) +workflow = wf.Workflow(args, args.workflow_name + '-main') +finalize_workflow = wf.Workflow(args, args.workflow_name + '-finalization') + +wf.makedir(args.output_dir) +os.chdir(args.output_dir) + +rdir = layout.SectionNumber('results', ['analysis_time', + 'detector_sensitivity', + 'single_triggers', + 'coincident_triggers', + 'injections', + 'search_sensitivity', + 'open_box_result', + 'workflow', + ]) + +wf.makedir(rdir.base) +wf.makedir(rdir['workflow']) + +wf_log_file = wf.File(workflow.ifos, 'workflow-log', workflow.analysis_time, + extension='.txt', + directory=rdir['workflow']) + +logging.basicConfig(format='%(asctime)s:%(levelname)s : %(message)s', + filename=wf_log_file.storage_path, + level=logging.INFO, + filemode='w') + +logfile = logging.FileHandler(filename=wf_log_file.storage_path,mode='w') +logfile.setLevel(logging.INFO) +formatter = logging.Formatter('%(asctime)s:%(levelname)s : %(message)s') +logfile.setFormatter(formatter) +logging.getLogger('').addHandler(logfile) +logging.info("Created log file %s" % wf_log_file.storage_path) + +# put start / end time at top of summary page +time = workflow.analysis_time +s, e = int(time[0]), int(time[1]) +s_utc = str(datetime.datetime(*lal.GPSToUTC(s)[0:6])) +e_utc = str(datetime.datetime(*lal.GPSToUTC(e)[0:6])) +time_str = '
GPS Interval [%s,%s). ' %(s,e) +time_str += 'UTC Interval %s - %s. ' %(s_utc, e_utc) +time_str += 'Interval duration = %.3f days.
Workflow generation script created workflow in output directory: %s
+Workflow name is: %s
+Workflow generation script run on host: %s
+%s+""" % (os.getcwd(), args.workflow_name, socket.gethostname(), logdata) +kwds = { 'title' : 'Workflow Generation Log', + 'caption' : "Log of the workflow script %s" % sys.argv[0], + 'cmd' :' '.join(sys.argv), } +save_fig_with_metadata(log_str, log_file_html.storage_path, **kwds) +layout.single_layout(rdir['workflow'], ([dashboard_file,log_file_html])) diff --git a/examples/singles_search/analysis.ini b/examples/singles_search/analysis.ini new file mode 100644 index 00000000000..63fefcf8db0 --- /dev/null +++ b/examples/singles_search/analysis.ini @@ -0,0 +1,224 @@ +[workflow] +file-retention-level = merged_triggers +start-time = 1186740100 +end-time = 1186743500 +h1-channel-name = H1:LOSC-STRAIN +l1-channel-name = L1:LOSC-STRAIN +v1-channel-name = V1:LOSC-STRAIN +h1-frame-file = ${resolve:./H-H1_LOSC_CLN_4_V1-1186740069-3584.gwf} +l1-frame-file = ${resolve:./L-L1_LOSC_CLN_4_V1-1186740069-3584.gwf} +v1-frame-file = ${resolve:./V-V1_LOSC_CLN_4_V1-1186739813-4096.gwf} + +[workflow-ifos] +h1 = +l1 = +v1 = + +[workflow-datafind] +datafind-method = AT_RUNTIME_FAKE_DATA +datafind-check-frames-exist = no_test +datafind-check-segment-gaps = no_test + +[workflow-segments] +segments-science = +DATA,-CBC_CAT1_VETO +segments-vetoes = +CBC_CAT2_VETO,+CBC_HW_INJ,+BURST_HW_INJ + +[datafind] +urltype = file + +[hdfinjfind] +injection-window = 2.0 +optimal-snr-column = H1:optimal_snr_H1 L1:optimal_snr_L1 V1:optimal_snr_V1 + +[workflow-tmpltbank] +tmpltbank-method = PREGENERATED_BANK +tmpltbank-pregenerated-bank = ${resolve:./bank.hdf} + +[workflow-splittable] +splittable-method = IN_WORKFLOW +splittable-exe-tag = splitbank + +[workflow-splittable-full_data] +splittable-num-banks = 1 + +[workflow-splittable-injections] +splittable-num-banks = 1 + +[workflow-matchedfilter] +matchedfilter-method = WORKFLOW_INDEPENDENT_IFOS +min-analysis-segments = 6 +min-analysis-length = 3400 +max-analysis-segments = 10 +output-type = hdf +plot-throughput = + +[workflow-coincidence] +do-trigger-fitting = + +[workflow-coincidence-full_data] +timeslide-precedence = H1, L1, V1, K1, I1 +parallelization-factor = 1 + +[workflow-coincidence-injections] +parallelization-factor = 1 + +[workflow-psd] +parallelization-factor = 1 + +[workflow-results] +max-hierarchical-removal = 1 + +[splitbank] +random-sort = + +[inspiral] +pad-data = 8 +strain-high-pass = 15 +sample-rate = 2048 +segment-length = 512 +segment-start-pad = 32 +segment-end-pad = 16 +allow-zero-padding = +taper-data = 1 +psd-estimation = median +psd-segment-length = 16 +psd-segment-stride = 8 +psd-inverse-length = 16 +psd-num-segments = 63 +psdvar-segment = 8 +psdvar-short-segment = 0.25 +psdvar-long-segment = 512 +psdvar-psd-duration = 8 +psdvar-psd-stride = 4 +psdvar-low-freq = 20 +psdvar-high-freq = 480 +autogating-threshold = 50 +autogating-cluster = 0.5 +autogating-width = 0.25 +autogating-taper = 0.25 +autogating-pad = 16 +low-frequency-cutoff = 20 +enable-bank-start-frequency = +snr-threshold = 3.8 +cluster-window = 1 +cluster-function = symmetric +chisq-snr-threshold = 5.25 +chisq-bins = 16 +newsnr-threshold = 3.8 +sgchisq-snr-threshold = 6.0 +sgchisq-locations = "mtotal>30:20-15,20-30,20-45,20-60,20-75,20-90,20-105,20-120" +filter-inj-only = +finalize-events-template-rate = 500 +injection-window = 4.5 +processing-scheme = mkl + +[single_template-h1&plot_singles_timefreq-h1&plot_qscan-h1&inspiral-h1&calculate_psd-h1] +frame-files = ${workflow|h1-frame-file} +channel-name = ${workflow|h1-channel-name} + +[single_template-l1&plot_singles_timefreq-l1&plot_qscan-l1&inspiral-l1&calculate_psd-l1] +frame-files = ${workflow|l1-frame-file} +channel-name = ${workflow|l1-channel-name} + +[single_template-v1&plot_singles_timefreq-v1&plot_qscan-v1&inspiral-v1&calculate_psd-v1] +frame-files = ${workflow|v1-frame-file} +channel-name = ${workflow|v1-channel-name} + +[calculate_psd] +cores = 1 +low-frequency-cutoff = ${inspiral|low-frequency-cutoff} +pad-data = ${inspiral|pad-data} +strain-high-pass = ${inspiral|strain-high-pass} +sample-rate = ${inspiral|sample-rate} +segment-length = ${inspiral|segment-length} +segment-start-pad = ${inspiral|segment-start-pad} +segment-end-pad = ${inspiral|segment-end-pad} +psd-estimation = ${inspiral|psd-estimation} +psd-segment-length = ${inspiral|psd-segment-length} +psd-segment-stride = ${inspiral|psd-segment-stride} +psd-num-segments = ${inspiral|psd-num-segments} +taper-data = ${inspiral|taper-data} +autogating-threshold = ${inspiral|autogating-threshold} +autogating-cluster = ${inspiral|autogating-cluster} +autogating-width = ${inspiral|autogating-width} +autogating-taper = ${inspiral|autogating-taper} +autogating-pad = ${inspiral|autogating-pad} + +[merge_psds] + +[hdf_trigger_merge] + +[bank2hdf] + +[fit_by_template] +fit-function = exponential +sngl-ranking = newsnr_sgveto_psdvar_scaled_threshold +stat-threshold = 4.0 +prune-param = mtotal +log-prune-param = +prune-bins = 1 +prune-number = 1 + +[fit_over_param] +fit-param = mtotal +f-lower = ${inspiral|low-frequency-cutoff} +log-param = True +smoothing-width = 0.4 + +[coinc] +coinc-threshold = 0.002 +ranking-statistic = phasetd_exp_fit_fgbg_norm +sngl-ranking = newsnr_sgveto_psdvar +randomize-template-order = +statistic-files = ${resolve:./statHL.hdf} ${resolve:./statLV.hdf} ${resolve:./statHV.hdf} ${resolve:./statHLV.hdf} + +[coinc-full_data] +timeslide-interval = 0.1 + +[coinc-full_data-2det] +loudest-keep-values = [-1:5,1:5] + +[coinc-full_data-3det] +loudest-keep-values = [-3:5,-1:5] + +[coinc-fullinj&coinc-injfull] +timeslide-interval = ${coinc-full_data|timeslide-interval} +cluster-window = ${statmap|cluster-window} +loudest-keep-values = 15.0:9999999999999 + +[coinc-injinj] + +[statmap] +max-hierarchical-removal = ${workflow-results|max-hierarchical-removal} +hierarchical-removal-against = exclusive + +[statmap&statmap_inj] +veto-window = 0.100 +cluster-window = 10.0 + +[sngls] +ranking-statistic = ${coinc|ranking-statistic} +sngl-ranking = ${coinc|sngl-ranking} +randomize-template-order = +statistic-files = ${coinc|statistic-files} + +[sngls_statmap] +max-hierarchical-removal = ${workflow-results|max-hierarchical-removal} +hierarchical-removal-against = ${statmap|hierarchical-removal-against} + +[sngls_statmap&sngls_statmap_inj] +far-calculation-method = trigger_fit +cluster-window = ${statmap|cluster-window} +fit-function = exponential +fit-threshold = H1:-10 L1:-5 V1:-12 + +[combine_statmap] +cluster-window = ${statmap|cluster-window} +fit-function = ${sngls_statmap|fit-function} +fit-threshold = ${sngls_statmap|fit-threshold} + +[foreground_censor] + +[results_page] +analysis-title = "PyCBC search" +analysis-subtitle = "Singles-included Test Search" diff --git a/examples/singles_search/executables.ini b/examples/singles_search/executables.ini new file mode 100644 index 00000000000..72b21f33ef6 --- /dev/null +++ b/examples/singles_search/executables.ini @@ -0,0 +1,88 @@ +[executables] +page_ifar_catalog = ${which:pycbc_ifar_catalog} +average_psd = ${which:pycbc_average_psd} +inj2hdf = ${which:pycbc_inspinj2hdf} +bank2hdf = ${which:pycbc_coinc_bank2hdf} +calculate_psd = ${which:pycbc_calculate_psd} +coinc = ${which:pycbc_coinc_findtrigs} +sngls = ${which:pycbc_sngls_findtrigs} +combine_statmap = ${which:pycbc_add_statmap} +distribute_background_bins = ${which:pycbc_distribute_background_bins} +fit_by_template = ${which:pycbc_fit_sngls_by_template} +fit_over_param = ${which:pycbc_fit_sngls_over_multiparam} +foreground_censor = ${which:pycbc_foreground_censor} +hdfinjfind = ${which:pycbc_coinc_hdfinjfind} +hdf_trigger_merge = ${which:pycbc_coinc_mergetrigs} +inj_cut = ${which:pycbc_inj_cut} +injections = ${which:lalapps_inspinj} +inspiral = ${which:pycbc_inspiral} +ligolw_combine_segments = ${which:ligolw_combine_segments} +llwadd = ${which:ligolw_add} +merge_psds = ${which:pycbc_merge_psds} +optimal_snr = ${which:pycbc_optimal_snr} +optimal_snr_merge = ${which:pycbc_merge_inj_hdf} +page_foreground = ${which:pycbc_page_foreground} +page_ifar = ${which:pycbc_page_ifar} +page_injections = ${which:pycbc_page_injtable} +page_segplot = ${which:pycbc_page_segplot} +page_segtable = ${which:pycbc_page_segtable} +page_vetotable = ${which:pycbc_page_vetotable} +plot_bank = ${which:pycbc_plot_bank_bins} +plot_binnedhist = ${which:pycbc_fit_sngls_binned} +plot_coinc_snrchi = ${which:pycbc_page_coinc_snrchi} +plot_foundmissed = ${which:pycbc_page_foundmissed} +plot_gating = ${which:pycbc_plot_gating} +plot_hist = ${which:pycbc_plot_hist} +plot_qscan = ${which:pycbc_plot_qscan} +plot_range = ${which:pycbc_plot_range} +plot_segments = ${which:pycbc_page_segments} +plot_sensitivity = ${which:pycbc_page_sensitivity} +plot_singles = ${which:pycbc_plot_singles_vs_params} +plot_snrchi = ${which:pycbc_page_snrchi} +plot_snrifar = ${which:pycbc_page_snrifar} +plot_spectrum = ${which:pycbc_plot_psd_file} +exclude_zerolag = ${which:pycbc_exclude_zerolag} +plot_throughput = ${which:pycbc_plot_throughput} +results_page = ${which:pycbc_make_html_page} +splitbank = ${which:pycbc_hdf5_splitbank} +statmap = ${which:pycbc_coinc_statmap} +statmap_inj = ${which:pycbc_coinc_statmap_inj} +sngls_statmap = ${which:pycbc_sngls_statmap} +sngls_statmap_inj = ${which:pycbc_sngls_statmap_inj} +strip_injections = ${which:pycbc_strip_injections} +tmpltbank = ${which:pycbc_geom_nonspinbank} +html_snippet = ${which:pycbc_create_html_snippet} +foreground_minifollowup = ${which:pycbc_foreground_minifollowup} +injection_minifollowup = ${which:pycbc_injection_minifollowup} +singles_minifollowup = ${which:pycbc_sngl_minifollowup} +page_injinfo = ${which:pycbc_page_injinfo} +page_coincinfo = ${which:pycbc_page_coincinfo} +page_snglinfo = ${which:pycbc_page_snglinfo} +plot_trigger_timeseries = ${which:pycbc_plot_trigger_timeseries} +single_template_plot = ${which:pycbc_single_template_plot} +single_template = ${which:pycbc_single_template} +plot_singles_timefreq = ${which:pycbc_plot_singles_timefreq} +plot_snrratehist = ${which:pycbc_page_snrratehist} +plot_waveform = ${which:pycbc_plot_waveform} + +[pegasus_profile] +condor|request_memory = 1000 +condor|accounting_group = ligo.prod.o3.cbc.bbh.pycbcoffline +pycbc|primary_site = condorpool_symlink +pycbc|submit-directory = ./ + +[pegasus_profile-condorpool_shared] +pycbc|site-scratch = ./ +pycbc|unique-scratch = + +[pegasus_profile-coinc] +pycbc|site = condorpool_copy + +[pegasus_profile-sngls] +pycbc|site = condorpool_copy + +[pegasus_profile-inspiral] +pycbc|site = condorpool_shared + +[pegasus_profile-results_page] +pycbc|site = condorpool_shared diff --git a/examples/singles_search/gen.sh b/examples/singles_search/gen.sh new file mode 100644 index 00000000000..5208fa55fc4 --- /dev/null +++ b/examples/singles_search/gen.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e + +pycbc_create_offline_search_workflow \ +--workflow-name gw \ +--output-dir output \ +--config-files analysis.ini plotting.ini executables.ini injections_minimal.ini \ +--config-overrides results_page:output-path:$(pwd)/html diff --git a/examples/singles_search/injections_minimal.ini b/examples/singles_search/injections_minimal.ini new file mode 100644 index 00000000000..246ffaeec51 --- /dev/null +++ b/examples/singles_search/injections_minimal.ini @@ -0,0 +1,44 @@ +[workflow-injections] +injections-method = IN_WORKFLOW +strip-injections = +compute-optimal-snr = + +[workflow-optimal-snr] +parallelization-factor = 2 + +[inspiral] +injection-filter-rejector-chirp-time-window = 5 + +[optimal_snr] +snr-columns = ${hdfinjfind|optimal-snr-column} +f-low = ${inspiral|low-frequency-cutoff} +seg-length = ${inspiral|segment-length} +sample-rate = 2048 +cores = 1 + +[optimal_snr_merge] + +[injections] +i-distr = uniform +l-distr = random +time-interval = 25 +time-step = 50 + +[workflow-injections-bbh] + +[injections-bbh] +dchirp-distr = uniform +min-distance = 1000 +max-distance = 100000 +m-distr = log +min-mass1 = 10 +max-mass1 = 30 +min-mass2 = 10 +max-mass2 = 30 +min-mtotal = 20 +max-mtotal = 60 +waveform = IMRPhenomD +taper-injection = start +seed = 123407 +f-lower = 25 +disable-spin = diff --git a/examples/singles_search/master.sh b/examples/singles_search/master.sh new file mode 100644 index 00000000000..da0e6a2033d --- /dev/null +++ b/examples/singles_search/master.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e + +bash -e ../search/get.sh +bash -e ../search/bank.sh +bash -e ../search/stats.sh +bash -e gen.sh + +cp *.gwf output +cd output +bash -e ../../search/submit.sh +python ../../search/check_job.py diff --git a/examples/singles_search/plotting.ini b/examples/singles_search/plotting.ini new file mode 100644 index 00000000000..c4fbfc762cd --- /dev/null +++ b/examples/singles_search/plotting.ini @@ -0,0 +1,238 @@ +[pycbc_ifar_catalog] +[workflow-minifollowups] +num-events=1 + +[workflow-sngl_minifollowups] +num-sngl-events=1 + +[workflow-sngl_minifollowups-noncoinc] +section-header = loudest_noncoinc_time + +[workflow-sngl_minifollowups-all] +section-header = all + +[workflow-injection_minifollowups] +num-events=1 +subsection-suffix=with_ifar_lt_1_year + +[foreground_minifollowup] +[foreground_minifollowup-foreground] +analysis-category = foreground + +[foreground_minifollowup-background] +analysis-category = background_exc + +[singles_minifollowup] +ranking-statistic = quadsum +sngl-ranking = newsnr_sgveto_psdvar + +[singles_minifollowup-noncoinc] +non-coinc-time-only = + +[singles_minifollowup-all] + +[injection_minifollowup] +ifar-threshold = 1 + +[page_snglinfo] +ranking-statistic = quadsum +sngl-ranking = newsnr_sgveto_psdvar + +[single_template_plot] + +[single_template_plot-p1] +window = 0.1 + +[plot_trigger_timeseries] + +[html_snippet] + +[page_coincinfo] +[page_coincinfo-background] +statmap-file-subspace-name=background_exc + +[page_coincinfo-foreground] +statmap-file-subspace-name=foreground + +[plot_trigger_timeseries-snr1] +window = 1 +plot-type = snr +log-y-axis = + +[plot_trigger_timeseries-newsnr1] +window = 1 +plot-type = newsnr + +[plot_singles_timefreq] +f-low = ${inspiral|low-frequency-cutoff} +rank = newsnr +num-loudest = 1 +approximant = TaylorF2 + +[plot_qscan] +time-windows = 10,1 1,1 + +[single_template&plot_singles_timefreq&plot_qscan] +strain-high-pass = ${inspiral|strain-high-pass} +sample-rate = ${inspiral|sample-rate} +pad-data = ${inspiral|pad-data} + +[single_template] +segment-length = ${inspiral|segment-length} +segment-start-pad = ${inspiral|segment-start-pad} +segment-end-pad = ${inspiral|segment-end-pad} +psd-estimation = ${inspiral|psd-estimation} +psd-segment-length = ${inspiral|psd-segment-length} +psd-segment-stride = ${inspiral|psd-segment-stride} +psd-inverse-length = ${inspiral|psd-inverse-length} +chisq-bins = ${inspiral|chisq-bins} +low-frequency-cutoff = ${inspiral|low-frequency-cutoff} +approximant = IMRPhenomD +processing-scheme = ${inspiral|processing-scheme} +window = 10 +psd-num-segments = ${inspiral|psd-num-segments} +taper-data = ${inspiral|taper-data} +allow-zero-padding = +autogating-threshold = ${inspiral|autogating-threshold} +autogating-cluster = ${inspiral|autogating-cluster} +autogating-width = ${inspiral|autogating-width} +autogating-taper = ${inspiral|autogating-taper} +autogating-pad = ${inspiral|autogating-pad} +minimum-chisq-bins = 3 + +[single_template-inj_params_inverted] +injection-scale-factor=-1 + +[single_template-inj_params_noinj] +injection-scale-factor=1000000 + +[plot_waveform] +low-frequency-cutoff = ${inspiral|low-frequency-cutoff} +waveform-length = ${inspiral|segment-length} +sample-rate = ${inspiral|sample-rate} +approximant = IMRPhenomD + +[plot_sensitivity] +; method for calculating volumes +dist-bins = 50 +exclusive-sig = +integration-method = pylal + +; bin choices +[s-mchirp] +bins = 10 20 100 + +[plot_sensitivity-all_mchirp&plot_sensitivity-summary_mchirp] +bin-type = mchirp +bins = ${s-mchirp|bins} +sig-type = ifar + +[plot_sensitivity-all_mchirpvt&plot_sensitivity-summary_mchirpvt] +bin-type = mchirp +bins = ${s-mchirp|bins} +dist-type = vt +sig-type = ifar +log-dist = + +[plot_foundmissed] +far-type = exclusive + +[plot_foundmissed-sub_mchirp_grad&plot_foundmissed-all_mchirp_grad&plot_foundmissed-summary] +distance-type = decisive_optimal_snr +axis-type = mchirp +log-x = +log-distance = +gradient-far = + +[plot_foundmissed-sub_mchirp_gradm&plot_foundmissed-all_mchirp_gradm&plot_foundmissed-summarym] +distance-type = decisive_optimal_snr +axis-type = mchirp +log-x = +log-distance = +gradient-far = +missed-on-top = + +[plot_singles] +min-snr = 6 + +[plot_singles-mtotal_eta_newsnr] +x-var = mtotal +log-x = +y-var = eta +z-var = "newsnr_sgveto" +min-z = 6 + +[plot_range] + +[plot_range-summ] +mass1 = 1.4 +mass2 = 1.4 +approximant = SPAtmplt + +[contours] +newsnr-contours = 5 7 9 11 13 + +[plot_snrchi] +min-snr = 6.0 +newsnr-contours = ${contours|newsnr-contours} + +[plot_coinc_snrchi] +newsnr-contours = ${contours|newsnr-contours} + +[plot_coinc_snrchi-inj] + +[plot_hist] +[plot_hist-summ_snr] +x-var = snr +x-min = 6 +x-max = 100 + +[plot_hist-summ_newsnr] +x-var = newsnr +x-min = 6 +x-max = 15 + +[plot_binnedhist] +sngl-ranking = ${fit_by_template|sngl-ranking} +fit-function = ${fit_by_template|fit-function} +; limit the number of triggers for which duration is calculated +stat-threshold = 5.0 +;${fit_by_template|stat-threshold} +prune-param = ${fit_by_template|prune-param} +log-prune-param = +prune-bins = ${fit_by_template|prune-bins} +prune-number = ${fit_by_template|prune-number} + +[plot_binnedhist-mtotal] +bin-param = mtotal +; for template duration, need a starting frequency +f-lower = 20.0 +num-bins = 2 +bin-spacing = log + +[plot_spectrum] +psd-model = aLIGOZeroDetHighPower + +[page_ifar] +decimation-factor = 100 + +[page_ifar-open_box] +open-box= + +[page_vetotable] +[plot_bank] +log-x = +log-y = + +[page_segtable] +[page_segplot] +[plot_snrifar] +[page_foreground] +[page_foreground-xmlloudest] +num-coincs-to-write = 2 + +[page_injections] +[plot_segments] +[plot_gating] +[plot_snrratehist] +[plot_throughput] diff --git a/pycbc/workflow/coincidence.py b/pycbc/workflow/coincidence.py index 00511b360b4..d5303f93756 100644 --- a/pycbc/workflow/coincidence.py +++ b/pycbc/workflow/coincidence.py @@ -108,9 +108,31 @@ def create_node(self, trig_files, bank_file, stat_files, veto_file, node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags) return node +class PyCBCFindSnglsExecutable(Executable): + """Calculate single-detector ranking statistic for triggers""" + + current_retention_level = Executable.ALL_TRIGGERS + file_input_options = ['--statistic-files'] + def create_node(self, trig_files, bank_file, stat_files, veto_file, + veto_name, template_str, tags=None): + if tags is None: + tags = [] + segs = trig_files.get_times_covered_by_files() + seg = segments.segment(segs[0][0], segs[-1][1]) + node = Node(self) + node.add_input_opt('--template-bank', bank_file) + node.add_input_list_opt('--trigger-files', trig_files) + if len(stat_files) > 0: + node.add_input_list_opt('--statistic-files', stat_files) + if veto_file is not None: + node.add_input_opt('--veto-files', veto_file) + node.add_opt('--segment-name', veto_name) + node.add_opt('--template-fraction-range', template_str) + node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags) + return node class PyCBCStatMapExecutable(Executable): - """Calculate FAP, IFAR, etc""" + """Calculate FAP, IFAR, etc for coincs""" current_retention_level = Executable.MERGED_TRIGGERS def create_node(self, coinc_files, ifos, tags=None): @@ -125,9 +147,25 @@ def create_node(self, coinc_files, ifos, tags=None): node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags) return node +class PyCBCSnglsStatMapExecutable(Executable): + """Calculate FAP, IFAR, etc for singles""" + + current_retention_level = Executable.MERGED_TRIGGERS + def create_node(self, sngls_files, ifo, tags=None): + if tags is None: + tags = [] + segs = sngls_files.get_times_covered_by_files() + seg = segments.segment(segs[0][0], segs[-1][1]) + + node = Node(self) + node.add_input_list_opt('--sngls-files', sngls_files) + node.add_opt('--ifos', ifo) + node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags) + return node + class PyCBCStatMapInjExecutable(Executable): - """Calculate FAP, IFAR, etc""" + """Calculate FAP, IFAR, etc for coincs for injections""" current_retention_level = Executable.MERGED_TRIGGERS def create_node(self, zerolag, full_data, @@ -151,6 +189,25 @@ def create_node(self, zerolag, full_data, node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags) return node +class PyCBCSnglsStatMapInjExecutable(Executable): + """Calculate FAP, IFAR, etc for singles for injections""" + + current_retention_level = Executable.MERGED_TRIGGERS + def create_node(self, sngls_files, background_file, + ifos, tags=None): + if tags is None: + tags = [] + segs = sngls_files.get_times_covered_by_files() + seg = segments.segment(segs[0][0], segs[-1][1]) + + node = Node(self) + node.add_input_list_opt('--sngls-files', sngls_files) + node.add_input_opt('--full-data-background', background_file) + + node.add_opt('--ifos', ifos) + node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags) + return node + class PyCBCHDFInjFindExecutable(Executable): """Find injections in the hdf files output""" @@ -373,6 +430,18 @@ def setup_statmap(workflow, ifos, coinc_files, out_dir, tags=None): return stat_node.output_file +def setup_sngls_statmap(workflow, ifo, sngls_files, out_dir, tags=None): + tags = [] if tags is None else tags + + statmap_exe = PyCBCSnglsStatMapExecutable(workflow.cp, 'sngls_statmap', + ifos=ifo, + tags=tags, out_dir=out_dir) + + stat_node = statmap_exe.create_node(sngls_files, ifo) + workflow.add_node(stat_node) + return stat_node.output_file + + def setup_statmap_inj(workflow, ifos, coinc_files, background_file, out_dir, tags=None): tags = [] if tags is None else tags @@ -392,6 +461,23 @@ def setup_statmap_inj(workflow, ifos, coinc_files, background_file, return stat_node.output_files[0] +def setup_sngls_statmap_inj(workflow, ifo, sngls_inj_files, background_file, + out_dir, tags=None): + tags = [] if tags is None else tags + + statmap_exe = PyCBCSnglsStatMapInjExecutable(workflow.cp, + 'sngls_statmap_inj', + ifos=ifo, + tags=tags, + out_dir=out_dir) + + stat_node = statmap_exe.create_node(sngls_inj_files, + background_file, + ifo) + workflow.add_node(stat_node) + return stat_node.output_files[0] + + def setup_interval_coinc_inj(workflow, hdfbank, full_data_trig_files, inj_trig_files, stat_files, background_file, veto_file, veto_name, @@ -504,6 +590,74 @@ def setup_interval_coinc(workflow, hdfbank, trig_files, stat_files, return statmap_files +def setup_sngls(workflow, hdfbank, trig_files, stat_files, + veto_file, veto_name, out_dir, tags=None): + """ + This function sets up getting statistic values for single-detector triggers + """ + ifos, _ = trig_files.categorize_by_attr('ifo') + findsngls_exe = PyCBCFindSnglsExecutable(workflow.cp, 'sngls', ifos=ifos, + tags=tags, out_dir=out_dir) + # Wall time knob and memory knob + factor = int(workflow.cp.get_opt_tags('workflow-coincidence', + 'parallelization-factor', + [findsngls_exe.ifo_string] + tags)) + + statmap_files = [] + bg_files = FileList() + for i in range(factor): + group_str = '%s/%s' % (i, factor) + sngls_node = findsngls_exe.create_node(trig_files, hdfbank, + stat_files, + veto_file, veto_name, + group_str, + tags=['JOB'+str(i)]) + bg_files += sngls_node.output_files + workflow.add_node(sngls_node) + + statmap_files = setup_sngls_statmap(workflow, ifos[0], bg_files, + out_dir, tags=tags) + + logging.info('...leaving coincidence ') + return statmap_files + + +def setup_sngls_inj(workflow, hdfbank, inj_trig_files, + stat_files, background_file, veto_file, veto_name, + out_dir, tags=None): + """ + This function sets up getting statistic values for single-detector triggers + from injections + """ + ifos, _ = inj_trig_files.categorize_by_attr('ifo') + findsnglsinj_exe = PyCBCFindSnglsExecutable(workflow.cp, 'sngls', ifos=ifos, + tags=tags, out_dir=out_dir) + # Wall time knob and memory knob + exe_str_tags = [findsnglsinj_exe.ifo_string] + tags + factor = int(workflow.cp.get_opt_tags('workflow-coincidence', + 'parallelization-factor', + exe_str_tags)) + + statmap_files = [] + bg_files = FileList() + for i in range(factor): + group_str = '%s/%s' % (i, factor) + sngls_node = findsnglsinj_exe.create_node(inj_trig_files, hdfbank, + stat_files, + veto_file, veto_name, + group_str, + tags=['JOB'+str(i)]) + bg_files += sngls_node.output_files + workflow.add_node(sngls_node) + + statmap_files = setup_sngls_statmap_inj(workflow, ifos[0], bg_files, + background_file, + out_dir, tags=tags) + + logging.info('...leaving coincidence ') + return statmap_files + + def select_files_by_ifo_combination(ifocomb, insps): """ This function selects single-detector files ('insps') for a given ifo combination diff --git a/tools/test_sngls_search_workflow.sh b/tools/test_sngls_search_workflow.sh new file mode 100755 index 00000000000..f195a9d2d44 --- /dev/null +++ b/tools/test_sngls_search_workflow.sh @@ -0,0 +1,73 @@ +#!/bin/bash +set -e +echo -e "\\n>> [`date`] Testing pycbc_make_coinc_search_workflow" + +VENV_PATH=${1} +TRAVIS_TAG=${2} + +if [ "x${VENV_PATH}" == "x" ] ; then + echo -e "\\n>> [`date`] Error: VENV_PATH was not passed to script or is empty" + exit 1 +fi + +if [ "x${TRAVIS_TAG}" == "x" ] ; then + echo -e "\\n>> [`date`] Error: TRAVIS_TAG was not passed to script or is empty" + exit 1 +fi + +echo -e "\\n>> [`date`] Entering virtual environment $VENV_PATH" +source ${VENV_PATH}/bin/activate + +CONFIG_PATH="https://raw.githubusercontent.com/ligo-cbc/pycbc-config/${TRAVIS_TAG}/test" +echo -e "\\n>> [`date`] Using config files from ${CONFIG_PATH}" + +echo -e "\\n>> [`date`] Creating test workflow" +UUID=`uuidgen` +WORKFLOW_NAME=test-sngls-workflow-$UUID +OUTPUT_PATH=`pwd`/public_html/test_sngls_workflow/${WORKFLOW_NAME} +export LIGO_DATAFIND_SERVER='128.230.190.43:80' + +mkdir $WORKFLOW_NAME +pushd $WORKFLOW_NAME + +# Doesn't need to be a valid bank file, just needs to exist +echo "DUMMY BANK FILE" > bank.hdf +echo "DUMMY STAT FILE" > statHL.hdf +echo "DUMMY STAT FILE" > statLV.hdf +echo "DUMMY STAT FILE" > statHV.hdf +echo "DUMMY STAT FILE" > statHLV.hdf + +echo -e "\\n>> [`date`] Building test workflow $WORKFLOWNAME" + +pycbc_create_offline_search_workflow \ +--workflow-name ${WORKFLOW_NAME} --output-dir output \ +--config-files \ +/pycbc/examples/singles_search/analysis.ini \ +/pycbc/examples/singles_search/plotting.ini \ +/pycbc/examples/singles_search/injections_minimal.ini \ +/pycbc/examples/singles_search/executables.ini \ +--config-overrides \ +"results_page:output-path:../../../html" + +pushd output + +for workflow in *.dax +do + echo -e "\\n>> [`date`] Validating workflow $workflow" + pegasus-dax-validator $workflow +done + +echo -e "\\n>> [`date`] Planning test workflow" +pycbc_submit_dax \ + --force-no-accounting-group \ + --dax ${WORKFLOW_NAME}.dax \ + --no-create-proxy \ + --no-submit \ + --no-grid + +popd +popd + +echo -e "\\n>> [`date`] Test workflow validation complete" + +exit 0