From da9df18dd0b18eee46f76074d770aabc6b038006 Mon Sep 17 00:00:00 2001 From: Alexander Harvey Nitz Date: Wed, 9 Dec 2020 15:26:42 +0100 Subject: [PATCH 1/3] Add github action test with full run of search workflow --- .github/workflows/search-workflow.yml | 61 +++++++ bin/all_sky_search/pycbc_add_statmap | 8 +- bin/all_sky_search/pycbc_coinc_statmap | 3 +- bin/all_sky_search/pycbc_dtphase | 6 +- bin/bank/pycbc_brute_bank | 88 ++++----- bin/plotting/pycbc_ifar_catalog | 9 +- bin/pycbc_submit_dax | 88 ++++----- examples/search/analysis.ini | 203 +++++++++++++++++++++ examples/search/bank.sh | 21 +++ examples/search/check_job.py | 36 ++++ examples/search/executables.ini | 66 +++++++ examples/search/gen.sh | 7 + examples/search/get.sh | 7 + examples/search/injections_minimal.ini | 43 +++++ examples/search/master.sh | 12 ++ examples/search/plotting.ini | 236 +++++++++++++++++++++++++ examples/search/stats.sh | 44 +++++ examples/search/submit.sh | 6 + pycbc/events/stat.py | 4 +- pycbc/io/hdf.py | 2 +- pycbc/waveform/bank.py | 2 +- pycbc/workflow/coincidence.py | 2 - pycbc/workflow/jobsetup.py | 1 - pycbc/workflow/plotting.py | 3 - 24 files changed, 855 insertions(+), 103 deletions(-) create mode 100644 .github/workflows/search-workflow.yml create mode 100644 examples/search/analysis.ini create mode 100644 examples/search/bank.sh create mode 100644 examples/search/check_job.py create mode 100644 examples/search/executables.ini create mode 100644 examples/search/gen.sh create mode 100644 examples/search/get.sh create mode 100644 examples/search/injections_minimal.ini create mode 100644 examples/search/master.sh create mode 100644 examples/search/plotting.ini create mode 100644 examples/search/stats.sh create mode 100644 examples/search/submit.sh diff --git a/.github/workflows/search-workflow.yml b/.github/workflows/search-workflow.yml new file mode 100644 index 00000000000..29177e2d710 --- /dev/null +++ b/.github/workflows/search-workflow.yml @@ -0,0 +1,61 @@ +name: run small search using pegasus + condor + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v1 + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - name: install condor + run: | + wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - + echo "deb http://research.cs.wisc.edu/htcondor/ubuntu/8.9/focal focal contrib" | sudo tee -a /etc/apt/sources.list + echo "deb-src http://research.cs.wisc.edu/htcondor/ubuntu/8.9/focal focal contrib" | sudo tee -a /etc/apt/sources.list + sudo apt-get update + sudo apt-get install minihtcondor + sudo systemctl start condor + sudo systemctl enable condor + - name: install pegasus + run: | + wget https://download.pegasus.isi.edu/pegasus/ubuntu/dists/bionic/main/binary-amd64/pegasus_4.9.3-1+ubuntu18_amd64.deb + sudo apt install ./pegasus_4.9.3-1+ubuntu18_amd64.deb + - run: sudo apt-get install *fftw3* intel-mkl* + - name: Install pycbc + run: | + python -m pip install --upgrade pip setuptools + pip install -r requirements.txt + pip install . + - name: retrieving frame data + run: bash -e examples/search/get.sh + - name: generating template bank + run: bash -e examples/search/bank.sh + - name: generating statistic files + run: bash -e examples/search/stats.sh + - name: generating workflow + run: | + cp examples/search/*.ini ./ + bash -e examples/search/gen.sh + cp *.gwf output/ + - name: running workflow + run: | + condor_status + cd output + bash -e ../examples/search/submit.sh + ./status + python ../examples/search/check_job.py + - name: store log files + if: always() + uses: actions/upload-artifact@v2 + with: + name: logs + path: output/submitdir/work/**/*.out.001 + - name: store result page + uses: actions/upload-artifact@v2 + with: + name: results + path: html diff --git a/bin/all_sky_search/pycbc_add_statmap b/bin/all_sky_search/pycbc_add_statmap index 37d1c17c6b8..f8379f4484e 100755 --- a/bin/all_sky_search/pycbc_add_statmap +++ b/bin/all_sky_search/pycbc_add_statmap @@ -98,10 +98,10 @@ logging.info('Combining foreground segments') # Convert segmentlistdict to a list ('seglists') of segmentlists # then np.sum(seglists, axis=0) does seglists[0] + seglists[1] + ... -if len(indiv_segs) > 1: - foreground_segs = np.sum(list(indiv_segs.values()), axis=0) -else: - foreground_segs = indiv_segs.values()[0] +foreground_segs = segments.segmentlist([]) +for segs in indiv_segs.values(): + foreground_segs += segs + f.attrs['foreground_time'] = abs(foreground_segs) # Output the segments which are in *any* type of coincidence diff --git a/bin/all_sky_search/pycbc_coinc_statmap b/bin/all_sky_search/pycbc_coinc_statmap index 5da5993aa9e..e1e2a2bc652 100755 --- a/bin/all_sky_search/pycbc_coinc_statmap +++ b/bin/all_sky_search/pycbc_coinc_statmap @@ -362,7 +362,8 @@ while numpy.any(louder_foreground == 0): indices_to_rm = [] for ifo in args.ifos: indices_to_rm = numpy.concatenate([indices_to_rm, ind_to_rm[ifo]]) - all_trigs = all_trigs.remove(indices_to_rm) + + all_trigs = all_trigs.remove(indices_to_rm.astype(int)) logging.info("We have %s triggers after hierarchical removal." % len(all_trigs.stat)) # Step 4: Re-cluster the triggers and calculate the inclusive ifar/fap diff --git a/bin/all_sky_search/pycbc_dtphase b/bin/all_sky_search/pycbc_dtphase index cb5179f3909..27b3fbe0c49 100644 --- a/bin/all_sky_search/pycbc_dtphase +++ b/bin/all_sky_search/pycbc_dtphase @@ -220,15 +220,15 @@ for ifo0 in args.ifos: logging.info('smoothing done: %s', len(weights)) logging.info('converting to numpy arrays and normalizing') - keys = np.array(weights.keys()) - values = np.array(weights.values()) + keys = np.array(list(weights.keys())) + values = np.array(list(weights.values())) values /= values.max() logging.info('Removing bins outside of SNR ratio limits') n_precut = len(keys) keep = None for i in range(len(args.ifos)-1): - srbin = np.array(zip(*keys)[i * 3 + 2]) + srbin = np.array(list(zip(*keys))[i * 3 + 2]) if keep is None: keep = (srbin <= srbmax) & (srbin >= srbmin) else: diff --git a/bin/bank/pycbc_brute_bank b/bin/bank/pycbc_brute_bank index eb69bdc3664..d8c7898c4a2 100644 --- a/bin/bank/pycbc_brute_bank +++ b/bin/bank/pycbc_brute_bank @@ -53,7 +53,7 @@ if args.fixed_params: class Shrinker(object): def __init__(self, data): - self.data = data + self.data = data def pop(self): if len(self.data) == 0: @@ -72,7 +72,7 @@ class TriangleBank(object): def __len__(self): return len(self.waveforms) - + def activelen(self): i = 0 for w in self.waveforms: @@ -97,7 +97,7 @@ class TriangleBank(object): def key(self, k): return numpy.array([p.params[k] for p in self.waveforms]) - + def sigma_match_bound(self, sig): if not hasattr(self, 'sigma'): self.sigma = None @@ -111,10 +111,10 @@ class TriangleBank(object): if self.r is None or len(self.r) != len(self): self.r = numpy.arange(0, len(self)) return self.r - + def culltau0(self, threshold): cull = numpy.where(self.tau0() < threshold)[0] - + class dumb(object): pass for c in cull: @@ -123,7 +123,7 @@ class TriangleBank(object): d.params = self.waveforms[c].params d.s = self.waveforms[c].s self.waveforms[c] = d - + def tau0(self): if not hasattr(self, 't0'): @@ -132,12 +132,12 @@ class TriangleBank(object): self.t0 = numpy.array([h.tau0 for h in self]) return self.t0 - def __contains__(self, hp): - mmax = 0 + def __contains__(self, hp): + mmax = 0 mnum = 0 #Apply sigmas maximal match. if args.enable_sigma_bound: - matches = self.sigma_match_bound(hp.s) + matches = self.sigma_match_bound(hp.s) r = self.range()[matches > hp.threshold] else: matches = numpy.ones(len(self)) @@ -148,7 +148,7 @@ class TriangleBank(object): #Apply tua0 threshold if args.tau0_threshold: hp.tau0 = pycbc.conversions.tau0_from_mass1_mass2( - hp.params['mass1'], + hp.params['mass1'], hp.params['mass2'], 15.0) hp.tbin = int(hp.tau0 / args.tau0_threshold) @@ -160,7 +160,7 @@ class TriangleBank(object): mtau = len(r) # Try to do some actual matches - inc = Shrinker(r*1) + inc = Shrinker(r*1) while 1: j = inc.pop() if j is None: @@ -175,7 +175,7 @@ class TriangleBank(object): m = hp.gen.match(hp, hc) matches[j] = m mnum += 1 - + # Update bounding match values, apply triangle inequality maxmatches = hc.matches - m + 1.10 update = numpy.where(maxmatches < matches[hc.indices])[0] @@ -184,11 +184,11 @@ class TriangleBank(object): # Update where to calculate matches skip_threshold = 1 - (1 - hp.threshold) * 2.0 inc.data = inc.data[matches[inc.data] > skip_threshold] - + if m > hp.threshold: return True if m > mmax: - mmax = m + mmax = m def check_params(self, gen, params, threshold): num_tried = 0 @@ -201,7 +201,7 @@ class TriangleBank(object): except Exception as err: print(err) continue - + hp.gen = gen hp.threshold = threshold if hp not in self: @@ -215,7 +215,7 @@ class GenUniformWaveform(object): self.f_lower = f_lower self.delta_f = 1.0 / buffer_length tlen = int(buffer_length * sample_rate) - self.flen = tlen / 2 + 1 + self.flen = tlen // 2 + 1 psd = pycbc.psd.from_cli(args, self.flen, self.delta_f, self.f_lower) self.kmin = int(f_lower * buffer_length) self.w = ((1.0 / psd[self.kmin:-1]) ** 0.5).astype(numpy.float32) @@ -229,16 +229,16 @@ class GenUniformWaveform(object): def generate(self, **kwds): kwds.update(fdict) if kwds['approximant'] in pycbc.waveform.fd_approximants(): - hp, hc = pycbc.waveform.get_fd_waveform(delta_f=self.delta_f, + hp, hc = pycbc.waveform.get_fd_waveform(delta_f=self.delta_f, f_lower=self.f_lower, **kwds) if 'fratio' in kwds: hp = hc * kwds['fratio'] + hp * (1 - kwds['fratio']) else: dt = 1.0 / args.sample_rate hp = pycbc.waveform.get_waveform_filter( - pycbc.types.zeros(self.flen, dtype=numpy.complex64), + pycbc.types.zeros(self.flen, dtype=numpy.complex64), delta_f=self.delta_f, delta_t=dt, - f_lower=self.f_lower, **kwds) + f_lower=self.f_lower, **kwds) hp.resize(self.flen) hp = hp.astype(numpy.complex64) @@ -264,7 +264,7 @@ else: size = int(1.0 / tolerance) -gen = GenUniformWaveform(args.buffer_length, +gen = GenUniformWaveform(args.buffer_length, args.sample_rate, args.low_frequency_cutoff) bank = TriangleBank() @@ -276,7 +276,7 @@ if args.input_file: def draw(rtype): params = {} - + if rtype == 'uniform': for name, pmin, pmax in zip(args.params, args.min, args.max): params[name] = numpy.random.uniform(pmin, pmax, size=size) @@ -293,9 +293,9 @@ def draw(rtype): points = kde.resample(size=size) for k, v in zip(p, points): params[k] = v - + params['approximant'] = numpy.array([args.approximant]*size) - + # Filter out stuff l = None for name, pmin, pmax in zip(args.params, args.min, args.max): @@ -318,42 +318,42 @@ def draw(rtype): from pycbc.conversions import mchirp_from_mass1_mass2 mc = mchirp_from_mass1_mass2(params['mass1'], params['mass2']) l &= mc > args.min_mchirp - + for k in params: params[k] = params[k][l] - + return params def cdraw(rtype, ts, te): from pycbc.conversions import tau0_from_mass1_mass2 - p = draw(rtype) - if len(p[p.keys()[0]]) > 0: + p = draw(rtype) + if len(p[list(p.keys())[0]]) > 0: t = tau0_from_mass1_mass2(p['mass1'], p['mass2'], 15.0) l = (t < te) & (t > ts) for k in p: p[k] = p[k][l] - + i = 0 - while len(p[p.keys()[0]]) < size: + while len(p[list(p.keys())[0]]) < size: tp = draw(rtype) for k in p: p[k] = numpy.concatenate([p[k], tp[k]]) - - if len(p[p.keys()[0]]) > 0: + + if len(p[list(p.keys())[0]]) > 0: t = tau0_from_mass1_mass2(p['mass1'], p['mass2'], 15.0) l = (t < te) & (t > ts) for k in p: - p[k] = p[k][l] - + p[k] = p[k][l] + i += 1 if i > 1000: break - - - if len(p[p.keys()[0]]) == 0: + + + if len(p[list(p.keys())[0]]) == 0: return None - + return p tau0s = args.tau0_start @@ -372,7 +372,7 @@ while tau0s < args.tau0_end: if len(bank) > 0: go = False break - + blen = len(bank) bank, uconv = bank.check_params(gen, params, args.minimal_match) logging.info("%s: Round (U): %s Size: %s conv: %s added: %s", @@ -393,17 +393,21 @@ while tau0s < args.tau0_end: if kloop == 1: okconv = kconv - + if kconv <= tolerance: conv = kconv break - + bank.culltau0(tau0s - args.tau0_threshold * 2.0) logging.info("Region Done %3.1f-%3.1f, %s stored", tau0s, tau0e, bank.activelen()) - region += 1 + region += 1 tau0s += args.tau0_crawl / 2 tau0e += args.tau0_crawl / 2 o = h5py.File(args.output_file, 'w') for k in bank.keys(): - o[k] = bank.key(k) + val = bank.key(k) + if val.dtype.char == 'U': + val = val.astype('bytes') + o[k] = val +o['f_lower'] = numpy.ones(len(val)) * args.low_frequency_cutoff diff --git a/bin/plotting/pycbc_ifar_catalog b/bin/plotting/pycbc_ifar_catalog index 5f2901b8c9f..4feba7ab181 100644 --- a/bin/plotting/pycbc_ifar_catalog +++ b/bin/plotting/pycbc_ifar_catalog @@ -26,9 +26,6 @@ import pycbc.version from pycbc import conversions from scipy.stats import norm, poisson -pylab.rc('text', usetex=True) -pylab.rc('font', **{'family': 'serif', 'serif': ['Computer Modern']}) - parser = argparse.ArgumentParser(usage='pycbc_ifar_catalog [--options]', description='Plots cumulative IFAR vs count for' ' coincident foreground triggers') @@ -46,6 +43,8 @@ parser.add_argument('--remove-threshold', type=float, parser.add_argument('--open-box', action='store_true', help='Are we putting open box results onto the plot? ' 'Default=False.') +parser.add_argument('--use-tex', action='store_true', + help="Render using latex") parser.add_argument('--use-hierarchical-level', type=int, default=None, help='Indicate which inclusive background and FARs of ' 'foreground triggers to plot if there were any ' @@ -66,6 +65,10 @@ parser.add_argument('--use-exclusive-ifar', action='store_true', 'IFAR calculations. Default=False.') opts = parser.parse_args() +if opts.use_tex: + pylab.rc('text', usetex=True) + pylab.rc('font', **{'family': 'serif', 'serif': ['Computer Modern']}) + trigf = [h5py.File(f, 'r') for f in opts.trigger_files] # Parse which inclusive background to use for the plotting diff --git a/bin/pycbc_submit_dax b/bin/pycbc_submit_dax index b7e0571407c..fa010f505b1 100644 --- a/bin/pycbc_submit_dax +++ b/bin/pycbc_submit_dax @@ -29,6 +29,7 @@ TRANSFORMATION_CATALOG="" LOCAL_STAGING_SERVER="file://" REMOTE_STAGING_SERVER="" NO_CREATE_PROXY=0 +NO_QUERY_DB=0 NO_GRID="" SUBMIT_DAX="--submit" HTML_ENTITIES="{\"\'\": ''', '(': '(', ')': ')', '+': '+', '\"': '"'}" @@ -49,7 +50,7 @@ rm -f _reuse.cache touch _reuse.cache rm -f *-extra-site-properties.xml -GETOPT_CMD=`getopt -o d:c:g:r:a:u:p:P:es:S:k:t:Fknl:Gh --long dax:,cache-file:,local-staging-server:,remote-staging-server:,accounting-group:,accounting-group-user:,pegasus-properties:,append-pegasus-property:,enable-shared-filesystem,execution-sites:,staging-sites:,append-site-profile:,transformation-catalog:,force-no-accounting-group,no-create-proxy,no-submit,local-dir:,no-grid,help -n 'pycbc_submit_dax' -- "$@"` +GETOPT_CMD=`getopt -o d:c:g:r:a:u:p:P:es:S:k:t:Fknl:Gh --long dax:,cache-file:,local-staging-server:,remote-staging-server:,accounting-group:,accounting-group-user:,pegasus-properties:,append-pegasus-property:,enable-shared-filesystem,execution-sites:,staging-sites:,append-site-profile:,transformation-catalog:,force-no-accounting-group,no-create-proxy,no-query-db,no-submit,local-dir:,no-grid,help -n 'pycbc_submit_dax' -- "$@"` eval set -- "$GETOPT_CMD" while true ; do @@ -167,6 +168,7 @@ EOF` esac ;; -F|--force-no-accounting-group) NO_ACCOUNTING_GROUP=1 ; shift ;; -K|--no-create-proxy) NO_CREATE_PROXY=1 ; shift ;; + -Q|--no-query-db) NO_QUERY_DB=1 ; shift ;; -n|--no-submit) SUBMIT_DAX="" ; shift ;; -l|--local-dir) case "$2" in @@ -560,48 +562,52 @@ set +e condor_reschedule &> /dev/null set -e -/bin/echo "Querying Pegasus database for workflow stored in ${SUBMIT_DIR}/work" -/bin/echo -n "This may take up to 120 seconds. Please wait..." -rm -f pegasus_db.log -touch pegasus_db.log -# querying the database sometimes fails, so allow retries -set +e -until [ $DB_TRY -ge 15 ] -do - /bin/echo -n "." - WORKFLOW_ID_STRING=`eval $WORKFLOW_DB_CMD 2>> pegasus_db.log` - if [ $? -eq 0 ] && [ ! -z $WORKFLOW_ID_STRING ] ; then - /bin/echo " Done." - DB_QUERY_SUCCESS=0 - break - else - DB_QUERY_SUCCESS=1 - fi - DB_TRY=$(( $DB_TRY + 1 )) - for s in `seq ${DB_TRY}` - do - /bin/echo -n "." - sleep 1 - done -done -set -e +if [ $NO_QUERY_DB == 0 ]; then + + /bin/echo "Querying Pegasus database for workflow stored in ${SUBMIT_DIR}/work" + /bin/echo -n "This may take up to 120 seconds. Please wait..." + rm -f pegasus_db.log + touch pegasus_db.log + # querying the database sometimes fails, so allow retries + set +e + until [ $DB_TRY -ge 15 ] + do + /bin/echo -n "." + WORKFLOW_ID_STRING=`eval $WORKFLOW_DB_CMD 2>> pegasus_db.log` + if [ $? -eq 0 ] && [ ! -z $WORKFLOW_ID_STRING ] ; then + /bin/echo " Done." + DB_QUERY_SUCCESS=0 + break + else + DB_QUERY_SUCCESS=1 + fi + DB_TRY=$(( $DB_TRY + 1 )) + for s in `seq ${DB_TRY}` + do + /bin/echo -n "." + sleep 1 + done + done + set -e + + if [ ${DB_QUERY_SUCCESS} -eq 1 ] ; then + echo; echo + /bin/echo "Query failed: ${WORKFLOW_DB_CMD}" + cat pegasus_db.log + else + rm -f pegasus_db.log + fi -if [ ${DB_QUERY_SUCCESS} -eq 1 ] ; then - echo; echo - /bin/echo "Query failed: ${WORKFLOW_DB_CMD}" - cat pegasus_db.log -else - rm -f pegasus_db.log -fi + if [ -z $WORKFLOW_ID_STRING ] ; then + echo "WARNING: Could not find the workflow in the Pegasus dashboard database." + echo " Workflow has been submitted but the results page will not contain" + echo " a link to the dashboard page. If this is a production workflow," + echo " please remove the workflow, check for the origin of this error," + echo " and re-submit the workflow by re-running this script." + echo + exit 1 + fi -if [ -z $WORKFLOW_ID_STRING ] ; then - echo "WARNING: Could not find the workflow in the Pegasus dashboard database." - echo " Workflow has been submitted but the results page will not contain" - echo " a link to the dashboard page. If this is a production workflow," - echo " please remove the workflow, check for the origin of this error," - echo " and re-submit the workflow by re-running this script." - echo - exit 1 fi WORKFLOW_ID_ARRAY=(${WORKFLOW_ID_STRING//,/ }) diff --git a/examples/search/analysis.ini b/examples/search/analysis.ini new file mode 100644 index 00000000000..827a1828ac3 --- /dev/null +++ b/examples/search/analysis.ini @@ -0,0 +1,203 @@ +[workflow] +file-retention-level = merged_triggers +start-time = 1186740100 +end-time = 1186743500 +h1-channel-name = H1:LOSC-STRAIN +l1-channel-name = L1:LOSC-STRAIN +v1-channel-name = V1:LOSC-STRAIN +h1-frame-file = ../../../../H-H1_LOSC_CLN_4_V1-1186740069-3584.gwf +l1-frame-file = ../../../../L-L1_LOSC_CLN_4_V1-1186740069-3584.gwf +v1-frame-file = ../../../../V-V1_LOSC_CLN_4_V1-1186739813-4096.gwf + +[workflow-ifos] +h1 = +l1 = +v1 = + +[workflow-datafind] +datafind-method = AT_RUNTIME_FAKE_DATA +datafind-check-frames-exist = no_test +datafind-check-segment-gaps = no_test + +[workflow-segments] +segments-science = +DATA,-CBC_CAT1_VETO +segments-vetoes = +CBC_CAT2_VETO,+CBC_HW_INJ,+BURST_HW_INJ + +[datafind] +urltype = file + +[hdfinjfind] +injection-window = 2.0 +optimal-snr-column = H1:alpha1 L1:alpha2 V1:alpha3 + +[workflow-tmpltbank] +tmpltbank-method = PREGENERATED_BANK +tmpltbank-pregenerated-bank = ../bank.hdf + +[workflow-splittable] +splittable-method = IN_WORKFLOW +splittable-exe-tag = splitbank + +[workflow-splittable-full_data] +splittable-num-banks = 1 + +[workflow-splittable-injections] +splittable-num-banks = 1 + +[workflow-matchedfilter] +matchedfilter-method = WORKFLOW_INDEPENDENT_IFOS +min-analysis-segments = 6 +min-analysis-length = 3400 +max-analysis-segments = 10 +output-type = hdf +plot-throughput = + +[workflow-coincidence] +do-trigger-fitting = + +[workflow-coincidence-full_data] +timeslide-precedence = H1, L1, V1, K1, I1 +parallelization-factor = 1 + +[workflow-coincidence-injections] +parallelization-factor = 1 + +[workflow-psd] +parallelization-factor = 1 + +[workflow-results] +max-hierarchical-removal = 1 + +[llwadd] +ilwdchar-compat = + +[segments_from_cats] +[ligolw_combine_segments] +[splitbank] +random-sort = + +[inspiral] +pad-data = 8 +strain-high-pass = 15 +sample-rate = 2048 +segment-length = 512 +segment-start-pad = 32 +segment-end-pad = 16 +allow-zero-padding = +taper-data = 1 +psd-estimation = median +psd-segment-length = 16 +psd-segment-stride = 8 +psd-inverse-length = 16 +psd-num-segments = 63 +psdvar-segment = 8 +psdvar-short-segment = 0.25 +psdvar-long-segment = 512 +psdvar-psd-duration = 8 +psdvar-psd-stride = 4 +psdvar-low-freq = 20 +psdvar-high-freq = 480 +autogating-threshold = 50 +autogating-cluster = 0.5 +autogating-width = 0.25 +autogating-taper = 0.25 +autogating-pad = 16 +low-frequency-cutoff = 20 +enable-bank-start-frequency = +snr-threshold = 3.8 +cluster-method = window +cluster-window = 1 +cluster-function = symmetric +chisq-snr-threshold = 5.25 +chisq-bins = 16 +newsnr-threshold = 3.8 +sgchisq-snr-threshold = 6.0 +sgchisq-locations = "mtotal>30:20-15,20-30,20-45,20-60,20-75,20-90,20-105,20-120" +filter-inj-only = +finalize-events-template-rate = 500 +injection-window = 4.5 +processing-scheme = mkl + +[single_template-h1&plot_singles_timefreq-h1&plot_qscan-h1&inspiral-h1&calculate_psd-h1] +frame-files = ${workflow|h1-frame-file} +channel-name = ${workflow|h1-channel-name} + +[single_template-l1&plot_singles_timefreq-l1&plot_qscan-l1&inspiral-l1&calculate_psd-l1] +frame-files = ${workflow|l1-frame-file} +channel-name = ${workflow|l1-channel-name} + +[single_template-v1&plot_singles_timefreq-v1&plot_qscan-v1&inspiral-v1&calculate_psd-v1] +frame-files = ${workflow|v1-frame-file} +channel-name = ${workflow|v1-channel-name} + +[calculate_psd] +cores = 1 +low-frequency-cutoff = ${inspiral|low-frequency-cutoff} +pad-data = ${inspiral|pad-data} +strain-high-pass = ${inspiral|strain-high-pass} +sample-rate = ${inspiral|sample-rate} +segment-length = ${inspiral|segment-length} +segment-start-pad = ${inspiral|segment-start-pad} +segment-end-pad = ${inspiral|segment-end-pad} +psd-estimation = ${inspiral|psd-estimation} +psd-segment-length = ${inspiral|psd-segment-length} +psd-segment-stride = ${inspiral|psd-segment-stride} +psd-num-segments = ${inspiral|psd-num-segments} +taper-data = ${inspiral|taper-data} +autogating-threshold = ${inspiral|autogating-threshold} +autogating-cluster = ${inspiral|autogating-cluster} +autogating-width = ${inspiral|autogating-width} +autogating-taper = ${inspiral|autogating-taper} +autogating-pad = ${inspiral|autogating-pad} + +[merge_psds] +[hdf_trigger_merge] +[bank2hdf] +[fit_by_template] +fit-function = exponential +sngl-stat = newsnr_sgveto_psdvar_scaled_threshold +stat-threshold = 4.0 +prune-param = mtotal +log-prune-param = +prune-bins = 1 +prune-number = 1 + +[fit_over_param] +fit-param = mtotal +f-lower = ${inspiral|low-frequency-cutoff} +log-param = True +smoothing-width = 0.4 + +[distribute_background_bins] +[coinc] +coinc-threshold = 0.002 +ranking-statistic = 2ogc +randomize-template-order = +statistic-files = ../statHL.hdf ../statLV.hdf ../statHV.hdf ../statHLV.hdf + +[coinc-full_data] +timeslide-interval = 0.1 +loudest-keep-values = 5:5 0:5 + +[coinc-fullinj&coinc-injfull] +timeslide-interval = ${coinc-full_data|timeslide-interval} +cluster-window = ${statmap|cluster-window} +loudest-keep-values = 15.0:9999999999999 + +[coinc-injinj] +[statmap] +max-hierarchical-removal = ${workflow-results|max-hierarchical-removal} +hierarchical-removal-against = exclusive + +[statmap&statmap_inj] +veto-window = 0.100 +cluster-window = 10.0 + +[combine_statmap] +cluster-window = ${statmap|cluster-window} + +[foreground_censor] +[results_page] +analysis-title = "PyCBC search" +analysis-subtitle = "Small Test Search" +output-path = ../../../../html diff --git a/examples/search/bank.sh b/examples/search/bank.sh new file mode 100644 index 00000000000..c8d7e558ac4 --- /dev/null +++ b/examples/search/bank.sh @@ -0,0 +1,21 @@ +#!/bin/bash +set -e + +pycbc_brute_bank \ +--verbose \ +--output-file bank.hdf \ +--minimal-match 0.95 \ +--tolerance .005 \ +--buffer-length 2 \ +--sample-rate 2048 \ +--tau0-threshold 0.5 \ +--approximant IMRPhenomD \ +--tau0-crawl 5 \ +--tau0-start 0 \ +--tau0-end 50 \ +--psd-model aLIGOZeroDetLowPower \ +--min 10 10 0 0 \ +--max 40 40 .2 .2 \ +--params mass1 mass2 spin1z spin2z \ +--seed 1 \ +--low-frequency-cutoff 20.0 diff --git a/examples/search/check_job.py b/examples/search/check_job.py new file mode 100644 index 00000000000..30eec025f4f --- /dev/null +++ b/examples/search/check_job.py @@ -0,0 +1,36 @@ +import subprocess +import time +time.sleep(30) +while 1: + time.sleep(5) + subprocess.run(["pegasus-status", "submitdir/work/"]) + out = subprocess.check_output(["pegasus-status", "submitdir/work/"]) + out = str(out) + lines = out.split('\\n') + for i in range(len(lines)): + if 'UNREADY' in lines[i]: + status_line = i + 1 + break + + stats = lines[status_line].split(' ') + stats = [s for s in stats if s != ''] + + unready = int(stats[0]) + ready = int(stats[1]) + pre = int(stats[2]) + queued = int(stats[3]) + post = int(stats[4]) + done = int(stats[5]) + failed = int(stats[6]) + + finished = (unready == 0 and ready == 0 and queued == 0 and post == 0) + passed = finished and failed == 0 + + if passed: + print("workflow has completed successfully") + exit(0) + + if failed != 0: + print("workflow has a failed job, ending now") + subprocess.run(["bash", "./stop"]) + exit(1) diff --git a/examples/search/executables.ini b/examples/search/executables.ini new file mode 100644 index 00000000000..ec02f043322 --- /dev/null +++ b/examples/search/executables.ini @@ -0,0 +1,66 @@ +[executables] +page_ifar_catalog = ${which:pycbc_ifar_catalog} +average_psd = ${which:pycbc_average_psd} +bank2hdf = ${which:pycbc_coinc_bank2hdf} +calculate_psd = ${which:pycbc_calculate_psd} +coinc = ${which:pycbc_coinc_findtrigs} +combine_statmap = ${which:pycbc_add_statmap} +distribute_background_bins = ${which:pycbc_distribute_background_bins} +fit_by_template = ${which:pycbc_fit_sngls_by_template} +fit_over_param = ${which:pycbc_fit_sngls_over_multiparam} +foreground_censor = ${which:pycbc_foreground_censor} +hdfinjfind = ${which:pycbc_coinc_hdfinjfind} +hdf_trigger_merge = ${which:pycbc_coinc_mergetrigs} +inj_cut = ${which:pycbc_inj_cut} +injections = ${which:lalapps_inspinj} +inspiral = ${which:pycbc_inspiral} +ligolw_combine_segments = ${which:ligolw_combine_segments} +llwadd = ${which:ligolw_add} +merge_psds = ${which:pycbc_merge_psds} +optimal_snr = ${which:pycbc_optimal_snr} +page_foreground = ${which:pycbc_page_foreground} +page_ifar = ${which:pycbc_page_ifar} +page_injections = ${which:pycbc_page_injtable} +page_segplot = ${which:pycbc_page_segplot} +page_segtable = ${which:pycbc_page_segtable} +page_vetotable = ${which:pycbc_page_vetotable} +plot_bank = ${which:pycbc_plot_bank_bins} +plot_binnedhist = ${which:pycbc_fit_sngls_binned} +plot_coinc_snrchi = ${which:pycbc_page_coinc_snrchi} +plot_foundmissed = ${which:pycbc_page_foundmissed} +plot_gating = ${which:pycbc_plot_gating} +plot_hist = ${which:pycbc_plot_hist} +plot_qscan = ${which:pycbc_plot_qscan} +plot_range = ${which:pycbc_plot_range} +plot_segments = ${which:pycbc_page_segments} +plot_sensitivity = ${which:pycbc_page_sensitivity} +plot_singles = ${which:pycbc_plot_singles_vs_params} +plot_snrchi = ${which:pycbc_page_snrchi} +plot_snrifar = ${which:pycbc_page_snrifar} +plot_spectrum = ${which:pycbc_plot_psd_file} +exclude_zerolag = ${which:pycbc_exclude_zerolag} +plot_throughput = ${which:pycbc_plot_throughput} +results_page = ${which:pycbc_make_html_page} +segment_query = ${which:ligolw_segment_query_dqsegdb} +segments_from_cats = ${which:ligolw_segments_from_cats_dqsegdb} +splitbank = ${which:pycbc_hdf5_splitbank} +statmap = ${which:pycbc_coinc_statmap} +statmap_inj = ${which:pycbc_coinc_statmap_inj} +strip_injections = ${which:pycbc_strip_injections} +tmpltbank = ${which:pycbc_geom_nonspinbank} +html_snippet = ${which:pycbc_create_html_snippet} +foreground_minifollowup = ${which:pycbc_foreground_minifollowup} +injection_minifollowup = ${which:pycbc_injection_minifollowup} +singles_minifollowup = ${which:pycbc_sngl_minifollowup} +page_injinfo = ${which:pycbc_page_injinfo} +page_coincinfo = ${which:pycbc_page_coincinfo} +page_snglinfo = ${which:pycbc_page_snglinfo} +plot_trigger_timeseries = ${which:pycbc_plot_trigger_timeseries} +single_template_plot = ${which:pycbc_single_template_plot} +single_template = ${which:pycbc_single_template} +plot_singles_timefreq = ${which:pycbc_plot_singles_timefreq} +plot_snrratehist = ${which:pycbc_page_snrratehist} +plot_waveform = ${which:pycbc_plot_waveform} + +[pegasus_profile] +condor|request_memory = 1000 diff --git a/examples/search/gen.sh b/examples/search/gen.sh new file mode 100644 index 00000000000..aa9329238ef --- /dev/null +++ b/examples/search/gen.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e + +pycbc_make_coinc_search_workflow \ +--workflow-name gw \ +--output-dir output \ +--config-files analysis.ini plotting.ini executables.ini injections_minimal.ini diff --git a/examples/search/get.sh b/examples/search/get.sh new file mode 100644 index 00000000000..810a7dfa675 --- /dev/null +++ b/examples/search/get.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e + +wget https://dcc.ligo.org/public/0146/P1700341/001/H-H1_LOSC_CLN_4_V1-1186740069-3584.gwf +wget https://dcc.ligo.org/public/0146/P1700341/001/L-L1_LOSC_CLN_4_V1-1186740069-3584.gwf +wget https://dcc.ligo.org/public/0146/P1700341/001/V-V1_LOSC_CLN_4_V1-1186739813-4096.gwf + diff --git a/examples/search/injections_minimal.ini b/examples/search/injections_minimal.ini new file mode 100644 index 00000000000..b75cf4c207c --- /dev/null +++ b/examples/search/injections_minimal.ini @@ -0,0 +1,43 @@ +[workflow-injections] +injections-method = IN_WORKFLOW +strip-injections = +compute-optimal-snr = + +[strip_injections] +[inspiral] +injection-filter-rejector-chirp-time-window = 5 + +[optimal_snr] +snr-columns = ${hdfinjfind|optimal-snr-column} +f-low = ${inspiral|low-frequency-cutoff} +seg-length = ${inspiral|segment-length} +sample-rate = 2048 +cores = 1 + +[inj_cut] +snr-columns = ${hdfinjfind|optimal-snr-column} +snr-threshold = 3.0 + +[injections] +i-distr = uniform +l-distr = random +time-interval = 25 +time-step = 50 + +[workflow-injections-bbh] +[injections-bbh] +dchirp-distr = uniform +min-distance = 1000 +max-distance = 100000 +m-distr = log +min-mass1 = 10 +max-mass1 = 30 +min-mass2 = 10 +max-mass2 = 30 +min-mtotal = 20 +max-mtotal = 60 +waveform = IMRPhenomD +taper-injection = start +seed = 123407 +f-lower = 25 +disable-spin = diff --git a/examples/search/master.sh b/examples/search/master.sh new file mode 100644 index 00000000000..b39534d7f49 --- /dev/null +++ b/examples/search/master.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e + +bash -e get.sh +bash -e bank.sh +bash -e stats.sh +bash -e gen.sh + +cp *.gwf output +cd output +bash -e ../submit.sh +python ../check_job.py diff --git a/examples/search/plotting.ini b/examples/search/plotting.ini new file mode 100644 index 00000000000..f46fe7ccace --- /dev/null +++ b/examples/search/plotting.ini @@ -0,0 +1,236 @@ +[pycbc_ifar_catalog] +[workflow-minifollowups] +num-events=1 + +[workflow-sngl_minifollowups] +num-sngl-events=1 + +[workflow-sngl_minifollowups-noncoinc] +section-header = loudest_noncoinc_time + +[workflow-sngl_minifollowups-all] +section-header = all + +[workflow-injection_minifollowups] +num-events=1 +subsection-suffix=with_ifar_lt_1_year + +[foreground_minifollowup] +[foreground_minifollowup-foreground] +analysis-category = foreground + +[foreground_minifollowup-background] +analysis-category = background_exc + +[singles_minifollowup] +ranking-statistic = newsnr_sgveto + +[singles_minifollowup-noncoinc] +non-coinc-time-only = + +[singles_minifollowup-all] + +[injection_minifollowup] +ifar-threshold = 1 + +[page_snglinfo] +ranking-statistic = newsnr_sgveto + +[single_template_plot] + +[single_template_plot-p1] +window = 0.1 + +[plot_trigger_timeseries] + +[html_snippet] + +[page_coincinfo] +[page_coincinfo-background] +statmap-file-subspace-name=background_exc + +[page_coincinfo-foreground] +statmap-file-subspace-name=foreground + +[plot_trigger_timeseries-snr1] +window = 1 +plot-type = snr +log-y-axis = + +[plot_trigger_timeseries-newsnr1] +window = 1 +plot-type = newsnr + +[plot_singles_timefreq] +f-low = ${inspiral|low-frequency-cutoff} +rank = newsnr +num-loudest = 1 +approximant = TaylorF2 + +[plot_qscan] +time-windows = 10,1 1,1 + +[single_template&plot_singles_timefreq&plot_qscan] +strain-high-pass = ${inspiral|strain-high-pass} +sample-rate = ${inspiral|sample-rate} +pad-data = ${inspiral|pad-data} + +[single_template] +segment-length = ${inspiral|segment-length} +segment-start-pad = ${inspiral|segment-start-pad} +segment-end-pad = ${inspiral|segment-end-pad} +psd-estimation = ${inspiral|psd-estimation} +psd-segment-length = ${inspiral|psd-segment-length} +psd-segment-stride = ${inspiral|psd-segment-stride} +psd-inverse-length = ${inspiral|psd-inverse-length} +chisq-bins = ${inspiral|chisq-bins} +low-frequency-cutoff = ${inspiral|low-frequency-cutoff} +approximant = IMRPhenomD +processing-scheme = ${inspiral|processing-scheme} +window = 10 +psd-num-segments = ${inspiral|psd-num-segments} +taper-data = ${inspiral|taper-data} +allow-zero-padding = +autogating-threshold = ${inspiral|autogating-threshold} +autogating-cluster = ${inspiral|autogating-cluster} +autogating-width = ${inspiral|autogating-width} +autogating-taper = ${inspiral|autogating-taper} +autogating-pad = ${inspiral|autogating-pad} +minimum-chisq-bins = 3 + +[single_template-inj_params_inverted] +injection-scale-factor=-1 + +[single_template-inj_params_noinj] +injection-scale-factor=1000000 + +[plot_waveform] +low-frequency-cutoff = ${inspiral|low-frequency-cutoff} +waveform-length = ${inspiral|segment-length} +sample-rate = ${inspiral|sample-rate} +approximant = IMRPhenomD + +[plot_sensitivity] +; method for calculating volumes +dist-bins = 50 +exclusive-sig = +integration-method = pylal + +; bin choices +[s-mchirp] +bins = 10 20 100 + +[plot_sensitivity-all_mchirp&plot_sensitivity-summary_mchirp] +bin-type = mchirp +bins = ${s-mchirp|bins} +sig-type = ifar + +[plot_sensitivity-all_mchirpvt&plot_sensitivity-summary_mchirpvt] +bin-type = mchirp +bins = ${s-mchirp|bins} +dist-type = vt +sig-type = ifar +log-dist = + +[plot_foundmissed] +far-type = exclusive + +[plot_foundmissed-sub_mchirp_grad&plot_foundmissed-all_mchirp_grad&plot_foundmissed-summary] +distance-type = decisive_optimal_snr +axis-type = mchirp +log-x = +log-distance = +gradient-far = + +[plot_foundmissed-sub_mchirp_gradm&plot_foundmissed-all_mchirp_gradm&plot_foundmissed-summarym] +distance-type = decisive_optimal_snr +axis-type = mchirp +log-x = +log-distance = +gradient-far = +missed-on-top = + +[plot_singles] +min-snr = 6 + +[plot_singles-mtotal_eta_newsnr] +x-var = mtotal +log-x = +y-var = eta +z-var = "newsnr_sgveto" +min-z = 6 + +[plot_range] + +[plot_range-summ] +mass1 = 1.4 +mass2 = 1.4 +approximant = SPAtmplt + +[contours] +newsnr-contours = 5 7 9 11 13 + +[plot_snrchi] +min-snr = 6.0 +newsnr-contours = ${contours|newsnr-contours} + +[plot_coinc_snrchi] +newsnr-contours = ${contours|newsnr-contours} + +[plot_coinc_snrchi-inj] + +[plot_hist] +[plot_hist-summ_snr] +x-var = snr +x-min = 6 +x-max = 100 + +[plot_hist-summ_newsnr] +x-var = newsnr +x-min = 6 +x-max = 15 + +[plot_binnedhist] +sngl-stat = ${fit_by_template|sngl-stat} +fit-function = ${fit_by_template|fit-function} +; limit the number of triggers for which duration is calculated +stat-threshold = 5.0 +;${fit_by_template|stat-threshold} +prune-param = ${fit_by_template|prune-param} +log-prune-param = +prune-bins = ${fit_by_template|prune-bins} +prune-number = ${fit_by_template|prune-number} + +[plot_binnedhist-mtotal] +bin-param = mtotal +; for template duration, need a starting frequency +f-lower = 20.0 +num-bins = 2 +bin-spacing = log + +[plot_spectrum] +psd-model = aLIGOZeroDetHighPower + +[page_ifar] +decimation-factor = 100 + +[page_ifar-open_box] +open-box= + +[page_vetotable] +[plot_bank] +log-x = +log-y = + +[page_segtable] +[page_segplot] +[plot_snrifar] +[page_foreground] +[page_foreground-xmlloudest] +num-coincs-to-write = 2 + +[page_injections] +[plot_segments] +[plot_gating] +[plot_snrratehist] +[plot_throughput] diff --git a/examples/search/stats.sh b/examples/search/stats.sh new file mode 100644 index 00000000000..687a53bd8d3 --- /dev/null +++ b/examples/search/stats.sh @@ -0,0 +1,44 @@ +#!/bin/bash +set -e + +pycbc_dtphase \ +--ifos H1 L1 \ +--relative-sensitivities .7 1 \ +--sample-size 200000 \ +--snr-ratio 2.0 \ +--seed 10 \ +--output-file statHL.hdf \ +--smoothing-sigma 1 \ +--verbose + +pycbc_dtphase \ +--ifos L1 V1 \ +--relative-sensitivities 1 0.3 \ +--sample-size 200000 \ +--snr-ratio 2.0 \ +--seed 10 \ +--output-file statLV.hdf \ +--smoothing-sigma 1 \ +--verbose + +pycbc_dtphase \ +--ifos H1 V1 \ +--relative-sensitivities .7 .3 \ +--sample-size 200000 \ +--snr-ratio 2.0 \ +--seed 10 \ +--output-file statHV.hdf \ +--smoothing-sigma 1 \ +--verbose + + +pycbc_dtphase \ +--ifos H1 L1 V1 \ +--relative-sensitivities .7 1 .3 \ +--sample-size 50000 \ +--timing-uncertainty .01 \ +--snr-ratio 2.0 \ +--seed 10 \ +--output-file statHLV.hdf \ +--smoothing-sigma 1 \ +--verbose diff --git a/examples/search/submit.sh b/examples/search/submit.sh new file mode 100644 index 00000000000..d6c795bb2d3 --- /dev/null +++ b/examples/search/submit.sh @@ -0,0 +1,6 @@ +pycbc_submit_dax --no-grid --no-create-proxy \ +--enable-shared-filesystem \ +--force-no-accounting-group \ +--local-dir ./ \ +--no-query-db \ +--dax gw.dax diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index 348b2fecc95..9f12255786a 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -53,7 +53,9 @@ def __init__(self, files=None, ifos=None, **kwargs): files = files or [] for filename in files: f = h5py.File(filename, 'r') - stat = (f.attrs['stat']).decode() + stat = f.attrs['stat'] + if hasattr(stat, 'decode'): + stat = stat.decode() if stat in self.files: raise RuntimeError("We already have one file with stat attr =" " %s. Can't provide more than one!" % stat) diff --git a/pycbc/io/hdf.py b/pycbc/io/hdf.py index 3dad22c1d38..8b9d2f4deca 100644 --- a/pycbc/io/hdf.py +++ b/pycbc/io/hdf.py @@ -180,7 +180,7 @@ def remove(self, idx): """ data = {} for k in self.data: - data[k] = np.delete(self.data[k], idx) + data[k] = np.delete(self.data[k], np.array(idx, dtype=int)) return self._return(data=data) def save(self, outname): diff --git a/pycbc/waveform/bank.py b/pycbc/waveform/bank.py index 5f6be6f0156..425b2b98554 100644 --- a/pycbc/waveform/bank.py +++ b/pycbc/waveform/bank.py @@ -452,7 +452,7 @@ def approximant(self, index): if 'approximant' not in self.table.fieldnames: raise ValueError("approximant not found in input file and no " "approximant was specified on initialization") - return self.table["approximant"][index] + return self.table["approximant"][index].decode() def __len__(self): return len(self.table) diff --git a/pycbc/workflow/coincidence.py b/pycbc/workflow/coincidence.py index 4517699cc27..add8ea0ebd8 100644 --- a/pycbc/workflow/coincidence.py +++ b/pycbc/workflow/coincidence.py @@ -121,7 +121,6 @@ def create_node(self, coinc_files, ifos, tags=None): seg = segments.segment(segs[0][0], segs[-1][1]) node = Node(self) - node.set_memory(5000) node.add_input_list_opt('--coinc-files', coinc_files) node.add_opt('--ifos', ifos) node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags) @@ -140,7 +139,6 @@ def create_node(self, zerolag, full_data, seg = segments.segment(segs[0][0], segs[-1][1]) node = Node(self) - node.set_memory(5000) node.add_input_list_opt('--zero-lag-coincs', zerolag) if isinstance(full_data, list): diff --git a/pycbc/workflow/jobsetup.py b/pycbc/workflow/jobsetup.py index 0f305b35968..cc87e373a57 100644 --- a/pycbc/workflow/jobsetup.py +++ b/pycbc/workflow/jobsetup.py @@ -770,7 +770,6 @@ def get_valid_times(self): if end > start: data_lengths += [data_length] valid_regions += [segments.segment(start, end)] - return data_lengths, valid_regions def zero_pad_data_extend(self, job_data_seg, curr_seg): diff --git a/pycbc/workflow/plotting.py b/pycbc/workflow/plotting.py index 453885da4a8..51d43f7ff26 100644 --- a/pycbc/workflow/plotting.py +++ b/pycbc/workflow/plotting.py @@ -321,8 +321,6 @@ def make_snrchi_plot(workflow, trig_files, veto_file, veto_name, out_dir=out_dir, tags=[tag] + tags) node = exe.create_node() - - node.set_memory(15000) node.add_input_opt('--trigger-file', trig_file) if veto_file is not None: node.add_input_opt('--veto-file', veto_file) @@ -486,7 +484,6 @@ def make_singles_plot(workflow, trig_files, bank_file, veto_file, veto_name, out_dir=out_dir, tags=[tag] + tags).create_node() - node.set_memory(15000) node.add_input_opt('--bank-file', bank_file) if veto_file is not None: node.add_input_opt('--veto-file', veto_file) From c823397d6010fa7d06eb7f79de9ec2b3cfa26ab8 Mon Sep 17 00:00:00 2001 From: Alexander Harvey Nitz Date: Thu, 10 Dec 2020 03:06:16 +0100 Subject: [PATCH 2/3] decode issues --- pycbc/waveform/bank.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pycbc/waveform/bank.py b/pycbc/waveform/bank.py index 425b2b98554..48e23d4a287 100644 --- a/pycbc/waveform/bank.py +++ b/pycbc/waveform/bank.py @@ -452,7 +452,10 @@ def approximant(self, index): if 'approximant' not in self.table.fieldnames: raise ValueError("approximant not found in input file and no " "approximant was specified on initialization") - return self.table["approximant"][index].decode() + apx = self.table["approximant"][index] + if hasattr(apx, 'decode'): + apx = apx.decode() + return apx def __len__(self): return len(self.table) From 012121a71f7e6a833afa2b2d1a49b29f413d59bf Mon Sep 17 00:00:00 2001 From: Alex Nitz Date: Thu, 10 Dec 2020 11:45:21 +0100 Subject: [PATCH 3/3] Update pycbc_add_statmap --- bin/all_sky_search/pycbc_add_statmap | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bin/all_sky_search/pycbc_add_statmap b/bin/all_sky_search/pycbc_add_statmap index f8379f4484e..4150519374f 100755 --- a/bin/all_sky_search/pycbc_add_statmap +++ b/bin/all_sky_search/pycbc_add_statmap @@ -96,8 +96,7 @@ for fi in files: logging.info('Combining foreground segments') -# Convert segmentlistdict to a list ('seglists') of segmentlists -# then np.sum(seglists, axis=0) does seglists[0] + seglists[1] + ... +# combine the segment list from each ifo foreground_segs = segments.segmentlist([]) for segs in indiv_segs.values(): foreground_segs += segs