Skip to content

Commit

Permalink
Merge branch 'gwastro:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
GarethCabournDavies authored Sep 10, 2024
2 parents ba4c13f + 32030e0 commit ec648b2
Show file tree
Hide file tree
Showing 39 changed files with 719 additions and 261 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/inference-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add -
echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list
sudo apt-get -o Acquire::Retries=3 update
sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18
sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18
- run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl*
- name: Install pycbc
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/search-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jobs:
wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add -
echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list
sudo apt-get -o Acquire::Retries=3 update
sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18
sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18
- run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl*
- name: Install pycbc
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tmpltbank-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add -
echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list
sudo apt-get -o Acquire::Retries=3 update
sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18
sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18
- run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl*
- name: Install pycbc
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/workflow-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add -
echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list
sudo apt-get -o Acquire::Retries=3 update
sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18
sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18
- run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl*
- name: Install pycbc
run: |
Expand Down
14 changes: 12 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,18 @@
*.log
dist/
html/
pycbc_inspiralc
build/
*.pyc
docs/Makefile
PyCBC.egg-info
PyCBC.egg-info/
pycbc/events/eventmgr_cython.cpp
pycbc/events/simd_threshold_cython.cpp
pycbc/fft/fftw_pruned_cython.cpp
pycbc/filter/matchedfilter_cpu.cpp
pycbc/filter/simd_correlate_cython.cpp
pycbc/inference/models/relbin_cpu.cpp
pycbc/types/array_cpu.cpp
pycbc/vetoes/chisq_cpu.cpp
pycbc/waveform/decompress_cpu_cython.cpp
pycbc/waveform/spa_tmplt_cpu.cpp
pycbc/waveform/utils_cpu.cpp
4 changes: 2 additions & 2 deletions bin/all_sky_search/pycbc_coinc_statmap
Original file line number Diff line number Diff line change
Expand Up @@ -508,9 +508,9 @@ f.attrs['hierarchical_removal_iterations'] = h_iterations

# Write whether hierarchical removals were removed against the
# inclusive background or the exclusive background. Have to use
# numpy.string_ datatype.
# numpy.bytes_ datatype.
if h_iterations != 0:
hrm_method = args.hierarchical_removal_against
f.attrs['hierarchical_removal_method'] = numpy.string_(hrm_method)
f.attrs['hierarchical_removal_method'] = numpy.bytes_(hrm_method)

logging.info("Done")
4 changes: 2 additions & 2 deletions bin/all_sky_search/pycbc_sngls_statmap
Original file line number Diff line number Diff line change
Expand Up @@ -436,10 +436,10 @@ f.attrs['hierarchical_removal_iterations'] = h_iterations

# Write whether hierarchical removals were removed against the
# inclusive background or the exclusive background. Have to use
# numpy.string_ datatype.
# numpy.bytes_ datatype.
if h_iterations != 0:
hrm_method = args.hierarchical_removal_against
f.attrs['hierarchical_removal_method'] = numpy.string_(hrm_method)
f.attrs['hierarchical_removal_method'] = numpy.bytes_(hrm_method)

logging.info("Done")

8 changes: 6 additions & 2 deletions bin/inference/pycbc_inference
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,11 @@ fft.from_cli(opts)
with ctx:

# read configuration file
cp = configuration.WorkflowConfigParser.from_cli(opts)
cp_original = configuration.WorkflowConfigParser.from_cli(opts)
# some models will interally modify original cp for sampling,
# such as joint_primary_marginalized, we need to save original
# and let modify the copied one
cp = cp_original.__deepcopy__(cp_original)

# create an empty checkpoint file, if needed
condor_ckpt = cp.has_option('sampler', 'checkpoint-signal')
Expand Down Expand Up @@ -138,7 +142,7 @@ with ctx:
if pool.is_main_process():
for fn in [sampler.checkpoint_file, sampler.backup_file]:
with loadfile(fn, 'a') as fp:
fp.write_config_file(cp)
fp.write_config_file(cp_original)

# Run the sampler
sampler.run()
Expand Down
32 changes: 24 additions & 8 deletions bin/live/pycbc_live_supervise_collated_trigger_fits
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,11 @@ def fit_over_multiparam(
"specified parameters",
len(daily_files)
)
logging.info(
"Smoothing fits using fit_over_multiparam with %d files and "
"specified parameters",
len(daily_files)
)
file_id_str = f'{first_date}-{end_date}'
out_fname = fit_over_controls['fit-over-format'].format(
dates=file_id_str,
Expand Down Expand Up @@ -270,7 +275,10 @@ def plot_fits(
]
fits_plot_arguments += sv.dict_to_args(plot_fit_options)

title = "Fit parameters for pycbc-live, triggers from " + day_title_str
title = "Fit parameters for pycbc-live, triggers from {}, {}".format(
ifo,
day_title_str
)
if smoothed == True:
title += ', smoothed'
fits_plot_arguments += ['--title', title]
Expand All @@ -290,7 +298,6 @@ def single_significance_fits(
day_str,
day_dt,
controls,
test_options,
stat_files=None,
):
"""
Expand All @@ -299,7 +306,10 @@ def single_significance_fits(
"""
daily_options['output'] = os.path.join(
output_dir,
daily_controls['sig-daily-format'].format(date=day_str),
daily_controls['sig-daily-format'].format(
ifos=''.join(sorted(controls['ifos'].split())),
date=day_str
),
)
daily_args = ['pycbc_live_single_significance_fits']

Expand All @@ -319,7 +329,10 @@ def plot_single_significance_fits(daily_output, daily_plot_options, controls):
"""
Plotting daily significance fits, and link to public directory if wanted
"""
daily_plot_output = f'{daily_output[:-4]}_{{ifo}}.png'
daily_plot_output = daily_output[:-4].replace(
''.join(sorted(controls['ifos'].split())),
'{ifo}'
) + '.png'
logging.info(
"Plotting daily significance fits from %s to %s",
daily_output,
Expand Down Expand Up @@ -362,9 +375,14 @@ def combine_significance_fits(
Supervise the smoothing of live trigger significance fits using
pycbc_live_combine_single_significance_fits
"""
# This has a trick to do partial formatting, get the IFOs into the
# string, but not the date
daily_files, first_date, end_date = find_daily_fit_files(
combined_controls,
combined_controls['daily-format'],
combined_controls['daily-format'].format(
ifos=''.join(sorted(controls['ifos'].split())),
date='{date}'
),
controls['output-directory'],
)
logging.info(
Expand Down Expand Up @@ -463,7 +481,6 @@ def supervise_collation_fits_dq(args, day_dt, day_str):
combined_control_options = config_opts['significance_combined_fits_control']
combined_plot_options = config_opts['plot_significance_combined']
combined_plot_control_options = config_opts['plot_significance_combined_control']
test_options = config_opts['test']

# The main output directory will have a date subdirectory which we
# put the output into
Expand Down Expand Up @@ -538,7 +555,6 @@ def supervise_collation_fits_dq(args, day_dt, day_str):
day_str,
day_dt,
controls,
test_options,
stat_files=stat_files,
)
plot_single_significance_fits(
Expand Down Expand Up @@ -569,7 +585,7 @@ def get_yesterday_date():
day_dt = datetime.utcnow() - timedelta(days=1)
day_dt = datetime.combine(day_dt, datetime.min.time())
day_str = day_dt.strftime('%Y_%m_%d')
return date_dt, date_str
return day_dt, day_str

parser = argparse.ArgumentParser(description=__doc__)
pycbc.add_common_pycbc_options(parser)
Expand Down
56 changes: 46 additions & 10 deletions bin/minifollowups/pycbc_page_coincinfo
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ from pycbc import add_common_pycbc_options
import pycbc.results
import pycbc.pnutils
from pycbc.io.hdf import HFile
from pycbc.events import ranking
from pycbc.events import ranking, stat as pystat
from pycbc.results import followup

parser = argparse.ArgumentParser()
Expand Down Expand Up @@ -69,11 +69,22 @@ parser.add_argument('--include-summary-page-link', action='store_true',
parser.add_argument('--include-gracedb-link', action='store_true',
help="If given, will provide a link to search GraceDB for events "
"within a 3s window around the coincidence time.")

parser.add_argument('--max-columns', type=int,
help="Maximum number of columns allowed in the table (not including detector names)")
pystat.insert_statistic_option_group(parser,
default_ranking_statistic='single_ranking_only')

args = parser.parse_args()
pycbc.init_logging(args.verbose)

if args.ranking_statistic not in ['quadsum', 'single_ranking_only']:
logging.warning(
"For the coincident info table, we only use single ranking, not %s, "
"this option will be ignored",
args.ranking_statistic
)
args.ranking_statistic = 'quadsum'

# Get the nth loudest trigger from the output of pycbc_coinc_statmap
f = HFile(args.statmap_file, 'r')
d = f[args.statmap_file_subspace_name]
Expand Down Expand Up @@ -146,12 +157,16 @@ statmapfile = d
# table. Each entry in data corresponds to each row in the final table and
# should be a list of values. So data is will be a list of lists.
data = []
row_labels = []
rank_method = pystat.get_statistic_from_opts(args, list(files.keys()))

for ifo in files.keys():

# ignore ifo if coinc didn't participate (only for multi-ifo workflow)
if (statmapfile['%s/time' % ifo][n] == -1.0):
continue

row_labels.append(ifo)
d = files[ifo]
i = idx[ifo]
tid = d['template_id'][i]
Expand All @@ -161,7 +176,12 @@ for ifo in files.keys():

time = d['end_time'][i]
utc = lal.GPSToUTC(int(time))[0:6]

trig_dict = {
k: numpy.array([d[k][i]])
for k in d.keys()
if not k.endswith('_template')
and k not in ['gating', 'search', 'template_boundaries']
}
# Headers will store the headers that will appear in the table.
headers = []
data.append([])
Expand All @@ -170,24 +190,35 @@ for ifo in files.keys():
if args.include_summary_page_link:
data[-1].append(pycbc.results.dq.get_summary_page_link(ifo, utc))
headers.append("Detector status")
else:
data[-1].append(ifo)
headers.append("Ifo")

# End times
data[-1].append(str(datetime.datetime(*utc)))
data[-1].append('%.3f' % time)
headers.append("UTC End Time")
headers.append("GPS End time")

#headers.append("Stat")
# Determine statistic naming
if args.sngl_ranking == "newsnr":
sngl_stat_name = "Reweighted SNR"
elif args.sngl_ranking == "newsnr_sgveto":
sngl_stat_name = "Reweighted SNR (+sgveto)"
elif args.sngl_ranking == "newsnr_sgveto_psdvar":
sngl_stat_name = "Reweighted SNR (+sgveto+psdvar)"
elif args.sngl_ranking == "snr":
sngl_stat_name = "SNR"
else:
sngl_stat_name = args.sngl_ranking

stat = rank_method.get_sngl_ranking(trig_dict)
headers.append(sngl_stat_name)
data[-1].append('%5.2f' % stat[0])

# SNR and phase (not showing any single-det stat here)
data[-1].append('%5.2f' % d['snr'][i])
data[-1].append('%5.2f' % d['coa_phase'][i])
#data[-1].append('%5.2f' % ranking.newsnr(d['snr'][i], rchisq))
headers.append("ρ")
headers.append("Phase")
#headers.append("Stat")

# Signal-glitch discrimators
data[-1].append('%5.2f' % rchisq)
data[-1].append('%i' % d['chisq_dof'][i])
Expand Down Expand Up @@ -218,7 +249,12 @@ for ifo in files.keys():
headers.append("s<sub>2z</sub>")
headers.append("Duration")

html += str(pycbc.results.static_table(data, headers))
html += str(pycbc.results.static_table(
data,
headers,
columns_max=args.max_columns,
row_labels=row_labels
))
###############################################################################

pycbc.results.save_fig_with_metadata(html, args.output_file, {},
Expand Down
Loading

0 comments on commit ec648b2

Please sign in to comment.