diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 65fabf82d7a..1cb0ed34aa5 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -13,7 +13,7 @@ jobs: max-parallel: 60 matrix: os: [ubuntu-20.04] - python-version: [3.8, 3.9, '3.10', '3.11'] + python-version: [3.9, '3.10', '3.11'] test-type: [unittest, search, docs] steps: - uses: actions/checkout@v4 @@ -25,7 +25,7 @@ jobs: run: | sudo apt-get -o Acquire::Retries=3 update sudo apt-get -o Acquire::Retries=3 install *fftw3* mpi intel-mkl* git-lfs graphviz - pip install "tox<4.0.0" pip setuptools --upgrade + pip install tox pip setuptools --upgrade - name: installing auxiliary data files run: | GIT_CLONE_PROTECTION_ACTIVE=false GIT_LFS_SKIP_SMUDGE=1 git clone https://git.ligo.org/lscsoft/lalsuite-extra diff --git a/.github/workflows/distribution.yml b/.github/workflows/distribution.yml index 7b315e1f896..cb5e9b8e40c 100644 --- a/.github/workflows/distribution.yml +++ b/.github/workflows/distribution.yml @@ -12,7 +12,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-20.04, macos-12] + os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/mac-test.yml b/.github/workflows/mac-test.yml index 111bb9aec66..84e3069f081 100644 --- a/.github/workflows/mac-test.yml +++ b/.github/workflows/mac-test.yml @@ -12,18 +12,46 @@ jobs: strategy: max-parallel: 4 matrix: - os: [macos-12] - python-version: [3.8, 3.9, '3.10', '3.11'] + os: [macos-latest] + python-version: + - '3.10' + - '3.11' + + # this is needed for conda environments to activate automatically + defaults: + run: + shell: bash -el {0} + steps: - uses: actions/checkout@v1 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + + - name: Cache conda packages + uses: actions/cache@v4 + env: + # increment to reset cache + CACHE_NUMBER: 0 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-${{ matrix.python-version}}-${{ env.CACHE_NUMBER }} + + - name: Configure conda + uses: conda-incubator/setup-miniconda@v3 with: + activate-environment: test + channels: conda-forge + miniforge-version: latest python-version: ${{ matrix.python-version }} - - run: | - brew install fftw openssl gsl - pip install --upgrade pip setuptools "tox<4.0.0" - - name: run basic pycbc test suite + + - name: Conda info + run: conda info --all + + - name: Install tox + run: | + conda install \ + pip \ + setuptools \ + tox + + - name: Run basic pycbc test suite run: | - sudo chmod -R 777 /usr/local/miniconda/ tox -e py-unittest diff --git a/Dockerfile b/Dockerfile index 97997bc5ab9..9ed52ffd5fb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,8 +10,8 @@ ADD docker/etc/cvmfs/config-osg.opensciencegrid.org.conf /etc/cvmfs/config-osg.o RUN dnf -y install https://ecsft.cern.ch/dist/cvmfs/cvmfs-release/cvmfs-release-latest.noarch.rpm && dnf -y install cvmfs cvmfs-config-default && dnf clean all && dnf makecache && \ dnf -y groupinstall "Development Tools" \ "Scientific Support" && \ - rpm -e --nodeps git perl-Git && dnf -y install @python39 rsync zlib-devel libpng-devel libjpeg-devel sqlite-devel openssl-devel fftw-libs-single fftw-devel fftw fftw-libs-long fftw-libs fftw-libs-double gsl gsl-devel hdf5 hdf5-devel python39-devel swig which osg-ca-certs && python3.9 -m pip install --upgrade pip setuptools wheel cython && python3.9 -m pip install mkl ipython jupyter jupyterhub jupyterlab lalsuite && \ - dnf -y install https://repo.opensciencegrid.org/osg/3.5/el8/testing/x86_64/osg-wn-client-3.5-5.osg35.el8.noarch.rpm && dnf clean all + rpm -e --nodeps git perl-Git && dnf -y install @python39 rsync zlib-devel libpng-devel libjpeg-devel sqlite-devel openssl-devel fftw-libs-single fftw-devel fftw fftw-libs-long fftw-libs fftw-libs-double gsl gsl-devel hdf5 hdf5-devel python39-devel swig which osg-ca-certs && python3.9 -m pip install --upgrade pip setuptools wheel cython && python3.9 -m pip install mkl ipython jupyter jupyterhub jupyterlab lalsuite==7.24 && \ + dnf -y install https://repo.opensciencegrid.org/osg/3.5/el8/testing/x86_64/osg-wn-client-3.5-5.osg35.el8.noarch.rpm && dnf -y install pelican-osdf-compat-7.10.11-1.x86_64 && dnf -y install pelican-7.10.11-1.x86_64 && dnf clean all # set up environment RUN cd / && \ diff --git a/bin/all_sky_search/pycbc_add_statmap b/bin/all_sky_search/pycbc_add_statmap index 1b8b7006e67..da2ae5f536e 100755 --- a/bin/all_sky_search/pycbc_add_statmap +++ b/bin/all_sky_search/pycbc_add_statmap @@ -310,14 +310,14 @@ if injection_style: for bg_fname in args.background_files: bg_f = h5py.File(bg_fname, 'r') ifo_combo_key = bg_f.attrs['ifos'].replace(' ','') - _, far[ifo_combo_key] = significance.get_far( + _, far[ifo_combo_key], _ = significance.get_far( bg_f['background/stat'][:], f['foreground/stat'][:], bg_f['background/decimation_factor'][:], bg_f.attrs['background_time'], **significance_dict[ifo_combo_key]) - _, far_exc[ifo_combo_key] = \ + _, far_exc[ifo_combo_key], _ = \ significance.get_far( bg_f['background_exc/stat'][:], f['foreground/stat'][:], @@ -329,7 +329,7 @@ else: # background included for f_in in files: ifo_combo_key = get_ifo_string(f_in).replace(' ','') - _, far[ifo_combo_key] = \ + _, far[ifo_combo_key], _ = \ significance.get_far( f_in['background/stat'][:], f['foreground/stat'][:], @@ -337,7 +337,7 @@ else: f_in.attrs['background_time'], **significance_dict[ifo_combo_key]) - _, far_exc[ifo_combo_key] = \ + _, far_exc[ifo_combo_key], _ = \ significance.get_far( f_in['background_exc/stat'][:], f['foreground/stat'][:], @@ -608,7 +608,7 @@ while True: fg_time_ct[key] -= args.cluster_window bg_t_y = conv.sec_to_year(bg_time_ct[key]) fg_t_y = conv.sec_to_year(fg_time_ct[key]) - bg_far, fg_far = significance.get_far( + bg_far, fg_far, _ = significance.get_far( sep_bg_data[key].data['stat'], sep_fg_data[key].data['stat'], sep_bg_data[key].data['decimation_factor'], @@ -632,7 +632,7 @@ while True: logging.info("Recalculating combined IFARs") for key in all_ifo_combos: - _, far[key] = significance.get_far( + _, far[key], _ = significance.get_far( sep_bg_data[key].data['stat'], combined_fg_data.data['stat'], sep_bg_data[key].data['decimation_factor'], diff --git a/bin/all_sky_search/pycbc_coinc_findtrigs b/bin/all_sky_search/pycbc_coinc_findtrigs index d76e3b9109a..d418624ceb2 100644 --- a/bin/all_sky_search/pycbc_coinc_findtrigs +++ b/bin/all_sky_search/pycbc_coinc_findtrigs @@ -411,7 +411,7 @@ def process_template(tnum): # with any trigger in the fixed network tidx = len(threshes) for i in range(1, len(threshes)): - if pivot_stat[-1] >= pivot_lower[kidx]: + if pivot_stat[-1] >= pivot_lower[i]: tidx = i break diff --git a/bin/all_sky_search/pycbc_coinc_statmap b/bin/all_sky_search/pycbc_coinc_statmap index d7b661606ff..3b0a4379709 100755 --- a/bin/all_sky_search/pycbc_coinc_statmap +++ b/bin/all_sky_search/pycbc_coinc_statmap @@ -241,7 +241,7 @@ fore_stat = all_trigs.stat[fore_locs] # Cumulative array of inclusive background triggers and the number of # inclusive background triggers louder than each foreground trigger -bg_far, fg_far = significance.get_far( +bg_far, fg_far, sig_info = significance.get_far( back_stat, fore_stat, all_trigs.decimation_factor[back_locs], @@ -250,7 +250,7 @@ bg_far, fg_far = significance.get_far( # Cumulative array of exclusive background triggers and the number # of exclusive background triggers louder than each foreground trigger -bg_far_exc, fg_far_exc = significance.get_far( +bg_far_exc, fg_far_exc, exc_sig_info = significance.get_far( exc_zero_trigs.stat, fore_stat, exc_zero_trigs.decimation_factor, @@ -288,10 +288,14 @@ if fore_locs.sum() > 0: fap = 1 - numpy.exp(- coinc_time / ifar) f['foreground/ifar'] = conv.sec_to_year(ifar) f['foreground/fap'] = fap + for key, value in sig_info.items(): + f['foreground'].attrs[key] = value ifar_exc = 1. / fg_far_exc fap_exc = 1 - numpy.exp(- coinc_time_exc / ifar_exc) f['foreground/ifar_exc'] = conv.sec_to_year(ifar_exc) f['foreground/fap_exc'] = fap_exc + for key, value in exc_sig_info.items(): + f['foreground'].attrs[key + '_exc'] = value else: f['foreground/ifar'] = numpy.array([]) f['foreground/fap'] = numpy.array([]) @@ -425,7 +429,7 @@ while numpy.any(ifar_foreground >= background_time): logging.info("Calculating FAN from background statistic values") back_stat = all_trigs.stat[back_locs] fore_stat = all_trigs.stat[fore_locs] - bg_far, fg_far = significance.get_far( + bg_far, fg_far, sig_info = significance.get_far( back_stat, fore_stat, all_trigs.decimation_factor[back_locs], @@ -452,7 +456,7 @@ while numpy.any(ifar_foreground >= background_time): # Exclusive background doesn't change when removing foreground triggers. # So we don't have to take background ifar, just repopulate ifar_foreground else : - _, fg_far_exc = significance.get_far( + _, fg_far_exc, _ = significance.get_far( exc_zero_trigs.stat, fore_stat, exc_zero_trigs.decimation_factor, @@ -479,6 +483,8 @@ while numpy.any(ifar_foreground >= background_time): fap = 1 - numpy.exp(- coinc_time / ifar) f['foreground_h%s/ifar' % h_iterations] = conv.sec_to_year(ifar) f['foreground_h%s/fap' % h_iterations] = fap + for key, value in sig_info.items(): + f['foreground_h%' % h_iterations].attrs[key] = value # Update ifar and fap for other foreground triggers for i in range(len(ifar)): diff --git a/bin/all_sky_search/pycbc_coinc_statmap_inj b/bin/all_sky_search/pycbc_coinc_statmap_inj index ed1fd819264..8dad8476db6 100644 --- a/bin/all_sky_search/pycbc_coinc_statmap_inj +++ b/bin/all_sky_search/pycbc_coinc_statmap_inj @@ -91,7 +91,7 @@ f.attrs['foreground_time'] = coinc_time if len(zdata) > 0: - _, fg_far_exc = significance.get_far( + _, fg_far_exc, exc_sig_info = significance.get_far( back_stat, zdata.stat, dec_fac, @@ -108,6 +108,8 @@ if len(zdata) > 0: fap_exc = 1 - numpy.exp(- coinc_time / ifar_exc) f['foreground/ifar_exc'] = conv.sec_to_year(ifar_exc) f['foreground/fap_exc'] = fap_exc + for key, value in exc_sig_info.items(): + f['foreground'].attrs[key + '_exc'] = value else: f['foreground/ifar_exc'] = numpy.array([]) diff --git a/bin/all_sky_search/pycbc_exclude_zerolag b/bin/all_sky_search/pycbc_exclude_zerolag index c5b71571ef1..15ae4ca9533 100644 --- a/bin/all_sky_search/pycbc_exclude_zerolag +++ b/bin/all_sky_search/pycbc_exclude_zerolag @@ -94,7 +94,7 @@ for k in filtered_trigs.data: f_out['background_exc/%s' % k] = filtered_trigs.data[k] logging.info('Recalculating IFARs') -bg_far, fg_far = significance.get_far( +bg_far, fg_far, sig_info = significance.get_far( filtered_trigs.data['stat'], f_in['foreground/stat'][:], filtered_trigs.data['decimation_factor'], @@ -110,6 +110,8 @@ bg_ifar_exc = 1. / bg_far logging.info('Writing updated ifars to file') f_out['foreground/ifar_exc'][:] = conv.sec_to_year(fg_ifar_exc) f_out['background_exc/ifar'][:] = conv.sec_to_year(bg_ifar_exc) +for key, value in sig_info.items(): + f_out['foreground'].attrs[key + '_exc'] = value fg_time_exc = conv.sec_to_year(f_in.attrs['foreground_time_exc']) f_out['foreground/fap_exc'][:] = 1 - np.exp(-fg_time_exc / fg_ifar_exc) diff --git a/bin/all_sky_search/pycbc_sngls_statmap b/bin/all_sky_search/pycbc_sngls_statmap index 5ebd6b39303..025b3ae0b3f 100755 --- a/bin/all_sky_search/pycbc_sngls_statmap +++ b/bin/all_sky_search/pycbc_sngls_statmap @@ -109,6 +109,7 @@ assert ifo + '/time' in all_trigs.data logging.info("We have %s triggers" % len(all_trigs.stat)) logging.info("Clustering triggers") all_trigs = all_trigs.cluster(args.cluster_window) +logging.info("%s triggers remain" % len(all_trigs.stat)) fg_time = float(all_trigs.attrs['foreground_time']) @@ -139,12 +140,13 @@ significance_dict = significance.digest_significance_options([ifo], args) # Cumulative array of inclusive background triggers and the number of # inclusive background triggers louder than each foreground trigger -bg_far, fg_far = significance.get_far( +bg_far, fg_far, sig_info = significance.get_far( back_stat, fore_stat, bkg_dec_facs, fg_time, - **significance_dict[ifo]) + **significance_dict[ifo] +) fg_far = significance.apply_far_limit( fg_far, @@ -192,7 +194,7 @@ back_exc_locs = back_exc_locs[to_keep] # Cumulative array of exclusive background triggers and the number # of exclusive background triggers louder than each foreground trigger -bg_far_exc, fg_far_exc = significance.get_far( +bg_far_exc, fg_far_exc, exc_sig_info = significance.get_far( back_stat_exc, fore_stat, bkg_exc_dec_facs, @@ -231,6 +233,10 @@ f['foreground/fap'] = fap fap_exc = 1 - numpy.exp(- fg_time_exc / fg_ifar_exc) f['foreground/ifar_exc'] = conv.sec_to_year(fg_ifar_exc) f['foreground/fap_exc'] = fap_exc +for key, value in sig_info.items(): + f['foreground'].attrs[key] = value +for key, value in exc_sig_info.items(): + f['foreground'].attrs[f'{key}_exc'] = value if 'name' in all_trigs.attrs: f.attrs['name'] = all_trigs.attrs['name'] @@ -290,6 +296,10 @@ while numpy.any(ifar_louder > hier_ifar_thresh_s): f['foreground_h%s/ifar' % h_iterations] = conv.sec_to_year(fg_ifar) f['foreground_h%s/ifar_exc' % h_iterations] = conv.sec_to_year(fg_ifar_exc) f['foreground_h%s/fap' % h_iterations] = fap + for key, value in sig_info.items(): + f['foreground_h%s' % h_iterations].attrs[key] = value + for key, value in exc_sig_info.items(): + f['foreground_h%s' % h_iterations].attrs[key + "_exc"] = value for k in all_trigs.data: f['foreground_h%s/' % h_iterations + k] = all_trigs.data[k] # Add the iteration number of hierarchical removals done. @@ -342,7 +352,7 @@ while numpy.any(ifar_louder > hier_ifar_thresh_s): logging.info("Calculating FAN from background statistic values") back_stat = fore_stat = all_trigs.stat - bg_far, fg_far = significance.get_far( + bg_far, fg_far, sig_info = significance.get_far( back_stat, fore_stat, numpy.ones_like(back_stat), @@ -368,11 +378,12 @@ while numpy.any(ifar_louder > hier_ifar_thresh_s): # triggers are being removed via inclusive or exclusive background. if is_bkg_inc: ifar_louder = fg_ifar + exc_sig_info = {} # Exclusive background doesn't change when removing foreground triggers. # So we don't have to take bg_far_exc, just repopulate fg_ifar_exc else: - _, fg_far_exc = significance.get_far( + _, fg_far_exc, exc_sig_info = significance.get_far( back_stat_exc, fore_stat, numpy.ones_like(back_stat_exc), @@ -400,6 +411,10 @@ while numpy.any(ifar_louder > hier_ifar_thresh_s): # Write ranking statistic to file just for downstream plotting code f['foreground_h%s/stat' % h_iterations] = fore_stat + for key, value in sig_info.items(): + f['foreground_h%s' % h_iterations].attrs[key] = value + for key, value in exc_sig_info.items(): + f['foreground_h%s' % h_iterations].attrs[key + "_exc"] = value fap = 1 - numpy.exp(- fg_time / fg_ifar) f['foreground_h%s/ifar' % h_iterations] = conv.sec_to_year(fg_ifar) f['foreground_h%s/fap' % h_iterations] = fap diff --git a/bin/all_sky_search/pycbc_sngls_statmap_inj b/bin/all_sky_search/pycbc_sngls_statmap_inj index 7d164c9383d..34b75095b31 100644 --- a/bin/all_sky_search/pycbc_sngls_statmap_inj +++ b/bin/all_sky_search/pycbc_sngls_statmap_inj @@ -110,7 +110,7 @@ significance_dict = significance.digest_significance_options([ifo], args) # Cumulative array of exclusive background triggers and the number # of exclusive background triggers louder than each foreground trigger -bg_far_exc, fg_far_exc = significance.get_far( +bg_far_exc, fg_far_exc, sig_info = significance.get_far( back_stat_exc, fore_stat, bkg_exc_dec_facs, @@ -137,6 +137,9 @@ fap_exc = 1 - numpy.exp(- fg_time_exc / fg_ifar_exc) f['foreground/ifar_exc'] = conv.sec_to_year(fg_ifar_exc) f['foreground/fap_exc'] = fap_exc +for key, value in sig_info.items(): + f['foreground'].attrs[key + '_exc'] = value + if 'name' in all_trigs.attrs: f.attrs['name'] = all_trigs.attrs['name'] diff --git a/docs/conf.py b/docs/conf.py index e3f24773677..836ab97b4ab 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -108,7 +108,6 @@ # a list of builtin themes. import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' -html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -117,9 +116,6 @@ 'logo_only':True, } -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - html_context = { 'display_github': True, 'github_user': 'gwastro', diff --git a/pycbc/events/significance.py b/pycbc/events/significance.py index 86f2a99e7ea..5e69f381439 100644 --- a/pycbc/events/significance.py +++ b/pycbc/events/significance.py @@ -55,6 +55,8 @@ def count_n_louder(bstat, fstat, dec, The cumulative array of background triggers. fore_n_louder: numpy.ndarray The number of background triggers above each foreground trigger + {} : (empty) dictionary + Ensure we return the same tuple of objects as n_louder_from_fit() """ sort = bstat.argsort() bstat = bstat[sort] @@ -84,7 +86,9 @@ def count_n_louder(bstat, fstat, dec, unsort = sort.argsort() back_cum_num = n_louder[unsort] - return back_cum_num, fore_n_louder + + # Empty dictionary to match the return from n_louder_from_fit + return back_cum_num, fore_n_louder, {} def n_louder_from_fit(back_stat, fore_stat, dec_facs, @@ -117,12 +121,17 @@ def n_louder_from_fit(back_stat, fore_stat, dec_facs, fg_n_louder: numpy.ndarray The estimated number of background events louder than each foreground event + sig_info : a dictionary + Information regarding the significance fit """ # Calculate the fitting factor of the ranking statistic distribution - alpha, _ = trstats.fit_above_thresh(fit_function, back_stat, - thresh=fit_threshold, - weights=dec_facs) + alpha, sig_alpha = trstats.fit_above_thresh( + fit_function, + back_stat, + thresh=fit_threshold, + weights=dec_facs + ) # Count background events above threshold as the cum_fit is # normalised to 1 @@ -153,7 +162,7 @@ def n_louder_from_fit(back_stat, fore_stat, dec_facs, # Count the number of below-threshold background events louder than the # bg and foreground - bg_n_louder[bg_below], fg_n_louder[fg_below] = \ + bg_n_louder[bg_below], fg_n_louder[fg_below],_ = \ count_n_louder(back_stat[bg_below], fore_stat[fg_below], dec_facs) # As we have only counted the louder below-threshold events, need to @@ -162,7 +171,8 @@ def n_louder_from_fit(back_stat, fore_stat, dec_facs, bg_n_louder[bg_below] += n_above fg_n_louder[fg_below] += n_above - return bg_n_louder, fg_n_louder + sig_info = {'alpha': alpha, 'sig_alpha': sig_alpha, 'n_above': n_above} + return bg_n_louder, fg_n_louder, sig_info _significance_meth_dict = { @@ -218,7 +228,7 @@ def get_far(back_stat, fore_stat, dec_facs, a FAR """ - bg_n_louder, fg_n_louder = get_n_louder( + bg_n_louder, fg_n_louder, significance_info = get_n_louder( back_stat, fore_stat, dec_facs, @@ -236,7 +246,10 @@ def get_far(back_stat, fore_stat, dec_facs, bg_far = bg_n_louder / background_time fg_far = fg_n_louder / background_time - return bg_far, fg_far + if "n_above" in significance_info: + significance_info["rate_above"] = significance_info["n_above"] / background_time + + return bg_far, fg_far, significance_info def insert_significance_option_group(parser): @@ -285,6 +298,7 @@ def positive_float(inp): logger.warning("Value provided to positive_float is less than zero, " "this is not allowed") raise ValueError + return fl_in diff --git a/pycbc/workflow/pegasus_sites.py b/pycbc/workflow/pegasus_sites.py index 15f578326d0..6c4abc10e93 100644 --- a/pycbc/workflow/pegasus_sites.py +++ b/pycbc/workflow/pegasus_sites.py @@ -93,18 +93,18 @@ def add_condorpool_symlink_site(sitecat, cp): value="true") site.add_profiles(Namespace.PEGASUS, key='auxillary.local', value="true") - site.add_profiles(Namespace.CONDOR, key="+OpenScienceGrid", + site.add_profiles(Namespace.CONDOR, key="My.OpenScienceGrid", value="False") site.add_profiles(Namespace.CONDOR, key="should_transfer_files", value="Yes") site.add_profiles(Namespace.CONDOR, key="when_to_transfer_output", value="ON_EXIT_OR_EVICT") site.add_profiles(Namespace.CONDOR, key="getenv", value="True") - site.add_profiles(Namespace.CONDOR, key="+DESIRED_Sites", + site.add_profiles(Namespace.CONDOR, key="My.DESIRED_Sites", value='"nogrid"') - site.add_profiles(Namespace.CONDOR, key="+IS_GLIDEIN", + site.add_profiles(Namespace.CONDOR, key="My.IS_GLIDEIN", value='"False"') - site.add_profiles(Namespace.CONDOR, key="+flock_local", + site.add_profiles(Namespace.CONDOR, key="My.flock_local", value="True") site.add_profiles(Namespace.DAGMAN, key="retry", value="2") sitecat.add_sites(site) @@ -125,18 +125,18 @@ def add_condorpool_copy_site(sitecat, cp): value=True) site.add_profiles(Namespace.PEGASUS, key='auxillary.local', value="true") - site.add_profiles(Namespace.CONDOR, key="+OpenScienceGrid", + site.add_profiles(Namespace.CONDOR, key="My.OpenScienceGrid", value="False") site.add_profiles(Namespace.CONDOR, key="should_transfer_files", value="Yes") site.add_profiles(Namespace.CONDOR, key="when_to_transfer_output", value="ON_EXIT_OR_EVICT") site.add_profiles(Namespace.CONDOR, key="getenv", value="True") - site.add_profiles(Namespace.CONDOR, key="+DESIRED_Sites", + site.add_profiles(Namespace.CONDOR, key="My.DESIRED_Sites", value='"nogrid"') - site.add_profiles(Namespace.CONDOR, key="+IS_GLIDEIN", + site.add_profiles(Namespace.CONDOR, key="My.IS_GLIDEIN", value='"False"') - site.add_profiles(Namespace.CONDOR, key="+flock_local", + site.add_profiles(Namespace.CONDOR, key="My.flock_local", value="True") site.add_profiles(Namespace.DAGMAN, key="retry", value="2") sitecat.add_sites(site) @@ -166,18 +166,18 @@ def add_condorpool_shared_site(sitecat, cp, local_path, local_url): value="true") site.add_profiles(Namespace.PEGASUS, key='auxillary.local', value="true") - site.add_profiles(Namespace.CONDOR, key="+OpenScienceGrid", + site.add_profiles(Namespace.CONDOR, key="My.OpenScienceGrid", value="False") site.add_profiles(Namespace.CONDOR, key="should_transfer_files", value="Yes") site.add_profiles(Namespace.CONDOR, key="when_to_transfer_output", value="ON_EXIT_OR_EVICT") site.add_profiles(Namespace.CONDOR, key="getenv", value="True") - site.add_profiles(Namespace.CONDOR, key="+DESIRED_Sites", + site.add_profiles(Namespace.CONDOR, key="My.DESIRED_Sites", value='"nogrid"') - site.add_profiles(Namespace.CONDOR, key="+IS_GLIDEIN", + site.add_profiles(Namespace.CONDOR, key="My.IS_GLIDEIN", value='"False"') - site.add_profiles(Namespace.CONDOR, key="+flock_local", + site.add_profiles(Namespace.CONDOR, key="My.flock_local", value="True") site.add_profiles(Namespace.DAGMAN, key="retry", value="2") # Need to set PEGASUS_HOME @@ -210,21 +210,24 @@ def add_osg_site(sitecat, cp): value="ON_SUCCESS") site.add_profiles(Namespace.CONDOR, key="success_exit_code", value="0") - site.add_profiles(Namespace.CONDOR, key="+OpenScienceGrid", + site.add_profiles(Namespace.CONDOR, key="My.OpenScienceGrid", value="True") site.add_profiles(Namespace.CONDOR, key="getenv", value="False") - site.add_profiles(Namespace.CONDOR, key="+InitializeModulesEnv", + site.add_profiles(Namespace.CONDOR, key="ulog_execute_attrs", + value="GLIDEIN_Site") + site.add_profiles(Namespace.CONDOR, key="My.InitializeModulesEnv", value="False") - site.add_profiles(Namespace.CONDOR, key="+SingularityCleanEnv", + site.add_profiles(Namespace.CONDOR, key="My.SingularityCleanEnv", value="True") + site.add_profiles(Namespace.CONDOR, key="My.DAGManNodesMask", + value=r"\"0,1,2,4,5,7,8,9,10,11,12,13,16,17,24,27,35,36,40\"") site.add_profiles(Namespace.CONDOR, key="Requirements", value="(HAS_SINGULARITY =?= TRUE) && " - "(HAS_LIGO_FRAMES =?= True) && " "(IS_GLIDEIN =?= True)") - cvmfs_loc = '"/cvmfs/singularity.opensciencegrid.org/pycbc/pycbc-el8:v' + cvmfs_loc = '"/cvmfs/singularity.opensciencegrid.org/pycbc/pycbc-el8:' cvmfs_loc += sing_version + '"' - site.add_profiles(Namespace.CONDOR, key="+SingularityImage", + site.add_profiles(Namespace.CONDOR, key="My.SingularityImage", value=cvmfs_loc) # On OSG failure rate is high site.add_profiles(Namespace.DAGMAN, key="retry", value="4") diff --git a/requirements.txt b/requirements.txt index 4857c0aa9d5..fe051839f51 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,7 @@ jinja2 mpld3>=0.3 beautifulsoup4>=4.6.0 cython -lalsuite!=7.2 +lalsuite!=7.2,<7.25 lscsoft-glue>=1.59.3 ligo-segments tqdm diff --git a/setup.py b/setup.py index c02e9f9298e..3c775bf596c 100755 --- a/setup.py +++ b/setup.py @@ -50,7 +50,7 @@ 'pegasus-wms.api >= 5.0.6', 'python-ligo-lw >= 1.7.0', 'ligo-segments', - 'lalsuite!=7.2', + 'lalsuite!=7.2,<7.25', 'lscsoft-glue>=1.59.3', 'pykerr', ] @@ -119,7 +119,7 @@ def __getattr__(self, attr): vinfo = _version_helper.generate_git_version_info() except: vinfo = vdummy() - vinfo.version = '2.3.8' + vinfo.version = '2.3.9' vinfo.release = 'True' version_script = f"""# coding: utf-8 @@ -217,7 +217,7 @@ def run(self): 'vetoes.chisq'] ext = [] cython_compile_args = ['-O3', '-w', '-ffast-math', - '-ffinite-math-only'] + '-fno-finite-math-only'] if platform.machine() == 'x86_64': cython_compile_args.append('-msse4.2') diff --git a/test/test_significance_module.py b/test/test_significance_module.py index 6bcf173e607..649cada41da 100644 --- a/test/test_significance_module.py +++ b/test/test_significance_module.py @@ -170,7 +170,7 @@ def setUp(self): method_dict['fit_threshold'] = None if not function else 0 def meth_test(self, md=method_dict): - bg_n_louder, fg_n_louder = significance.get_n_louder( + bg_n_louder, fg_n_louder, sig_info = significance.get_n_louder( self.test_bg_stat, self.test_fg_stat, self.dec_facs, @@ -207,6 +207,9 @@ def meth_test(self, md=method_dict): self.assertTrue(np.array_equal(fg_n_louder[fore_stat_sort], fg_n_louder[fore_far_sort][::-1])) + # Tests on the significance info output dictionary + self.assertTrue(isinstance(sig_info, dict)) + setattr(SignificanceMethodTest, 'test_%s_%s' % (method, function), meth_test) diff --git a/test/validation_code/old_coinc.py b/test/validation_code/old_coinc.py index 4d150d64a31..a8eb869f8a5 100644 --- a/test/validation_code/old_coinc.py +++ b/test/validation_code/old_coinc.py @@ -361,8 +361,8 @@ def cluster_coincs(stat, time1, time2, timeslide_id, slide, window, argmax=numpy else: time = 0.5 * (time2 + time1) - tslide = timeslide_id.astype(numpy.float128) - time = time.astype(numpy.float128) + tslide = timeslide_id.astype(numpy.longdouble) + time = time.astype(numpy.longdouble) span = (time.max() - time.min()) + window * 10 time = time + span * tslide @@ -411,8 +411,8 @@ def cluster_coincs_multiifo(stat, time_coincs, timeslide_id, slide, window, argm nifos_minusone = (num_ifos - numpy.ones_like(num_ifos)) time_avg = time_avg + (nifos_minusone * timeslide_id * slide)/num_ifos - tslide = timeslide_id.astype(numpy.float128) - time_avg = time_avg.astype(numpy.float128) + tslide = timeslide_id.astype(numpy.longdouble) + time_avg = time_avg.astype(numpy.longdouble) span = (time_avg.max() - time_avg.min()) + window * 10 time_avg = time_avg + span * tslide diff --git a/tox.ini b/tox.ini index 5663066ecbf..0262f1ad07e 100644 --- a/tox.ini +++ b/tox.ini @@ -10,21 +10,35 @@ deps = :preinstall: -rrequirements.txt -rcompanion.txt mkl;'arm' not in platform_machine +conda_deps = + c-compiler + cxx-compiler + gsl + mysqlclient + ; these packages don't install cleanly with pip, conda has patches + ligo-segments + python-ligo-lw [testenv] -allowlist_externals = bash -passenv=LAL_DATA_PATH -conda_deps= - openssl=1.1 - m2crypto +allowlist_externals = + bash + conda conda_channels=conda-forge - -# This test should run on almost anybody's environment -[testenv:py-unittest] +conda_deps = + {[base]conda_deps} +commands_pre = + conda list +commands = pytest deps = {[base]deps} pytest -commands = pytest +passenv=LAL_DATA_PATH +platform = + lin: linux + mac: darwin + +# This test should run on almost anybody's environment +[testenv:py-unittest] # The following are long running or may require # some extra system-level dependencies or static files. @@ -50,16 +64,6 @@ setenv = PYCBC_TEST_TYPE=inference commands = bash tools/pycbc_test_suite.sh [testenv:py-docs] -deps = - {[base]deps} -conda_deps= - mysqlclient - gcc_linux-64>=12.2.0 - gxx_linux-64>=12.2.0 - binutils_linux-64>=2.39 - gsl - lapack==3.6.1 - openmpi -conda_channels=conda-forge -setenv = PYCBC_TEST_TYPE=docs +setenv = + PYCBC_TEST_TYPE=docs commands = bash tools/pycbc_test_suite.sh