Skip to content

Commit

Permalink
Merge pull request #38 from jacopok/removing-pycbc
Browse files Browse the repository at this point in the history
Removing pycbc and simplifiyng dependencies
  • Loading branch information
jacopok authored Sep 16, 2022
2 parents 84a7d93 + ee466dc commit 7512b01
Show file tree
Hide file tree
Showing 14 changed files with 4,892 additions and 1,071 deletions.
20 changes: 10 additions & 10 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,24 @@ name: CI Pipeline for mlgw_bns

on:
- push
- pull_request
# - pull_request

jobs:
build:
runs-on: ${{matrix.platform}}
strategy:
matrix:
platform: [ubuntu-latest]
python-version: ['3.7']
python-version: ['3.8', '3.9', '3.10']

steps:
- name: Clone and install TEOBResumS repo
run: |
cd ..
git clone https://bitbucket.org/eob_ihes/teobresums/
- name: Install TEOBResumS deps
run: |
sudo apt-get install -y libconfig-dev libgsl-dev
# - name: Clone and install TEOBResumS repo
# run: |
# cd ..
# git clone https://bitbucket.org/eob_ihes/teobresums/
# - name: Install TEOBResumS deps
# run: |
# sudo apt-get install -y libconfig-dev libgsl-dev
- uses: actions/checkout@v1
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
Expand All @@ -28,7 +28,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install poetry
poetry install -v
poetry install -v --with dev
- name: Run tests
run: |
poetry run coverage run -m pytest -v --junit-xml tests/test_results.xml
Expand Down
8 changes: 7 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
post-Newtonian expressions.
(Right now, this is still [not smooth](https://github.com/jacopok/mlgw_bns/issues/36).)
- Mention of this changelog in the README
- Reference documentation about the mathematical details of higher order modes.
- Reference documentation about the mathematical details of higher order modes
- Removed dependence on `pycbc` for PSD computations (see [this PR](https://github.com/jacopok/mlgw_bns/pull/38)):
this significantly decreases the dependency load of the package

### Removed

- Python 3.7 support

## [0.10.2] - 2022-07-01

Expand Down
315 changes: 315 additions & 0 deletions docs/mlgw_bns_refs.bib

Large diffs are not rendered by default.

121 changes: 121 additions & 0 deletions docs/refs.bib

Large diffs are not rendered by default.

11 changes: 2 additions & 9 deletions mlgw_bns/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,10 @@
The code can be found on the `github page <https://github.com/jacopok/mlgw_bns>`_.
"""
try:
from importlib import metadata # type: ignore
except ImportError:
# python <3.8 compatibility
import importlib_metadata as metadata # type: ignore
from importlib import metadata # type: ignore

import toml # type: ignore

from .model import Model, ParametersWithExtrinsic

try:
__version__ = metadata.version(__package__) # type: ignore
except metadata.PackageNotFoundError: # type: ignore
__version__ = toml.load("pyproject.toml")["tool"]["poetry"]["version"] + "dev"
__version__ = metadata.version(__package__) # type: ignore
3,000 changes: 3,000 additions & 0 deletions mlgw_bns/data/ET_psd.txt

Large diffs are not rendered by default.

86 changes: 30 additions & 56 deletions mlgw_bns/model_validation.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
from __future__ import annotations

from abc import ABC
from typing import Optional, Type
from functools import cached_property
from pathlib import Path
from typing import Callable, Optional, Type

import numpy as np
import pycbc.psd # type: ignore
from scipy import integrate # type: ignore
from scipy.interpolate import interp1d # type: ignore
from scipy.optimize import minimize_scalar # type: ignore
from tqdm import tqdm # type: ignore

Expand All @@ -15,6 +17,8 @@
from .model import Model
from .resample_residuals import cartesian_waveforms_at_frequencies

PSD_PATH = Path(__file__).parent / "data"


class ValidateModel:
r"""Functionality for the validation of a model.
Expand All @@ -26,56 +30,32 @@ class ValidateModel:
psd_name: str
Name of the power spectral density to use in the computation
of the mismatches.
The name should correspond to one of the PSDs provided by
`pycbc <https://pycbc.org/pycbc/latest/html/pycbc.psd.html>`_.
Defaults to "EinsteinTelescopeP1600143".
This is compatible with
the `official ET sensitivities <http://www.et-gw.eu/index.php/etsensitivities>`_
only down to 5Hz (`issue <https://github.com/gwastro/pycbc/issues/3938>`_).
downsample_by: int
Factor by which to increase the spacing in the frequencies
used for the computation of the PSD and mismatches, compared
to the "standard FFT grid" with spacing
:math:`\Delta f = 1/ T`.
Defaults to 256.
Currently only 'ET' (default) is supported
"""

def __init__(
self,
model: Model,
psd_name: str = "EinsteinTelescopeP1600143",
downsample_by: int = 256,
psd_name: str = "ET",
):

self.model = model
self.psd_name: str = psd_name
self.psd_data = np.loadtxt(PSD_PATH / f"{self.psd_name}_psd.txt")

f_length = (
int(
(
model.dataset.effective_srate_hz / 2
- model.dataset.effective_initial_frequency_hz
)
/ model.dataset.delta_f_hz
all_frequencies = self.psd_data[:, 0]
mask = np.where(
np.logical_and(
all_frequencies < self.model.dataset.effective_srate_hz / 2,
all_frequencies > self.model.dataset.effective_initial_frequency_hz,
)
+ 1
)

self.psd_name: str = psd_name
self.psd: pycbc.types.FrequencySeries = pycbc.psd.from_string(
psd_name,
length=f_length // downsample_by,
delta_f=model.dataset.delta_f_hz * downsample_by,
low_freq_cutoff=model.dataset.initial_frequency_hz,
)

psd_frequencies: pycbc.types.array.Array = self.psd.sample_frequencies

mask = self.psd > 0

self.frequencies = psd_frequencies[mask]
self.psd_values = self.psd[mask]
self.frequencies = self.psd_data[:, 0][mask]
self.psd_values = self.psd_data[:, 1][mask]

def psd_at_frequencies(self, frequencies: np.ndarray) -> np.ndarray:
@cached_property
def psd_at_frequencies(self) -> Callable[[np.ndarray], np.ndarray]:
"""Compute the given PSD
Parameters
Expand All @@ -88,7 +68,10 @@ def psd_at_frequencies(self, frequencies: np.ndarray) -> np.ndarray:
np.ndarray
Values of the PSD, :math:`S_n(f_i)`.
"""
return np.array([self.psd.at_frequency(freq) for freq in frequencies])
return interp1d(
self.frequencies,
self.psd_values,
)

def param_set(
self, number_of_parameter_tuples: int, seed: Optional[int] = None
Expand Down Expand Up @@ -301,27 +284,14 @@ def mismatch(
max_delta_t: float
Maximum time shift for the two waveforms which are being compared,
in seconds.
Defaults to 0.05.
Defaults to 0.07.
"""

if frequencies is None:
psd_values = self.psd_values
frequencies = self.frequencies
psd_values = self.psd_values
else:
# TODO deal with the possibility that the frequencies may
# lie outside the range of the PSD's frequencies
# (recompute whole PSD?)
# (or just ignore the part outside of the bounds?)
# for now we do the latter
mask = np.bitwise_and(
min(self.frequencies) < frequencies,
frequencies < max(self.frequencies) - 2 * self.model.dataset.delta_f_hz,
)

psd_values = self.psd_at_frequencies(frequencies[mask])
frequencies = frequencies[mask]
waveform_1 = waveform_1[mask]
waveform_2 = waveform_2[mask]
psd_values = self.psd_at_frequencies(frequencies)

def product(a: np.ndarray, b: np.ndarray) -> float:
integral = integrate.trapezoid(np.conj(a) * b / psd_values, x=frequencies)
Expand All @@ -339,4 +309,8 @@ def to_minimize(t_c: float) -> float:
res = minimize_scalar(
to_minimize, method="brent", bracket=(-max_delta_t, max_delta_t)
)

if not res.success:
raise ValueError("Mismatch optimization did not succeed!")

return 1 - (-res.fun) / norm
Loading

0 comments on commit 7512b01

Please sign in to comment.