From 36eb0eb8b6c0a2f068cf15aa4a3dbd810288e31c Mon Sep 17 00:00:00 2001 From: Colm Talbot Date: Thu, 27 Jun 2024 15:55:51 +0000 Subject: [PATCH 01/15] MAINT: pin some versions --- requirements.txt | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 336b9e00..05c8c4fa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,12 @@ bilby.cython>=0.3.0 -dynesty>=2.0.1 +# remove pin after https://git.ligo.org/lscsoft/bilby/-/merge_requests/1368 +dynesty>=2.0.1,<2.1.4 emcee corner numpy matplotlib -scipy>=1.5 +# remove pin after https://git.ligo.org/lscsoft/bilby/-/merge_requests/1368 +scipy>=1.5,<1.14 pandas dill tqdm From 69ca3538bc6ef572e5edbddad57e07bae6708de2 Mon Sep 17 00:00:00 2001 From: Gregory Ashton Date: Wed, 10 Jul 2024 16:22:44 +0000 Subject: [PATCH 02/15] BUGFIX: fix likelihood automatic timing --- bilby/core/sampler/base_sampler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bilby/core/sampler/base_sampler.py b/bilby/core/sampler/base_sampler.py index 3c573afe..17a7b119 100644 --- a/bilby/core/sampler/base_sampler.py +++ b/bilby/core/sampler/base_sampler.py @@ -463,7 +463,7 @@ def _time_likelihood(self, n_evaluations=100): logger.info("Unable to measure single likelihood time") else: logger.info( - f"Single likelihood evaluation took {self._log_likelihood_eval_time:.3e} s" + f"Single likelihood evaluation took {log_likelihood_eval_time:.3e} s" ) return log_likelihood_eval_time From a2f2a878e953c64dff6e7d95b52a0ba65886894d Mon Sep 17 00:00:00 2001 From: Matthew Pitkin Date: Wed, 10 Jul 2024 17:23:43 +0000 Subject: [PATCH 03/15] Catch error if trying to load zero bytes resume file --- bilby/bilby_mcmc/sampler.py | 4 +++- bilby/core/sampler/dynesty.py | 5 ++++- bilby/core/sampler/emcee.py | 6 +++++- bilby/core/sampler/kombine.py | 6 +++++- bilby/core/sampler/ptemcee.py | 8 ++++++-- 5 files changed, 23 insertions(+), 6 deletions(-) diff --git a/bilby/bilby_mcmc/sampler.py b/bilby/bilby_mcmc/sampler.py index 6676c76d..c8ccf253 100644 --- a/bilby/bilby_mcmc/sampler.py +++ b/bilby/bilby_mcmc/sampler.py @@ -388,7 +388,9 @@ def read_current_state(self): If true, resume file was successfully loaded, otherwise false """ - if os.path.isfile(self.resume_file) is False: + if os.path.isfile(self.resume_file) is False or not os.path.getsize( + self.resume_file + ): return False import dill diff --git a/bilby/core/sampler/dynesty.py b/bilby/core/sampler/dynesty.py index 852fb88c..bb931ed3 100644 --- a/bilby/core/sampler/dynesty.py +++ b/bilby/core/sampler/dynesty.py @@ -736,7 +736,10 @@ def read_saved_state(self, continuing=False): if os.path.isfile(self.resume_file): logger.info(f"Reading resume file {self.resume_file}") with open(self.resume_file, "rb") as file: - sampler = dill.load(file) + try: + sampler = dill.load(file) + except EOFError: + sampler = None if not hasattr(sampler, "versions"): logger.warning( diff --git a/bilby/core/sampler/emcee.py b/bilby/core/sampler/emcee.py index db88ee5a..76e4dd1e 100644 --- a/bilby/core/sampler/emcee.py +++ b/bilby/core/sampler/emcee.py @@ -311,7 +311,11 @@ def sampler(self): """ if hasattr(self, "_sampler"): pass - elif self.resume and os.path.isfile(self.checkpoint_info.sampler_file): + elif ( + self.resume + and os.path.isfile(self.checkpoint_info.sampler_file) + and os.path.getsize(self.checkpoint_info.sampler_file) + ): import dill logger.info( diff --git a/bilby/core/sampler/kombine.py b/bilby/core/sampler/kombine.py index bda7c6d4..46775195 100644 --- a/bilby/core/sampler/kombine.py +++ b/bilby/core/sampler/kombine.py @@ -166,7 +166,11 @@ def sampler_chain(self): return self.sampler.chain[:nsteps, :, :] def check_resume(self): - return self.resume and os.path.isfile(self.checkpoint_info.sampler_file) + return ( + self.resume + and os.path.isfile(self.checkpoint_info.sampler_file) + and os.path.getsize(self.checkpoint_info.sampler_file) > 0 + ) @signal_wrapper def run_sampler(self): diff --git a/bilby/core/sampler/ptemcee.py b/bilby/core/sampler/ptemcee.py index fd927235..1d74a6f6 100644 --- a/bilby/core/sampler/ptemcee.py +++ b/bilby/core/sampler/ptemcee.py @@ -415,7 +415,11 @@ def setup_sampler(self): # This is a very ugly hack to support numpy>=1.24 ptemcee.sampler.np.float = float - if os.path.isfile(self.resume_file) and self.resume is True: + if ( + os.path.isfile(self.resume_file) + and os.path.getsize(self.resume_file) + and self.resume is True + ): import dill logger.info(f"Resume data {self.resume_file} found") @@ -513,7 +517,7 @@ def run_sampler(self): logger.info("Starting to sample") while True: - for (pos0, log_posterior, log_likelihood) in sampler.sample( + for pos0, log_posterior, log_likelihood in sampler.sample( self.pos0, storechain=False, iterations=self.convergence_inputs.niterations_per_check, From 32074018864df513cc180a79bc79c6b8419b686f Mon Sep 17 00:00:00 2001 From: Colm Talbot Date: Wed, 10 Jul 2024 19:09:33 +0000 Subject: [PATCH 04/15] MAINT: suppress dynesty warning --- bilby/core/sampler/dynesty.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bilby/core/sampler/dynesty.py b/bilby/core/sampler/dynesty.py index bb931ed3..70480c6b 100644 --- a/bilby/core/sampler/dynesty.py +++ b/bilby/core/sampler/dynesty.py @@ -3,6 +3,7 @@ import os import sys import time +import warnings import numpy as np from pandas import DataFrame @@ -677,10 +678,17 @@ def _run_external_sampler_with_checkpointing(self): chain of nested samples within dynesty and have to be removed before restarting the sampler. """ + logger.debug("Running sampler with checkpointing") old_ncall = self.sampler.ncall sampler_kwargs = self.sampler_function_kwargs.copy() + warnings.filterwarnings( + "ignore", + message="The sampling was stopped short due to maxiter/maxcall limit*", + category=UserWarning, + module="dynesty.sampler", + ) while True: self.finalize_sampler_kwargs(sampler_kwargs) self.sampler.run_nested(**sampler_kwargs) From 09789ac58726ec2a96b55530d996b103ad8a4f9d Mon Sep 17 00:00:00 2001 From: Colm Talbot Date: Fri, 21 Jun 2024 22:06:42 +0000 Subject: [PATCH 05/15] MAINT: simplify the healpix distance pdf call --- bilby/gw/prior.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bilby/gw/prior.py b/bilby/gw/prior.py index 21767411..b353c60d 100644 --- a/bilby/gw/prior.py +++ b/bilby/gw/prior.py @@ -1459,7 +1459,7 @@ def update_distance(self, pix_idx): self.distance_pdf = lambda r: self.distnorm[pix_idx] * norm( loc=self.distmu[pix_idx], scale=self.distsigma[pix_idx] ).pdf(r) - pdfs = self.rs ** 2 * norm(loc=self.distmu[pix_idx], scale=self.distsigma[pix_idx]).pdf(self.rs) + pdfs = self.rs ** 2 * self.distance_pdf(self.rs) cdfs = np.cumsum(pdfs) / np.sum(pdfs) self.distance_icdf = interp1d(cdfs, self.rs) From d4c02aa092116b6402fb378f3cbcb294bb0e283d Mon Sep 17 00:00:00 2001 From: Colm Talbot Date: Thu, 11 Jul 2024 15:49:00 +0000 Subject: [PATCH 06/15] CI fixes --- bilby/core/sampler/dynesty.py | 6 +- bilby/core/utils/calculus.py | 89 ++++++---------------- bilby/gw/likelihood/base.py | 8 +- bilby/gw/likelihood/roq.py | 6 +- examples/core_examples/logo/sample_logo.py | 2 +- requirements.txt | 5 +- test/core/sampler/dynesty_test.py | 2 +- test/core/utils_test.py | 2 +- 8 files changed, 38 insertions(+), 82 deletions(-) diff --git a/bilby/core/sampler/dynesty.py b/bilby/core/sampler/dynesty.py index 70480c6b..f1617f1e 100644 --- a/bilby/core/sampler/dynesty.py +++ b/bilby/core/sampler/dynesty.py @@ -691,6 +691,8 @@ def _run_external_sampler_with_checkpointing(self): ) while True: self.finalize_sampler_kwargs(sampler_kwargs) + if getattr(self.sampler, "added_live", False): + self.sampler._remove_live_points() self.sampler.run_nested(**sampler_kwargs) if self.sampler.ncall == old_ncall: break @@ -705,8 +707,8 @@ def _run_external_sampler_with_checkpointing(self): if last_checkpoint_s > self.check_point_delta_t: self.write_current_state() self.plot_current_state() - if getattr(self.sampler, "added_live", False): - self.sampler._remove_live_points() + if getattr(self.sampler, "added_live", False): + self.sampler._remove_live_points() self.sampler.run_nested(**sampler_kwargs) self.write_current_state() diff --git a/bilby/core/utils/calculus.py b/bilby/core/utils/calculus.py index 618061f4..ac6fcefc 100644 --- a/bilby/core/utils/calculus.py +++ b/bilby/core/utils/calculus.py @@ -1,7 +1,7 @@ import math -from numbers import Number + import numpy as np -from scipy.interpolate import interp2d +from scipy.interpolate import RectBivariateSpline from scipy.special import logsumexp from .log import logger @@ -189,79 +189,34 @@ def logtrapzexp(lnf, dx): return C + logsumexp([logsumexp(lnfdx1), logsumexp(lnfdx2)]) -class UnsortedInterp2d(interp2d): - def __call__(self, x, y, dx=0, dy=0, assume_sorted=False): - """Modified version of the interp2d call method. - - This avoids the outer product that is done when two numpy - arrays are passed. - - Parameters - ========== - x: See superclass - y: See superclass - dx: See superclass - dy: See superclass - assume_sorted: bool, optional - This is just a place holder to prevent a warning. - Overwriting this will not do anything +class BoundedRectBivariateSpline(RectBivariateSpline): - Returns - ======= - array_like: See superclass + def __init__(self, x, y, z, bbox=[None] * 4, kx=3, ky=3, s=0, fill_value=None): + self.x_min, self.x_max, self.y_min, self.y_max = bbox + if self.x_min is None: + self.x_min = min(x) + if self.x_max is None: + self.x_max = max(x) + if self.y_min is None: + self.y_min = min(y) + if self.y_max is None: + self.y_max = max(y) + self.fill_value = fill_value + super().__init__(x=x, y=y, z=z, bbox=bbox, kx=kx, ky=ky, s=s) - """ - from scipy.interpolate.dfitpack import bispeu - - x, y = self._sanitize_inputs(x, y) + def __call__(self, x, y, dx=0, dy=0, grid=False): + result = super().__call__(x=x, y=y, dx=dx, dy=dy, grid=grid) out_of_bounds_x = (x < self.x_min) | (x > self.x_max) out_of_bounds_y = (y < self.y_min) | (y > self.y_max) bad = out_of_bounds_x | out_of_bounds_y - if isinstance(x, Number) and isinstance(y, Number): + result[bad] = self.fill_value + if result.size == 1: if bad: - output = self.fill_value - ier = 0 + return self.fill_value else: - output, ier = bispeu(*self.tck, x, y) - output = float(output) + return result.item() else: - output = np.empty_like(x) - output[bad] = self.fill_value - if np.any(~bad): - output[~bad], ier = bispeu(*self.tck, x[~bad], y[~bad]) - else: - ier = 0 - if ier == 10: - raise ValueError("Invalid input data") - elif ier: - raise TypeError("An error occurred") - return output - - @staticmethod - def _sanitize_inputs(x, y): - if isinstance(x, np.ndarray) and x.size == 1: - x = float(x) - if isinstance(y, np.ndarray) and y.size == 1: - y = float(y) - if isinstance(x, np.ndarray) and isinstance(y, np.ndarray): - original_shapes = (x.shape, y.shape) - if x.shape != y.shape: - while x.ndim > y.ndim: - y = np.expand_dims(y, -1) - while y.ndim > x.ndim: - x = np.expand_dims(x, -1) - try: - x = x * np.ones(y.shape) - y = y * np.ones(x.shape) - except ValueError: - raise ValueError( - f"UnsortedInterp2d received incompatibly shaped arrays: {original_shapes}" - ) - elif isinstance(x, np.ndarray) and not isinstance(y, np.ndarray): - y = y * np.ones_like(x) - elif not isinstance(x, np.ndarray) and isinstance(y, np.ndarray): - x = x * np.ones_like(y) - return x, y + return result def round_up_to_power_of_two(x): diff --git a/bilby/gw/likelihood/base.py b/bilby/gw/likelihood/base.py index e0a09c1e..d04b28d8 100644 --- a/bilby/gw/likelihood/base.py +++ b/bilby/gw/likelihood/base.py @@ -7,7 +7,7 @@ from scipy.special import logsumexp from ...core.likelihood import Likelihood -from ...core.utils import logger, UnsortedInterp2d, create_time_series +from ...core.utils import logger, BoundedRectBivariateSpline, create_time_series from ...core.prior import Interped, Prior, Uniform, DeltaFunction from ..detector import InterferometerList, get_empty_interferometer, calibration from ..prior import BBHPriorDict, Cosmological @@ -752,7 +752,7 @@ def distance_marginalized_likelihood(self, d_inner_h, h_inner_h): d_inner_h_ref = np.real(d_inner_h_ref) return self._interp_dist_margd_loglikelihood( - d_inner_h_ref, h_inner_h_ref) + d_inner_h_ref, h_inner_h_ref, grid=False) def phase_marginalized_likelihood(self, d_inner_h, h_inner_h): d_inner_h = ln_i0(abs(d_inner_h)) @@ -891,9 +891,9 @@ def _setup_distance_marginalization(self, lookup_table=None): self._create_lookup_table() else: self._create_lookup_table() - self._interp_dist_margd_loglikelihood = UnsortedInterp2d( + self._interp_dist_margd_loglikelihood = BoundedRectBivariateSpline( self._d_inner_h_ref_array, self._optimal_snr_squared_ref_array, - self._dist_margd_loglikelihood_array, kind='cubic', fill_value=-np.inf) + self._dist_margd_loglikelihood_array.T, fill_value=-np.inf) @property def cached_lookup_table_filename(self): diff --git a/bilby/gw/likelihood/roq.py b/bilby/gw/likelihood/roq.py index 12d5212f..b0bac463 100644 --- a/bilby/gw/likelihood/roq.py +++ b/bilby/gw/likelihood/roq.py @@ -1110,11 +1110,11 @@ def calc_fhigh(freq, psd, scaling=20.): f_high: float The maximum frequency which must be considered """ - from scipy.integrate import simps + from scipy.integrate import simpson integrand1 = np.power(freq, -7. / 3) / psd - integral1 = simps(integrand1, freq) + integral1 = simpson(y=integrand1, x=freq) integrand3 = np.power(freq, 2. / 3.) / (psd * integral1) - f_3_bar = simps(integrand3, freq) + f_3_bar = simpson(y=integrand3, x=freq) f_high = scaling * f_3_bar**(1 / 3) diff --git a/examples/core_examples/logo/sample_logo.py b/examples/core_examples/logo/sample_logo.py index 6b5b7225..a9993d8e 100644 --- a/examples/core_examples/logo/sample_logo.py +++ b/examples/core_examples/logo/sample_logo.py @@ -18,7 +18,7 @@ def log_likelihood(self): img = 1 - io.imread("{}.png".format(letter), as_gray=True)[::-1, :] x = np.arange(img.shape[0]) y = np.arange(img.shape[1]) - interp = si.interpolate.interp2d(x, y, img.T) + interp = si.RectBivariateSpline(x, y, img, kx=1, ky=1) likelihood = Likelihood(interp) diff --git a/requirements.txt b/requirements.txt index 05c8c4fa..37bdb21f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,10 @@ bilby.cython>=0.3.0 -# remove pin after https://git.ligo.org/lscsoft/bilby/-/merge_requests/1368 -dynesty>=2.0.1,<2.1.4 +dynesty>=2.0.1 emcee corner numpy matplotlib -# remove pin after https://git.ligo.org/lscsoft/bilby/-/merge_requests/1368 +# see https://github.com/healpy/healpy/pull/953 scipy>=1.5,<1.14 pandas dill diff --git a/test/core/sampler/dynesty_test.py b/test/core/sampler/dynesty_test.py index d33cc2e2..5d1c534a 100644 --- a/test/core/sampler/dynesty_test.py +++ b/test/core/sampler/dynesty_test.py @@ -216,7 +216,7 @@ def setUp(self): self.sampler = cls( loglikelihood=lambda x: 1, prior_transform=lambda x: x, - npdim=4, + ndim=4, live_points=(np.zeros((1000, 4)), np.zeros((1000, 4)), np.zeros(1000)), update_interval=None, first_update=dict(), diff --git a/test/core/utils_test.py b/test/core/utils_test.py index d6b3e890..ed63916a 100644 --- a/test/core/utils_test.py +++ b/test/core/utils_test.py @@ -320,7 +320,7 @@ def setUp(self): self.xx = np.linspace(0, 1, 10) self.yy = np.linspace(0, 1, 10) self.zz = np.random.random((10, 10)) - self.interpolant = bilby.core.utils.UnsortedInterp2d(self.xx, self.yy, self.zz) + self.interpolant = bilby.core.utils.BoundedRectBivariateSpline(self.xx, self.yy, self.zz) def tearDown(self): pass From 379a6ed6159b6eb4ff2e174521b0592ce8bb90fc Mon Sep 17 00:00:00 2001 From: Rhiannon Udall Date: Fri, 12 Jul 2024 16:27:57 +0000 Subject: [PATCH 07/15] ENH: Add whitened TD strain as a property of the Interferometer --- bilby/gw/detector/interferometer.py | 88 +++++++++++++++++++++++-- bilby/gw/result.py | 24 ++----- test/gw/detector/interferometer_test.py | 87 +++++++++++++++++++++--- 3 files changed, 167 insertions(+), 32 deletions(-) diff --git a/bilby/gw/detector/interferometer.py b/bilby/gw/detector/interferometer.py index 662866e8..82c6f1c4 100644 --- a/bilby/gw/detector/interferometer.py +++ b/bilby/gw/detector/interferometer.py @@ -616,19 +616,95 @@ def matched_filter_snr(self, signal): power_spectral_density=self.power_spectral_density_array[self.strain_data.frequency_mask], duration=self.strain_data.duration) + def whiten_frequency_series(self, frequency_series : np.array) -> np.array: + """Whitens a frequency series with the noise properties of the detector + + .. math:: + \\tilde{a}_w(f) = \\tilde{a}(f) \\sqrt{\\frac{4}{T S_n(f)}} + + Such that + + .. math:: + Var(n) = \\frac{1}{N} \\sum_k=0^N n_W(f_k)n_W^*(f_k) = 2 + + Where the factor of two is due to the independent real and imaginary + components. + + Parameters + ========== + frequency_series : np.array + The frequency series, whitened by the ASD + """ + return frequency_series / (self.amplitude_spectral_density_array * np.sqrt(self.duration / 4)) + + def get_whitened_time_series_from_whitened_frequency_series( + self, + whitened_frequency_series : np.array + ) -> np.array: + """Gets the whitened time series from a whitened frequency series. + + This ifft's and also applies a windowing factor, + since when f_min and f_max are set bilby applies a mask to the series. + + Per 6.2a-b in https://arxiv.org/pdf/gr-qc/0509116 since our window + is just a band pass, + this coefficient is :math:`w/W` where + + .. math:: + + W = \\frac{1}{N} \\sum_{k=0}^N w^2[j] + + Since our window :math:`w` is simply 1 or 0, depending on the mask, we get + + .. math:: + + W = \\frac{1}{N} \\sum_{k=0}^N \\Theta(f_{max} - f_k)\\Theta(f_k - f_{min})} + + and accordingly the termwise window factor is + + .. math:: + w = \\sqrt{N W} = \\sqrt{\\sum_{k=0}^N \\Theta(f_{max} - f_k)\\Theta(f_k - f_{min})}} + + """ + frequency_window_factor = ( + np.sum(self.frequency_mask) + / len(self.frequency_mask) + ) + + whitened_time_series = ( + np.fft.irfft(whitened_frequency_series) + * np.sqrt(np.sum(self.frequency_mask)) / frequency_window_factor + ) + + return whitened_time_series + @property def whitened_frequency_domain_strain(self): - """ Calculates the whitened data by dividing the frequency domain data by - ((amplitude spectral density) * (duration / 4) ** 0.5). The resulting - data will have unit variance. + r"""Whitens the frequency domain data by dividing through by ASD, + with appropriate normalization. + + See `whiten_frequency_series()` for details. Returns ======= array_like: The whitened data """ - return self.strain_data.frequency_domain_strain / ( - self.amplitude_spectral_density_array * np.sqrt(self.duration / 4) - ) + return self.whiten_frequency_series(self.strain_data.frequency_domain_strain) + + @property + def whitened_time_domain_strain(self) -> np.array: + """Calculates the whitened time domain strain + by iffting the whitened frequency domain strain, + with the appropriate normalization. + + See `get_whitened_time_series_from_whitened_frequency_series()` for details + + Returns + ======= + array_like + The whitened data in the time domain + """ + return self.get_whitened_time_series_from_whitened_frequency_series(self.whitened_frequency_domain_strain) def save_data(self, outdir, label=None): """ Creates save files for interferometer data in plain text format. diff --git a/bilby/gw/result.py b/bilby/gw/result.py index 5d1b6e7e..cc9f97ed 100644 --- a/bilby/gw/result.py +++ b/bilby/gw/result.py @@ -377,10 +377,6 @@ def plot_interferometer_waveform_posterior( logger.debug("Downsampling frequency mask to {} values".format( len(frequency_idxs)) ) - frequency_window_factor = ( - np.sum(interferometer.frequency_mask) - / len(interferometer.frequency_mask) - ) plot_times = interferometer.time_array[time_idxs] plot_times -= interferometer.strain_data.start_time start_time -= interferometer.strain_data.start_time @@ -451,11 +447,7 @@ def plot_interferometer_waveform_posterior( fig.add_trace( go.Scatter( x=plot_times, - y=np.fft.irfft( - interferometer.whitened_frequency_domain_strain - * np.sqrt(np.sum(interferometer.frequency_mask)) - / frequency_window_factor - )[time_idxs], + y=interferometer.whitened_time_domain_strain[time_idxs], fill=None, mode='lines', line_color=DATA_COLOR, opacity=0.5, @@ -478,11 +470,7 @@ def plot_interferometer_waveform_posterior( interferometer.amplitude_spectral_density_array[frequency_idxs], color=DATA_COLOR, label='ASD') axs[1].plot( - plot_times, np.fft.irfft( - interferometer.whitened_frequency_domain_strain - * np.sqrt(np.sum(interferometer.frequency_mask)) - / frequency_window_factor - )[time_idxs], + plot_times, interferometer.whitened_time_domain_strain[time_idxs], color=DATA_COLOR, alpha=0.3) logger.debug('Plotted interferometer data.') @@ -493,10 +481,10 @@ def plot_interferometer_waveform_posterior( wf_pols = waveform_generator.frequency_domain_strain(params) fd_waveform = interferometer.get_detector_response(wf_pols, params) fd_waveforms.append(fd_waveform[frequency_idxs]) - td_waveform = infft( - fd_waveform * np.sqrt(2. / interferometer.sampling_frequency) / - interferometer.amplitude_spectral_density_array, - self.sampling_frequency)[time_idxs] + whitened_fd_waveform = interferometer.whiten_frequency_series(fd_waveform) + td_waveform = interferometer.get_whitened_time_series_from_whitened_frequency_series( + whitened_fd_waveform + )[time_idxs] td_waveforms.append(td_waveform) fd_waveforms = asd_from_freq_series( fd_waveforms, diff --git a/test/gw/detector/interferometer_test.py b/test/gw/detector/interferometer_test.py index 35804541..66e84760 100644 --- a/test/gw/detector/interferometer_test.py +++ b/test/gw/detector/interferometer_test.py @@ -560,21 +560,92 @@ def test_time_delay_vs_lal(self): class TestInterferometerWhitenedStrain(unittest.TestCase): def setUp(self): + self.duration = 64 + self.sampling_frequency = 4096 self.ifo = bilby.gw.detector.get_empty_interferometer('H1') self.ifo.set_strain_data_from_power_spectral_density( - sampling_frequency=4096, duration=64) + sampling_frequency=self.sampling_frequency, duration=self.duration) + self.waveform_generator = bilby.gw.waveform_generator.WaveformGenerator( + duration=self.duration, + sampling_frequency=self.sampling_frequency, + frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole, + waveform_arguments={ + "waveform_approximant": "IMRPhenomXP" + }) + + self.parameters = { + 'mass_1': 10, + 'mass_2': 10, + 'a_1': 0, + 'a_2': 0, + 'tilt_1': 0, + 'tilt_2': 0, + 'phi_12': 0, + 'phi_jl': 0, + 'theta_jn': 0, + 'luminosity_distance': 40, + 'phase': 0, + 'ra': 0, + 'dec': 0, + 'geocent_time': 62, + 'psi': 0 + } def tearDown(self): del self.ifo + del self.waveform_generator + del self.parameters + del self.duration + del self.sampling_frequency - def test_whitened_strain(self): - mask = self.ifo.frequency_mask - white = self.ifo.whitened_frequency_domain_strain[mask] - std_real = np.std(white.real) - std_imag = np.std(white.imag) + def _check_frequency_series_whiteness(self, frequency_series): + std_real = np.std(frequency_series.real) + std_imag = np.std(frequency_series.imag) self.assertAlmostEqual(std_real, 1, places=2) self.assertAlmostEqual(std_imag, 1, places=2) + def _check_time_series_whiteness(self, time_series): + std = np.std(time_series) + self.assertAlmostEqual(std, 1, places=2) -if __name__ == "__main__": - unittest.main() + def test_frequency_domain_whitened_strain(self): + mask = self.ifo.frequency_mask + white = self.ifo.whitened_frequency_domain_strain[mask] + self._check_frequency_series_whiteness(white) + + def test_time_domain_whitened_strain(self): + whitened_td = self.ifo.whitened_time_domain_strain + self._check_time_series_whiteness(whitened_td) + + def test_frequency_domain_noise_and_signal_whitening(self): + # Inject some (loud) signal + self.ifo.inject_signal(waveform_generator=self.waveform_generator, parameters=self.parameters) + # Make the template separately + waveform_polarizations = self.waveform_generator.frequency_domain_strain(parameters=self.parameters) + signal_ifo = self.ifo.get_detector_response( + waveform_polarizations=waveform_polarizations, + parameters=self.parameters + ) + # Whiten the template + whitened_signal_ifo = self.ifo.whiten_frequency_series(signal_ifo) + mask = self.ifo.frequency_mask + white = self.ifo.whitened_frequency_domain_strain[mask] - whitened_signal_ifo[mask] + self._check_frequency_series_whiteness(white) + + def test_time_domain_noise_and_signal_whitening(self): + # Inject some (loud) signal + self.ifo.inject_signal(waveform_generator=self.waveform_generator, parameters=self.parameters) + # Make the template separately + waveform_polarizations = self.waveform_generator.frequency_domain_strain(parameters=self.parameters) + signal_ifo = self.ifo.get_detector_response( + waveform_polarizations=waveform_polarizations, + parameters=self.parameters + ) + # Whiten the template in FD + whitened_signal_ifo_fd = self.ifo.whiten_frequency_series(signal_ifo) + # Get whitened template in TD + whitened_signal_ifo_td = self.ifo.get_whitened_time_series_from_whitened_frequency_series( + whitened_signal_ifo_fd + ) + whitened_td = self.ifo.whitened_time_domain_strain - whitened_signal_ifo_td + self._check_time_series_whiteness(whitened_td) From a90aaaa2458ef7a4f514200ac8a3495a20da5828 Mon Sep 17 00:00:00 2001 From: Soichiro Morisaki Date: Sun, 14 Jul 2024 13:40:37 +0000 Subject: [PATCH 08/15] Add informative error messages in ROQ likelihood --- bilby/gw/likelihood/roq.py | 66 ++++++++++++++++++++++++++++------ test/gw/likelihood_test.py | 74 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 130 insertions(+), 10 deletions(-) diff --git a/bilby/gw/likelihood/roq.py b/bilby/gw/likelihood/roq.py index b0bac463..fddfa92b 100644 --- a/bilby/gw/likelihood/roq.py +++ b/bilby/gw/likelihood/roq.py @@ -136,13 +136,28 @@ def __init__( linear_matrix['duration_s'][()])], dtype=[('flow', float), ('fhigh', float), ('seglen', float)] ) - elif is_hdf5_quadratic: - self.roq_params = np.array( - [(quadratic_matrix['minimum_frequency_hz'][()], - quadratic_matrix['maximum_frequency_hz'][()], - quadratic_matrix['duration_s'][()])], - dtype=[('flow', float), ('fhigh', float), ('seglen', float)] - ) + if is_hdf5_quadratic: + if self.roq_params is None: + self.roq_params = np.array( + [(quadratic_matrix['minimum_frequency_hz'][()], + quadratic_matrix['maximum_frequency_hz'][()], + quadratic_matrix['duration_s'][()])], + dtype=[('flow', float), ('fhigh', float), ('seglen', float)] + ) + else: + self.roq_params['flow'] = max( + self.roq_params['flow'], quadratic_matrix['minimum_frequency_hz'][()] + ) + self.roq_params['fhigh'] = min( + self.roq_params['fhigh'], quadratic_matrix['maximum_frequency_hz'][()] + ) + self.roq_params['seglen'] = min( + self.roq_params['seglen'], quadratic_matrix['duration_s'][()] + ) + if self.roq_params is not None: + for ifo in self.interferometers: + self.perform_roq_params_check(ifo) + self.weights = dict() self._set_weights(linear_matrix=linear_matrix, quadratic_matrix=quadratic_matrix) if is_hdf5_linear: @@ -158,9 +173,10 @@ def __init__( for basis_type in ['linear', 'quadratic']: number_of_bases = getattr(self, f'number_of_bases_{basis_type}') if number_of_bases > 1: - self._verify_prior_ranges_and_frequency_nodes(basis_type) + self._verify_numbers_of_prior_ranges_and_frequency_nodes(basis_type) else: self._check_frequency_nodes_exist_for_single_basis(basis_type) + self._verify_prior_ranges(basis_type) self._set_unique_frequency_nodes_and_inverse() # need to fill waveform_arguments here if single basis is used, as they will never be updated. @@ -171,7 +187,7 @@ def __init__( self._waveform_generator.waveform_arguments['linear_indices'] = linear_indices self._waveform_generator.waveform_arguments['quadratic_indices'] = quadratic_indices - def _verify_prior_ranges_and_frequency_nodes(self, basis_type): + def _verify_numbers_of_prior_ranges_and_frequency_nodes(self, basis_type): """ Check if self.weights contains lists of prior ranges and frequency nodes, and their sizes are equal to the number of bases. @@ -205,6 +221,35 @@ def _verify_prior_ranges_and_frequency_nodes(self, basis_type): raise ValueError( f'The number of arrays of frequency nodes does not match the number of {basis_type} bases') + def _verify_prior_ranges(self, basis_type): + """Check if the union of prior ranges is within the ROQ basis bounds. + + Parameters + ========== + basis_type: str + + """ + key = f'prior_range_{basis_type}' + if key not in self.weights: + return + prior_ranges = self.weights[key] + for param_name, prior_ranges_of_this_param in prior_ranges.items(): + prior_minimum = self.priors[param_name].minimum + basis_minimum = np.min(prior_ranges_of_this_param[:, 0]) + if prior_minimum < basis_minimum: + raise BilbyROQParamsRangeError( + f"Prior minimum of {param_name} {prior_minimum} less " + f"than ROQ basis bound {basis_minimum}" + ) + + prior_maximum = self.priors[param_name].maximum + basis_maximum = np.max(prior_ranges_of_this_param[:, 1]) + if prior_maximum > basis_maximum: + raise BilbyROQParamsRangeError( + f"Prior maximum of {param_name} {prior_maximum} greater " + f"than ROQ basis bound {basis_maximum}" + ) + def _check_frequency_nodes_exist_for_single_basis(self, basis_type): """ For a single-basis case, frequency nodes should be contained in self._waveform_generator.waveform_arguments or @@ -701,6 +746,8 @@ def _set_weights(self, linear_matrix, quadratic_matrix): roq_scale_factor = 1. prior_ranges[param_name] = matrix[key][param_name][()] * roq_scale_factor selected_idxs, selected_prior_ranges = self._select_prior_ranges(prior_ranges) + if len(selected_idxs) == 0: + raise BilbyROQParamsRangeError(f"There are no {basis_type} ROQ bases within the prior range.") self.weights[key] = selected_prior_ranges idxs_in_prior_range[basis_type] = selected_idxs else: @@ -725,7 +772,6 @@ def _set_weights(self, linear_matrix, quadratic_matrix): ifo_idxs = {} for ifo in self.interferometers: if self.roq_params is not None: - self.perform_roq_params_check(ifo) # Get scaled ROQ quantities roq_scaled_minimum_frequency = self.roq_params['flow'] * self.roq_scale_factor roq_scaled_maximum_frequency = self.roq_params['fhigh'] * self.roq_scale_factor diff --git a/test/gw/likelihood_test.py b/test/gw/likelihood_test.py index 0c157594..b12ffd59 100644 --- a/test/gw/likelihood_test.py +++ b/test/gw/likelihood_test.py @@ -656,6 +656,80 @@ def setUp(self): self.injection_parameters["geocent_time"] + 0.1 ) + @parameterized.expand( + [(_path_to_basis, 20., 2048., 16), + (_path_to_basis, 10., 1024., 16), + (_path_to_basis, 20., 1024., 32), + (_path_to_basis_mb, 20., 2048., 16)] + ) + def test_fails_with_frequency_duration_mismatch( + self, basis, minimum_frequency, maximum_frequency, duration + ): + """Test if likelihood fails as expected, when data frequency range is + not within the basis range or data duration does not match the basis + duration. The basis frequency range and duration are 20--1024Hz and + 16s""" + self.priors["chirp_mass"].minimum = 8 + self.priors["chirp_mass"].maximum = 9 + interferometers = bilby.gw.detector.InterferometerList(["H1"]) + interferometers.set_strain_data_from_power_spectral_densities( + sampling_frequency=2 * maximum_frequency, + duration=duration, + start_time=self.injection_parameters["geocent_time"] - duration + 1 + ) + for ifo in interferometers: + ifo.minimum_frequency = minimum_frequency + ifo.maximum_frequency = maximum_frequency + search_waveform_generator = bilby.gw.waveform_generator.WaveformGenerator( + duration=duration, + sampling_frequency=2 * maximum_frequency, + frequency_domain_source_model=bilby.gw.source.binary_black_hole_roq, + waveform_arguments=dict( + reference_frequency=self.reference_frequency, + waveform_approximant=self.waveform_approximant + ) + ) + with self.assertRaises(BilbyROQParamsRangeError): + bilby.gw.likelihood.ROQGravitationalWaveTransient( + interferometers=interferometers, + priors=self.priors, + waveform_generator=search_waveform_generator, + linear_matrix=basis, + quadratic_matrix=basis, + ) + + @parameterized.expand([(_path_to_basis, 7, 13), (_path_to_basis, 9, 15), (_path_to_basis, 16, 17)]) + def test_fails_with_prior_mismatch(self, basis, chirp_mass_min, chirp_mass_max): + """Test if likelihood fails as expected, when prior range is not within + the basis bounds. Basis chirp-mass range is 8Msun--14Msun.""" + self.priors["chirp_mass"].minimum = chirp_mass_min + self.priors["chirp_mass"].maximum = chirp_mass_max + interferometers = bilby.gw.detector.InterferometerList(["H1"]) + interferometers.set_strain_data_from_power_spectral_densities( + sampling_frequency=self.sampling_frequency, + duration=self.duration, + start_time=self.injection_parameters["geocent_time"] - self.duration + 1 + ) + for ifo in interferometers: + ifo.minimum_frequency = self.minimum_frequency + search_waveform_generator = bilby.gw.waveform_generator.WaveformGenerator( + duration=self.duration, + sampling_frequency=self.sampling_frequency, + frequency_domain_source_model=bilby.gw.source.binary_black_hole_roq, + waveform_arguments=dict( + reference_frequency=self.reference_frequency, + waveform_approximant=self.waveform_approximant + ) + ) + with self.assertRaises(BilbyROQParamsRangeError): + bilby.gw.likelihood.ROQGravitationalWaveTransient( + interferometers=interferometers, + priors=self.priors, + waveform_generator=search_waveform_generator, + linear_matrix=basis, + quadratic_matrix=basis, + ) + @parameterized.expand( product( [_path_to_basis, _path_to_basis_mb], From 13f4c77d8316dec431f25565cf65ef0d2c8197cf Mon Sep 17 00:00:00 2001 From: Rhiannon Udall Date: Mon, 22 Jul 2024 16:10:16 +0000 Subject: [PATCH 09/15] ENH: Add template-template inner product to Interferometer --- bilby/gw/detector/interferometer.py | 20 ++++++++++++++++++++ test/gw/detector/interferometer_test.py | 12 ++++++++++++ 2 files changed, 32 insertions(+) diff --git a/bilby/gw/detector/interferometer.py b/bilby/gw/detector/interferometer.py index 82c6f1c4..4f54b715 100644 --- a/bilby/gw/detector/interferometer.py +++ b/bilby/gw/detector/interferometer.py @@ -597,6 +597,26 @@ def inner_product(self, signal): power_spectral_density=self.power_spectral_density_array[self.strain_data.frequency_mask], duration=self.strain_data.duration) + def template_template_inner_product(self, signal_1, signal_2): + """A noise weighted inner product between two templates, using this ifo's PSD. + + Parameters + ========== + signal_1 : array_like + An array containing the first signal + signal_2 : array_like + an array containing the second signal + + Returns + ======= + float: The noise weighted inner product of the two templates + """ + return gwutils.noise_weighted_inner_product( + aa=signal_1[self.strain_data.frequency_mask], + bb=signal_2[self.strain_data.frequency_mask], + power_spectral_density=self.power_spectral_density_array[self.strain_data.frequency_mask], + duration=self.strain_data.duration) + def matched_filter_snr(self, signal): """ diff --git a/test/gw/detector/interferometer_test.py b/test/gw/detector/interferometer_test.py index 66e84760..08a8f6a8 100644 --- a/test/gw/detector/interferometer_test.py +++ b/test/gw/detector/interferometer_test.py @@ -321,6 +321,18 @@ def test_optimal_snr_squared(self): self.assertTrue(np.array_equal(expected[2], actual[2])) self.assertEqual(expected[3], actual[3]) + def test_template_template_inner_product(self): + signal_1 = np.ones_like(self.ifo.power_spectral_density_array) + signal_2 = np.ones_like(self.ifo.power_spectral_density_array) * 2 + signal_1_optimal = self.ifo.optimal_snr_squared(signal=signal_1) + signal_1_optimal_by_template_template = self.ifo.template_template_inner_product( + signal_1=signal_1, + signal_2=signal_1 + ) + self.assertTrue(np.array_equal(signal_1_optimal, signal_1_optimal_by_template_template)) + signal_1_signal_2_inner_product = self.ifo.template_template_inner_product(signal_1=signal_1, signal_2=signal_2) + self.assertTrue(np.array_equal(signal_1_optimal * 2, signal_1_signal_2_inner_product)) + def test_repr(self): expected = ( "Interferometer(name='{}', power_spectral_density={}, minimum_frequency={}, " From bad30b7cfd732d16b62322e45db5bb5f9ebc4f3c Mon Sep 17 00:00:00 2001 From: Rhiannon Udall Date: Mon, 22 Jul 2024 16:12:00 +0000 Subject: [PATCH 10/15] Add identity conversion and generation functions --- bilby/gw/conversion.py | 56 +++++++++++++++++++++++++++++++++ test/gw/conversion_test.py | 64 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 120 insertions(+) diff --git a/bilby/gw/conversion.py b/bilby/gw/conversion.py index 85bdf115..fe1688f4 100644 --- a/bilby/gw/conversion.py +++ b/bilby/gw/conversion.py @@ -2556,3 +2556,59 @@ def fill_sample(args): likelihood.parameters.update(dict(sample).copy()) new_sample = likelihood.generate_posterior_sample_from_marginalized_likelihood() return tuple((new_sample[key] for key in marginalized_parameters)) + + +def identity_map_conversion(parameters): + """An identity map conversion function that makes no changes to the parameters, + but returns the correct signature expected by other conversion functions + (e.g. convert_to_lal_binary_black_hole_parameters)""" + return parameters, [] + + +def identity_map_generation(sample, likelihood=None, priors=None, npool=1): + """An identity map generation function that handles marginalizations, SNRs, etc. correctly, + but does not attempt e.g. conversions in mass or spins + + Parameters + ========== + sample: dict or pandas.DataFrame + Samples to fill in with extra parameters, this may be either an + injection or posterior samples. + likelihood: bilby.gw.likelihood.GravitationalWaveTransient, optional + GravitationalWaveTransient used for sampling, used for waveform and + likelihood.interferometers. + priors: dict, optional + Dictionary of prior objects, used to fill in non-sampled parameters. + + Returns + ======= + + """ + output_sample = sample.copy() + + output_sample = fill_from_fixed_priors(output_sample, priors) + + if likelihood is not None: + compute_per_detector_log_likelihoods( + samples=output_sample, likelihood=likelihood, npool=npool) + + marginalized_parameters = getattr(likelihood, "_marginalized_parameters", list()) + if len(marginalized_parameters) > 0: + try: + generate_posterior_samples_from_marginalized_likelihood( + samples=output_sample, likelihood=likelihood, npool=npool) + except MarginalizedLikelihoodReconstructionError as e: + logger.warning( + "Marginalised parameter reconstruction failed with message " + "{}. Some parameters may not have the intended " + "interpretation.".format(e) + ) + + if ("ra" in output_sample.keys() and "dec" in output_sample.keys() and "psi" in output_sample.keys()): + compute_snrs(output_sample, likelihood, npool=npool) + else: + logger.info( + "Skipping SNR computation since samples have insufficient sky location information" + ) + + return output_sample diff --git a/test/gw/conversion_test.py b/test/gw/conversion_test.py index 8ce40e3b..f75dd5c2 100644 --- a/test/gw/conversion_test.py +++ b/test/gw/conversion_test.py @@ -171,6 +171,26 @@ def test_lambda_1_lambda_2_to_delta_lambda_tilde(self): ) self.assertTrue((self.delta_lambda_tilde - delta_lambda_tilde) < 1e-5) + def test_identity_conversion(self): + original_samples = dict( + mass_1=self.mass_1, + mass_2=self.mass_2, + mass_ratio=self.mass_ratio, + total_mass=self.total_mass, + chirp_mass=self.chirp_mass, + symmetric_mass_ratio=self.symmetric_mass_ratio, + cos_angle=self.cos_angle, + angle=self.angle, + lambda_1=self.lambda_1, + lambda_2=self.lambda_2, + lambda_tilde=self.lambda_tilde, + delta_lambda_tilde=self.delta_lambda_tilde + ) + identity_samples, blank_list = conversion.identity_map_conversion(original_samples) + assert blank_list == [] + for key, val in identity_samples.items(): + assert val == self.__dict__[key] + class TestConvertToLALParams(unittest.TestCase): def setUp(self): @@ -509,6 +529,50 @@ def test_generate_bbh_parameters_with_likelihood(self): for key in extra_expected: self.assertIn(key, converted) + def test_identity_generation_no_likelihood(self): + test_fixed_prior = bilby.core.prior.PriorDict({ + "test_param_a": bilby.core.prior.DeltaFunction(0, name="test_param_a"), + "test_param_b": bilby.core.prior.DeltaFunction(1, name="test_param_b") + } + ) + output_sample = conversion.identity_map_generation(self.parameters, priors=test_fixed_prior) + assert output_sample.pop("test_param_a") == 0 + assert output_sample.pop("test_param_b") == 1 + for key, val in self.parameters.items(): + assert output_sample.pop(key) == val + assert output_sample == {} + + def test_identity_generation_with_likelihood(self): + priors = bilby.gw.prior.BBHPriorDict() + priors["geocent_time"] = bilby.core.prior.Uniform(0.4, 0.6) + self.parameters["time_jitter"] = 0.0 + # Note we do *not* switch to azimuth/zenith, because the identity generation function + # is not intended to be capable of that conversion + ifos = bilby.gw.detector.InterferometerList(["H1"]) + ifos.set_strain_data_from_power_spectral_densities(duration=1, sampling_frequency=256) + wfg = bilby.gw.waveform_generator.WaveformGenerator( + frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole + ) + likelihood = bilby.gw.likelihood.GravitationalWaveTransient( + interferometers=ifos, + waveform_generator=wfg, + priors=priors, + phase_marginalization=True, + time_marginalization=True, + reference_frame="sky", + ) + output_sample = conversion.identity_map_generation(self.parameters, priors=priors, likelihood=likelihood) + extra_expected = [ + "phase", + "geocent_time", + "H1_optimal_snr", + "H1_matched_filter_snr", + ] + for key in extra_expected: + self.assertIn(key, output_sample) + for key, val in self.parameters.items(): + self.assertTrue(output_sample[key] == val) + class TestDistanceTransformations(unittest.TestCase): def setUp(self): From a20cd33b661ecff4c33b95aa8956f0994b5a286f Mon Sep 17 00:00:00 2001 From: Rhiannon Udall Date: Wed, 7 Aug 2024 13:58:15 +0000 Subject: [PATCH 11/15] Documentation: Fix Rendering of Sphinx Text for Whitening Functions --- bilby/gw/detector/interferometer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bilby/gw/detector/interferometer.py b/bilby/gw/detector/interferometer.py index 4f54b715..ac0c1488 100644 --- a/bilby/gw/detector/interferometer.py +++ b/bilby/gw/detector/interferometer.py @@ -645,7 +645,7 @@ def whiten_frequency_series(self, frequency_series : np.array) -> np.array: Such that .. math:: - Var(n) = \\frac{1}{N} \\sum_k=0^N n_W(f_k)n_W^*(f_k) = 2 + Var(n) = \\frac{1}{N} \\sum_{k=0}^N n_W(f_k)n_W^*(f_k) = 2 Where the factor of two is due to the independent real and imaginary components. @@ -678,12 +678,12 @@ def get_whitened_time_series_from_whitened_frequency_series( .. math:: - W = \\frac{1}{N} \\sum_{k=0}^N \\Theta(f_{max} - f_k)\\Theta(f_k - f_{min})} + W = \\frac{1}{N} \\sum_{k=0}^N \\Theta(f_{max} - f_k)\\Theta(f_k - f_{min}) and accordingly the termwise window factor is .. math:: - w = \\sqrt{N W} = \\sqrt{\\sum_{k=0}^N \\Theta(f_{max} - f_k)\\Theta(f_k - f_{min})}} + w = \\sqrt{N W} = \\sqrt{\\sum_{k=0}^N \\Theta(f_{max} - f_k)\\Theta(f_k - f_{min})} """ frequency_window_factor = ( From 45524b5d7800bff14f081040600d303298556346 Mon Sep 17 00:00:00 2001 From: Colm Talbot Date: Wed, 7 Aug 2024 14:40:33 +0000 Subject: [PATCH 12/15] MAINT: drop py39 support --- .gitlab-ci.yml | 28 -------------------- containers/v3-dockerfile-test-suite-python39 | 27 ------------------- containers/write_dockerfiles.py | 2 +- docs/installation.txt | 6 ++--- setup.py | 7 +++-- 5 files changed, 7 insertions(+), 63 deletions(-) delete mode 100644 containers/v3-dockerfile-test-suite-python39 diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a9043a80..647b71c8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -70,10 +70,6 @@ containers: ${script} --help; done -basic-3.9: - <<: *test-python - image: python:3.9 - basic-3.10: <<: *test-python image: python:3.10 @@ -89,10 +85,6 @@ basic-3.11: - *list-env - pytest test/test_samplers_import.py -v -import-samplers-3.9: - <<: *test-samplers-import - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39 - import-samplers-3.10: <<: *test-samplers-import image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 @@ -138,11 +130,6 @@ install: - pytest --cov=bilby --durations 10 -python-3.9: - <<: *unit-test - needs: ["basic-3.9"] - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39 - python-3.10: <<: *unit-test needs: ["basic-3.10", "precommits-py3.10"] @@ -172,11 +159,6 @@ python-3.11: - *list-env - pytest test/integration/sampler_run_test.py --durations 10 -v -python-3.9-samplers: - <<: *test-sampler - needs: ["basic-3.9"] - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39 - python-3.10-samplers: <<: *test-sampler needs: ["basic-3.10", "precommits-py3.10"] @@ -208,11 +190,6 @@ integration-tests-python-3.10: - *list-env - pytest test/gw/plot_test.py -plotting-python-3.9: - <<: *plotting - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39 - needs: ["basic-3.9"] - plotting-python-3.10: <<: *plotting image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 @@ -280,11 +257,6 @@ pages: - docker image tag v3-bilby-$PYVERSION containers.ligo.org/lscsoft/bilby/v2-bilby-$PYVERSION:latest - docker image push containers.ligo.org/lscsoft/bilby/v2-bilby-$PYVERSION:latest -build-python39-container: - <<: *build-container - variables: - PYVERSION: "python39" - build-python310-container: <<: *build-container variables: diff --git a/containers/v3-dockerfile-test-suite-python39 b/containers/v3-dockerfile-test-suite-python39 deleted file mode 100644 index 9c1d5a04..00000000 --- a/containers/v3-dockerfile-test-suite-python39 +++ /dev/null @@ -1,27 +0,0 @@ -# This dockerfile is written automatically and should not be modified by hand. - -FROM containers.ligo.org/docker/base:conda -LABEL name="bilby CI testing" \ -maintainer="Gregory Ashton , Colm Talbot " - -COPY env-template.yml env.yml -RUN echo " - python=3.9" >> env.yml -ENV conda_env python39 - -RUN mamba env create -f env.yml -n ${conda_env} -RUN echo "source activate ${conda_env}" > ~/.bashrc -ENV PATH /opt/conda/envs/${conda_env}/bin:$PATH -RUN /bin/bash -c "source activate ${conda_env}" -RUN mamba info -RUN python --version - -# Add the ROQ data to the image -RUN mkdir roq_basis \ - && cd roq_basis \ - && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_linear.npy \ - && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_quadratic.npy \ - && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_linear.npy \ - && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_quadratic.npy \ - && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/params.dat \ - && wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_addcal.hdf5 \ - && wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_multiband_addcal.hdf5 diff --git a/containers/write_dockerfiles.py b/containers/write_dockerfiles.py index f12c071e..1ca29f98 100644 --- a/containers/write_dockerfiles.py +++ b/containers/write_dockerfiles.py @@ -3,7 +3,7 @@ with open("dockerfile-template", "r") as ff: template = ff.read() -python_versions = [(3, 9), (3, 10), (3, 11)] +python_versions = [(3, 10), (3, 11)] today = date.today().strftime("%Y%m%d") for python_major_version, python_minor_version in python_versions: diff --git a/docs/installation.txt b/docs/installation.txt index adfbe199..378ee65f 100644 --- a/docs/installation.txt +++ b/docs/installation.txt @@ -10,7 +10,7 @@ Installation $ conda install -c conda-forge bilby - Supported python versions: 3.9-3.11. + Supported python versions: 3.10-3.11. .. tab:: Pip @@ -18,7 +18,7 @@ Installation $ pip install bilby - Supported python versions: 3.9-3.11. + Supported python versions: 3.10-3.11. This will install all requirements for running :code:`bilby` for general @@ -47,7 +47,7 @@ wave inference, please additionally run the following commands. Install bilby from source ------------------------- -:code:`bilby` is developed and tested with Python 3.9-3.11. In the +:code:`bilby` is developed and tested with Python 3.10-3.11. In the following, we assume you have a working python installation, `python pip `_, and `git `_. See :ref:`installing-python` for our diff --git a/setup.py b/setup.py index 353e92ee..5b619120 100644 --- a/setup.py +++ b/setup.py @@ -5,8 +5,8 @@ import os python_version = sys.version_info -if python_version < (3, 9): - sys.exit("Python < 3.9 is not supported, aborting setup") +if python_version < (3, 10): + sys.exit("Python < 3.10 is not supported, aborting setup") def get_long_description(): @@ -66,7 +66,7 @@ def readfile(filename): "bilby.gw.detector": ["noise_curves/*.txt", "detectors/*"], "bilby.gw.eos": ["eos_tables/*.dat"], }, - python_requires=">=3.9", + python_requires=">=3.10", install_requires=get_requirements(), extras_require={ "gw": get_requirements("gw"), @@ -104,7 +104,6 @@ def readfile(filename): ], }, classifiers=[ - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "License :: OSI Approved :: MIT License", From e37c308ac5d4efebba07f7f0bbc2fcf147225fa6 Mon Sep 17 00:00:00 2001 From: Colm Talbot Date: Thu, 8 Aug 2024 11:39:43 +0000 Subject: [PATCH 13/15] MAINT: Add py312 tests and bump default test version --- .gitlab-ci.yml | 68 +++++++++++++------ containers/v3-dockerfile-test-suite-python312 | 27 ++++++++ containers/write_dockerfiles.py | 2 +- docs/installation.txt | 6 +- 4 files changed, 79 insertions(+), 24 deletions(-) create mode 100644 containers/v3-dockerfile-test-suite-python312 diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 647b71c8..e702fa93 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -35,7 +35,7 @@ authors: # Test containers scripts are up to date containers: stage: initial - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 script: - cd containers - python write_dockerfiles.py #HACK @@ -43,7 +43,7 @@ containers: # write_dockerfiles.py and commit the changes. - git diff --exit-code - cp env-template.yml env.yml - - echo " - python=3.10" >> env.yml + - echo " - python=3.11" >> env.yml - mamba env create -f env.yml -n test --dry-run .test-python: &test-python @@ -78,6 +78,10 @@ basic-3.11: <<: *test-python image: python:3.11 +basic-3.12: + <<: *test-python + image: python:3.12 + .test-samplers-import: &test-samplers-import stage: initial script: @@ -93,6 +97,10 @@ import-samplers-3.11: <<: *test-samplers-import image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 +import-samplers-3.12: + <<: *test-samplers-import + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python312 + .precommits: &precommits stage: initial script: @@ -104,19 +112,19 @@ import-samplers-3.11: # Run precommits (flake8, spellcheck, isort, no merge conflicts, etc) - pre-commit run --all-files --verbose --show-diff-on-failure -precommits-py3.10: +precommits-py3.11: <<: *precommits - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 variables: - CACHE_DIR: ".pip310" - PYVERSION: "python310" + CACHE_DIR: ".pip311" + PYVERSION: "python311" install: stage: initial parallel: matrix: - EXTRA: [gw, mcmc, all] - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 script: - pip install .[$EXTRA] @@ -132,8 +140,13 @@ install: python-3.10: <<: *unit-test - needs: ["basic-3.10", "precommits-py3.10"] + needs: ["basic-3.10"] image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 + +python-3.11: + <<: *unit-test + needs: ["basic-3.11", "precommits-py3.11"] + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 after_script: - coverage html - coverage xml @@ -147,10 +160,10 @@ python-3.10: - htmlcov/ expire_in: 30 days -python-3.11: +python-3.12: <<: *unit-test - needs: ["basic-3.11"] - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 + needs: ["basic-3.12"] + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python312 .test-sampler: &test-sampler stage: test @@ -161,18 +174,23 @@ python-3.11: python-3.10-samplers: <<: *test-sampler - needs: ["basic-3.10", "precommits-py3.10"] + needs: ["basic-3.10"] image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 python-3.11-samplers: <<: *test-sampler - needs: ["basic-3.11"] + needs: ["basic-3.11", "precommits-py3.11"] image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 -integration-tests-python-3.10: +python-3.12-samplers: + <<: *test-sampler + needs: ["basic-3.12"] + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python312 + +integration-tests-python-3.11: stage: test - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 - needs: ["basic-3.10", "precommits-py3.10"] + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 + needs: ["basic-3.11", "precommits-py3.11"] only: - schedules script: @@ -193,18 +211,23 @@ integration-tests-python-3.10: plotting-python-3.10: <<: *plotting image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 - needs: ["basic-3.10", "precommits-py3.10"] + needs: ["basic-3.10"] plotting-python-3.11: <<: *plotting image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 - needs: ["basic-3.11"] + needs: ["basic-3.11", "precommits-py3.11"] + +plotting-python-3.12: + <<: *plotting + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python312 + needs: ["basic-3.12"] # ------------------- Docs stage ------------------------------------------- docs: stage: docs - image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 + image: containers.ligo.org/lscsoft/bilby/v2-bilby-python311 before_script: - python -m ipykernel install script: @@ -225,7 +248,7 @@ docs: pages: stage: deploy - needs: ["docs", "python-3.10"] + needs: ["docs", "python-3.11"] script: - mkdir public/ - mv htmlcov/ public/ @@ -267,6 +290,11 @@ build-python311-container: variables: PYVERSION: "python311" +build-python312-container: + <<: *build-container + variables: + PYVERSION: "python312" + pypi-release: stage: deploy image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310 diff --git a/containers/v3-dockerfile-test-suite-python312 b/containers/v3-dockerfile-test-suite-python312 new file mode 100644 index 00000000..60c0228d --- /dev/null +++ b/containers/v3-dockerfile-test-suite-python312 @@ -0,0 +1,27 @@ +# This dockerfile is written automatically and should not be modified by hand. + +FROM containers.ligo.org/docker/base:conda +LABEL name="bilby CI testing" \ +maintainer="Gregory Ashton , Colm Talbot " + +COPY env-template.yml env.yml +RUN echo " - python=3.12" >> env.yml +ENV conda_env python312 + +RUN mamba env create -f env.yml -n ${conda_env} +RUN echo "source activate ${conda_env}" > ~/.bashrc +ENV PATH /opt/conda/envs/${conda_env}/bin:$PATH +RUN /bin/bash -c "source activate ${conda_env}" +RUN mamba info +RUN python --version + +# Add the ROQ data to the image +RUN mkdir roq_basis \ + && cd roq_basis \ + && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_linear.npy \ + && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_quadratic.npy \ + && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_linear.npy \ + && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_quadratic.npy \ + && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/params.dat \ + && wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_addcal.hdf5 \ + && wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_multiband_addcal.hdf5 diff --git a/containers/write_dockerfiles.py b/containers/write_dockerfiles.py index 1ca29f98..064c5d0f 100644 --- a/containers/write_dockerfiles.py +++ b/containers/write_dockerfiles.py @@ -3,7 +3,7 @@ with open("dockerfile-template", "r") as ff: template = ff.read() -python_versions = [(3, 10), (3, 11)] +python_versions = [(3, 10), (3, 11), (3, 12)] today = date.today().strftime("%Y%m%d") for python_major_version, python_minor_version in python_versions: diff --git a/docs/installation.txt b/docs/installation.txt index 378ee65f..0583ce87 100644 --- a/docs/installation.txt +++ b/docs/installation.txt @@ -10,7 +10,7 @@ Installation $ conda install -c conda-forge bilby - Supported python versions: 3.10-3.11. + Supported python versions: 3.10-3.12. .. tab:: Pip @@ -18,7 +18,7 @@ Installation $ pip install bilby - Supported python versions: 3.10-3.11. + Supported python versions: 3.10-3.12. This will install all requirements for running :code:`bilby` for general @@ -47,7 +47,7 @@ wave inference, please additionally run the following commands. Install bilby from source ------------------------- -:code:`bilby` is developed and tested with Python 3.10-3.11. In the +:code:`bilby` is developed and tested with Python 3.10-3.12. In the following, we assume you have a working python installation, `python pip `_, and `git `_. See :ref:`installing-python` for our From 41cb703c5a0c283843095ab94750c5f9ee5ba491 Mon Sep 17 00:00:00 2001 From: Colm Talbot Date: Thu, 8 Aug 2024 12:40:53 +0000 Subject: [PATCH 14/15] FEAT: enable caching to be disabled in hyper.model.Model --- bilby/hyper/model.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/bilby/hyper/model.py b/bilby/hyper/model.py index 35927407..3e84fbb6 100644 --- a/bilby/hyper/model.py +++ b/bilby/hyper/model.py @@ -1,7 +1,7 @@ from ..core.utils import infer_args_from_function_except_n_args -class Model(object): +class Model: r""" Population model that combines a set of factorizable models. @@ -12,18 +12,24 @@ class Model(object): p(\theta | \Lambda) = \prod_{i} p_{i}(\theta | \Lambda) """ - def __init__(self, model_functions=None): + def __init__(self, model_functions=None, cache=True): """ Parameters ========== model_functions: list List of callables to compute the probability. - If this includes classes, the `__call__` method should return the - probability. + If this includes classes, the :code:`__call__`: method + should return the probability. The requires variables are chosen at run time based on either inspection or querying a :code:`variable_names` attribute. + cache: bool + Whether to cache the value returned by the model functions, + default=:code:`True`. The caching only looks at the parameters + not the data, so should be used with caution. The caching also + breaks :code:`jax` JIT compilation. """ self.models = model_functions + self.cache = cache self._cached_parameters = {model: None for model in self.models} self._cached_probability = {model: None for model in self.models} @@ -48,14 +54,18 @@ def prob(self, data, **kwargs): probability = 1.0 for ii, function in enumerate(self.models): function_parameters = self._get_function_parameters(function) - if self._cached_parameters[function] == function_parameters: + if ( + self.cache + and self._cached_parameters[function] == function_parameters + ): new_probability = self._cached_probability[function] else: new_probability = function( data, **self._get_function_parameters(function) ) - self._cached_parameters[function] = function_parameters - self._cached_probability[function] = new_probability + if self.cache: + self._cached_parameters[function] = function_parameters + self._cached_probability[function] = new_probability probability *= new_probability return probability From 9a34533d018c4f7e6270ab3b7bc7683472e44cf5 Mon Sep 17 00:00:00 2001 From: Matthew Pitkin Date: Thu, 22 Aug 2024 15:19:55 +0000 Subject: [PATCH 15/15] Resolve "patch to speed the MCMC" --- AUTHORS.md | 1 + bilby/core/sampler/emcee.py | 16 +++++++++------- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/AUTHORS.md b/AUTHORS.md index 8c227ed6..17b09070 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -16,6 +16,7 @@ Bruce Edelman Carl-Johan Haster Cecilio Garcia-Quiros Charlie Hoy +Chentao Yang Christopher Philip Luke Berry Christos Karathanasis Colm Talbot diff --git a/bilby/core/sampler/emcee.py b/bilby/core/sampler/emcee.py index 76e4dd1e..2c12ee35 100644 --- a/bilby/core/sampler/emcee.py +++ b/bilby/core/sampler/emcee.py @@ -1,7 +1,6 @@ import os import shutil from collections import namedtuple -from shutil import copyfile import numpy as np from packaging import version @@ -333,16 +332,19 @@ def sampler(self): def write_chains_to_file(self, sample): chain_file = self.checkpoint_info.chain_file temp_chain_file = chain_file + ".temp" - if os.path.isfile(chain_file): - copyfile(chain_file, temp_chain_file) if self.prerelease: points = np.hstack([sample.coords, sample.blobs]) else: points = np.hstack([sample[0], np.array(sample[3])]) - with open(temp_chain_file, "a") as ff: - for ii, point in enumerate(points): - ff.write(self.checkpoint_info.chain_template.format(ii, *point)) - shutil.move(temp_chain_file, chain_file) + data_to_write = "\n".join( + self.checkpoint_info.chain_template.format(ii, *point) + for ii, point in enumerate(points) + ) + with open(temp_chain_file, "w") as ff: + ff.write(data_to_write) + with open(temp_chain_file, "rb") as ftemp, open(chain_file, "ab") as fchain: + shutil.copyfileobj(ftemp, fchain) + os.remove(temp_chain_file) @property def _previous_iterations(self):