From de3536e7eb1d3a8d1e59c306af49e636c09f12aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ot=C3=A1vio=20Alves?= Date: Thu, 4 Apr 2024 23:23:05 -0400 Subject: [PATCH] Added DESI Y1 BAO likelihood --- modules_desy6/desi_likelihood/desi_y1_bao.py | 103 ++++++++++++++++++ .../desi_likelihood/desi_y1_bao_ani.txt | 7 ++ .../desi_likelihood/desi_y1_bao_iso.txt | 3 + .../desi_likelihood/example/params.ini | 46 ++++++++ .../desi_likelihood/example/values.ini | 14 +++ modules_desy6/desi_likelihood/module.yaml | 57 ++++++++++ 6 files changed, 230 insertions(+) create mode 100644 modules_desy6/desi_likelihood/desi_y1_bao.py create mode 100644 modules_desy6/desi_likelihood/desi_y1_bao_ani.txt create mode 100644 modules_desy6/desi_likelihood/desi_y1_bao_iso.txt create mode 100644 modules_desy6/desi_likelihood/example/params.ini create mode 100644 modules_desy6/desi_likelihood/example/values.ini create mode 100644 modules_desy6/desi_likelihood/module.yaml diff --git a/modules_desy6/desi_likelihood/desi_y1_bao.py b/modules_desy6/desi_likelihood/desi_y1_bao.py new file mode 100644 index 00000000..e545d358 --- /dev/null +++ b/modules_desy6/desi_likelihood/desi_y1_bao.py @@ -0,0 +1,103 @@ +import os +import numpy as np + +from cosmosis.datablock import option_section, names +from cosmosis.gaussian_likelihood import GaussianLikelihood + +ROOT_DIR = os.path.split(os.path.abspath(__file__))[0] + +class DESIY1BAOLikelihood(GaussianLikelihood): + + like_name = "desi_y1_bao" + + def __init__(self, options): + + iso_file = os.path.join(ROOT_DIR, "desi_y1_bao_iso.txt") + ani_file = os.path.join(ROOT_DIR, "desi_y1_bao_ani.txt") + + iso_cols = ['z_eff', 'DVrd', 'DVrd_error'] + ani_cols = ['z_eff', 'DMrd', 'DMrd_error', 'DHrd', 'DHrd_error', 'corr'] + + self.iso_data = {k: v for k,v in zip(iso_cols, np.loadtxt(iso_file, unpack=True))} + self.ani_data = {k: v for k,v in zip(ani_cols, np.loadtxt(ani_file, unpack=True))} + + self.niso = len(self.iso_data['z_eff']) + self.nani = len(self.ani_data['z_eff']) + + self.feedback = options.get_bool("feedback", default=False) + + super().__init__(options) + + def build_data(self): + zeff = np.concatenate([self.iso_data['z_eff'], self.ani_data['z_eff'], self.ani_data['z_eff']]) + dist = np.concatenate([self.iso_data['DVrd'], self.ani_data['DMrd'], self.ani_data['DHrd']]) + + if self.feedback: + print('Data distances') + print('zeff DV/rd DM/rd DH/rd') + for z, v in zip(self.iso_data['z_eff'], self.iso_data['DVrd']): + print(f'{z:.2f} {v:.2f}') + for z, m, h in zip(self.ani_data['z_eff'], self.ani_data['DMrd'], self.ani_data['DHrd']): + print(f'{z:.2f} {m:.2f} {h:.2f}') + + return zeff, dist + + def build_covariance(self): + cov = np.diag(np.concatenate([self.iso_data['DVrd_error'], self.ani_data['DMrd_error'], self.ani_data['DHrd_error']]))**2 + + zipped = zip(self.ani_data['DMrd_error'], self.ani_data['DHrd_error'], self.ani_data['corr']) + + for i, (DMrd_error, DHrd_error, corr) in enumerate(zipped, start=self.niso): + cov[i,i+self.nani] = DMrd_error * DHrd_error * corr + cov[i+self.nani,i] = cov[i,i+self.nani] + + return cov + + def build_inverse_covariance(self): + return np.linalg.inv(self.cov) + + def extract_theory_points(self, block): + + z = block[names.distances, 'z'] + + # Sound horizon at the drag epoch + rd = block[names.distances, "rs_zdrag"] + if self.feedback: + print(f'rs_zdrag = {rd}') + + # Comoving distance + DM_z = block[names.distances, 'd_m'] # in Mpc + + # Hubble distance + DH_z = 1/block[names.distances, 'H'] # in Mpc + + # Angle-averaged distance + DV_z = (z * DM_z**2 * DH_z)**(1/3) # in Mpc + + # z and distance maybe are loaded in chronological order + # Reverse to start from low z + if (z[1] < z[0]): + z = z[::-1] + DM_z = DM_z[::-1] + DH_z = DH_z[::-1] + + # Find theory DM and DH at effective redshift by interpolation + z_eff = self.data_x[:self.niso+self.nani] + + DMrd = np.interp(z_eff, z, DM_z)/rd + DHrd = np.interp(z_eff, z, DH_z)/rd + DVrd = (z_eff * DMrd**2 * DHrd)**(1/3) + + if self.feedback: + print('Theory distances') + print('zeff DV/rd DM/rd DH/rd') + for i, (z, m, h, v) in enumerate(zip(z_eff, DMrd, DHrd, DVrd)): + if i < self.niso: + print(f'{z:.2f} {v:.2f}') + else: + print(f'{z:.2f} {m:.2f} {h:.2f}') + + return np.concatenate([DVrd[:self.niso], DMrd[self.niso:], DHrd[self.niso:]]) + +setup, execute, cleanup = DESIY1BAOLikelihood.build_module() + diff --git a/modules_desy6/desi_likelihood/desi_y1_bao_ani.txt b/modules_desy6/desi_likelihood/desi_y1_bao_ani.txt new file mode 100644 index 00000000..af91bcf5 --- /dev/null +++ b/modules_desy6/desi_likelihood/desi_y1_bao_ani.txt @@ -0,0 +1,7 @@ +# zeff DM/rd error DH/rd error corr +0.51 13.62 0.25 20.98 0.61 -0.445 +0.71 16.84 0.32 20.08 0.60 -0.420 +0.93 21.73 0.28 17.87 0.35 -0.389 +1.32 27.80 0.69 13.82 0.42 -0.444 +2.33 39.71 0.94 8.52 0.17 -0.477 + diff --git a/modules_desy6/desi_likelihood/desi_y1_bao_iso.txt b/modules_desy6/desi_likelihood/desi_y1_bao_iso.txt new file mode 100644 index 00000000..9211bf63 --- /dev/null +++ b/modules_desy6/desi_likelihood/desi_y1_bao_iso.txt @@ -0,0 +1,3 @@ +# zeff DV/rd error +0.30 7.92 0.15 +1.49 26.09 0.67 diff --git a/modules_desy6/desi_likelihood/example/params.ini b/modules_desy6/desi_likelihood/example/params.ini new file mode 100644 index 00000000..349f06bd --- /dev/null +++ b/modules_desy6/desi_likelihood/example/params.ini @@ -0,0 +1,46 @@ +[runtime] +sampler = polychord +root = ${CSL_DIR} + +[DEFAULT] +RUN_NAME = desi + +[pipeline] +modules = consistency camb desi +likelihoods = desi_y1_bao +values = values.ini +debug = F + +[test] +save_dir = output +fatal_errors = T + +[polychord] +base_dir = output +polychord_outfile_root = pc +resume = F +feedback = 3 +fast_fraction = 0.1 +live_points = 50 +num_repeats = 5 +tolerance = 0.01 + +[output] +filename= chain +format=text + +[consistency] +file = utility/consistency/consistency_interface.py + +[bbn_consistency] +file = utility/bbn_consistency/bbn_consistency.py + +[camb] +file = boltzmann/camb/camb.so +mode = background +feedback = 0 + +[desi] +file = modules_desy6/desi_likelihood/desi_y1_bao.py +feedback = F + diff --git a/modules_desy6/desi_likelihood/example/values.ini b/modules_desy6/desi_likelihood/example/values.ini new file mode 100644 index 00000000..0c4270e5 --- /dev/null +++ b/modules_desy6/desi_likelihood/example/values.ini @@ -0,0 +1,14 @@ + +[cosmological_parameters] +omega_m = 0.01 0.295 0.99 +omega_b = 0.01 +h0 = 1.0 +omnuh2 = 0.00064419153 +massive_nu = 1 +massless_nu = 2.044 +omega_k = 0.0 +w = -1.0 +wa = 0.0 + +[distances] +rs_zdrag = 10 101.8 1000 diff --git a/modules_desy6/desi_likelihood/module.yaml b/modules_desy6/desi_likelihood/module.yaml new file mode 100644 index 00000000..aaa4f1a2 --- /dev/null +++ b/modules_desy6/desi_likelihood/module.yaml @@ -0,0 +1,57 @@ +name: desi_y1_bao +version: "arXiv:2404.03002" +purpose: Compute the likelihood of expansion history using DESI Y1 BAO data +url: "https://svn.sdss.org/public/data/eboss/DR16cosmo/tags/v1_0_0/likelihoods/" +interface: desi_y1_bao.py +attribution: "" +rules: "" +cite: + - "arXiv:2404.03000" + - "arXiv:2404.03001" + - "arXiv:2404.03002" + +assumptions: + - "" + - "" + +explanation: > + "This module gives a likelihood of the comoving angular diameter + distance D_m and the Hubble parameter H(z). + It uses the sound horizon at last-scatter rs_zdrag. + + A correlated Gaussian likelihood is then returned." + +params: + feedback: + meaning: Whether to print extra output + type: bool + default: False + +inputs: + cosmological_parameters: + omega_m: + meaning: Baryon + cdm density fraction today + type: real + default: + h0: + meaning: Hubble parameter H0/(100 km/s/Mpc) + type: real + default: + distances: + z: + meaning: Redshifts of samples + type: real 1d + default: + d_a: + meaning: Angular diameter distance in Mpc + type: real 1d + default: + h: + meaning: Hubble parameter with in units of Mpc + type: real 1d + default: +outputs: + likelihoods: + desi_y1_bao_like: + meaning: Likelihood of supplied expansion history + type: real