From 68665660184eb33b6235ad0df5b9496d7546369d Mon Sep 17 00:00:00 2001 From: Francisco Jimenez Forteza <francisco.jimenez@condor1.atlas.local> Date: Thu, 6 May 2021 18:20:30 +0000 Subject: [PATCH] added the option of overwriting --- code_new/source_code/RD_Fits.ipynb | 33 ++- code_new/source_code/RD_Fits.py | 26 +- code_new/source_code/Run_Setup.ipynb | 167 +++++++++++++ code_new/source_code/rdown.py | 208 ++++++++++++++++ code_new/source_code/rdown_utilities.py | 318 ++++++++++++++++++++++++ code_new/source_code/read_data.py | 6 +- 6 files changed, 737 insertions(+), 21 deletions(-) create mode 100644 code_new/source_code/Run_Setup.ipynb create mode 100644 code_new/source_code/rdown.py create mode 100644 code_new/source_code/rdown_utilities.py diff --git a/code_new/source_code/RD_Fits.ipynb b/code_new/source_code/RD_Fits.ipynb index 0f91eb7..7b061a2 100644 --- a/code_new/source_code/RD_Fits.ipynb +++ b/code_new/source_code/RD_Fits.ipynb @@ -23,7 +23,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -55,7 +55,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -65,7 +65,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -89,14 +89,14 @@ " parser.sections()\n", "except SystemExit: \n", " parser = ConfigParser()\n", - " parser.read('config_n0_to_1_mock.ini')\n", + " parser.read('./run_0_mock/config_n0_to_1_mock.ini')\n", " parser.sections()\n", " pass" ] }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -109,7 +109,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -119,7 +119,7 @@ "model: w-tau\n", "nmax: 0\n", "nm_mock: 1\n", - "tshift: 0.0\n", + "tshift: 0.2\n", "error: True\n", "error value: 0.002\n", "export: True\n", @@ -147,7 +147,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -173,7 +173,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -186,7 +186,20 @@ "samples_file = rdata.create_output_files(output_folder_1,pars,'post_samples')\n", "results_file = rdata.create_output_files(output_folder_1,pars,'sampler_results')\n", "sumary_data = rdata.create_output_files(output_folder_1,pars,'log_z')\n", - "best_data = rdata.create_output_files(output_folder_1,pars,'best_vals')" + "best_data = rdata.create_output_files(output_folder_1,pars,'best_vals')\n", + "\n", + "files = [corner_plot,corner_plot_extra,diagnosis_plot,fit_plot,samples_file,results_file,sumary_data,best_data]" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "# Remove old files if overwrite = True\n", + "if overwrite:\n", + " rd_ut.rm_files(files)" ] }, { diff --git a/code_new/source_code/RD_Fits.py b/code_new/source_code/RD_Fits.py index 2caa3df..a8d13aa 100755 --- a/code_new/source_code/RD_Fits.py +++ b/code_new/source_code/RD_Fits.py @@ -8,7 +8,7 @@ """ -# In[39]: +# In[1]: #Import relevant modules, import data and all that @@ -37,14 +37,14 @@ import rdown_utilities as rd_ut import read_data as rdata -# In[40]: +# In[2]: ## Loading and running data tested with NR data ## Loading and running data tested with Mock data -# In[41]: +# In[3]: # Cell that calls the arguments from your 'config.ini' file. @@ -58,12 +58,12 @@ try: parser.sections() except SystemExit: parser = ConfigParser() - parser.read('config_n0_to_1_mock.ini') + parser.read('./run_0_mock/config_n0_to_1_mock.ini') parser.sections() pass -# In[42]: +# In[4]: # Load variables from config file @@ -73,7 +73,7 @@ except SystemExit: error_type, error_val, af, mf,tau_var_str,nm_mock)=rdata.read_config_file(parser) -# In[43]: +# In[5]: # Show configuration options @@ -92,7 +92,7 @@ print('nr code:',nr_code) print('fit noise:',fitnoise) -# In[44]: +# In[6]: # Create output directories @@ -115,7 +115,7 @@ if not os.path.exists(output_folder_1): print("Directory " , output_folder_1 , " Created ") -# In[45]: +# In[7]: # Define output files @@ -129,6 +129,16 @@ results_file = rdata.create_output_files(output_folder_1,pars,'sampler_results') sumary_data = rdata.create_output_files(output_folder_1,pars,'log_z') best_data = rdata.create_output_files(output_folder_1,pars,'best_vals') +files = [corner_plot,corner_plot_extra,diagnosis_plot,fit_plot,samples_file,results_file,sumary_data,best_data] + + +# In[8]: + + +# Remove old files if overwrite = True +if overwrite: + rd_ut.rm_files(files) + # In[46]: diff --git a/code_new/source_code/Run_Setup.ipynb b/code_new/source_code/Run_Setup.ipynb new file mode 100644 index 0000000..1d8a779 --- /dev/null +++ b/code_new/source_code/Run_Setup.ipynb @@ -0,0 +1,167 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 4, + "id": "square-toddler", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "b''" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "\"\"\"\n", + "Created by Sumit Kumar on 2020-03-08 && modified by Xisco on 2021-04\n", + "Last modified:\n", + "\"\"\"\n", + "\n", + "import os, sys, numpy, glob, argparse\n", + "from datetime import date\n", + "import subprocess\n", + "from subprocess import call\n", + "import re\n", + "from configparser import ConfigParser\n", + "\n", + "today = date.today()\n", + "date = today.strftime(\"%Y%m%d\")\n", + "\n", + "runname='run_0_mock'\n", + "nmax = 0\n", + "config_file ='config_n0_to_1_mock.ini'\n", + "overwrite = True\n", + "\n", + "######\n", + "times = '(0 0.2 0.4 0.8 1.2 2 2.5 5 7.5 10 12 15 18 20)'\n", + "accounting_group = 'cbc.test.pe_ringdown'\n", + "cpus=8\n", + "nlive_points = 2000\n", + "req_memory=\"16GB\"\n", + "not_user='frjifo@aei.mpg.de'\n", + "pythonfile='/work/francisco.jimenez/sio/git/rdstackingproject/code_new/RD_Fits.py'\n", + "pythonscript='/work/francisco.jimenez/venv/bin/python'\n", + "#######################################################\n", + "\n", + "pwd = os.getcwd()\n", + "run_dir = '%s/%s'%(pwd,runname)\n", + "logs_dir = '%s/logs'%run_dir\n", + "\n", + "os.system('mkdir -p %s'%logs_dir)\n", + "os.system('cp %s %s/'%(config_file,run_dir))\n", + "\n", + "###########################################################################\n", + "# Creating Condor submit file\n", + "###########################################################################\n", + "filename1 = '%s/%s'%(run_dir,'condor_submit')\n", + "text_file1 = open(filename1 + \".sub\", \"w\")\n", + "text_file1.write(\"universe = vanilla\\n\")\n", + "text_file1.write(\"getenv = true\\n\")\n", + "text_file1.write(\"# run script -- make sure that condor has execute permission for this file (chmod a+x script.py)\\n\")\n", + "text_file1.write(\"executable = \"'%s/%s'%(run_dir,runname+'.sh \\n'))\n", + "text_file1.write(\"# file to dump stdout (this directory should exist)\\n\")\n", + "text_file1.write(\"output = %s/%s-$(Process).out\\n\"%(logs_dir,runname))\n", + "text_file1.write(\"# file to dump stderr\\n\")\n", + "text_file1.write(\"error = %s/%s-$(Process).err\\n\"%(logs_dir,runname))\n", + "text_file1.write(\"# condor logs\\n\")\n", + "text_file1.write(\"log = %s/%s-$(Process).log\\n\"%(logs_dir,runname))\n", + "text_file1.write(\"initialdir = %s \\n\"%run_dir)\n", + "text_file1.write(\"notify_user =\"+not_user+' \\n')\n", + "text_file1.write(\"notification = Complete\\n\")\n", + "text_file1.write('''arguments = \"-processid $(Process)\" \\n''')\n", + "text_file1.write(\"request_memory = \"+str(req_memory)+\"\\n\")\n", + "text_file1.write(\"request_cpus = \"+str(cpus)+\"\\n\") \n", + "text_file1.write(\"on_exit_remove = (ExitBySignal == False) || ((ExitBySignal == True) && (ExitSignal != 11))\\n\")\n", + "text_file1.write(\"accounting_group = %s\\n\"%accounting_group)\n", + "text_file1.write(\"queue 1\\n\")\n", + "text_file1.write(\"\\n\")\n", + "text_file1.close()\n", + "\n", + "###########################################################\n", + "# Creating python executable file\n", + "############################################################\n", + "filename2 = run_dir+'/'+runname+'.sh'\n", + "text_file2 = open(filename2, \"w\") \n", + "text_file2.write(\"#! /bin/bash \\n\")\n", + "text_file2.write(\"\\n\")\n", + "text_file2.write(\"times=\"+times+\"\\n\")\n", + "text_file2.write(\"config_file=\"+config_file+\"\\n\")\n", + "text_file2.write(\"pythonfile=\"+pythonfile+\"\\n\")\n", + "text_file2.write(\"pythonscript=\"+pythonscript+\"\\n\")\n", + "text_file2.write(\"\\n\")\n", + "text_file2.write(\"for i in ${times[@]}; do\\n\")\n", + "text_file2.write(\" awk -v a=\\\"$i\\\" '/^tshift/ && $3 != \\\"supplied\\\" { $3=a } { print }' $config_file > tmp && mv tmp $config_file\\n\")\n", + "text_file2.write(\" $pythonscript $pythonfile -c $config_file \\n\")\n", + "text_file2.write(\"done\\n\")\n", + "text_file2.write(\"awk -v a=\\\"0\\\" '/^tshift/ && $3 != \\\"supplied\\\" { $3=a } { print }' $config_file > tmp && mv tmp $config_file \\n\")\n", + "text_file2.write(\"\\n\")\n", + "text_file2.close()\n", + "os.system('chmod u+x %s'%filename2)\n", + "\n", + "os.system('cp '+runname+'.sh %s/'%run_dir)\n", + "os.system('chmod u+x ./'+runname+'.sh')\n", + "os.system('cd '+run_dir)\n", + "\n", + "###########################################################\n", + "# Checking the configuration file and adding some important replacements\n", + "############################################################\n", + "\n", + "filename3 = '%s/%s'%(run_dir,config_file)\n", + "# change the number of cores\n", + "bashCommand = \"awk -v a=\"+str(cpus)+\" '/^nb_cores/ && $3 != \\\"supplied\\\" { $3=a } { print }' \"+str(config_file)+\" > tmp && mv tmp \"+str(config_file);\n", + "subprocess.call(bashCommand,shell=True)\n", + "\n", + "filename3 = '%s/%s'%(run_dir,config_file)\n", + "# change the number of nmax\n", + "bashCommand = \"awk -v a=\"+str(nmax)+\" '/^nmax/ && $3 != \\\"supplied\\\" { $3=a } { print }' \"+str(config_file)+\" > tmp && mv tmp \"+str(config_file);\n", + "subprocess.call(bashCommand,shell=True)\n", + "\n", + "filename3 = '%s/%s'%(run_dir,config_file)\n", + "# change the number of nmax\n", + "bashCommand = \"awk -v a=\"+str(nlive_points)+\" '/^npoints/ && $3 != \\\"supplied\\\" { $3=a } { print }' \"+str(config_file)+\" > tmp && mv tmp \"+str(config_file);\n", + "subprocess.call(bashCommand,shell=True)\n", + "\n", + "# change the overwrite parameter\n", + "bashCommand = \"awk -v a=\"+str(overwrite)+\" '/^overwrite/ && $3 != \\\"supplied\\\" { $3=a } { print }' \"+str(config_file)+\" > tmp && mv tmp \"+str(config_file);\n", + "subprocess.call(bashCommand,shell=True)\n", + " \n", + "\n", + "###########################################################\n", + "# Submit the job\n", + "############################################################\n", + "filename4 = '%s/%s'%(run_dir,'condor_submit.sub')\n", + "bashCommand = ['condor_submit', str(filename4)]\n", + "process = subprocess.Popen(bashCommand, stdout=subprocess.PIPE,stderr=subprocess.PIPE);\n", + "output,error = process.communicate()\n", + "error" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/code_new/source_code/rdown.py b/code_new/source_code/rdown.py new file mode 100644 index 0000000..1357ccb --- /dev/null +++ b/code_new/source_code/rdown.py @@ -0,0 +1,208 @@ +# Copyright (C) 2021 Xisco Jimenez Forteza +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 3 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + + +# +# ============================================================================= +# +# Preamble +# +# ============================================================================= +# +# Module to generate RD waveforms. + +import numpy as np +import qnm +import os + +f_fpars= [[2.95845, -2.58697, 0.0533469], [2.12539, -1.78054, 0.0865503], [1.74755, -1.44776, 0.123666], [1.78287, -1.53203, 0.129475], [2.04028, -1.83224, 0.112497]] +q_fpars=[[0.584077, 1.52053, -0.480658], [0.00561441, 0.630715, -0.432664], [-0.197965, 0.515956, -0.369706], [-0.275097, 0.455691, -0.331543], [-0.287596, 0.398514, -0.309799]] + + +c=2.99792458*10**8;G=6.67259*10**(-11);MS=1.9885*10**30; +class Ringdown_Spectrum: + """RDown model generator""" + def __init__(self,mf,af,l,m,n=4,s=-2,time=[],fixed=False,qnm_model='berti'): + self.mf = mf + self.af = af + self.l = l + self.m = m + self.n = n + self.time = time + self.grav_220 = [qnm.modes_cache(s=s,l=self.l,m=self.m,n=i) for i in range (0,self.n+1)] + self.dim = self.n+1 + self.fixed = fixed + self.qnm_model = qnm_model + dict_omega = {'berti': self.QNM_Berti , 'qnm': self.QNM_spectrum} + dic = {'w-tau':self.rd_model_wtau , 'w-q': self.rd_model_wq, 'w-tau-fixed':self.rd_model_wtau_fixed,'w-tau-fixed-m-af': self.rd_model_wtau_m_af} + + if len(self.time)==0: + self.time = np.arange(0,100,0.1) + + if self.fixed: + omegas_new=np.asarray([self.grav_220[i](a=self.af)[0] for i in range (0,self.dim)]) + self.w = (np.real(omegas_new))/self.mf + self.tau=-1/(np.imag(omegas_new))*self.mf + + + def QNM_spectrum(self): + """ It computes the RD frequencies and damping times in NR units. + """ + omegas_new=np.asarray([self.grav_220[i](a=self.af)[0] for i in range (0,self.n+1)]) + w_m_a = (np.real(omegas_new))/self.mf + tau_m_a=-1/(np.imag(omegas_new))*self.mf + + return (w_m_a, tau_m_a) + + def QNM_Berti(self,rdowndata): + """ It computes the RD frequencies and damping times in NR units. + """ + position=np.argmax(rdowndata[0,0] >= (self.af)) + #w_m_a=f1+f2*(1-af)**f3 + w_m_a=[None]*(self.n+1) + tau_ma_a=[None]*(self.n+1) + + for i in range(self.n+1): + qnm=rdowndata[i,1:3,position] + w_m_a[i] = qnm[0]/self.mf + tau_ma_a[i] = -1/(qnm[1])*self.mf + + return w_m_a, tau_ma_a + + + def w_fpars_Berti(self,n): + return f_fpars[n] + + def tau_qpars_Berti(self,n): + return q_fpars[n] + + def mass_from_wtau(self,n,w,tau): + f1,f2,f3 = w_fpars_Berti(n) + q1,q2,q3 = tau_qpars_Berti(n) + res=(f1 + f2*(2**(-1/q3)*((-2*q1 + w*tau)/q2)**(1/q3))**f3)/w + return res + + def spin_from_wtau(self,n,w,tau): + f1,f2,f3 = w_fpars_Berti(n) + q1,q2,q3 = tau_qpars_Berti(n) + res=1 - 2**(-1/q3)*((-2*q1 + w*tau)/q2)**(1/q3) + return res + + def mass_from_wtau_loop(self,w,tau,l,m): + res=[None]*dim + for n in range (0,dim): + f1,f2,f3 = w_fpars_Berti(n) + q1,q2,q3 = tau_qpars_Berti(n) + res[n]=(f1 + f2*(2**(-1/q3)*((-2*q1 + w[n]*tau[n])/q2)**(1/q3))**f3)/w[n] + return res + + def spin_from_wtau_loop(self,w,tau,l,m): + res=[None]*dim + for n in range (0,dim): + f1,f2,f3 = w_fpars_Berti(n) + q1,q2,q3 = tau_qpars_Berti(n) + res[n]= 1 - 2**(-1/q3)*((-2*q1 + w[n]*tau[n])/q2)**(1/q3) + return res + + + def rd_model_wtau(self,theta): + """RD model parametrized with the damping time tau. + """ + assert int(len(theta)/4) == self.dim, 'Please recheck your n and parameters' + + wvars = theta[ : (self.dim)] + tvars = theta[(self.dim) : 2*(self.dim)] + xvars = theta[2*(self.dim) : 3*(self.dim)] + yvars = theta[3*(self.dim) : ] + + ansatz = 0 + for i in range (0,self.dim): + ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-self.time/tvars[i]) * (np.cos(wvars[i]*self.time)-1j*np.sin(wvars[i]*self.time)) + # -1j to agree with SXS convention + return ansatz + + def rd_model_wtau_m_af(theta): + """RD model parametrized with the damping time tau and with the QNM spectrum fixd to GR. The QNM spectrum is given from the mass and spin. + """ + xvars = theta[ : (dim)] + yvars = theta[(dim) : 2*(dim)] + mass_vars = theta[index_mass] + spin_vars = theta[index_spin] + + w_m_a , tau_m_a = dict_omega[self.qnm_model](mass_vars,spin_vars,2,2) + + ansatz = 0 + for i in range (0,dim): + ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-timesrd_final_tsh/tau_m_a[i]) * (np.cos(w_m_a[i]*timesrd_final_tsh)-1j*np.sin(w_m_a[i]*timesrd_final_tsh)) + # -1j to agree with SXS convention + return ansatz + + def rd_model_wtau_fixed(theta): + """RD model parametrized with the damping time tau and with the QNM spectrum fixd to GR. + """ + xvars = theta[ : (dim)] + yvars = theta[(dim) : 2*(dim)] + + ansatz = 0 + for i in range (0,dim): + ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-timesrd_final_tsh/tau[i]) * (np.cos(w[i]*timesrd_final_tsh)-1j*np.sin(w[i]*timesrd_final_tsh)) + # -1j to agree with SXS convention + return ansatz + + def rd_model_wq(self,theta): + """RD model parametrized with the quality factor q. + """ + assert int(len(theta)/4) == self.dim, 'Please recheck your n and parameters' + + wvars = theta[ : (self.dim)] + qvars = theta[(self.dim) : 2*(self.dim)] + xvars = theta[2*(self.dim) : 3*(self.dim)] + yvars = theta[3*(self.dim) : ] + + ansatz = 0 + for i in range (0,self.dim): + ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-self.time*np.pi*wvars[i]/qvars[i])*(np.cos(wvars[i]*self.time)-1j*np.sin(wvars[i]*self.time)) + # -1j to agree with SXS convention + return ansatz + + def rd_model_wq_fixed(self,theta): + """RD model parametrized with the damping time tau and with the QNM spectrum fixd to GR. + """ + xvars = theta[ : (self.dim)] + yvars = theta[(self.dim) : 2*(self.dim)] + + ansatz = 0 + for i in range (0,self.dim): + ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-self.time/self.tau[i]) * (np.cos(self.w[i]*self.time)-1j*np.sin(self.w[i]*self.time)) + # -1j to agree with SXS convention + return ansatz + + + def rd_model_wq_m_a(self,theta): + """RD model parametrized with the damping time tau and with the QNM spectrum fixd to GR. The QNM spectrum is given from the mass and spin. + """ + xvars = theta[ : (self.dim)] + yvars = theta[(self.dim) : 2*(self.dim)] + mass_vars = theta[-2] + spin_vars = theta[-1] + + w_m_a , tau_m_a = QNM_spectrum() + + ansatz = 0 + for i in range (0,dim): + ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-timesrd_final_tsh/tau_m_a[i]) * (np.cos(w_m_a[i]*timesrd_final_tsh)-1j*np.sin(w_m_a[i]*timesrd_final_tsh)) + # -1j to agree with SXS convention + return ansatz \ No newline at end of file diff --git a/code_new/source_code/rdown_utilities.py b/code_new/source_code/rdown_utilities.py new file mode 100644 index 0000000..c4a5b95 --- /dev/null +++ b/code_new/source_code/rdown_utilities.py @@ -0,0 +1,318 @@ +# Copyright (C) 2021 Xisco Jimenez Forteza +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 3 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + + +# +# ============================================================================= +# +# Preamble +# +# ============================================================================= +# +# Module to run PE on RD data +import numpy as np +from dynesty.utils import resample_equal +from dynesty import utils as dyfunc +import os +import csv +import pandas as pd +import pickle + +def posterior_samples(sampler): + """ + Returns posterior samples from nested samples and weights + given by dynsety sampler + """ + + dynesty_samples = sampler.results['samples'] + wt = np.exp(sampler.results['logwt'] - + sampler.results['logz'][-1]) + # Make sure that sum of weights equal to 1 + weights = wt/np.sum(wt) + posterior_dynesty = dyfunc.resample_equal(dynesty_samples, weights) + return posterior_dynesty + +def FFT_FreqBins(times): + Len = len(times) + DeltaT = times[-1]- times[0] + dt = DeltaT/(Len-1) + dnu = 1/(Len*dt) + maxfreq = 1/(2*dt) + add = dnu/4 + + p = np.arange(0.0,maxfreq+add,dnu) + m = np.arange(p[-1]-(2*maxfreq)+dnu,-dnu/2+add,dnu) + res=np.concatenate((p,m)) + + return res + +def hFromPsi4FFI(tpsi4,f0): + + timecheck1=tpsi4[-2,0]-tpsi4[-1,0] + timecheck2=tpsi4[1,0]-tpsi4[0,0] + + if np.abs(timecheck1-timecheck2)>=0.0001: + print("The data might not be equally sampled!!") + + times,data= tpsi4[:,0],tpsi4[:,1] + + freqs = FT_FreqBins(xaxis.real).real + position = np.argmax(freqs >= f0) + freqs[:position]=f0*np.ones(len(freqs[:position])) + freqs=2*np.pi*freqs + + fdata=fft(data) + len(myTable)*ifft(- fdata/floor**2); + np.stack((times,data)).T + + +def twopoint_autocovariance(t,n): + """ It computes the two-point autocovariance function. + """ + dt=t[1]-t[0] + res = np.zeros(len(n)) + taus = np.zeros(len(n)) + for tau in range(0,int(len(n)/2)): + ntau=np.roll(n, tau) + taus[tau] = t[tau] + res[tau]=np.sum(n*ntau).real + return (taus[:int(len(n)/2)],res[:int(len(n)/2)]) + +def save_object(obj, filename): + with open(filename, 'wb') as output: # Overwrites any existing file. + pickle.dump(obj, output, pickle.HIGHEST_PROTOCOL) + + +def EasyMatchT(t,h1,h2,tmin,tmax): + """ It computes the time-domain match for (h1|h2) complex waveforms. + """ + pos = np.argmax(t >= (tmin)); + + h1red=h1[pos:]; + h2red=h2[pos:]; + + norm1=np.sum(np.abs(h1red)**2) + norm2=np.sum(np.abs(h2red)**2) + + myTable=h1red*np.conjugate(h2red) + res=((np.sum(myTable)/np.sqrt(norm1*norm2))).real + + return res + +def EasySNRT(t,h1,h2,tmin,tmax): + """ It computes the time-domain snr for (h1|h2) complex waveforms. + """ + pos = np.argmax(t >= (tmin)); + + h1red=h1[pos:]; + h2red=h2[pos:]; + + myTable=h1red*np.conjugate(h2red) + res=2*np.sqrt((np.sum(myTable)).real) + + return res + +def FindTmaximum(y): + """ It determines the maximum absolute value of the complex waveform. + """ + absval = np.sqrt(y[:,1]*y[:,1]+y[:,2]*y[:,2]) + vmax=np.max(absval) + index = np.argmax(absval == vmax) + timemax=y[index,0] + + return timemax + +def export_logz_files(output_file,pars): + sim_num, nmax, tshift, evidence, evidence_error = pars + + """ + Generate the logz.csv files you want to export the data to. + file_type must be one of this options: [corner_plot,corner_plot_extra,diagnosis,fit,post_samples,sampler_results,log_z] + """ + + summary_titles=['n','id','t_shift','dlogz','dlogz_err'] + if os.path.exists(output_file): + outvalues = np.array([[nmax, sim_num, tshift, evidence,evidence_error]]) + else: + outvalues = np.array([summary_titles,[nmax, sim_num, tshift, evidence,evidence_error]]) + + with open(output_file, 'a') as file: + writer = csv.writer(file) + if (outvalues.shape)[0]>1 : + writer.writerows(outvalues) + else: + writer.writerow(outvalues[0]) + + return + +def export_bestvals_files(best_data_file,postsamps,pars): + + tshift, lenpriors, labels = pars + + sigma_vars_m = np.empty(lenpriors) + sigma_vars_p = np.empty(lenpriors) + sigma_vars = np.empty(lenpriors) + sigma_vars_ml = np.empty(lenpriors) + for i in range(lenpriors): + amps_aux = postsamps[:,i] + sigma_vars_m[i] = np.quantile(amps_aux, 0.05) + sigma_vars[i] = np.quantile(amps_aux, 0.5) + sigma_vars_ml[i] = postsamps[-1,i] + sigma_vars_p[i] = np.quantile(amps_aux, 0.95) + + sigma_vars_all = [sigma_vars,sigma_vars_ml,sigma_vars_m,sigma_vars_p] + sigma_vars_all=np.stack([sigma_vars,sigma_vars_ml,sigma_vars_m,sigma_vars_p], axis=0) + + key =['max val','max val ml','lower bound','higher bound'] + dfslist = [pd.DataFrame(np.concatenate(([tshift],sigma_vars_all[i])).reshape((-1,lenpriors+1)), columns=np.concatenate((['tshift'],labels)), index = [key[i]]) for i in range(4)] + df2 = pd.concat(dfslist) + if os.path.exists(best_data_file): + df2.to_csv(best_data_file, mode='a', header=False,index = True) + else: + df2.to_csv(best_data_file, index = True) + + + +def define_labels(dim,model,fitnoise): + wstr = r'$\omega_' + + if model == 'w-tau': + taustr = r'$\tau_' + elif model == 'w-q': + taustr = r'$q_' + elif model == 'w-tau-fixed': + taustr = r'$dumb_var}' + elif model == 'w-tau-fixed-m-af': + taustr = r'$\tau_' + + ampstr = r'$A_' + phasestr = r'$\phi_' + + w_lab = [None] * dim + tau_lab = [None] * dim + amp_lab = [None] * dim + pha_lab = [None] * dim + mass_lab = ['mass'] + spin_lab = ['spin'] + + for i in range(dim): + w_lab[i] = wstr+str(i)+'$' + tau_lab[i] = taustr+str(i)+'$' + amp_lab[i] = ampstr+str(i)+'$' + pha_lab[i] = phasestr+str(i)+'$' + + + labels = np.concatenate((w_lab,tau_lab,amp_lab,pha_lab)) + + if model=='w-tau-fixed': + labels = np.concatenate((amp_lab,pha_lab)) + + if model=='w-tau-fixed-m-af': + pha_lab[i] = phasestr+str(i)+'$' + + labels = np.concatenate((amp_lab,pha_lab,mass_lab,spin_lab)) + + if fitnoise: + noise_lab = ['noise'] + labels = np.concatenate((labels,noise_lab)) + + return labels + +def get_truths(model,pars,fitnoise): + w, tau, mf, af , npamps = pars + if model == 'w-q': + tau_val = np.pi*w*tau + truths = np.concatenate((w,tau_val,npamps)) + elif model == 'w-tau': + tau_val = tau + truths = np.concatenate((w,tau_val,npamps)) + elif model == 'w-tau-fixed': + truths = npamps + elif model == 'w-tau-fixed-m-af': + truths = np.concatenate((npamps,[mf],[af])) + + if fitnoise: + truths = np.concatenate((truths,[1])) + + return truths + +def get_best_amps(pars,parser=None,nr_code=None): + nmax,model,samps_tr,half_points = pars + + + if model=='w-tau-fixed': + rg = (nmax+1) + elif model=='w-tau-fixed': + rg = (nmax+1)+2 + else: + rg = (nmax+1)*2 + + + if model=='w-tau-fixed-a-mf': + npamps = np.empty((nmax+1)) + for i in range(0,(nmax+1)): + amps_aux = samps_tr[i+rg][half_points:-1] + npamps[i] = np.quantile(amps_aux, 0.5) + else : + npamps = np.empty((nmax+1)*2) + for i in range(0,(nmax+1)*2): + amps_aux = samps_tr[i][half_points:-1] + npamps[i] = np.quantile(amps_aux, 0.5) + + if nr_code == 'Mock-data': + nm_mock = parser.get('rd-mock-parameters','nm_mock') + nm_mock = np.int(nm_mock) + amp_mock=np.empty(nm_mock+1) + ph_mock=np.empty(nm_mock+1) + for i in range(nm_mock+1): + amp_mockp = parser.get('rd-mock-parameters','amp'+str(i)) + amp_mock[i] = np.float(amp_mockp) + ph_mockp=parser.get('rd-mock-parameters','phase'+str(i)) + ph_mock[i] = np.float(ph_mockp) + + npamps = np.concatenate((amp_mock,ph_mock)) + return npamps + +def convert_m_af_2_w_tau_post(res,fitnoise=False): + + samples_2=res.samples + samps=f2.results.samples + + if fitnoise: + fmass_spin=(samps.T)[-3:-1].T + else: + fmass_spin=(samps.T)[-2:].T + #fmass_spin=new_samples[-2:] + fmass_spin_dist=[None]*len(fmass_spin) + weight=np.exp(res.logwt - res.logz[-1]) + for i in range(len(fmass_spin)): + fmass_spin_dist[i]=np.concatenate(dict_omega[qnm_model](fmass_spin[i,0],fmass_spin[i,1],2,2)) + + fmass_spin_dist_v2=np.asarray(fmass_spin_dist) + new_samples = dyfunc.resample_equal(fmass_spin_dist_v2, weight) + + return new_samples + +def save_object(obj, filename): + with open(filename, 'wb') as output: # Overwrites any existing file. + pickle.dump(obj, output, pickle.HIGHEST_PROTOCOL) + +def rm_files(files): + """ rm all old files """ + for i in files: + if os.path.exists(i): + os.remove(i) + \ No newline at end of file diff --git a/code_new/source_code/read_data.py b/code_new/source_code/read_data.py index e6e5cad..87185de 100644 --- a/code_new/source_code/read_data.py +++ b/code_new/source_code/read_data.py @@ -226,11 +226,11 @@ def create_output_files(output_folder,pars,file_type): """ if file_type=='corner_plot': - outfile = output_folder+'/Dynesty_'+str(sim_num)+'_'+model+'_nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'corner_plot.png' + outfile = output_folder+'/Dynesty_'+str(sim_num)+'_'+model+'_nmax_'+str(nmax)+'_tshift_'+str(tshift)+'_'+str(npoints)+'corner_plot.png' elif file_type=='corner_plot_extra': - outfile = output_folder+'/Dynesty_'+str(sim_num)+'_'+model+'_nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'corner_plot_extra.png' + outfile = output_folder+'/Dynesty_'+str(sim_num)+'_'+model+'_nmax_'+str(nmax)+'_tshift_'+str(tshift)+'_'+str(npoints)+'corner_plot_extra.png' elif file_type=='diagnosis': - outfile = output_folder+'/Dynesty_diagnosis'+str(sim_num)+'_'+model+'_nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'.png' + outfile = output_folder+'/Dynesty_diagnosis'+str(sim_num)+'_'+model+'_nmax_'+str(nmax)+'_tshift_'+str(tshift)+'_'+str(npoints)+'.png' elif file_type=='fit': outfile = output_folder+'/Fit_results_'+str(sim_num)+'tshift_'+str(tshift)+'_'+model+'_nmax_'+str(nmax)+'.png' elif file_type=='post_samples': -- GitLab