Skip to content
Snippets Groups Projects
Commit f5ff79af authored by Rayne Liu's avatar Rayne Liu
Browse files

ptemcee run files

parent 53f22fb9
Branches
No related tags found
No related merge requests found
# -*- coding: utf-8 -*-
#!/usr/bin/env python
# coding: utf-8
'''
This script calculates the RDGW150914 constraints with ptemcee - specifically, the n=1 case both varying or not varying the fundamental frequencies. It produces the chain plot, corner plot, parameter constraints, and data plotted with the 1-sigma band. Since we are working specifically for the n=1 case, we also add in the corner plot the combined chain of alpha_0 and alpha_1, in order to demonstrate that the two are indistinguishable with each other.
......@@ -31,17 +32,17 @@ from scipy.optimize import minimize
#tshift: time shift after the strain peak
#vary_fund: whether you vary the fundamental frequency. Works in the model_dv function.
rootpath="/Users/RayneLiu"
rootpath="/work/rayne.liu" #"/Users/RayneLiu"
nmax=1
tshift=19
vary_fund = False
vary_fund = True
#sampler parameters
npoints=5001
npoints=101
nwalkers = 32
ntemps=12
ndim = int(4*(nmax+1))
burnin = 1000 #How many points do you burn before doing the corner plot. You need to watch the convergence of the chain plot a bit.
burnin = 10 #How many points do you burn before doing the corner plot. You need to watch the convergence of the chain plot a bit.
#This is trivial but often forgotten: this cannot be more than npoints! Usually 1/5~1/4 npoints is what I observe.
numbins = 21 #corner plot parameter - how many bins you want
datacolor = '#105670' #'#4fa3a7'
......
#!/usr/bin/env python
# coding: utf-8
'''
This script calculates the RDGW150914 constraints with ptemcee - specifically, the n=1 case both varying or not varying the fundamental frequencies. It produces the chain plot, corner plot, parameter constraints, and data plotted with the 1-sigma band. Since we are working specifically for the n=1 case, we also add in the corner plot the combined chain of alpha_0 and alpha_1, in order to demonstrate that the two are indistinguishable with each other.
--- R^a_{yne} L^i_u, 08/09/2020
'''
import numpy as np
import corner
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib import rc
#plt.rcParams['font.family'] = 'DejaVu Sans'
#rc('text', usetex=True)
plt.rcParams.update({'font.size': 16.5})
import ptemcee
import math
import h5py
import inspect
import pandas as pd
import json
import qnm
import random
from scipy.optimize import minimize
#Remember to change the following global variables
#rootpath: root path to nr data
#npoints: number of points you re using for your sampling
#nmax: tone index --> nmax = 0 if fitting the fundamental tone
#tshift: time shift after the strain peak
#vary_fund: whether you vary the fundamental frequency. Works in the model_dv function.
rootpath="/work/rayne.liu" #"/Users/RayneLiu"
nmax=1
tshift=3.8
vary_fund = True
#sampler parameters
npoints=1000000
nwalkers = 42
ntemps=12
ndim = int(4*(nmax+1))
burnin = 200000 #How many points do you burn before doing the corner plot. You need to watch the convergence of the chain plot a bit.
#This is trivial but often forgotten: this cannot be more than npoints! Usually 1/5~1/4 npoints is what I observe.
numbins = 42 #corner plot parameter - how many bins you want
datacolor = '#105670' #'#4fa3a7'
pkcolor = '#f2c977' #'#ffb45f'
mediancolor = '#f7695c' #'#9b2814'
#Import data and necessary functions
#TimeOfMaximum
def FindTmaximum(y):
#Determines the maximum absolute value of the complex waveform
absval = y[:,1]*y[:,1]+y[:,2]*y[:,2]
vmax=np.max(absval)
index = np.argmax(absval == vmax)
timemax=gw_sxs_bbh_0305[index,0]
return timemax
#This loads the 22 mode data
gw = {}
gw["SXS:BBH:0305"] = h5py.File(rootpath+"/git/rdstackingproject/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/rhOverM_Asymptotic_GeometricUnits_CoM.h5", 'r')
gw_sxs_bbh_0305 = gw["SXS:BBH:0305"]["Extrapolated_N2.dir"]["Y_l2_m2.dat"]
# Remember to download metadata.json from the simulation with number: 0305. Download Lev6/metadata.json
# This postprocesses the metadata file to find the final mass and final spin
metadata = {}
with open(rootpath+"/git/rdstackingproject/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/metadata.json") as file:
metadata["SXS:BBH:0305"] = json.load(file)
af = metadata["SXS:BBH:0305"]['remnant_dimensionless_spin'][-1]
mf = metadata["SXS:BBH:0305"]['remnant_mass']
#times --> x axis of your data
times = gw_sxs_bbh_0305[:,0]
tmax=FindTmaximum(gw_sxs_bbh_0305)
t0=tmax +tshift
#Select the data from t0 onwards
position = np.argmax(times >= (t0))
gw_sxs_bbh_0305rd=gw_sxs_bbh_0305[position:-1]
timesrd=gw_sxs_bbh_0305[position:-1][:,0]
# Depending on nmax, you load nmax number of freqs. and damping times from the qnm package
omegas = [qnm.modes_cache(s=-2,l=2,m=2,n=i)(a=af)[0] for i in range (0,nmax+1)]
#Fitting
#RD model for nmax tones. Amplitudes are in (xn*Exp[i yn]) version. Used here.
def model_dv(theta):
#x0, y0= theta
#Your nmax might not align with the dim of theta. Better check it here.
assert int(len(theta)/4) == nmax + 1, 'Please recheck your n and parameters'
w = (np.real(omegas))/mf
tau=-1/(np.imag(omegas))*mf
dim =int(len(theta)/4)
avars = theta[ : (nmax+1)]
bvars = theta[(nmax+1) : 2*(nmax+1)]
xvars = theta[2*(nmax+1) : 3*(nmax+1)]
yvars = theta[3*(nmax+1) : ]
if vary_fund == False:
avars[0]=0
bvars[0]=0
ansatz = 0
for i in range (0,dim):
#bvars[1]=0
#avars[1]=0
ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-(timesrd-timesrd[0])/(tau[i]*(1+bvars[i]))) * (np.cos((1+avars[i])*w[i]*timesrd)-1j*np.sin((1+avars[i])*w[i]*timesrd))
# -1j to agree with SXS convention
return ansatz
# Logprior distribution. It defines the allowed range my variables can vary over.
#It works for the (xn*Exp[iyn]) version.
def log_prior(theta):
#Warning: we are specifically working with nmax=1 so here individual prior to the parameters are manually adjusted. This does not apply to all other nmax's.
a_0 = theta[0]
a_1 = theta[1]
b_0 = theta[2]
b_1 = theta[3]
x_0 = theta[4]
x_1 = theta[5]
y_0 = theta[6]
y_1 = theta[7]
if all([nmax == 1, 0 <= tshift <= 5, vary_fund == True, 0 <= x_0 <= 2.0, 0 <= y_0 <= 2*np.pi, -0.4 <= a_0 <= 0.4, -1.0 <= b_0 <= 1.6, 0 <= x_1 <= 1.8, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 1.6, -1.0 <= b_1 <= 2.0]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == True, 0 <= x_0 <= 1.5, 0 <= y_0 <= 2*np.pi, -1.0 <= a_0 <= 3.0, -1.0 <= b_0 <= 2.0, 0 <= x_1 <= 1.8, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 3.0, -1.0 <= b_1 <= 2.0]):
return 0.0
#PAY EXTRA ATTENTION TO THESE TWO CASES, SINCE WE SHIFTED y_0. THE CORNER PLOTS LABELS NEED TO BE TREATED WITH CARE.
elif all([nmax == 1, 0 <= tshift <= 5, vary_fund == False, 0 <= x_0 <= 2.0, 0 <= y_0-np.pi <= 2*np.pi, -1.0 <= a_0 <= 1.0, -1.0 <= b_0 <= 1.0, 0 <= x_1 <= 1.6, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 1.2, -1.0 <= b_1 <= 2.8]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == False, 0 <= x_0 <= 0.6, 0 <= y_0-np.pi <= 2*np.pi, -1.0 <= a_0 <= 1.0, -1.0 <= b_0 <= 1.0, 0 <= x_1 <= 1.2, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 3.0, -1.0 <= b_1 <= 2.0]):
return 0.0
return -np.inf
# LogLikelihood function. It is just a Gaussian loglikelihood based on computing the residuals^2
def log_likelihood(theta):
modelev = model_dv(theta)
return -np.sum((gw_sxs_bbh_0305rd[:,1] - (modelev.real))**2+(gw_sxs_bbh_0305rd[:,2] - (modelev.imag))**2)
# Logposterior distribution for the residuals case.
# The evidence is just a normalization factor
def log_probability(theta):
lp = log_prior(theta)
print('lp:')
print(lp)
if not np.isfinite(lp):
return -np.inf
return lp + log_likelihood(theta)
#Fit with ptemcee
vary_param = float(vary_fund)
pos = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), 4.28313743e-01, random.uniform(2.5, 2.6) + (1-vary_param) * np.pi]])
for i in range (1,nmax+1):
pos_aux = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), random.uniform(0.3,0.4), random.uniform(2.01, 2.02) + (1-vary_param) * np.pi]])
pos = np.concatenate((pos, pos_aux), axis = 0)
pos = pos.T.flatten()
pos = list(pos)
pos += 1e-5 * np.random.randn(ntemps, nwalkers, ndim)
#print(pos)
sampler = ptemcee.Sampler(nwalkers, ndim, log_likelihood, log_prior, ntemps=ntemps)
sampler.run_mcmc(pos,npoints)
#Define labels and start plotting
paramlabels_a = [r'$\alpha_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_b = [r'$\beta_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_x = [r'$x_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_y = [r'$y_'+str(i)+'$' for i in range (nmax+1)] if vary_fund == True else ['$y_'+str(i)+'-\pi$' for i in range (nmax+1)]
paramlabels = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
#Need to delete alpha_0 and alpha_1 for the corner plot
paramlabels_corner = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
if vary_fund == False:
del paramlabels_corner[0]
del paramlabels_corner[1]
#Chain plot
fig, axes = plt.subplots(4*(nmax+1), 1, sharex=True, figsize=(12, 9*(nmax+1)))
for i in range(4*(nmax+1)):
axes[i].plot(sampler.chain[0,:, :, i].T, color="k", alpha=0.4)
axes[i].yaxis.set_major_locator(MaxNLocator(5))
axes[i].set_ylabel(paramlabels[i])
axes[-1].set_xlabel('Iterations')
#plt.show()
fig.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_chain.pdf', format = 'pdf')
#Burn samples, calculate peak likelihood value (not necessarily so in atlas) and make corner plot
samples = sampler.chain[0,:, burnin:, :].reshape((-1, ndim))
#samples for corner plot
samples_corn = samples if vary_fund == True else np.delete(samples, np.s_[0,2], 1)
#print('Values with peak likelihood:')
lglk = np.array([log_likelihood(samples[i]) for i in range(len(samples))])
pk = samples[np.argmax(lglk)]
#print('pk:')
#print(pk)
pk_corn = pk if vary_fund == True else np.delete(pk, [0,2])
#print('pkFalse:')
#print(pk)
#print(pk)
#Now calculate median (50-percentile) value
median = np.median(samples_corn, axis=0)
#print(samples)
#print(samples_corn)
figcorn = corner.corner(samples_corn, bins = numbins, hist_bin_factor = 5, color = datacolor, truths=pk_corn, truth_color = pkcolor, plot_contours = True, labels = paramlabels_corner, quantiles=(0.05, 0.16, 0.5, 0.84, 0.95), levels=[1-np.exp(-0.5), 1-np.exp(-1.64 ** 2/2)], show_titles=True)
#Extract the axes in order to add more important line plots
naxes = len(pk_corn)
axes = np.array(figcorn.axes).reshape((naxes, naxes))
# Loop over the diagonal
for i in range(naxes):
ax = axes[i, i]
ax.axvline(median[i], color=mediancolor)
# Loop over the histograms
for yi in range(naxes):
for xi in range(yi):
ax = axes[yi, xi]
ax.axvline(median[xi], color=mediancolor)
ax.axhline(median[yi], color=mediancolor)
ax.plot(median[xi], median[yi], color = mediancolor, marker = 's')
figcorn.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_corner.pdf', format = 'pdf')
#Now plot alpha_0 on top of alpha_1 - only on top, not stacked, and only valid for vary_fund == True
if vary_fund == True:
samplea0 = samples.T[0]
samplea1 = samples.T[1]
fighist1 = plt.figure(figsize = (8, 6))
n0, bins0, patches0 = plt.hist(samplea0, bins = numbins * 6, alpha = 0.5, label = r'$\alpha_0$')
n1, bins1, patches1 = plt.hist(samplea1, bins = numbins * 10, alpha = 0.5, label = r'$\alpha_1$')
#n01, bins01, patches01 = plt.hist([samplea0, samplea1], numbins * 10, rwidth = 3, alpha = 0.5, label = [r'$\alpha_0$', r'$\alpha_1$'])
plt.legend()
fighist1.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_histtop.pdf', format = 'pdf')
#This plot is stacked
fighist1 = plt.figure(figsize = (8, 6))
sn01, sbins01, spatches01 = plt.hist([samplea0, samplea1], numbins * 10, rwidth = 1, stacked = True, alpha = 0.5, label = [r'$\alpha_0$', r'$\alpha_1$'])
plt.legend()
fighist1.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_histstack.pdf', format = 'pdf')
#Now plot the NR data against the mcmc fit data, together with the 1-sigma varying error data
onesig_bounds = np.array([np.percentile(samples[:, i], [16, 84]) for i in range(len(samples[0]))]).T
modelfitpk = model_dv(pk)
figband = plt.figure(figsize = (12, 9))
#Plot the 1-sigma_percentile
for j in range(len(samples)):
sample = samples[j]
if np.all(onesig_bounds[0] <= sample) and np.all(sample <= onesig_bounds[1]):
plt.plot(timesrd, model_dv(sample).real, "#79CAF2", alpha=0.3)
plt.plot(timesrd, gw_sxs_bbh_0305rd[:,1], "k", alpha=0.7, lw=2, label=r'NR_re')
plt.plot(timesrd, modelfitpk.real, "r", alpha=0.7, lw=2, label=r'FitMCmax_re')
plt.title(r'Comparison of the MC fit data and the $1-\sigma$ error band')
plt.legend()
plt.xlim(timesrd[0], timesrd[0]+80)
plt.xlabel("t")
plt.ylabel("h")
figband.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_band.pdf', format = 'pdf')
"""
"""
#!/usr/bin/env python
# coding: utf-8
'''
This script calculates the RDGW150914 constraints with ptemcee - specifically, the n=1 case both varying or not varying the fundamental frequencies. It produces the chain plot, corner plot, parameter constraints, and data plotted with the 1-sigma band. Since we are working specifically for the n=1 case, we also add in the corner plot the combined chain of alpha_0 and alpha_1, in order to demonstrate that the two are indistinguishable with each other.
--- R^a_{yne} L^i_u, 08/09/2020
'''
import numpy as np
import corner
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib import rc
#plt.rcParams['font.family'] = 'DejaVu Sans'
#rc('text', usetex=True)
plt.rcParams.update({'font.size': 16.5})
import ptemcee
import math
import h5py
import inspect
import pandas as pd
import json
import qnm
import random
from scipy.optimize import minimize
#Remember to change the following global variables
#rootpath: root path to nr data
#npoints: number of points you re using for your sampling
#nmax: tone index --> nmax = 0 if fitting the fundamental tone
#tshift: time shift after the strain peak
#vary_fund: whether you vary the fundamental frequency. Works in the model_dv function.
rootpath="/work/rayne.liu" #"/Users/RayneLiu"
nmax=1
tshift=19
vary_fund = True
#sampler parameters
npoints=1000000
nwalkers = 42
ntemps=12
ndim = int(4*(nmax+1))
burnin = 200000 #How many points do you burn before doing the corner plot. You need to watch the convergence of the chain plot a bit.
#This is trivial but often forgotten: this cannot be more than npoints! Usually 1/5~1/4 npoints is what I observe.
numbins = 42 #corner plot parameter - how many bins you want
datacolor = '#105670' #'#4fa3a7'
pkcolor = '#f2c977' #'#ffb45f'
mediancolor = '#f7695c' #'#9b2814'
#Import data and necessary functions
#TimeOfMaximum
def FindTmaximum(y):
#Determines the maximum absolute value of the complex waveform
absval = y[:,1]*y[:,1]+y[:,2]*y[:,2]
vmax=np.max(absval)
index = np.argmax(absval == vmax)
timemax=gw_sxs_bbh_0305[index,0]
return timemax
#This loads the 22 mode data
gw = {}
gw["SXS:BBH:0305"] = h5py.File(rootpath+"/git/rdstackingproject/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/rhOverM_Asymptotic_GeometricUnits_CoM.h5", 'r')
gw_sxs_bbh_0305 = gw["SXS:BBH:0305"]["Extrapolated_N2.dir"]["Y_l2_m2.dat"]
# Remember to download metadata.json from the simulation with number: 0305. Download Lev6/metadata.json
# This postprocesses the metadata file to find the final mass and final spin
metadata = {}
with open(rootpath+"/git/rdstackingproject/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/metadata.json") as file:
metadata["SXS:BBH:0305"] = json.load(file)
af = metadata["SXS:BBH:0305"]['remnant_dimensionless_spin'][-1]
mf = metadata["SXS:BBH:0305"]['remnant_mass']
#times --> x axis of your data
times = gw_sxs_bbh_0305[:,0]
tmax=FindTmaximum(gw_sxs_bbh_0305)
t0=tmax +tshift
#Select the data from t0 onwards
position = np.argmax(times >= (t0))
gw_sxs_bbh_0305rd=gw_sxs_bbh_0305[position:-1]
timesrd=gw_sxs_bbh_0305[position:-1][:,0]
# Depending on nmax, you load nmax number of freqs. and damping times from the qnm package
omegas = [qnm.modes_cache(s=-2,l=2,m=2,n=i)(a=af)[0] for i in range (0,nmax+1)]
#Fitting
#RD model for nmax tones. Amplitudes are in (xn*Exp[i yn]) version. Used here.
def model_dv(theta):
#x0, y0= theta
#Your nmax might not align with the dim of theta. Better check it here.
assert int(len(theta)/4) == nmax + 1, 'Please recheck your n and parameters'
w = (np.real(omegas))/mf
tau=-1/(np.imag(omegas))*mf
dim =int(len(theta)/4)
avars = theta[ : (nmax+1)]
bvars = theta[(nmax+1) : 2*(nmax+1)]
xvars = theta[2*(nmax+1) : 3*(nmax+1)]
yvars = theta[3*(nmax+1) : ]
if vary_fund == False:
avars[0]=0
bvars[0]=0
ansatz = 0
for i in range (0,dim):
#bvars[1]=0
#avars[1]=0
ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-(timesrd-timesrd[0])/(tau[i]*(1+bvars[i]))) * (np.cos((1+avars[i])*w[i]*timesrd)-1j*np.sin((1+avars[i])*w[i]*timesrd))
# -1j to agree with SXS convention
return ansatz
# Logprior distribution. It defines the allowed range my variables can vary over.
#It works for the (xn*Exp[iyn]) version.
def log_prior(theta):
#Warning: we are specifically working with nmax=1 so here individual prior to the parameters are manually adjusted. This does not apply to all other nmax's.
a_0 = theta[0]
a_1 = theta[1]
b_0 = theta[2]
b_1 = theta[3]
x_0 = theta[4]
x_1 = theta[5]
y_0 = theta[6]
y_1 = theta[7]
if all([nmax == 1, 0 <= tshift <= 5, vary_fund == True, 0 <= x_0 <= 2.0, 0 <= y_0 <= 2*np.pi, -0.4 <= a_0 <= 0.4, -1.0 <= b_0 <= 1.6, 0 <= x_1 <= 1.8, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 1.6, -1.0 <= b_1 <= 2.0]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == True, 0 <= x_0 <= 1.5, 0 <= y_0 <= 2*np.pi, -1.0 <= a_0 <= 3.0, -1.0 <= b_0 <= 2.0, 0 <= x_1 <= 1.8, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 3.0, -1.0 <= b_1 <= 2.0]):
return 0.0
#PAY EXTRA ATTENTION TO THESE TWO CASES, SINCE WE SHIFTED y_0. THE CORNER PLOTS LABELS NEED TO BE TREATED WITH CARE.
elif all([nmax == 1, 0 <= tshift <= 5, vary_fund == False, 0 <= x_0 <= 2.0, 0 <= y_0-np.pi <= 2*np.pi, -1.0 <= a_0 <= 1.0, -1.0 <= b_0 <= 1.0, 0 <= x_1 <= 1.6, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 1.2, -1.0 <= b_1 <= 2.8]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == False, 0 <= x_0 <= 0.6, 0 <= y_0-np.pi <= 2*np.pi, -1.0 <= a_0 <= 1.0, -1.0 <= b_0 <= 1.0, 0 <= x_1 <= 1.2, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 3.0, -1.0 <= b_1 <= 2.0]):
return 0.0
return -np.inf
# LogLikelihood function. It is just a Gaussian loglikelihood based on computing the residuals^2
def log_likelihood(theta):
modelev = model_dv(theta)
return -np.sum((gw_sxs_bbh_0305rd[:,1] - (modelev.real))**2+(gw_sxs_bbh_0305rd[:,2] - (modelev.imag))**2)
# Logposterior distribution for the residuals case.
# The evidence is just a normalization factor
def log_probability(theta):
lp = log_prior(theta)
print('lp:')
print(lp)
if not np.isfinite(lp):
return -np.inf
return lp + log_likelihood(theta)
#Fit with ptemcee
vary_param = float(vary_fund)
pos = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), 4.28313743e-01, random.uniform(2.5, 2.6) + (1-vary_param) * np.pi]])
for i in range (1,nmax+1):
pos_aux = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), random.uniform(0.3,0.4), random.uniform(2.01, 2.02) + (1-vary_param) * np.pi]])
pos = np.concatenate((pos, pos_aux), axis = 0)
pos = pos.T.flatten()
pos = list(pos)
pos += 1e-5 * np.random.randn(ntemps, nwalkers, ndim)
#print(pos)
sampler = ptemcee.Sampler(nwalkers, ndim, log_likelihood, log_prior, ntemps=ntemps)
sampler.run_mcmc(pos,npoints)
#Define labels and start plotting
paramlabels_a = [r'$\alpha_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_b = [r'$\beta_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_x = [r'$x_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_y = [r'$y_'+str(i)+'$' for i in range (nmax+1)] if vary_fund == True else ['$y_'+str(i)+'-\pi$' for i in range (nmax+1)]
paramlabels = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
#Need to delete alpha_0 and alpha_1 for the corner plot
paramlabels_corner = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
if vary_fund == False:
del paramlabels_corner[0]
del paramlabels_corner[1]
#Chain plot
fig, axes = plt.subplots(4*(nmax+1), 1, sharex=True, figsize=(12, 9*(nmax+1)))
for i in range(4*(nmax+1)):
axes[i].plot(sampler.chain[0,:, :, i].T, color="k", alpha=0.4)
axes[i].yaxis.set_major_locator(MaxNLocator(5))
axes[i].set_ylabel(paramlabels[i])
axes[-1].set_xlabel('Iterations')
#plt.show()
fig.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_chain.pdf', format = 'pdf')
#Burn samples, calculate peak likelihood value (not necessarily so in atlas) and make corner plot
samples = sampler.chain[0,:, burnin:, :].reshape((-1, ndim))
#samples for corner plot
samples_corn = samples if vary_fund == True else np.delete(samples, np.s_[0,2], 1)
#print('Values with peak likelihood:')
lglk = np.array([log_likelihood(samples[i]) for i in range(len(samples))])
pk = samples[np.argmax(lglk)]
#print('pk:')
#print(pk)
pk_corn = pk if vary_fund == True else np.delete(pk, [0,2])
#print('pkFalse:')
#print(pk)
#print(pk)
#Now calculate median (50-percentile) value
median = np.median(samples_corn, axis=0)
#print(samples)
#print(samples_corn)
figcorn = corner.corner(samples_corn, bins = numbins, hist_bin_factor = 5, color = datacolor, truths=pk_corn, truth_color = pkcolor, plot_contours = True, labels = paramlabels_corner, quantiles=(0.05, 0.16, 0.5, 0.84, 0.95), levels=[1-np.exp(-0.5), 1-np.exp(-1.64 ** 2/2)], show_titles=True)
#Extract the axes in order to add more important line plots
naxes = len(pk_corn)
axes = np.array(figcorn.axes).reshape((naxes, naxes))
# Loop over the diagonal
for i in range(naxes):
ax = axes[i, i]
ax.axvline(median[i], color=mediancolor)
# Loop over the histograms
for yi in range(naxes):
for xi in range(yi):
ax = axes[yi, xi]
ax.axvline(median[xi], color=mediancolor)
ax.axhline(median[yi], color=mediancolor)
ax.plot(median[xi], median[yi], color = mediancolor, marker = 's')
figcorn.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_corner.pdf', format = 'pdf')
#Now plot alpha_0 on top of alpha_1 - only on top, not stacked, and only valid for vary_fund == True
if vary_fund == True:
samplea0 = samples.T[0]
samplea1 = samples.T[1]
fighist1 = plt.figure(figsize = (8, 6))
n0, bins0, patches0 = plt.hist(samplea0, bins = numbins * 6, alpha = 0.5, label = r'$\alpha_0$')
n1, bins1, patches1 = plt.hist(samplea1, bins = numbins * 10, alpha = 0.5, label = r'$\alpha_1$')
#n01, bins01, patches01 = plt.hist([samplea0, samplea1], numbins * 10, rwidth = 3, alpha = 0.5, label = [r'$\alpha_0$', r'$\alpha_1$'])
plt.legend()
fighist1.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_histtop.pdf', format = 'pdf')
#This plot is stacked
fighist1 = plt.figure(figsize = (8, 6))
sn01, sbins01, spatches01 = plt.hist([samplea0, samplea1], numbins * 10, rwidth = 1, stacked = True, alpha = 0.5, label = [r'$\alpha_0$', r'$\alpha_1$'])
plt.legend()
fighist1.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_histstack.pdf', format = 'pdf')
#Now plot the NR data against the mcmc fit data, together with the 1-sigma varying error data
onesig_bounds = np.array([np.percentile(samples[:, i], [16, 84]) for i in range(len(samples[0]))]).T
modelfitpk = model_dv(pk)
figband = plt.figure(figsize = (12, 9))
#Plot the 1-sigma_percentile
for j in range(len(samples)):
sample = samples[j]
if np.all(onesig_bounds[0] <= sample) and np.all(sample <= onesig_bounds[1]):
plt.plot(timesrd, model_dv(sample).real, "#79CAF2", alpha=0.3)
plt.plot(timesrd, gw_sxs_bbh_0305rd[:,1], "k", alpha=0.7, lw=2, label=r'NR_re')
plt.plot(timesrd, modelfitpk.real, "r", alpha=0.7, lw=2, label=r'FitMCmax_re')
plt.title(r'Comparison of the MC fit data and the $1-\sigma$ error band')
plt.legend()
plt.xlim(timesrd[0], timesrd[0]+80)
plt.xlabel("t")
plt.ylabel("h")
figband.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_band.pdf', format = 'pdf')
"""
"""
#!/usr/bin/env python
# coding: utf-8
'''
This script calculates the RDGW150914 constraints with ptemcee - specifically, the n=1 case both varying or not varying the fundamental frequencies. It produces the chain plot, corner plot, parameter constraints, and data plotted with the 1-sigma band. Since we are working specifically for the n=1 case, we also add in the corner plot the combined chain of alpha_0 and alpha_1, in order to demonstrate that the two are indistinguishable with each other.
--- R^a_{yne} L^i_u, 08/09/2020
'''
import numpy as np
import corner
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib import rc
#plt.rcParams['font.family'] = 'DejaVu Sans'
#rc('text', usetex=True)
plt.rcParams.update({'font.size': 16.5})
import ptemcee
import math
import h5py
import inspect
import pandas as pd
import json
import qnm
import random
from scipy.optimize import minimize
#Remember to change the following global variables
#rootpath: root path to nr data
#npoints: number of points you re using for your sampling
#nmax: tone index --> nmax = 0 if fitting the fundamental tone
#tshift: time shift after the strain peak
#vary_fund: whether you vary the fundamental frequency. Works in the model_dv function.
rootpath="/work/rayne.liu" #"/Users/RayneLiu"
nmax=1
tshift=0
vary_fund = False
#sampler parameters
npoints=1000000
nwalkers = 42
ntemps=12
ndim = int(4*(nmax+1))
burnin = 200000 #How many points do you burn before doing the corner plot. You need to watch the convergence of the chain plot a bit.
#This is trivial but often forgotten: this cannot be more than npoints! Usually 1/5~1/4 npoints is what I observe.
numbins = 42 #corner plot parameter - how many bins you want
datacolor = '#105670' #'#4fa3a7'
pkcolor = '#f2c977' #'#ffb45f'
mediancolor = '#f7695c' #'#9b2814'
#Import data and necessary functions
#TimeOfMaximum
def FindTmaximum(y):
#Determines the maximum absolute value of the complex waveform
absval = y[:,1]*y[:,1]+y[:,2]*y[:,2]
vmax=np.max(absval)
index = np.argmax(absval == vmax)
timemax=gw_sxs_bbh_0305[index,0]
return timemax
#This loads the 22 mode data
gw = {}
gw["SXS:BBH:0305"] = h5py.File(rootpath+"/git/rdstackingproject/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/rhOverM_Asymptotic_GeometricUnits_CoM.h5", 'r')
gw_sxs_bbh_0305 = gw["SXS:BBH:0305"]["Extrapolated_N2.dir"]["Y_l2_m2.dat"]
# Remember to download metadata.json from the simulation with number: 0305. Download Lev6/metadata.json
# This postprocesses the metadata file to find the final mass and final spin
metadata = {}
with open(rootpath+"/git/rdstackingproject/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/metadata.json") as file:
metadata["SXS:BBH:0305"] = json.load(file)
af = metadata["SXS:BBH:0305"]['remnant_dimensionless_spin'][-1]
mf = metadata["SXS:BBH:0305"]['remnant_mass']
#times --> x axis of your data
times = gw_sxs_bbh_0305[:,0]
tmax=FindTmaximum(gw_sxs_bbh_0305)
t0=tmax +tshift
#Select the data from t0 onwards
position = np.argmax(times >= (t0))
gw_sxs_bbh_0305rd=gw_sxs_bbh_0305[position:-1]
timesrd=gw_sxs_bbh_0305[position:-1][:,0]
# Depending on nmax, you load nmax number of freqs. and damping times from the qnm package
omegas = [qnm.modes_cache(s=-2,l=2,m=2,n=i)(a=af)[0] for i in range (0,nmax+1)]
#Fitting
#RD model for nmax tones. Amplitudes are in (xn*Exp[i yn]) version. Used here.
def model_dv(theta):
#x0, y0= theta
#Your nmax might not align with the dim of theta. Better check it here.
assert int(len(theta)/4) == nmax + 1, 'Please recheck your n and parameters'
w = (np.real(omegas))/mf
tau=-1/(np.imag(omegas))*mf
dim =int(len(theta)/4)
avars = theta[ : (nmax+1)]
bvars = theta[(nmax+1) : 2*(nmax+1)]
xvars = theta[2*(nmax+1) : 3*(nmax+1)]
yvars = theta[3*(nmax+1) : ]
if vary_fund == False:
avars[0]=0
bvars[0]=0
ansatz = 0
for i in range (0,dim):
#bvars[1]=0
#avars[1]=0
ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-(timesrd-timesrd[0])/(tau[i]*(1+bvars[i]))) * (np.cos((1+avars[i])*w[i]*timesrd)-1j*np.sin((1+avars[i])*w[i]*timesrd))
# -1j to agree with SXS convention
return ansatz
# Logprior distribution. It defines the allowed range my variables can vary over.
#It works for the (xn*Exp[iyn]) version.
def log_prior(theta):
#Warning: we are specifically working with nmax=1 so here individual prior to the parameters are manually adjusted. This does not apply to all other nmax's.
a_0 = theta[0]
a_1 = theta[1]
b_0 = theta[2]
b_1 = theta[3]
x_0 = theta[4]
x_1 = theta[5]
y_0 = theta[6]
y_1 = theta[7]
if all([nmax == 1, 0 <= tshift <= 5, vary_fund == True, 0 <= x_0 <= 2.0, 0 <= y_0 <= 2*np.pi, -0.4 <= a_0 <= 0.4, -1.0 <= b_0 <= 1.6, 0 <= x_1 <= 1.8, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 1.6, -1.0 <= b_1 <= 2.0]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == True, 0 <= x_0 <= 1.5, 0 <= y_0 <= 2*np.pi, -1.0 <= a_0 <= 3.0, -1.0 <= b_0 <= 2.0, 0 <= x_1 <= 1.8, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 3.0, -1.0 <= b_1 <= 2.0]):
return 0.0
#PAY EXTRA ATTENTION TO THESE TWO CASES, SINCE WE SHIFTED y_0. THE CORNER PLOTS LABELS NEED TO BE TREATED WITH CARE.
elif all([nmax == 1, 0 <= tshift <= 5, vary_fund == False, 0 <= x_0 <= 2.0, 0 <= y_0-np.pi <= 2*np.pi, -1.0 <= a_0 <= 1.0, -1.0 <= b_0 <= 1.0, 0 <= x_1 <= 1.6, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 1.2, -1.0 <= b_1 <= 2.8]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == False, 0 <= x_0 <= 0.6, 0 <= y_0-np.pi <= 2*np.pi, -1.0 <= a_0 <= 1.0, -1.0 <= b_0 <= 1.0, 0 <= x_1 <= 1.2, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 3.0, -1.0 <= b_1 <= 2.0]):
return 0.0
return -np.inf
# LogLikelihood function. It is just a Gaussian loglikelihood based on computing the residuals^2
def log_likelihood(theta):
modelev = model_dv(theta)
return -np.sum((gw_sxs_bbh_0305rd[:,1] - (modelev.real))**2+(gw_sxs_bbh_0305rd[:,2] - (modelev.imag))**2)
# Logposterior distribution for the residuals case.
# The evidence is just a normalization factor
def log_probability(theta):
lp = log_prior(theta)
print('lp:')
print(lp)
if not np.isfinite(lp):
return -np.inf
return lp + log_likelihood(theta)
#Fit with ptemcee
vary_param = float(vary_fund)
pos = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), 4.28313743e-01, random.uniform(2.5, 2.6) + (1-vary_param) * np.pi]])
for i in range (1,nmax+1):
pos_aux = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), random.uniform(0.3,0.4), random.uniform(2.01, 2.02) + (1-vary_param) * np.pi]])
pos = np.concatenate((pos, pos_aux), axis = 0)
pos = pos.T.flatten()
pos = list(pos)
pos += 1e-5 * np.random.randn(ntemps, nwalkers, ndim)
#print(pos)
sampler = ptemcee.Sampler(nwalkers, ndim, log_likelihood, log_prior, ntemps=ntemps)
sampler.run_mcmc(pos,npoints)
#Define labels and start plotting
paramlabels_a = [r'$\alpha_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_b = [r'$\beta_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_x = [r'$x_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_y = [r'$y_'+str(i)+'$' for i in range (nmax+1)] if vary_fund == True else ['$y_'+str(i)+'-\pi$' for i in range (nmax+1)]
paramlabels = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
#Need to delete alpha_0 and alpha_1 for the corner plot
paramlabels_corner = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
if vary_fund == False:
del paramlabels_corner[0]
del paramlabels_corner[1]
#Chain plot
fig, axes = plt.subplots(4*(nmax+1), 1, sharex=True, figsize=(12, 9*(nmax+1)))
for i in range(4*(nmax+1)):
axes[i].plot(sampler.chain[0,:, :, i].T, color="k", alpha=0.4)
axes[i].yaxis.set_major_locator(MaxNLocator(5))
axes[i].set_ylabel(paramlabels[i])
axes[-1].set_xlabel('Iterations')
#plt.show()
fig.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_chain.pdf', format = 'pdf')
#Burn samples, calculate peak likelihood value (not necessarily so in atlas) and make corner plot
samples = sampler.chain[0,:, burnin:, :].reshape((-1, ndim))
#samples for corner plot
samples_corn = samples if vary_fund == True else np.delete(samples, np.s_[0,2], 1)
#print('Values with peak likelihood:')
lglk = np.array([log_likelihood(samples[i]) for i in range(len(samples))])
pk = samples[np.argmax(lglk)]
#print('pk:')
#print(pk)
pk_corn = pk if vary_fund == True else np.delete(pk, [0,2])
#print('pkFalse:')
#print(pk)
#print(pk)
#Now calculate median (50-percentile) value
median = np.median(samples_corn, axis=0)
#print(samples)
#print(samples_corn)
figcorn = corner.corner(samples_corn, bins = numbins, hist_bin_factor = 5, color = datacolor, truths=pk_corn, truth_color = pkcolor, plot_contours = True, labels = paramlabels_corner, quantiles=(0.05, 0.16, 0.5, 0.84, 0.95), levels=[1-np.exp(-0.5), 1-np.exp(-1.64 ** 2/2)], show_titles=True)
#Extract the axes in order to add more important line plots
naxes = len(pk_corn)
axes = np.array(figcorn.axes).reshape((naxes, naxes))
# Loop over the diagonal
for i in range(naxes):
ax = axes[i, i]
ax.axvline(median[i], color=mediancolor)
# Loop over the histograms
for yi in range(naxes):
for xi in range(yi):
ax = axes[yi, xi]
ax.axvline(median[xi], color=mediancolor)
ax.axhline(median[yi], color=mediancolor)
ax.plot(median[xi], median[yi], color = mediancolor, marker = 's')
figcorn.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_corner.pdf', format = 'pdf')
#Now plot alpha_0 on top of alpha_1 - only on top, not stacked, and only valid for vary_fund == True
if vary_fund == True:
samplea0 = samples.T[0]
samplea1 = samples.T[1]
fighist1 = plt.figure(figsize = (8, 6))
n0, bins0, patches0 = plt.hist(samplea0, bins = numbins * 6, alpha = 0.5, label = r'$\alpha_0$')
n1, bins1, patches1 = plt.hist(samplea1, bins = numbins * 10, alpha = 0.5, label = r'$\alpha_1$')
#n01, bins01, patches01 = plt.hist([samplea0, samplea1], numbins * 10, rwidth = 3, alpha = 0.5, label = [r'$\alpha_0$', r'$\alpha_1$'])
plt.legend()
fighist1.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_histtop.pdf', format = 'pdf')
#This plot is stacked
fighist1 = plt.figure(figsize = (8, 6))
sn01, sbins01, spatches01 = plt.hist([samplea0, samplea1], numbins * 10, rwidth = 1, stacked = True, alpha = 0.5, label = [r'$\alpha_0$', r'$\alpha_1$'])
plt.legend()
fighist1.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_histstack.pdf', format = 'pdf')
#Now plot the NR data against the mcmc fit data, together with the 1-sigma varying error data
onesig_bounds = np.array([np.percentile(samples[:, i], [16, 84]) for i in range(len(samples[0]))]).T
modelfitpk = model_dv(pk)
figband = plt.figure(figsize = (12, 9))
#Plot the 1-sigma_percentile
for j in range(len(samples)):
sample = samples[j]
if np.all(onesig_bounds[0] <= sample) and np.all(sample <= onesig_bounds[1]):
plt.plot(timesrd, model_dv(sample).real, "#79CAF2", alpha=0.3)
plt.plot(timesrd, gw_sxs_bbh_0305rd[:,1], "k", alpha=0.7, lw=2, label=r'NR_re')
plt.plot(timesrd, modelfitpk.real, "r", alpha=0.7, lw=2, label=r'FitMCmax_re')
plt.title(r'Comparison of the MC fit data and the $1-\sigma$ error band')
plt.legend()
plt.xlim(timesrd[0], timesrd[0]+80)
plt.xlabel("t")
plt.ylabel("h")
figband.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_band.pdf', format = 'pdf')
"""
"""
#!/usr/bin/env python
# coding: utf-8
'''
This script calculates the RDGW150914 constraints with ptemcee - specifically, the n=1 case both varying or not varying the fundamental frequencies. It produces the chain plot, corner plot, parameter constraints, and data plotted with the 1-sigma band. Since we are working specifically for the n=1 case, we also add in the corner plot the combined chain of alpha_0 and alpha_1, in order to demonstrate that the two are indistinguishable with each other.
--- R^a_{yne} L^i_u, 08/09/2020
'''
import numpy as np
import corner
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib import rc
#plt.rcParams['font.family'] = 'DejaVu Sans'
#rc('text', usetex=True)
plt.rcParams.update({'font.size': 16.5})
import ptemcee
import math
import h5py
import inspect
import pandas as pd
import json
import qnm
import random
from scipy.optimize import minimize
#Remember to change the following global variables
#rootpath: root path to nr data
#npoints: number of points you re using for your sampling
#nmax: tone index --> nmax = 0 if fitting the fundamental tone
#tshift: time shift after the strain peak
#vary_fund: whether you vary the fundamental frequency. Works in the model_dv function.
rootpath="/work/rayne.liu" #"/Users/RayneLiu"
nmax=1
tshift=19
vary_fund = False
#sampler parameters
npoints=1000000
nwalkers = 42
ntemps=12
ndim = int(4*(nmax+1))
burnin = 200000 #How many points do you burn before doing the corner plot. You need to watch the convergence of the chain plot a bit.
#This is trivial but often forgotten: this cannot be more than npoints! Usually 1/5~1/4 npoints is what I observe.
numbins = 42 #corner plot parameter - how many bins you want
datacolor = '#105670' #'#4fa3a7'
pkcolor = '#f2c977' #'#ffb45f'
mediancolor = '#f7695c' #'#9b2814'
#Import data and necessary functions
#TimeOfMaximum
def FindTmaximum(y):
#Determines the maximum absolute value of the complex waveform
absval = y[:,1]*y[:,1]+y[:,2]*y[:,2]
vmax=np.max(absval)
index = np.argmax(absval == vmax)
timemax=gw_sxs_bbh_0305[index,0]
return timemax
#This loads the 22 mode data
gw = {}
gw["SXS:BBH:0305"] = h5py.File(rootpath+"/git/rdstackingproject/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/rhOverM_Asymptotic_GeometricUnits_CoM.h5", 'r')
gw_sxs_bbh_0305 = gw["SXS:BBH:0305"]["Extrapolated_N2.dir"]["Y_l2_m2.dat"]
# Remember to download metadata.json from the simulation with number: 0305. Download Lev6/metadata.json
# This postprocesses the metadata file to find the final mass and final spin
metadata = {}
with open(rootpath+"/git/rdstackingproject/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/metadata.json") as file:
metadata["SXS:BBH:0305"] = json.load(file)
af = metadata["SXS:BBH:0305"]['remnant_dimensionless_spin'][-1]
mf = metadata["SXS:BBH:0305"]['remnant_mass']
#times --> x axis of your data
times = gw_sxs_bbh_0305[:,0]
tmax=FindTmaximum(gw_sxs_bbh_0305)
t0=tmax +tshift
#Select the data from t0 onwards
position = np.argmax(times >= (t0))
gw_sxs_bbh_0305rd=gw_sxs_bbh_0305[position:-1]
timesrd=gw_sxs_bbh_0305[position:-1][:,0]
# Depending on nmax, you load nmax number of freqs. and damping times from the qnm package
omegas = [qnm.modes_cache(s=-2,l=2,m=2,n=i)(a=af)[0] for i in range (0,nmax+1)]
#Fitting
#RD model for nmax tones. Amplitudes are in (xn*Exp[i yn]) version. Used here.
def model_dv(theta):
#x0, y0= theta
#Your nmax might not align with the dim of theta. Better check it here.
assert int(len(theta)/4) == nmax + 1, 'Please recheck your n and parameters'
w = (np.real(omegas))/mf
tau=-1/(np.imag(omegas))*mf
dim =int(len(theta)/4)
avars = theta[ : (nmax+1)]
bvars = theta[(nmax+1) : 2*(nmax+1)]
xvars = theta[2*(nmax+1) : 3*(nmax+1)]
yvars = theta[3*(nmax+1) : ]
if vary_fund == False:
avars[0]=0
bvars[0]=0
ansatz = 0
for i in range (0,dim):
#bvars[1]=0
#avars[1]=0
ansatz += (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-(timesrd-timesrd[0])/(tau[i]*(1+bvars[i]))) * (np.cos((1+avars[i])*w[i]*timesrd)-1j*np.sin((1+avars[i])*w[i]*timesrd))
# -1j to agree with SXS convention
return ansatz
# Logprior distribution. It defines the allowed range my variables can vary over.
#It works for the (xn*Exp[iyn]) version.
def log_prior(theta):
#Warning: we are specifically working with nmax=1 so here individual prior to the parameters are manually adjusted. This does not apply to all other nmax's.
a_0 = theta[0]
a_1 = theta[1]
b_0 = theta[2]
b_1 = theta[3]
x_0 = theta[4]
x_1 = theta[5]
y_0 = theta[6]
y_1 = theta[7]
if all([nmax == 1, 0 <= tshift <= 5, vary_fund == True, 0 <= x_0 <= 2.0, 0 <= y_0 <= 2*np.pi, -0.4 <= a_0 <= 0.4, -1.0 <= b_0 <= 1.6, 0 <= x_1 <= 1.8, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 1.6, -1.0 <= b_1 <= 2.0]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == True, 0 <= x_0 <= 1.5, 0 <= y_0 <= 2*np.pi, -1.0 <= a_0 <= 3.0, -1.0 <= b_0 <= 2.0, 0 <= x_1 <= 1.8, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 3.0, -1.0 <= b_1 <= 2.0]):
return 0.0
#PAY EXTRA ATTENTION TO THESE TWO CASES, SINCE WE SHIFTED y_0. THE CORNER PLOTS LABELS NEED TO BE TREATED WITH CARE.
elif all([nmax == 1, 0 <= tshift <= 5, vary_fund == False, 0 <= x_0 <= 2.0, 0 <= y_0-np.pi <= 2*np.pi, -1.0 <= a_0 <= 1.0, -1.0 <= b_0 <= 1.0, 0 <= x_1 <= 1.6, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 1.2, -1.0 <= b_1 <= 2.8]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == False, 0 <= x_0 <= 0.6, 0 <= y_0-np.pi <= 2*np.pi, -1.0 <= a_0 <= 1.0, -1.0 <= b_0 <= 1.0, 0 <= x_1 <= 1.2, 0 <= y_1 <= 2*np.pi, -1.0 <= a_1 <= 3.0, -1.0 <= b_1 <= 2.0]):
return 0.0
return -np.inf
# LogLikelihood function. It is just a Gaussian loglikelihood based on computing the residuals^2
def log_likelihood(theta):
modelev = model_dv(theta)
return -np.sum((gw_sxs_bbh_0305rd[:,1] - (modelev.real))**2+(gw_sxs_bbh_0305rd[:,2] - (modelev.imag))**2)
# Logposterior distribution for the residuals case.
# The evidence is just a normalization factor
def log_probability(theta):
lp = log_prior(theta)
print('lp:')
print(lp)
if not np.isfinite(lp):
return -np.inf
return lp + log_likelihood(theta)
#Fit with ptemcee
vary_param = float(vary_fund)
pos = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), 4.28313743e-01, random.uniform(2.5, 2.6) + (1-vary_param) * np.pi]])
for i in range (1,nmax+1):
pos_aux = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), random.uniform(0.3,0.4), random.uniform(2.01, 2.02) + (1-vary_param) * np.pi]])
pos = np.concatenate((pos, pos_aux), axis = 0)
pos = pos.T.flatten()
pos = list(pos)
pos += 1e-5 * np.random.randn(ntemps, nwalkers, ndim)
#print(pos)
sampler = ptemcee.Sampler(nwalkers, ndim, log_likelihood, log_prior, ntemps=ntemps)
sampler.run_mcmc(pos,npoints)
#Define labels and start plotting
paramlabels_a = [r'$\alpha_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_b = [r'$\beta_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_x = [r'$x_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_y = [r'$y_'+str(i)+'$' for i in range (nmax+1)] if vary_fund == True else ['$y_'+str(i)+'-\pi$' for i in range (nmax+1)]
paramlabels = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
#Need to delete alpha_0 and alpha_1 for the corner plot
paramlabels_corner = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
if vary_fund == False:
del paramlabels_corner[0]
del paramlabels_corner[1]
#Chain plot
fig, axes = plt.subplots(4*(nmax+1), 1, sharex=True, figsize=(12, 9*(nmax+1)))
for i in range(4*(nmax+1)):
axes[i].plot(sampler.chain[0,:, :, i].T, color="k", alpha=0.4)
axes[i].yaxis.set_major_locator(MaxNLocator(5))
axes[i].set_ylabel(paramlabels[i])
axes[-1].set_xlabel('Iterations')
#plt.show()
fig.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_chain.pdf', format = 'pdf')
#Burn samples, calculate peak likelihood value (not necessarily so in atlas) and make corner plot
samples = sampler.chain[0,:, burnin:, :].reshape((-1, ndim))
#samples for corner plot
samples_corn = samples if vary_fund == True else np.delete(samples, np.s_[0,2], 1)
#print('Values with peak likelihood:')
lglk = np.array([log_likelihood(samples[i]) for i in range(len(samples))])
pk = samples[np.argmax(lglk)]
#print('pk:')
#print(pk)
pk_corn = pk if vary_fund == True else np.delete(pk, [0,2])
#print('pkFalse:')
#print(pk)
#print(pk)
#Now calculate median (50-percentile) value
median = np.median(samples_corn, axis=0)
#print(samples)
#print(samples_corn)
figcorn = corner.corner(samples_corn, bins = numbins, hist_bin_factor = 5, color = datacolor, truths=pk_corn, truth_color = pkcolor, plot_contours = True, labels = paramlabels_corner, quantiles=(0.05, 0.16, 0.5, 0.84, 0.95), levels=[1-np.exp(-0.5), 1-np.exp(-1.64 ** 2/2)], show_titles=True)
#Extract the axes in order to add more important line plots
naxes = len(pk_corn)
axes = np.array(figcorn.axes).reshape((naxes, naxes))
# Loop over the diagonal
for i in range(naxes):
ax = axes[i, i]
ax.axvline(median[i], color=mediancolor)
# Loop over the histograms
for yi in range(naxes):
for xi in range(yi):
ax = axes[yi, xi]
ax.axvline(median[xi], color=mediancolor)
ax.axhline(median[yi], color=mediancolor)
ax.plot(median[xi], median[yi], color = mediancolor, marker = 's')
figcorn.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_corner.pdf', format = 'pdf')
#Now plot alpha_0 on top of alpha_1 - only on top, not stacked, and only valid for vary_fund == True
if vary_fund == True:
samplea0 = samples.T[0]
samplea1 = samples.T[1]
fighist1 = plt.figure(figsize = (8, 6))
n0, bins0, patches0 = plt.hist(samplea0, bins = numbins * 6, alpha = 0.5, label = r'$\alpha_0$')
n1, bins1, patches1 = plt.hist(samplea1, bins = numbins * 10, alpha = 0.5, label = r'$\alpha_1$')
#n01, bins01, patches01 = plt.hist([samplea0, samplea1], numbins * 10, rwidth = 3, alpha = 0.5, label = [r'$\alpha_0$', r'$\alpha_1$'])
plt.legend()
fighist1.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_histtop.pdf', format = 'pdf')
#This plot is stacked
fighist1 = plt.figure(figsize = (8, 6))
sn01, sbins01, spatches01 = plt.hist([samplea0, samplea1], numbins * 10, rwidth = 1, stacked = True, alpha = 0.5, label = [r'$\alpha_0$', r'$\alpha_1$'])
plt.legend()
fighist1.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_histstack.pdf', format = 'pdf')
#Now plot the NR data against the mcmc fit data, together with the 1-sigma varying error data
onesig_bounds = np.array([np.percentile(samples[:, i], [16, 84]) for i in range(len(samples[0]))]).T
modelfitpk = model_dv(pk)
figband = plt.figure(figsize = (12, 9))
#Plot the 1-sigma_percentile
for j in range(len(samples)):
sample = samples[j]
if np.all(onesig_bounds[0] <= sample) and np.all(sample <= onesig_bounds[1]):
plt.plot(timesrd, model_dv(sample).real, "#79CAF2", alpha=0.3)
plt.plot(timesrd, gw_sxs_bbh_0305rd[:,1], "k", alpha=0.7, lw=2, label=r'NR_re')
plt.plot(timesrd, modelfitpk.real, "r", alpha=0.7, lw=2, label=r'FitMCmax_re')
plt.title(r'Comparison of the MC fit data and the $1-\sigma$ error band')
plt.legend()
plt.xlim(timesrd[0], timesrd[0]+80)
plt.xlabel("t")
plt.ylabel("h")
figband.savefig(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='+str(nmax)+'_tshift='+str(tshift)+'_'+str(npoints)+'pt_band.pdf', format = 'pdf')
"""
"""
universe = vanilla
getenv = true
# run script -- make sure that condor has execute permission for this file (chmod a+x script.py)
executable = RDGW150914_ptemcee.py
# file to dump stdout (this directory should exist)
output = RDGW150914_ptemcee.out
# file to dump stderr
error = RDGW150914_ptemcee.err
# condor logs
log = RDGW150914_ptemcee.log
initialdir = .
notify_user = rl746@cornell.edu
notification = Complete
arguments = "-processid $(Process)"
request_memory = 16GB
request_cpus = 1
on_exit_remove = (ExitBySignal == False) || ((ExitBySignal == True) && (ExitSignal != 11))
accounting_group = aei.dev.test_dynesty
queue 1
universe = vanilla
getenv = true
# run script -- make sure that condor has execute permission for this file (chmod a+x script.py)
executable = RDGW150914_ptemcee1.py
# file to dump stdout (this directory should exist)
output = RDGW150914_ptemcee1.out
# file to dump stderr
error = RDGW150914_ptemcee1.err
# condor logs
log = RDGW150914_ptemcee1.log
initialdir = .
notify_user = rl746@cornell.edu
notification = Complete
arguments = "-processid $(Process)"
request_memory = 16GB
request_cpus = 1
on_exit_remove = (ExitBySignal == False) || ((ExitBySignal == True) && (ExitSignal != 11))
accounting_group = aei.dev.test_dynesty
queue 1
universe = vanilla
getenv = true
# run script -- make sure that condor has execute permission for this file (chmod a+x script.py)
executable = RDGW150914_ptemcee2.py
# file to dump stdout (this directory should exist)
output = RDGW150914_ptemcee2.out
# file to dump stderr
error = RDGW150914_ptemcee2.err
# condor logs
log = RDGW150914_ptemcee2.log
initialdir = .
notify_user = rl746@cornell.edu
notification = Complete
arguments = "-processid $(Process)"
request_memory = 16GB
request_cpus = 1
on_exit_remove = (ExitBySignal == False) || ((ExitBySignal == True) && (ExitSignal != 11))
accounting_group = aei.dev.test_dynesty
queue 1
universe = vanilla
getenv = true
# run script -- make sure that condor has execute permission for this file (chmod a+x script.py)
executable = RDGW150914_ptemcee3.py
# file to dump stdout (this directory should exist)
output = RDGW150914_ptemcee3.out
# file to dump stderr
error = RDGW150914_ptemcee3.err
# condor logs
log = RDGW150914_ptemcee3.log
initialdir = .
notify_user = rl746@cornell.edu
notification = Complete
arguments = "-processid $(Process)"
request_memory = 16GB
request_cpus = 1
on_exit_remove = (ExitBySignal == False) || ((ExitBySignal == True) && (ExitSignal != 11))
accounting_group = aei.dev.test_dynesty
queue 1
universe = vanilla
getenv = true
# run script -- make sure that condor has execute permission for this file (chmod a+x script.py)
executable = RDGW150914_ptemcee4.py
# file to dump stdout (this directory should exist)
output = RDGW150914_ptemcee4.out
# file to dump stderr
error = RDGW150914_ptemcee4.err
# condor logs
log = RDGW150914_ptemcee4.log
initialdir = .
notify_user = rl746@cornell.edu
notification = Complete
arguments = "-processid $(Process)"
request_memory = 16GB
request_cpus = 1
on_exit_remove = (ExitBySignal == False) || ((ExitBySignal == True) && (ExitSignal != 11))
accounting_group = aei.dev.test_dynesty
queue 1
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment