Skip to content
Snippets Groups Projects
Commit fe566338 authored by Rayne Liu's avatar Rayne Liu
Browse files

Included the nmax=0 scenario

parent 804f04df
No related branches found
No related tags found
No related merge requests found
......@@ -7,6 +7,10 @@ warnings.simplefilter("ignore") #Used to suppress the RuntimeWarnings. But if yo
This script calculates the RDGW150914 constraints with ptemcee - specifically, the n=1 case varying the fundamental frequencies, tshift = 0. It produces the chain plot, corner plot, parameter constraints, and data plotted with the 1-sigma band. Since we are working specifically for the n=1 case, we also compare alpha_0 and alpha_1 in a histogram, in order to demonstrate that the two are indistinguishable with each other.
--- R^a_{yne} L^i_u, 08/09/2020
Adapted to include the nmax = 0 case.
---R^a_{yne} L^i_u, 08/20/2020, don't know what to do now
'''
import numpy as np
......@@ -20,14 +24,12 @@ plt.rcParams.update({'font.size': 16.5})
import ptemcee
from pycbc.pool import choose_pool
import math
import h5py
import inspect
import pandas as pd
import json
import qnm
import random
from scipy.optimize import minimize
#Remember to change the following global variables
#rootpath: root path to nr data
......@@ -36,17 +38,18 @@ from scipy.optimize import minimize
#tshift: time shift after the strain peak
#vary_fund: whether you vary the fundamental frequency. Works in the model_dv function.
rootpath= "/work/rayne.liu"#"/Users/RayneLiu"
nmax=1
rootpath= "/Users/RayneLiu"#"/work/rayne.liu"
nmax=0
tshift=0
vary_fund = True
#sampler parameters
npoints=19000
npoints=2400
nwalkers = 1200
ntemps=137
ndim = int(4*(nmax+1))
burnin = 9000 #How many points do you burn before doing the corner plot. You need to watch the convergence of the chain plot a bit.
ntemps=16
dim = nmax+1
ndim = 4*dim
burnin = 1200 #How many points do you burn before doing the corner plot. You need to watch the convergence of the chain plot a bit.
#This is trivial but often forgotten: this cannot be more than npoints! Usually 1/5~1/4 npoints is what I observe.
numbins = 42 #corner plot parameter - how many bins you want
datacolor = '#105670' #'#4fa3a7'
......@@ -94,8 +97,9 @@ gw_sxs_bbh_0305rd=gw_sxs_bbh_0305[position:-1]
timesrd=gw_sxs_bbh_0305[position:-1][:,0]
# Depending on nmax, you load nmax number of freqs. and damping times from the qnm package
omegas = [qnm.modes_cache(s=-2,l=2,m=2,n=i)(a=af)[0] for i in range (0,nmax+1)]
omegas = [qnm.modes_cache(s=-2,l=2,m=2,n=i)(a=af)[0] for i in range (0,dim)]
w = (np.real(omegas))/mf
tau=-1/(np.imag(omegas))*mf
......@@ -104,15 +108,12 @@ omegas = [qnm.modes_cache(s=-2,l=2,m=2,n=i)(a=af)[0] for i in range (0,nmax+1)]
def model_dv(theta):
#x0, y0= theta
#Your nmax might not align with the dim of theta. Better check it here.
assert int(len(theta)/4) == nmax + 1, 'Please recheck your n and parameters'
w = (np.real(omegas))/mf
tau=-1/(np.imag(omegas))*mf
dim =int(len(theta)/4)
assert int(len(theta)/4) == dim, 'Please recheck your n and parameters'
avars = theta[ : (nmax+1)]
bvars = theta[(nmax+1) : 2*(nmax+1)]
xvars = theta[2*(nmax+1) : 3*(nmax+1)]
yvars = theta[3*(nmax+1) : ]
avars = theta[ : (dim)]
bvars = theta[(dim) : 2*(dim)]
xvars = theta[2*(dim) : 3*(dim)]
yvars = theta[3*(dim) : ]
if vary_fund == False:
avars[0]=0
......@@ -131,24 +132,24 @@ def model_dv(theta):
#It works for the (xn*Exp[iyn]) version.
def log_prior(theta):
#Warning: we are specifically working with nmax=1 so here individual prior to the parameters are manually adjusted. This does not apply to all other nmax's.
a_0 = theta[0]
a_1 = theta[1]
b_0 = theta[2]
b_1 = theta[3]
x_0 = theta[4]
x_1 = theta[5]
y_0 = theta[6]
y_1 = theta[7]
if all([nmax == 1, 0 <= tshift <= 5, vary_fund == True, -1.2 <= a_0 <= 1.2, -1.5 <= a_1 <= 1.5, -4.0 <= b_0 <= 9.0, -4.0 <= b_1 <= 21.0, 0 <= x_0 <= 1.6, 0 <= x_1 <= 1.4, 0 <= y_0 <= 2*np.pi, 0 <= y_1 <= 2*np.pi]):
avars = theta[ : (dim)]
bvars = theta[(dim) : 2*(dim)]
xvars = theta[2*(dim) : 3*(dim)]
yvars = theta[3*(dim) : ]
if nmax == 0:
if all([0 <= tshift <= 5, vary_fund == True, -0.45 <= avars[0] <= 0.05, -0.95 <= bvars[0] <= 0.95, 0 <= xvars[0] <= 2.4, -np.pi <= yvars[0] <= np.pi]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == True, -10.0 <= a_0 <= 10.0, -10.0 <= a_1 <= 10.0, -20.0 <= b_0 <= 30.0, -25.0 <= b_1 <= 30.0, 0 <= x_0 <= 0.6, 0 <= x_1 <= 0.9, 0 <= y_0 <= 2*np.pi, 0 <= y_1 <= 2*np.pi]):
elif nmax == 1:
if all([0 <= tshift <= 5, vary_fund == True, -1.2 <= avars[0] <= 1.2, -1.5 <= avars[1] <= 1.5, -4.0 <= bvars[0] <= 9.0, -4.0 <= bvars[1] <= 21.0, 0 <= xvars[0] <= 1.6, 0 <= xvars[1] <= 1.4, 0 <= yvars[0] <= 2*np.pi, 0 <= yvars[1] <= 2*np.pi]):
return 0.0
elif all([tshift == 19, vary_fund == True, -10.0 <= avars[0] <= 10.0, -10.0 <= avars[1] <= 10.0, -20.0 <= bvars[0] <= 30.0, -25.0 <= bvars[1] <= 30.0, 0 <= xvars[0] <= 0.6, 0 <= xvars[1] <= 0.9, 0 <= yvars[0] <= 2*np.pi, 0 <= yvars[1] <= 2*np.pi]):
return 0.0
#PAY EXTRA ATTENTION TO THESE TWO CASES, SINCE WE SHIFTED y's. THE CORNER PLOTS LABELS NEED TO BE TREATED WITH CARE.
elif all([nmax == 1, 0 <= tshift <= 5, vary_fund == False, -10.0 <= a_0 <= 10.0, -1.5 <= a_1 <= 1.5, -9.0 <= b_0 <= 9.0, -6.0 <= b_1 <= 20.0, 0 <= x_0 <= 2.4, 0 <= x_1 <= 2.5, 0 <= y_0-np.pi <= 2*np.pi, 0 <= y_1-np.pi <= 2*np.pi,]):
elif all([0 <= tshift <= 5, vary_fund == False, -10.0 <= avars[0] <= 10.0, -1.5 <= avars[1] <= 1.5, -9.0 <= bvars[0] <= 9.0, -6.0 <= bvars[1] <= 20.0, 0 <= xvars[0] <= 2.4, 0 <= xvars[1] <= 2.5, 0 <= yvars[0]-np.pi <= 2*np.pi, 0 <= yvars[1]-np.pi <= 2*np.pi,]):
return 0.0
elif all([nmax == 1, tshift == 19, vary_fund == False, -10.0 <= a_0 <= 10.0, -8.0 <= a_1 <= 8.0, -9.0 <= b_0 <= 9.0, -10.0 <= b_1 <= 42.0, 0 <= x_0 <= 0.6, 0 <= x_1 <= 0.7, 0 <= y_0-np.pi <= 2*np.pi, 0 <= y_1-np.pi <= 2*np.pi]):
elif all([tshift == 19, vary_fund == False, -10.0 <= avars[0] <= 10.0, -8.0 <= avars[1] <= 8.0, -9.0 <= bvars[0] <= 9.0, -10.0 <= bvars[1] <= 42.0, 0 <= xvars[0] <= 0.6, 0 <= xvars[1] <= 0.7, 0 <= yvars[0]-np.pi <= 2*np.pi, 0 <= yvars[1]-np.pi <= 2*np.pi]):
return 0.0
return -np.inf
......@@ -167,24 +168,21 @@ def log_likelihood(theta):
# The evidence is just a normalization factor
def log_probability(theta):
lp = log_prior(theta)
print('lp:')
print(lp)
if not np.isfinite(lp):
return -np.inf
return lp + log_likelihood(theta)
#Fit with ptemcee
#Set the number of cores of your processors
pool = choose_pool(24)
pool.size = 24
vary_param = float(vary_fund)
np.random.seed(42)
pos = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), 4.28313743e-01, random.uniform(2.5, 2.6) + (1-vary_param) * np.pi]])
for i in range (1,nmax+1):
pos_aux = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), random.uniform(0.3,0.4), random.uniform(2.01, 2.02) + (1-vary_param) * np.pi]])
pos = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), 4.28313743e-01, random.uniform(0.5, 0.6) + (1-vary_param) * np.pi]])
for i in range (1,dim):
pos_aux = np.array([[random.uniform(-0.1,0.1), random.uniform(-0.1,0.1), random.uniform(0.3,0.4), random.uniform(0.01, 0.02) + (1-vary_param) * np.pi]])
pos = np.concatenate((pos, pos_aux), axis = 0)
pos = pos.T.flatten()
......@@ -198,10 +196,10 @@ sampler.run_mcmc(pos,npoints)
#Define labels and start plotting
paramlabels_a = [r'$\alpha_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_b = [r'$\beta_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_x = [r'$x_'+str(i)+'$' for i in range (nmax+1)]
paramlabels_y = [r'$y_'+str(i)+'$' for i in range (nmax+1)] if vary_fund == True else ['$y_'+str(i)+'-\pi$' for i in range (nmax+1)]
paramlabels_a = [r'$\alpha_'+str(i)+'$' for i in range (dim)]
paramlabels_b = [r'$\beta_'+str(i)+'$' for i in range (dim)]
paramlabels_x = [r'$x_'+str(i)+'$' for i in range (dim)]
paramlabels_y = [r'$y_'+str(i)+'$' for i in range (dim)] if vary_fund == True else ['$y_'+str(i)+'-\pi$' for i in range (dim)]
paramlabels = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
#Need to delete alpha_0 and alpha_1 for the corner plot
paramlabels_corner = paramlabels_a + paramlabels_b + paramlabels_x + paramlabels_y
......@@ -212,8 +210,8 @@ if vary_fund == False:
#Chain plot
fig, axes = plt.subplots(4*(nmax+1), 1, sharex=True, figsize=(12, 9*(nmax+1)))
for i in range(4*(nmax+1)):
fig, axes = plt.subplots(ndim, 1, sharex=True, figsize=(12, 9*(dim)))
for i in range(ndim):
axes[i].plot(sampler.chain[0,:, :, i].T, color="k", alpha=0.4, rasterized=True)
axes[i].yaxis.set_major_locator(MaxNLocator(5))
axes[i].set_ylabel(paramlabels[i])
......@@ -236,10 +234,8 @@ pk = samples[np.argmax(lglk)]
pk_corn = pk if vary_fund == True else np.delete(pk, [0,2])
#y_0 range needs some messaging to make the plot. But in order to make the whole picture consistent, better change the range of y_1 too.
if vary_fund == False:
samples_corn.T[-1] = samples_corn.T[-1] - np.pi
samples_corn.T[-2] = samples_corn.T[-2] - np.pi #This indeed changes samples_corn itself
pk[-1] -= np.pi
pk[-2] -= np.pi
samples_corn.T[-dim:] -= np.pi #This indeed changes samples_corn itself
pk[-dim:] -= np.pi
#print('pkFalse:')
#print(pk)
......@@ -298,7 +294,7 @@ df4.to_hdf(rootpath+'/git/rdstackingproject/plotsmc/vary'+str(vary_fund)+'nmax='
#Now plot alpha_0 on top of alpha_1 - only on top, not stacked, and only valid for vary_fund == True
if vary_fund == True:
if nmax == 1 and vary_fund == True:
samplea0 = samplesT[0]
samplea1 = samplesT[1]
fighist1 = plt.figure(figsize = (8, 6))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment