Skip to content
Snippets Groups Projects
Select Git revision
  • daae3ce9b7ba68fcd84994bb204220e22ddf6bf5
  • master default
2 results

test_const.py

Blame
  • Forked from finesse / pykat
    Source project has a limited visibility.
    RDGW150914.ipynb 1.73 MiB
    In [76]:
    import numpy as np
    import corner
    %matplotlib inline
    import matplotlib.pyplot as plt
    from matplotlib.ticker import MaxNLocator
    import emcee
    import math
    import h5py
    import inspect
    import pandas as pd
    import json
    import qnm
    import random
    import ptemcee
    from scipy.optimize import minimize
    In [77]:
    rootpath="/Users/xisco"
    npoints=20000
    nmax=0
    tshift=10
    In [78]:
    def FindTmaximum(y):
        absval = y[:,1]*y[:,1]+y[:,2]*y[:,2]
        vmax=np.max(absval)
        index = np.argmax(absval == vmax)
        timemax=gw_sxs_bbh_0305[index,0]
        return timemax
    In [79]:
    gw = {}
    gw["SXS:BBH:0305"] = h5py.File(rootpath+"/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/rhOverM_Asymptotic_GeometricUnits_CoM.h5", 'r')
    gw_sxs_bbh_0305 = gw["SXS:BBH:0305"]["Extrapolated_N2.dir"]["Y_l2_m2.dat"]
    In [80]:
    metadata = {}
    with open(rootpath+"/SXS/BBH_SKS_d14.3_q1.22_sA_0_0_0.330_sB_0_0_-0.440/Lev6/metadata.json") as file:
        metadata["SXS:BBH:0305"] = json.load(file)
    
    af = metadata["SXS:BBH:0305"]['remnant_dimensionless_spin'][-1]
    mf = metadata["SXS:BBH:0305"]['remnant_mass']
    In [82]:
    times = gw_sxs_bbh_0305[:,0]
    tmax=FindTmaximum(gw_sxs_bbh_0305)
    t0=tmax +tshift
    In [83]:
    position = np.argmax(times >= (t0))
    gw_sxs_bbh_0305rd=gw_sxs_bbh_0305[position:-1]
    timesrd=gw_sxs_bbh_0305[position:-1][:,0]
    In [84]:
    def EradUIB2017(eta,chi1,chi2):
        
        m1=0.5*(1+(1-4*eta)**0.5)
        m2=0.5*(1-(1-4*eta)**0.5)
        S= (m1**2*chi1 + m2**2*chi2)/(m1*m1 + m2*m2)
        
        erad=(((1-(2*(2)**0.5)/3)*eta+0.5609904135313374*eta**2-0.84667563764404*eta**3+3.145145224278187*eta**4)*(1+S**3 *(-0.6320191645391563+ 4.952698546796005*eta-10.023747993978121*eta**2)+S**2*(-0.17762802148331427+ 2.176667900182948*eta**2)+S*(-0.13084389181783257- 1.1387311580238488*eta+5.49074464410971*eta**2)))/(1+S*(-0.9919475346968611+ 0.367620218664352*eta+4.274567337924067*eta**2))-0.01978238971523653*S*(1-4.91667749015812*eta)*(1-4*eta)**0.5 *eta *(chi1-chi2)-0.09803730445895877*(1-4*eta)**0.5*(1-3.2283713377939134*eta)*eta**2 *(chi1-chi2)+0.01118530335431078*eta**3 *(chi1-chi2)**2
        return  erad
    In [85]:
    omegas = []
    for i in range (0,nmax+1):
        grav_220 = qnm.modes_cache(s=-2,l=2,m=2,n=i)
        omega = grav_220(a=af)[0]
        omegas.append(omega)
    In [86]:
    #deprecated
    def model(theta):
        x0, y0, a0, b0 = theta
        #x0, y0= theta
        w = (np.real(omegas))[0]/mf
        tau=-1/(np.imag(omegas))[0]*mf
        
    
        # -1j to agree with SXS convention
        #return (x0+y0*1j)*(np.exp(-(timesrd-timesrd[0])/(tau*(1+b0))))*(np.cos((1+a0)*w*timesrd)-1j*np.sin((1+a0)*w*timesrd))
    
        return (x0+1j*y0)*np.exp(-(timesrd-timesrd[0])/tau)*(np.cos(w*timesrd)-1j*np.sin(w*timesrd))
    In [87]:
    def model_dv(theta):
        #x0, y0= theta
        w = (np.real(omegas))/mf
        tau=-1/(np.imag(omegas))*mf
        dim =int(len(theta)/4)
        
        xvars = []
        yvars = []
        avars = []
        bvars = []
        for i in range (0,dim):
            xvar = theta[4*i]
            xvars.append(xvar)
            yvar = theta[4*i+1]
            yvars.append(yvar)
            avar = theta[4*i+2]
            avars.append(avar)
            bvar = theta[4*i+3]  
            bvars.append(bvar)
            
        
        ansatz = 0
        for i in range (0,dim):
            bvars[0]=0
            avars[0]=0
            ansatz = ansatz + (xvars[i]+1j*yvars[i])*np.exp(-(timesrd-timesrd[0])/(tau[i]*(1+bvars[i])))\
            *(np.cos((1+avars[i])*w[i]*timesrd)-1j*np.sin((1+avars[i])*w[i]*timesrd))
        # -1j to agree with SXS convention
        return ansatz
    In [88]:
    def model_dv_af(theta):
        #x0, y0= theta
        w = (np.real(omegas))/mf
        tau=-1/(np.imag(omegas))*mf
        dim =int(len(theta)/4)        
        tau=[1/0.0851903]
        w=[0.555733]
        
        xvars = []
        yvars = []
        avars = []
        bvars = []
        for i in range (0,dim):
            xvar = theta[4*i]
            xvars.append(xvar)
            yvar = theta[4*i+1]
            yvars.append(yvar)
            avar = theta[4*i+2]
            avars.append(avar)
            bvar = theta[4*i+3]  
            bvars.append(bvar)
            
        
        ansatz = 0
        for i in range (0,dim):
            bvars[0]=0
            avars[0]=0
            ansatz = ansatz + (xvars[i]*np.exp(1j*yvars[i]))*np.exp(-(timesrd-timesrd[0])/(tau[i]*(1+bvars[i])))\
            *(np.cos((1+avars[i])*w[i]*timesrd)-1j*np.sin((1+avars[i])*w[i]*timesrd))
        # -1j to agree with SXS convention
        return ansatz
    In [89]:
    def log_likelihood(theta):
        #modelev = model_dv(theta)
        #modelev = model(theta)
        modelev = model_dv_af(theta)
        
        return  -np.sum((gw_sxs_bbh_0305rd[:,1] - (modelev.real))**2+(gw_sxs_bbh_0305rd[:,2] - (modelev.imag))**2)
    In [90]:
    def log_likelihood_match(theta):
        
        #model and data
        modelev = model_dv(theta)
        data = gw_sxs_bbh_0305rd[:,1] - 1j*gw_sxs_bbh_0305rd[:,2]
        
        #norms
        norm1=np.sum(modelev*np.conj(modelev))
        norm2=np.sum(data*np.conj(data))
    
        #mismatch
        myTable = data*np.conj(modelev);    
        return -(1-(np.sum(myTable)).real/np.sqrt(norm1*norm2)).real
    In [91]:
    def log_prior(theta):
        x_s = theta[0::4]
        y_s = theta[1::4]
        a_s = theta[2::4]
        b_s = theta[3::4]
        if all(-10 <= t <= 10 for t in x_s) and all(-10 <= t <= 10 for t in y_s) and all(-0.4 <= t <= 0.4 for t in a_s) and all(-0.4 <= t <= 0.4 for t in b_s):
            return 0.0
        return -np.inf
    In [92]:
    def log_probability(theta):
        lp = log_prior(theta)
        if not np.isfinite(lp):
            return -np.inf
        return lp + log_likelihood(theta)
    In [93]:
    def log_probability_match(theta):
        lp = log_prior(theta)
        if not np.isfinite(lp):
            return -np.inf
        return lp + log_likelihood_match(theta)

    Maximum estimator Fitting

    In [94]:
    np.random.seed(42)
    nll = lambda *args: -log_likelihood(*args)
    initial = np.array([4,1,0,0])
    soln = minimize(nll, initial)
    x0_ml, y0_ml, a0_ml, b0_ml = soln.x
    print("Maximum likelihood estimates:")
    print("x0 = {0:.3f}".format(x0_ml))
    print("y0 = {0:.3f}".format(y0_ml))
    print("a0 = {0:.3f}".format(a0_ml))
    print("b0 = {0:.3f}".format(b0_ml))
    Out [94]:
    Maximum likelihood estimates:
    x0 = 0.357
    y0 = -1.245
    a0 = 0.000
    b0 = 0.000
    
    In [98]:
    plt.plot(timesrd, gw_sxs_bbh_0305rd[:,1], "r", alpha=0.3, lw=3, label="NR_re")
    modelfit = model_dv_af([x0_ml,y0_ml,a0_ml,b0_ml])
    plt.plot(timesrd, modelfit.real,"b", alpha=0.3, lw=3, label="Fit_re")
    #plt.plot(x0, np.dot(np.vander(x0, 2), w), "--k", label="LS")
    plt.legend(fontsize=14)
    plt.xlim(timesrd[0], timesrd[0]+80)
    plt.xlabel("t")
    plt.ylabel("h");
    out [98]:
    In [99]:
    plt.plot(timesrd, gw_sxs_bbh_0305rd[:,2], "r", alpha=0.3, lw=3, label="NR_im")
    modelfit = model_dv_af([x0_ml,y0_ml,a0_ml,b0_ml])
    plt.plot(timesrd, modelfit.imag,"b", alpha=0.3, lw=3, label="Fit_im")
    #plt.plot(x0, np.dot(np.vander(x0, 2), w), "--k", label="LS")
    plt.legend(fontsize=14)
    plt.xlim(timesrd[0], timesrd[0]+80)
    plt.xlabel("t")
    plt.ylabel("h");
    out [99]:

    mcmc Fitting

    In [113]:
    def log_prior(theta):
        x_s = theta[0::4]
        y_s = theta[1::4]
        a_s = theta[2::4]
        b_s = theta[3::4]
        if all(0 <= t <= 10 for t in x_s) and all(0 <= t <= 2*np.pi for t in y_s) and all(-0.4 <= t <= 0.4 for t in a_s) and all(-0.4 <= t <= 0.4 for t in b_s):
            return 0.0
        return -np.inf
    In [114]:
    nwalkers = 32
    ndim = int(4*(nmax+1))
    
    pos = [random.uniform(0.01,10) ,random.uniform(0,2*np.pi) ,random.uniform(-0.1,0.1) ,random.uniform(-0.1,0.1)]
    for i in range (1,nmax+1):
           pos_aux = [random.uniform(0.01,10) ,random.uniform(0,2*np.pi) ,random.uniform(-0.1,0.1) ,random.uniform(-0.1,0.1)]
           pos = pos + pos_aux
    
    pos = pos + 1e-3 * np.random.randn(nwalkers, ndim)
    In [115]:
    sampler = emcee.EnsembleSampler(nwalkers, ndim, log_probability)
    sampler.run_mcmc(pos,npoints,progress=True);
    Out [115]:
    100%|██████████| 20000/20000 [01:06<00:00, 298.64it/s]
    
    In [118]:
    plt.clf()
    fig, axes = plt.subplots(4, 1, sharex=True, figsize=(8, 9))
    axes[0].plot(sampler.chain[:, :, 0].T, color="k", alpha=0.4)
    axes[0].yaxis.set_major_locator(MaxNLocator(5))
    axes[0].set_ylabel("$x0$")
    axes[1].plot(sampler.chain[:, :, 1].T, color="k", alpha=0.4)
    axes[1].yaxis.set_major_locator(MaxNLocator(5))
    axes[1].set_ylabel("$y0$")
    axes[2].plot(sampler.chain[:, :, 2].T, color="k", alpha=0.4)
    axes[2].yaxis.set_major_locator(MaxNLocator(5))
    axes[2].set_ylabel("$a0$")
    axes[3].plot(sampler.chain[:, :, 3].T, color="k", alpha=0.4)
    axes[3].yaxis.set_major_locator(MaxNLocator(5))
    axes[3].set_ylabel("$b0$")
    plt.show()
    Out [118]:
    <Figure size 432x288 with 0 Axes>
    In [119]:
    label = ['x0','y0','a0','b0']
    for i in range (1,nmax+1):
        label2 = ['x' + str(i),'y' + str(i),'a' + str(i),'b' + str(i)]
        label = label + label2
    In [122]:
    burnin = 500
    flat_samples = sampler.get_chain(discard=burnin, thin=15, flat=True)
    median=np.median(sampler.flatchain, axis=0)
    print(median)
    
    fig = corner.corner(
        flat_samples, labels=label, truths=median,quantiles=(0.1, 0.9)
    );
    Out [122]:
    [ 0.33810786  5.02060618 -0.00915384 -0.01628947]
    
    In [123]:
    sampler.
    Out [123]:
    array([0.469  , 0.47145, 0.45355, 0.4765 , 0.46735, 0.46935, 0.4647 ,
           0.4694 , 0.48365, 0.47425, 0.4823 , 0.48   , 0.472  , 0.4814 ,
           0.47185, 0.47435, 0.45325, 0.4705 , 0.0864 , 0.47365, 0.48135,
           0.45385, 0.47545, 0.2995 , 0.476  , 0.473  , 0.47965, 0.47605,
           0.46365, 0.47625, 0.4936 , 0.478  ])
    In [112]:
    plt.clf()
    fig, axes = plt.subplots(4, 1, sharex=True, figsize=(8, 9))
    axes[0].plot(sampler.chain[:, :, 0].T, color="k", alpha=0.4)
    axes[0].yaxis.set_major_locator(MaxNLocator(5))
    axes[0].set_ylabel("$x0$")
    axes[1].plot(sampler.chain[:, :, 1].T, color="k", alpha=0.4)
    axes[1].yaxis.set_major_locator(MaxNLocator(5))
    axes[1].set_ylabel("$y0$")
    axes[2].plot(sampler.chain[:, :, 2].T, color="k", alpha=0.4)
    axes[2].yaxis.set_major_locator(MaxNLocator(5))
    axes[2].set_ylabel("$a0$")
    axes[3].plot(sampler.chain[:, :, 3].T, color="k", alpha=0.4)
    axes[3].yaxis.set_major_locator(MaxNLocator(5))
    axes[3].set_ylabel("$b0$")
    plt.show()
    Out [112]:
    <Figure size 432x288 with 0 Axes>
    In [140]:
    fig.savefig(rootpath+'/git/rdstackingproject/plots/fit_chi2_'+str(nmax)+'.pdf')
    In [138]:
    burnin = 500
    samples = sampler.chain[0,:, burnin:, :].reshape((-1, ndim))
    sampler.chain.shape
    Out [138]:
    ---------------------------------------------------------------------------
    IndexError                                Traceback (most recent call last)
    <ipython-input-138-0f683973fc4f> in <module>
          1 burnin = 500
    ----> 2 samples = sampler.chain[0,:, burnin:, :].reshape((-1, ndim))
          3 sampler.chain.shape
    
    IndexError: too many indices for array
    
    sampler.flatchain?
    fig = corner.corner(samples, labels=["$x0$", "$y0$",  "$a0$", "$b0$"],truths=median,quantiles=(0.1, 0.9))
    In [716]:
    #sampler = emcee.EnsembleSampler(nwalkers, ndim, log_probability_match, args=(timesrd, gw_sxs_bbh_0305rd))
    #sampler.run_mcmc(pos,npoints, progress=True);
    Out [716]:
    100%|██████████| 100000/100000 [14:00<00:00, 118.99it/s]
    
    In [205]:
    plt.clf()
    fig, axes = plt.subplots(4, 1, sharex=True, figsize=(8, 9))
    axes[0].plot(sampler.chain[0,:, :, 0].T, color="k", alpha=0.4)
    axes[0].yaxis.set_major_locator(MaxNLocator(5))
    axes[0].set_ylabel("$x0$")
    axes[1].plot(sampler.chain[0,:, :, 1].T, color="k", alpha=0.4)
    axes[1].yaxis.set_major_locator(MaxNLocator(5))
    axes[1].set_ylabel("$y0$")
    axes[2].plot(sampler.chain[0,:, :, 2].T, color="k", alpha=0.4)
    axes[2].yaxis.set_major_locator(MaxNLocator(5))
    axes[2].set_ylabel("$a0$")
    axes[3].plot(sampler.chain[0,:, :, 3].T, color="k", alpha=0.4)
    axes[3].yaxis.set_major_locator(MaxNLocator(5))
    axes[3].set_ylabel("$b0$")
    plt.show()
    Out [205]:
    <Figure size 432x288 with 0 Axes>
    In [170]:
    nwalkers = 32
    ntemps=10
    ndim = int(4*(nmax+1))
    
    pos = [random.uniform(-10,10) ,random.uniform(-10,10) ,random.uniform(-1,1) ,random.uniform(-1,1)]
    for i in range (1,nmax+1):
           pos_aux = [random.uniform(-4,4) ,random.uniform(-4,4) ,random.uniform(-.1,.1) ,random.uniform(-.1,.1)]
           pos = pos + pos_aux
    
    pos = pos + 1e-5 * np.random.randn(ntemps,nwalkers,ndim)
    In [171]:
    sampler = ptemcee.Sampler(nwalkers, 4,log_likelihood,log_prior, ntemps=10)
    sampler.run_mcmc(pos,10000);
    Out [171]:
    ---------------------------------------------------------------------------
    ValueError                                Traceback (most recent call last)
    <ipython-input-171-e3f877d33c5c> in <module>
          1 sampler = ptemcee.Sampler(nwalkers, 4,log_likelihood,log_prior, ntemps=10)
    ----> 2 sampler.run_mcmc(pos,10000);
    
    ~/Library/Python/3.7/lib/python/site-packages/ptemcee/sampler.py in run_mcmc(self, *args, **kwargs)
        275 
        276         """
    --> 277         for x in self.sample(*args, **kwargs):
        278             pass
        279         return x
    
    ~/Library/Python/3.7/lib/python/site-packages/ptemcee/sampler.py in sample(self, p0, iterations, thin, storechain, adapt, swap_ratios)
        353 
        354         if (logpost == -np.inf).any():
    --> 355             raise ValueError('Attempting to start with samples outside posterior support.')
        356 
        357         # Expand the chain in advance of the iterations
    
    ValueError: Attempting to start with samples outside posterior support.
    
    In [157]:
    sampler.chain.shape
    Out [157]:
    (10, 32, 10000, 4)
    In [161]:
    burnin = 1000
    samples = sampler.chain[0,:, burnin:, :].reshape((-1, ndim))
    fig = corner.corner(samples, labels=["$x0$", "$y0$",  "$a0$", "$b0$"],truths=median,quantiles=(0.1, 0.9))
    out [161]:
    In [156]:
    plt.clf()
    fig, axes = plt.subplots(4, 1, sharex=True, figsize=(8, 9))
    axes[0].plot(sampler.chain[0,:, :, 0].T, color="k", alpha=0.4)
    axes[0].yaxis.set_major_locator(MaxNLocator(5))
    axes[0].set_ylabel("$x0$")
    axes[1].plot(sampler.chain[0,:, :, 1].T, color="k", alpha=0.4)
    axes[1].yaxis.set_major_locator(MaxNLocator(5))
    axes[1].set_ylabel("$y0$")
    axes[2].plot(sampler.chain[0,:, :, 2].T, color="k", alpha=0.4)
    axes[2].yaxis.set_major_locator(MaxNLocator(5))
    axes[2].set_ylabel("$a0$")
    axes[3].plot(sampler.chain[0,:, :, 3].T, color="k", alpha=0.4)
    axes[3].yaxis.set_major_locator(MaxNLocator(5))
    axes[3].set_ylabel("$b0$")
    plt.show()
    Out [156]:
    <Figure size 432x288 with 0 Axes>
    In [183]:
    def log_prior(theta):
        x_s = theta[0::4]
        y_s = theta[1::4]
        a_s = theta[2::4]
        b_s = theta[3::4]
        if all(0.1 <= t <= 10 for t in x_s) and all(0 <= t <= 2*np.pi for t in y_s) and all(-1 <= t <= 1 for t in a_s) and all(-1 <= t <= 1 for t in b_s):
            return 0.0
        return -np.inf
    In [195]:
    def log_likelihood(theta):
        #modelev = model_dv(theta)
        #modelev = model(theta)
        modelev = model_dv_af(theta)
        
        return  -np.sum((gw_sxs_bbh_0305rd[:,1] - (modelev.real))**2+(gw_sxs_bbh_0305rd[:,2] - (modelev.imag))**2)
    In [196]:
    def model_dv_af(theta):
        #x0, y0= theta
        w = (np.real(omegas))/mf
        tau=-1/(np.imag(omegas))*mf
        dim =int(len(theta)/4)
        
        xvars = []
        yvars = []
        avars = []
        bvars = []
        for i in range (0,dim):
            xvar = theta[4*i]
            xvars.append(xvar)
            yvar = theta[4*i+1]
            yvars.append(yvar)
            avar = theta[4*i+2]
            avars.append(avar)
            bvar = theta[4*i+3]  
            bvars.append(bvar)
            
        
        ansatz = 0
        for i in range (0,dim):
            ansatz = ansatz + xvars[i]*np.exp(1j*yvars[i])*np.exp(-(timesrd-timesrd[0])/(tau[i]*(1+bvars[i])))\
            *(np.cos((1+avars[i])*w[i]*timesrd)-1j*np.sin((1+avars[i])*w[i]*timesrd))
        # -1j to agree with SXS convention
        return ansatz
    In [199]:
    np.random.seed(42)
    nll = lambda *args: -log_likelihood(*args)
    initial = np.array([0.4,-0.1,0.0,0.0])
    soln = minimize(nll, initial)
    x0_ml, y0_ml, a0_ml, b0_ml = soln.x
    print("Maximum likelihood estimates:")
    print("x0 = {0:.3f}".format(x0_ml))
    print("y0 = {0:.3f}".format(y0_ml))
    print("a0 = {0:.3f}".format(a0_ml))
    print("b0 = {0:.3f}".format(b0_ml))
    Out [199]:
    Maximum likelihood estimates:
    x0 = 0.342
    y0 = -374.430
    a0 = -0.044
    b0 = 0.113
    
    In [203]:
    modelfit = model_dv_af([x0_ml,y0_ml,a0_ml,b0_ml])
    plt.plot(timesrd, gw_sxs_bbh_0305rd[:,1], "r", alpha=0.3, lw=3, label="NR_re")
    plt.plot(timesrd, modelfit.real,"b", alpha=0.3, lw=3, label="Fit_re")
    plt.plot(timesrd, gw_sxs_bbh_0305rd[:,2], "o", alpha=0.3, lw=3, label="NR_im")
    plt.plot(timesrd, modelfit.imag,"g", alpha=0.3, lw=3, label="Fit_im")
    #plt.plot(x0, np.dot(np.vander(x0, 2), w), "--k", label="LS")
    plt.legend(fontsize=14)
    plt.xlim(timesrd[0], timesrd[0]+80)
    plt.xlabel("t")
    plt.ylabel("h");
    out [203]:
    In [208]:
    nwalkers = 32
    ntemps=10
    ndim = int(4*(nmax+1))
    npoints=5000
    
    pos = [random.uniform(0.1,4) ,random.uniform(0.01,2*np.pi) ,random.uniform(-1,1) ,random.uniform(-1,1)]
    for i in range (1,nmax+1):
           pos_aux = [random.uniform(0.1,3) ,random.uniform(0.01,0.02) ,random.uniform(-.1,.1) ,random.uniform(-.1,.1)]
           pos = pos + pos_aux
    
    pos = pos + 1e-5 * np.random.randn(ntemps,nwalkers,ndim)
    sampler = ptemcee.Sampler(nwalkers, ndim,log_likelihood,log_prior, ntemps=10)
    sampler.run_mcmc(pos,npoints);
    In [209]:
    plt.clf()
    fig, axes = plt.subplots(4, 1, sharex=True, figsize=(8, 9))
    axes[0].plot(sampler.chain[0,:, :, 0].T, color="k", alpha=0.4)
    axes[0].yaxis.set_major_locator(MaxNLocator(5))
    axes[0].set_ylabel("$x0$")
    axes[1].plot(sampler.chain[0,:, :, 1].T, color="k", alpha=0.4)
    axes[1].yaxis.set_major_locator(MaxNLocator(5))
    axes[1].set_ylabel("$y0$")
    axes[2].plot(sampler.chain[0,:, :, 2].T, color="k", alpha=0.4)
    axes[2].yaxis.set_major_locator(MaxNLocator(5))
    axes[2].set_ylabel("$a0$")
    axes[3].plot(sampler.chain[0,:, :, 3].T, color="k", alpha=0.4)
    axes[3].yaxis.set_major_locator(MaxNLocator(5))
    axes[3].set_ylabel("$b0$")
    plt.show()
    Out [209]:
    <Figure size 432x288 with 0 Axes>
    In [215]:
    burnin = 2000
    samples = sampler.chain[1,:, burnin:, :].reshape((-1, ndim))
    fig = corner.corner(samples, labels=["$x0$", "$y0$",  "$a0$", "$b0$"],truths=median,quantiles=(0.1, 0.9))
    out [215]:
    In [212]:
    median
    Out [212]:
    array([ 0.0085003 , -0.01569561, -0.05168496, -0.06578066])
    In [284]:
    def log_prior(theta):
        x_s = theta[0::4]
        y_s = theta[1::4]
        a_s = theta[2::4]
        b_s = theta[3::4]
        if all(0.1 <= t <= 2 for t in x_s) and all(0 <= t <= 2*np.pi for t in y_s) and all(-0.4 <= t <= 0.4 for t in a_s) and all(-0.4 <= t <= 0.4 for t in b_s):
            return 0.0
        return -np.inf
    In [296]:
    nwalkers = 32
    ntemps=10
    ndim = int(4*(nmax+1))
    
    pos = [random.uniform(0,10) ,random.uniform(0,2*np.pi) ,random.uniform(-0.1,0.1) ,random.uniform(-0.1,0.1)]
    for i in range (1,nmax+1):
           pos_aux = [random.uniform(0,4) ,random.uniform(0,4) ,random.uniform(-.05,.05) ,random.uniform(-.05,.05)]
           pos = pos + pos_aux
    
    pos = pos + 1e-5 * np.random.randn(ntemps,nwalkers,ndim)
    
    sampler = ptemcee.Sampler(nwalkers, ndim,log_likelihood,log_prior, ntemps=10)
    sampler.run_mcmc(pos,10000);
    In [301]:
    plt.clf()
    fig, axes = plt.subplots(4, 1, sharex=True, figsize=(8, 9))
    axes[0].plot(sampler.chain[0,:, :, 0].T, color="k", alpha=0.4)
    axes[0].yaxis.set_major_locator(MaxNLocator(5))
    axes[0].set_ylabel("$x0$")
    axes[1].plot(sampler.chain[0,:, :, 1].T, color="k", alpha=0.4)
    axes[1].yaxis.set_major_locator(MaxNLocator(5))
    axes[1].set_ylabel("$y0$")
    axes[2].plot(sampler.chain[0,:, :, 2].T, color="k", alpha=0.4)
    axes[2].yaxis.set_major_locator(MaxNLocator(5))
    axes[2].set_ylabel("$a0$")
    axes[3].plot(sampler.chain[0,:, :, 3].T, color="k", alpha=0.4)
    axes[3].yaxis.set_major_locator(MaxNLocator(5))
    axes[3].set_ylabel("$b0$")
    plt.show()
    Out [301]:
    <Figure size 432x288 with 0 Axes>
    In [312]:
    burnin = 200
    samples = sampler.chain[0,:, burnin:, :].reshape((-1, ndim))
    median=np.median(samples, axis=0)
    print(median)
    fig = corner.corner(samples, labels=["$x0$", "$y0$",  "$a0$", "$b0$"],truths=median,quantiles=(0.1, 0.9))
    Out [312]:
    [ 0.3364835   3.08117345 -0.05128693 -0.03812301]
    
    In [314]:
    modelfit = model_dv_af(median)
    plt.plot(timesrd, gw_sxs_bbh_0305rd[:,1], "r", alpha=0.3, lw=3, label="NR_re")
    plt.plot(timesrd, modelfit.real,"b", alpha=0.3, lw=3, label="Fit_re")
    #plt.plot(x0, np.dot(np.vander(x0, 2), w), "--k", label="LS")
    plt.legend(fontsize=14)
    plt.xlim(timesrd[0], timesrd[0]+80)
    plt.xlabel("t")
    plt.ylabel("h");
    out [314]: