Skip to content
Snippets Groups Projects
Commit 0ddf0e37 authored by Gregory Ashton's avatar Gregory Ashton
Browse files

Adds directed MC results

parent 4197c2af
No related branches found
No related tags found
No related merge requests found
import pyfstat
import numpy as np
import os
import sys
ID = sys.argv[1]
outdir = sys.argv[2]
label = 'run_{}'.format(ID)
data_label = '{}_data'.format(label)
results_file_name = '{}/MCResults_{}.txt'.format(outdir, ID)
# Properties of the GW data
sqrtSX = 2e-23
......@@ -16,14 +24,13 @@ Alpha = 5e-3
Delta = 6e-2
tref = .5*(tstart+tend)
data_label = 'temp_data_{}'.format(os.getpid())
results_file_name = 'MCResults.txt'
VF0 = VF1 = 100
DeltaF0 = VF0 * np.sqrt(3)/(np.pi*Tspan)
DeltaF1 = VF1 * np.sqrt(45/4.)/(np.pi*Tspan**2)
depths = np.linspace(100, 250, 13)
depths = np.linspace(100, 400, 7)
depths = [125, 175]
run_setup = [((10, 0), 16, False),
((10, 0), 5, False),
......@@ -40,7 +47,7 @@ for depth in depths:
cosi = np.random.uniform(-1, 1)
data = pyfstat.Writer(
label=data_label, outdir='data', tref=tref,
label=data_label, outdir=outdir, tref=tref,
tstart=tstart, F0=F0, F1=F1, F2=F2, duration=Tspan, Alpha=Alpha,
Delta=Delta, h0=h0, sqrtSX=sqrtSX, psi=psi, phi=phi, cosi=cosi,
detector='H1,L1')
......@@ -60,12 +67,13 @@ for depth in depths:
ntemps = 1
log10temperature_min = -1
nwalkers = 50
nwalkers = 100
nsteps = [50, 50]
mcmc = pyfstat.MCMCFollowUpSearch(
label='temp', outdir='data',
sftfilepath='data/*'+data_label+'*sft', theta_prior=theta_prior,
label=label, outdir=outdir,
sftfilepath='{}/*{}*sft'.format(outdir, data_label),
theta_prior=theta_prior,
tref=tref, minStartTime=tstart, maxStartTime=tend, nsteps=nsteps,
nwalkers=nwalkers, ntemps=ntemps,
log10temperature_min=log10temperature_min)
......@@ -77,4 +85,4 @@ for depth in depths:
with open(results_file_name, 'a') as f:
f.write('{} {:1.8e} {:1.8e} {:1.8e} {:1.8e} {:1.8e}\n'
.format(depth, h0, dF0, dF1, predicted_twoF, maxtwoF))
os.system('rm data/temp*')
os.system('rm {}/*{}*'.format(outdir, label))
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import scipy.stats
import os
from tqdm import tqdm
from oct2py import octave
plt.style.use('paper')
Tspan = 100 * 86400
def Recovery(Tspan, Depth, twoFstar=60):
rho2 = 4*Tspan/25./Depth**2
twoF_Hs = scipy.stats.distributions.ncx2(df=4, nc=rho2)
return 1 - twoF_Hs.cdf(twoFstar)
def Recovery(Tspan, Depth, twoFstar=60, detectors='H1,L1'):
numDetectors = len(detectors.split(','))
cmd = ("DetectionProbabilityStackSlide('Nseg', 1, 'Tdata', {},"
"'misHist', createDeltaHist(0), 'avg2Fth', {}, 'detectors', '{}',"
"'Depth', {})"
).format(numDetectors*Tspan, twoFstar, detectors, Depth)
return octave.eval(cmd, verbose=False)
def binomialConfidenceInterval(N, K, confidence=0.95):
cmd = '[fLow, fUpper] = binomialConfidenceInterval({}, {}, {})'.format(
N, K, confidence)
[l, u] = octave.eval(cmd, verbose=False, return_both=True)[0].split('\n')
return float(l.split('=')[1]), float(u.split('=')[1])
results_file_name = 'MCResults.txt'
......@@ -20,26 +34,40 @@ df = pd.read_csv(
twoFstar = 60
depths = np.unique(df.depth.values)
recovery_fraction = []
recovery_fraction_std = []
recovery_fraction_CI = []
for d in depths:
twoFs = df[df.depth == d].twoF.values
print d, len(twoFs)
n = float(len(twoFs))
rf = np.sum(twoFs > twoFstar) / n
rf_bars = [np.sum(np.concatenate((twoFs[:i-1], twoFs[i:]))>twoFstar)/(n-1)
for i in range(int(n))]
var = (n-1)/n * np.sum([(rf_bar - rf)**2 for rf_bar in rf_bars])
recovery_fraction.append(rf)
recovery_fraction_std.append(np.sqrt(var))
N = len(twoFs)
K = np.sum(twoFs > twoFstar)
print d, N, K
recovery_fraction.append(K/float(N))
[fLower, fUpper] = binomialConfidenceInterval(N, K)
recovery_fraction_CI.append([fLower, fUpper])
yerr = np.abs(recovery_fraction - np.array(recovery_fraction_CI).T)
fig, ax = plt.subplots()
ax.errorbar(depths, recovery_fraction, yerr=recovery_fraction_std)
#ax.plot(depths, recovery_fraction)
ax.errorbar(depths, recovery_fraction, yerr=yerr, fmt='sk', marker='s', ms=2,
capsize=1, capthick=0.5, elinewidth=0.5,
label='Monte-Carlo result')
fname = 'analytic_data.txt'
if os.path.isfile(fname):
depths_smooth, recovery_analytic = np.loadtxt(fname)
else:
depths_smooth = np.linspace(10, 550, 100)
recovery_analytic = []
for d in tqdm(depths_smooth):
recovery_analytic.append(Recovery(Tspan, d, twoFstar))
np.savetxt(fname, np.array([depths_smooth, recovery_analytic]))
depths_smooth = np.concatenate(([0], depths_smooth))
recovery_analytic = np.concatenate(([1], recovery_analytic))
ax.plot(depths_smooth, recovery_analytic, '-k', label='Theoretical maximum')
depths_smooth = np.linspace(min(depths), max(depths), 100)
recovery_analytic = [Recovery(Tspan, d) for d in depths_smooth]
ax.plot(depths_smooth, recovery_analytic)
ax.set_ylim(0, 1.05)
ax.set_xlabel(r'Signal depth', size=10)
ax.set_ylabel(r'Recovered fraction', size=10)
ax.legend(loc=1, frameon=False)
ax.set_xlabel(r'Signal depth', size=16)
ax.set_ylabel('Recovered fraction [%]', size=12)
fig.tight_layout()
fig.savefig('directed_recovery.png')
for ((n=0;n<100;n++))
#!/bin/bash
. /home/gregory.ashton/lalsuite-install/etc/lalapps-user-env.sh
export PATH="/home/gregory.ashton/anaconda2/bin:$PATH"
export MPLCONFIGDIR=/home/gregory.ashton/.config/matplotlib
rm /local/user/gregory.ashton/MCResults*txt
for ((n=0;n<10;n++))
do
echo $n
python generate_data.py --quite --no-template-counting
/home/gregory.ashton/anaconda2/bin/python generate_data.py "$1" /local/user/gregory.ashton --quite --no-template-counting
done
cp /local/user/gregory.ashton/MCResults*txt /home/gregory.ashton/PyFstat/Paper/DirectedMC/CollectedOutput
Executable= repeat.sh
Arguments= $(Cluster)_$(Process)
Universe=vanilla
Input=/dev/null
accounting_group = ligo.dev.o2.cw.explore.test
Output=CollectedOutput/out.$(Process)
Error=CollectedOutput/err.$(Process)
Log=CollectedOutput/log.$(Process)
request_cpus = 1
request_memory = 16 GB
Queue 100
Paper/directed_recovery.png

50.2 KiB

0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment