Commit 4b4621dc authored by Gregory Ashton's avatar Gregory Ashton
Browse files

Finalise transition from volume to Nstar

parent 5837254c
...@@ -17,7 +17,7 @@ import dill as pickle ...@@ -17,7 +17,7 @@ import dill as pickle
import pyfstat.core as core import pyfstat.core as core
from pyfstat.core import tqdm, args, earth_ephem, sun_ephem, read_par from pyfstat.core import tqdm, args, earth_ephem, sun_ephem, read_par
from pyfstat.optimal_setup_functions import get_V_estimate, get_optimal_setup from pyfstat.optimal_setup_functions import get_Nstar_estimate, get_optimal_setup
import pyfstat.helper_functions as helper_functions import pyfstat.helper_functions as helper_functions
...@@ -1831,54 +1831,29 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -1831,54 +1831,29 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
if prior[key]['type'] == 'unif': if prior[key]['type'] == 'unif':
return .5*(prior[key]['upper'] + prior[key]['lower']) return .5*(prior[key]['upper'] + prior[key]['lower'])
def init_V_estimate_parameters(self):
if 'Alpha' in self.theta_keys:
DeltaAlpha = self.get_width_from_prior(self.theta_prior, 'Alpha')
DeltaDelta = self.get_width_from_prior(self.theta_prior, 'Delta')
DeltaMid = self.get_mid_from_prior(self.theta_prior, 'Delta')
DeltaOmega = np.sin(np.pi/2 - DeltaMid) * DeltaDelta * DeltaAlpha
logging.info('Search over Alpha and Delta')
else:
logging.info('No sky search requested')
DeltaOmega = 0
if 'F0' in self.theta_keys:
DeltaF0 = self.get_width_from_prior(self.theta_prior, 'F0')
else:
raise ValueError("You aren't searching over F0?")
DeltaFs = [DeltaF0]
if 'F1' in self.theta_keys:
DeltaF1 = self.get_width_from_prior(self.theta_prior, 'F1')
DeltaFs.append(DeltaF1)
if 'F2' in self.theta_keys:
DeltaF2 = self.get_width_from_prior(self.theta_prior, 'F2')
DeltaFs.append(DeltaF2)
logging.info('Searching over Frequency and {} spin-down components'
.format(len(DeltaFs)-1))
if type(self.theta_prior['F0']) == dict:
fiducial_freq = self.get_mid_from_prior(self.theta_prior, 'F0')
else:
fiducial_freq = self.theta_prior['F0']
return fiducial_freq, DeltaOmega, DeltaFs
def read_setup_input_file(self, run_setup_input_file): def read_setup_input_file(self, run_setup_input_file):
with open(run_setup_input_file, 'r+') as f: with open(run_setup_input_file, 'r+') as f:
d = pickle.load(f) d = pickle.load(f)
return d return d
def write_setup_input_file(self, run_setup_input_file, R, Nsegs0, def write_setup_input_file(self, run_setup_input_file, R, Nsegs0,
nsegs_vals, V_vals, DeltaOmega, DeltaFs): nsegs_vals, Nstar_vals, theta_prior):
d = dict(R=R, Nsegs0=Nsegs0, nsegs_vals=nsegs_vals, V_vals=V_vals, d = dict(R=R, Nsegs0=Nsegs0, nsegs_vals=nsegs_vals,
DeltaOmega=DeltaOmega, DeltaFs=DeltaFs) theta_prior=theta_prior, Nstar_vals=Nstar_vals)
with open(run_setup_input_file, 'w+') as f: with open(run_setup_input_file, 'w+') as f:
pickle.dump(d, f) pickle.dump(d, f)
def check_old_run_setup(self, old_setup, **kwargs): def check_old_run_setup(self, old_setup, **kwargs):
try: try:
truths = [val == old_setup[key] for key, val in kwargs.iteritems()] truths = [val == old_setup[key] for key, val in kwargs.iteritems()]
return all(truths) if all(truths):
except KeyError: return True
else:
logging.info(
'Old setup does not match one of R, Nsegs0 or prior')
except KeyError as e:
logging.info(
'Error found when comparing with old setup: {}'.format(e))
return False return False
def init_run_setup(self, run_setup=None, R=10, Nsegs0=None, log_table=True, def init_run_setup(self, run_setup=None, R=10, Nsegs0=None, log_table=True,
...@@ -1888,7 +1863,6 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -1888,7 +1863,6 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
raise ValueError( raise ValueError(
'You must either specify the run_setup, or Nsegs0 from which ' 'You must either specify the run_setup, or Nsegs0 from which '
'the optimal run_setup given R can be estimated') 'the optimal run_setup given R can be estimated')
fiducial_freq, DeltaOmega, DeltaFs = self.init_V_estimate_parameters()
if run_setup is None: if run_setup is None:
logging.info('No run_setup provided') logging.info('No run_setup provided')
...@@ -1901,29 +1875,26 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -1901,29 +1875,26 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
old_setup = self.read_setup_input_file(run_setup_input_file) old_setup = self.read_setup_input_file(run_setup_input_file)
if self.check_old_run_setup(old_setup, R=R, if self.check_old_run_setup(old_setup, R=R,
Nsegs0=Nsegs0, Nsegs0=Nsegs0,
DeltaOmega=DeltaOmega, theta_prior=self.theta_prior):
DeltaFs=DeltaFs):
logging.info('Using old setup with R={}, Nsegs0={}'.format( logging.info('Using old setup with R={}, Nsegs0={}'.format(
R, Nsegs0)) R, Nsegs0))
nsegs_vals = old_setup['nsegs_vals'] nsegs_vals = old_setup['nsegs_vals']
V_vals = old_setup['V_vals'] Nstar_vals = old_setup['Nstar_vals']
generate_setup = False generate_setup = False
else: else:
logging.info(
'Old setup does not match requested R, Nsegs0')
generate_setup = True generate_setup = True
else: else:
generate_setup = True generate_setup = True
if generate_setup: if generate_setup:
nsegs_vals, V_vals = get_optimal_setup( nsegs_vals, Nstar_vals = get_optimal_setup(
R, Nsegs0, self.tref, self.minStartTime, R, Nsegs0, self.tref, self.minStartTime,
self.maxStartTime, self.theta_prior, fiducial_freq, self.maxStartTime, self.theta_prior,
self.search.detector_names, self.earth_ephem, self.search.detector_names, self.earth_ephem,
self.sun_ephem) self.sun_ephem)
self.write_setup_input_file(run_setup_input_file, R, Nsegs0, self.write_setup_input_file(run_setup_input_file, R, Nsegs0,
nsegs_vals, V_vals, DeltaOmega, nsegs_vals, Nstar_vals,
DeltaFs) self.theta_prior)
run_setup = [((self.nsteps[0], 0), nsegs, False) run_setup = [((self.nsteps[0], 0), nsegs, False)
for nsegs in nsegs_vals[:-1]] for nsegs in nsegs_vals[:-1]]
...@@ -1932,7 +1903,7 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -1932,7 +1903,7 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
else: else:
logging.info('Calculating the number of templates for this setup') logging.info('Calculating the number of templates for this setup')
V_vals = [] Nstar_vals = []
for i, rs in enumerate(run_setup): for i, rs in enumerate(run_setup):
rs = list(rs) rs = list(rs)
if len(rs) == 2: if len(rs) == 2:
...@@ -1942,25 +1913,24 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -1942,25 +1913,24 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
run_setup[i] = rs run_setup[i] = rs
if args.no_template_counting: if args.no_template_counting:
V_vals.append([1, 1, 1]) Nstar_vals.append([1, 1, 1])
else: else:
V = get_V_estimate( Nstar = get_Nstar_estimate(
rs[1], self.tref, self.minStartTime, self.maxStartTime, rs[1], self.tref, self.minStartTime, self.maxStartTime,
self.theta_prior, fiducial_freq, self.theta_prior, self.search.detector_names,
self.search.detector_names, self.earth_ephem, self.earth_ephem, self.sun_ephem)
self.sun_ephem) Nstar_vals.append(Nstar)
V_vals.append(V)
if log_table: if log_table:
logging.info('Using run-setup as follows:') logging.info('Using run-setup as follows:')
logging.info( logging.info(
'Stage | nburn | nprod | nsegs | Tcoh d | resetp0 | V') 'Stage | nburn | nprod | nsegs | Tcoh d | resetp0 | Nstar')
for i, rs in enumerate(run_setup): for i, rs in enumerate(run_setup):
Tcoh = (self.maxStartTime - self.minStartTime) / rs[1] / 86400 Tcoh = (self.maxStartTime - self.minStartTime) / rs[1] / 86400
if V_vals[i] is None: if Nstar_vals[i] is None:
vtext = 'N/A' vtext = 'N/A'
else: else:
vtext = '{:1.0e}'.format(V_vals[i]) vtext = '{:0.3e}'.format(int(Nstar_vals[i]))
logging.info('{} | {} | {} | {} | {} | {} | {}'.format( logging.info('{} | {} | {} | {} | {} | {} | {}'.format(
str(i).ljust(5), str(rs[0][0]).ljust(5), str(i).ljust(5), str(rs[0][0]).ljust(5),
str(rs[0][1]).ljust(5), str(rs[1]).ljust(5), str(rs[0][1]).ljust(5), str(rs[1]).ljust(5),
...@@ -1971,24 +1941,22 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -1971,24 +1941,22 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
filename = '{}/{}_run_setup.tex'.format(self.outdir, self.label) filename = '{}/{}_run_setup.tex'.format(self.outdir, self.label)
with open(filename, 'w+') as f: with open(filename, 'w+') as f:
f.write(r'\begin{tabular}{c|cccc}' + '\n') f.write(r'\begin{tabular}{c|cccc}' + '\n')
f.write(r'Stage & $\Nseg$ & $\Tcoh^{\rm days}$ &' f.write(r'Stage & $N_\mathrm{seg}$ &'
r'$\Nsteps$ & $\V$ \\ \hline' r'$T_\mathrm{coh}^{\rm days}$ &'
r'$N_\mathrm{burn}$ & $N_\mathrm{prod}$ &'
r'$N^*$ \\ \hline'
'\n') '\n')
for i, rs in enumerate(run_setup): for i, rs in enumerate(run_setup):
Tcoh = float( Tcoh = float(
self.maxStartTime - self.minStartTime)/rs[1]/86400 self.maxStartTime - self.minStartTime)/rs[1]/86400
line = r'{} & {} & {} & {} & {} \\' + '\n' line = r'{} & {} & {} & {} & {} & {} \\' + '\n'
if V_vals[i] is None: if Nstar_vals[i] is None:
V = 'N/A' Nstar = 'N/A'
else:
V = V_vals[i]
if rs[0][-1] == 0:
nsteps = rs[0][0]
else: else:
nsteps = '{},{}'.format(*rs[0]) Nstar = Nstar_vals[i]
line = line.format(i, rs[1], '{:1.1f}'.format(Tcoh), line = line.format(i, rs[1], '{:1.1f}'.format(Tcoh),
nsteps, rs[0], rs[1],
helper_functions.texify_float(V)) helper_functions.texify_float(Nstar))
f.write(line) f.write(line)
f.write(r'\end{tabular}' + '\n') f.write(r'\end{tabular}' + '\n')
......
""" """
Provides functions to aid in calculating the optimal setup based on the metric Provides functions to aid in calculating the optimal setup for zoom follow up
volume estimates.
""" """
from __future__ import division, absolute_import, print_function from __future__ import division, absolute_import, print_function
...@@ -15,41 +14,64 @@ import pyfstat.helper_functions as helper_functions ...@@ -15,41 +14,64 @@ import pyfstat.helper_functions as helper_functions
def get_optimal_setup( def get_optimal_setup(
R, Nsegs0, tref, minStartTime, maxStartTime, prior, fiducial_freq, R, Nsegs0, tref, minStartTime, maxStartTime, prior,
detector_names, earth_ephem, sun_ephem): detector_names, earth_ephem, sun_ephem):
""" Using an optimisation step, calculate the optimal setup ladder
Parameters
----------
R : float
Nsegs0 : int
The number of segments for the initial step of the ladder
minStartTime, maxStartTime : int
GPS times of the start and end time of the search
prior : dict
Prior dictionary, each item must either be a fixed scalar value, or
a uniform prior.
detector_names : list of str
earth_ephem, sun_ephem : str
Returns
-------
nsegs, Nstar : list
Ladder of segment numbers and the corresponding Nstar
"""
logging.info('Calculating optimal setup for R={}, Nsegs0={}'.format( logging.info('Calculating optimal setup for R={}, Nsegs0={}'.format(
R, Nsegs0)) R, Nsegs0))
V_0 = get_V_estimate( Nstar_0 = get_Nstar_estimate(
Nsegs0, tref, minStartTime, maxStartTime, prior, fiducial_freq, Nsegs0, tref, minStartTime, maxStartTime, prior,
detector_names, earth_ephem, sun_ephem) detector_names, earth_ephem, sun_ephem)
logging.info('Stage {}, nsegs={}, V={}'.format(0, Nsegs0, V_0)) logging.info(
'Stage {}, nsegs={}, Nstar={}'.format(0, Nsegs0, int(Nstar_0)))
nsegs_vals = [Nsegs0] nsegs_vals = [Nsegs0]
V_vals = [V_0] Nstar_vals = [Nstar_0]
i = 0 i = 0
nsegs_i = Nsegs0 nsegs_i = Nsegs0
while nsegs_i > 1: while nsegs_i > 1:
nsegs_i, V_i = get_nsegs_ip1( nsegs_i, Nstar_i = _get_nsegs_ip1(
nsegs_i, R, tref, minStartTime, maxStartTime, prior, fiducial_freq, nsegs_i, R, tref, minStartTime, maxStartTime, prior,
detector_names, earth_ephem, sun_ephem) detector_names, earth_ephem, sun_ephem)
nsegs_vals.append(nsegs_i) nsegs_vals.append(nsegs_i)
V_vals.append(V_i) Nstar_vals.append(Nstar_i)
i += 1 i += 1
logging.info( logging.info(
'Stage {}, nsegs={}, V={}'.format(i, nsegs_i, V_i)) 'Stage {}, nsegs={}, Nstar={}'.format(i, nsegs_i, int(Nstar_i)))
return nsegs_vals, V_vals return nsegs_vals, Nstar_vals
def get_nsegs_ip1( def _get_nsegs_ip1(nsegs_i, R, tref, minStartTime, maxStartTime, prior,
nsegs_i, R, tref, minStartTime, maxStartTime, prior, fiducial_freq, detector_names, earth_ephem, sun_ephem):
detector_names, earth_ephem, sun_ephem): """ Calculate Nsegs_{i+1} given Nsegs_{i} """
log10R = np.log10(R) log10R = np.log10(R)
log10Vi = np.log10(get_V_estimate( log10Nstari = np.log10(get_Nstar_estimate(
nsegs_i, tref, minStartTime, maxStartTime, prior, fiducial_freq, nsegs_i, tref, minStartTime, maxStartTime, prior,
detector_names, earth_ephem, sun_ephem)) detector_names, earth_ephem, sun_ephem))
def f(nsegs_ip1): def f(nsegs_ip1):
...@@ -60,28 +82,46 @@ def get_nsegs_ip1( ...@@ -60,28 +82,46 @@ def get_nsegs_ip1(
nsegs_ip1 = int(nsegs_ip1[0]) nsegs_ip1 = int(nsegs_ip1[0])
if nsegs_ip1 == 0: if nsegs_ip1 == 0:
nsegs_ip1 = 1 nsegs_ip1 = 1
Vip1 = get_V_estimate( Nstarip1 = get_Nstar_estimate(
nsegs_ip1, tref, minStartTime, maxStartTime, prior, fiducial_freq, nsegs_ip1, tref, minStartTime, maxStartTime, prior,
detector_names, earth_ephem, sun_ephem) detector_names, earth_ephem, sun_ephem)
if Vip1 is None: if Nstarip1 is None:
return 1e6 return 1e6
else: else:
log10Vip1 = np.log10(Vip1) log10Nstarip1 = np.log10(Nstarip1)
return np.abs(log10Vi + log10R - log10Vip1) return np.abs(log10Nstari + log10R - log10Nstarip1)
res = scipy.optimize.minimize(f, .5*nsegs_i, method='Powell', tol=0.1, res = scipy.optimize.minimize(f, .5*nsegs_i, method='Powell', tol=0.1,
options={'maxiter': 10}) options={'maxiter': 10})
nsegs_ip1 = int(res.x) nsegs_ip1 = int(res.x)
if nsegs_ip1 == 0: if nsegs_ip1 == 0:
nsegs_ip1 = 1 nsegs_ip1 = 1
if res.success: if res.success:
return nsegs_ip1, get_V_estimate( return nsegs_ip1, get_Nstar_estimate(
nsegs_ip1, tref, minStartTime, maxStartTime, prior, fiducial_freq, nsegs_ip1, tref, minStartTime, maxStartTime, prior,
detector_names, earth_ephem, sun_ephem) detector_names, earth_ephem, sun_ephem)
else: else:
raise ValueError('Optimisation unsuccesful') raise ValueError('Optimisation unsuccesful')
def get_parallelepiped(prior): def _extract_data_from_prior(prior):
""" Calculate the input data from the prior
Parameters
----------
prior: dict
Returns
-------
p : ndarray
Matrix with columns being the edges of the uniform bounding box
spindowns : int
The number of spindowns
sky : bool
If true, search includes the sky position
fiducial_freq : float
Fidicual frequency
"""
keys = ['Alpha', 'Delta', 'F0', 'F1', 'F2'] keys = ['Alpha', 'Delta', 'F0', 'F1', 'F2']
spindown_keys = keys[3:] spindown_keys = keys[3:]
sky_keys = keys[:2] sky_keys = keys[:2]
...@@ -110,33 +150,43 @@ def get_parallelepiped(prior): ...@@ -110,33 +150,43 @@ def get_parallelepiped(prior):
p.append(basex) p.append(basex)
spindowns = np.sum([np.sum(lims_keys == k) for k in spindown_keys]) spindowns = np.sum([np.sum(lims_keys == k) for k in spindown_keys])
sky = any([key in lims_keys for key in sky_keys]) sky = any([key in lims_keys for key in sky_keys])
return np.array(p).T, spindowns , sky if type(prior['F0']) == dict:
fiducial_freq = prior['F0']['upper']
else:
fiducial_freq = prior['F0']
return np.array(p).T, spindowns, sky, fiducial_freq
def get_V_estimate( def get_Nstar_estimate(
nsegs, tref, minStartTime, maxStartTime, prior, fiducial_freq, nsegs, tref, minStartTime, maxStartTime, prior,
detector_names, earth_ephem, sun_ephem): detector_names, earth_ephem, sun_ephem):
""" Returns V estimated from the super-sky metric """ Returns N* estimated from the super-sky metric
Parameters Parameters
---------- ----------
nsegs: int nsegs : int
Number of semi-coherent segments Number of semi-coherent segments
tref: int tref : int
Reference time in GPS seconds Reference time in GPS seconds
minStartTime, maxStartTime: int minStartTime, maxStartTime : int
Minimum and maximum SFT timestamps Minimum and maximum SFT timestamps
prior: dict prior : dict
The prior dictionary The prior dictionary
fiducial_freq: float detector_names : array
Fidicual frequency
detector_names: array
Array of detectors to average over Array of detectors to average over
earth_ephem, sun_ephem: st earth_ephem, sun_ephem : str
Paths to the ephemeris files Paths to the ephemeris files
Returns
-------
Nstar: int
The estimated approximate number of templates to cover the prior
parameter space at a mismatch of unity, assuming the normalised
thickness is unity.
""" """
in_phys, spindowns, sky = get_parallelepiped(prior) in_phys, spindowns, sky, fiducial_freq = _extract_data_from_prior(prior)
out_rssky = np.zeros(in_phys.shape) out_rssky = np.zeros(in_phys.shape)
in_phys = helper_functions.convert_array_to_gsl_matrix(in_phys) in_phys = helper_functions.convert_array_to_gsl_matrix(in_phys)
...@@ -160,8 +210,9 @@ def get_V_estimate( ...@@ -160,8 +210,9 @@ def get_V_estimate(
ephemeris = lalpulsar.InitBarycenter(earth_ephem, sun_ephem) ephemeris = lalpulsar.InitBarycenter(earth_ephem, sun_ephem)
try: try:
SSkyMetric = lalpulsar.ComputeSuperskyMetrics( SSkyMetric = lalpulsar.ComputeSuperskyMetrics(
spindowns, ref_time, segments, fiducial_freq, detectors, lalpulsar.SUPERSKY_METRIC_TYPE, spindowns, ref_time, segments,
detector_weights, detector_motion, ephemeris) fiducial_freq, detectors, detector_weights, detector_motion,
ephemeris)
except RuntimeError as e: except RuntimeError as e:
logging.debug('Encountered run-time error {}'.format(e)) logging.debug('Encountered run-time error {}'.format(e))
return None, None, None return None, None, None
...@@ -181,6 +232,6 @@ def get_V_estimate( ...@@ -181,6 +232,6 @@ def get_V_estimate(
dV = np.abs(np.linalg.det(parallelepiped)) dV = np.abs(np.linalg.det(parallelepiped))
V = sqrtdetG * dV Nstar = sqrtdetG * dV
return V return Nstar
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment