pyfstat.py 125 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
""" Classes for various types of searches using ComputeFstatistic """
import os
import sys
import itertools
import logging
import argparse
import copy
import glob
import inspect
from functools import wraps
11
import subprocess
12
from collections import OrderedDict
13
14
15
16

import numpy as np
import matplotlib
import matplotlib.pyplot as plt
17
import scipy.special
18
import scipy.optimize
19
20
21
import emcee
import corner
import dill as pickle
22
import lal
23
24
import lalpulsar

25

Gregory Ashton's avatar
Gregory Ashton committed
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
def set_up_optional_tqdm():
    try:
        from tqdm import tqdm
    except ImportError:
        def tqdm(x, *args, **kwargs):
            return x


def set_up_matplotlib_defaults():
    plt.switch_backend('Agg')
    plt.rcParams['text.usetex'] = True
    plt.rcParams['axes.formatter.useoffset'] = False


def set_up_ephemeris_configuration():
    config_file = os.path.expanduser('~')+'/.pyfstat.conf'
    if os.path.isfile(config_file):
        d = {}
        with open(config_file, 'r') as f:
            for line in f:
                k, v = line.split('=')
                k = k.replace(' ', '')
                for item in [' ', "'", '"', '\n']:
                    v = v.replace(item, '')
                d[k] = v
        earth_ephem = d['earth_ephem']
        sun_ephem = d['sun_ephem']
    else:
        logging.warning('No ~/.pyfstat.conf file found please provide the '
                        'paths when initialising searches')
        earth_ephem = None
        sun_ephem = None


def set_up_command_line_arguments():
    parser = argparse.ArgumentParser()
    parser.add_argument("-q", "--quite", help="Decrease output verbosity",
                        action="store_true")
    parser.add_argument("--no-interactive", help="Don't use interactive",
                        action="store_true")
    parser.add_argument("-c", "--clean", help="Don't use cached data",
                        action="store_true")
    parser.add_argument("-u", "--use-old-data", action="store_true")
    parser.add_argument('-s', "--setup-only", action="store_true")
    parser.add_argument('-n', "--no-template-counting", action="store_true")
    parser.add_argument('unittest_args', nargs='*')
    args, unknown = parser.parse_known_args()
    sys.argv[1:] = args.unittest_args
    if args.quite or args.no_interactive:
        def tqdm(x, *args, **kwargs):
            return x
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)
    stream_handler = logging.StreamHandler()
    if args.quite:
        stream_handler.setLevel(logging.WARNING)
    else:
        stream_handler.setLevel(logging.DEBUG)
    stream_handler.setFormatter(logging.Formatter(
        '%(asctime)s %(levelname)-8s: %(message)s', datefmt='%H:%M'))
    logger.addHandler(stream_handler)
87

Gregory Ashton's avatar
Gregory Ashton committed
88
89
90
91
set_up_optional_tqdm()
set_up_matplotlib_defaults()
set_up_ephemeris_configuration()
set_up_command_line_arguments()
92

93

94
95
96
97
98
99
100
101
def round_to_n(x, n):
    if not x:
        return 0
    power = -int(np.floor(np.log10(abs(x)))) + (n - 1)
    factor = (10 ** power)
    return round(x * factor) / factor


102
def texify_float(x, d=2):
103
104
    if type(x) == str:
        return x
105
106
107
108
109
110
111
112
113
114
115
    x = round_to_n(x, d)
    if 0.01 < abs(x) < 100:
        return str(x)
    else:
        power = int(np.floor(np.log10(abs(x))))
        stem = np.round(x / 10**power, d)
        if d == 1:
            stem = int(stem)
        return r'${}{{\times}}10^{{{}}}$'.format(stem, power)


116
def initializer(func):
117
    """ Decorator function to automatically assign the parameters to self """
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
    names, varargs, keywords, defaults = inspect.getargspec(func)

    @wraps(func)
    def wrapper(self, *args, **kargs):
        for name, arg in list(zip(names[1:], args)) + list(kargs.items()):
            setattr(self, name, arg)

        for name, default in zip(reversed(names), reversed(defaults)):
            if not hasattr(self, name):
                setattr(self, name, default)

        func(self, *args, **kargs)

    return wrapper


def read_par(label, outdir):
135
    """ Read in a .par file, returns a dictionary of the values """
136
137
138
139
    filename = '{}/{}.par'.format(outdir, label)
    d = {}
    with open(filename, 'r') as f:
        for line in f:
140
141
142
            if len(line.split('=')) > 1:
                key, val = line.rstrip('\n').split(' = ')
                key = key.strip()
143
                d[key] = np.float64(eval(val.rstrip('; ')))
144
145
146
    return d


147
def get_optimal_setup(
148
        R, Nsegs0, tref, minStartTime, maxStartTime, DeltaOmega,
149
        DeltaFs, fiducial_freq, detector_names, earth_ephem, sun_ephem):
150
151
    logging.info('Calculating optimal setup for R={}, Nsegs0={}'.format(
        R, Nsegs0))
152

153
154
    V_0 = get_V_estimate(
        Nsegs0, tref, minStartTime, maxStartTime, DeltaOmega, DeltaFs,
155
        fiducial_freq, detector_names, earth_ephem, sun_ephem)
156
    logging.info('Stage {}, nsegs={}, V={}'.format(0, Nsegs0, V_0))
157

158
159
    nsegs_vals = [Nsegs0]
    V_vals = [V_0]
160
161

    i = 0
162
163
    nsegs_i = Nsegs0
    while nsegs_i > 1:
164
        nsegs_i, V_i = get_nsegs_ip1(
165
            nsegs_i, R, tref, minStartTime, maxStartTime, DeltaOmega,
166
167
168
169
170
171
172
173
174
175
176
            DeltaFs, fiducial_freq, detector_names, earth_ephem, sun_ephem)
        nsegs_vals.append(nsegs_i)
        V_vals.append(V_i)
        i += 1
        logging.info(
            'Stage {}, nsegs={}, V={}'.format(i, nsegs_i, V_i))

    return nsegs_vals, V_vals


def get_nsegs_ip1(
177
        nsegs_i, R, tref, minStartTime, maxStartTime, DeltaOmega,
178
179
        DeltaFs, fiducial_freq, detector_names, earth_ephem, sun_ephem):

180
    log10R = np.log10(R)
181
182
183
184
185
    log10Vi = np.log10(get_V_estimate(
        nsegs_i, tref, minStartTime, maxStartTime, DeltaOmega, DeltaFs,
        fiducial_freq, detector_names, earth_ephem, sun_ephem))

    def f(nsegs_ip1):
186
187
188
        if nsegs_ip1[0] > nsegs_i:
            return 1e6
        if nsegs_ip1[0] < 0:
189
            return 1e6
190
191
192
        nsegs_ip1 = int(nsegs_ip1[0])
        if nsegs_ip1 == 0:
            nsegs_ip1 = 1
193
        Vip1 = get_V_estimate(
194
195
            nsegs_ip1, tref, minStartTime, maxStartTime, DeltaOmega,
            DeltaFs, fiducial_freq, detector_names, earth_ephem, sun_ephem)
196
197
198
199
        if Vip1[0] is None:
            return 1e6
        else:
            log10Vip1 = np.log10(Vip1)
200
201
202
203
204
205
            return np.abs(log10Vi[0] + log10R - log10Vip1[0])
    res = scipy.optimize.minimize(f, .5*nsegs_i, method='Powell', tol=0.1,
                                  options={'maxiter': 10})
    nsegs_ip1 = int(res.x)
    if nsegs_ip1 == 0:
        nsegs_ip1 = 1
206
    if res.success:
207
208
        return nsegs_ip1, get_V_estimate(
            nsegs_ip1, tref, minStartTime, maxStartTime, DeltaOmega, DeltaFs,
209
210
211
212
213
            fiducial_freq, detector_names, earth_ephem, sun_ephem)
    else:
        raise ValueError('Optimisation unsuccesful')


214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
def get_V_estimate(
        nsegs, tref, minStartTime, maxStartTime, DeltaOmega, DeltaFs,
        fiducial_freq, detector_names, earth_ephem, sun_ephem):
    """ Returns V, Vsky, Vpe estimated from the super-sky metric

    Parameters
    ----------
    nsegs: int
        Number of semi-coherent segments
    tref: int
        Reference time in GPS seconds
    minStartTime, maxStartTime: int
        Minimum and maximum SFT timestamps
    DeltaOmega: float
        Solid angle of the sky-patch
    DeltaFs: array
        Array of [DeltaF0, DeltaF1, ...], length determines the number of
        spin-down terms.
    fiducial_freq: float
        Fidicual frequency
    detector_names: array
        Array of detectors to average over
    earth_ephem, sun_ephem: st
        Paths to the ephemeris files

    """
    spindowns = len(DeltaFs) - 1
    tboundaries = np.linspace(minStartTime, maxStartTime, nsegs+1)

    ref_time = lal.LIGOTimeGPS(tref)
    segments = lal.SegListCreate()
    for j in range(len(tboundaries)-1):
        seg = lal.SegCreate(lal.LIGOTimeGPS(tboundaries[j]),
                            lal.LIGOTimeGPS(tboundaries[j+1]),
                            j)
        lal.SegListAppend(segments, seg)
    detNames = lal.CreateStringVector(*detector_names)
    detectors = lalpulsar.MultiLALDetector()
    lalpulsar.ParseMultiLALDetector(detectors, detNames)
    detector_weights = None
    detector_motion = (lalpulsar.DETMOTION_SPIN
                       + lalpulsar.DETMOTION_ORBIT)
    ephemeris = lalpulsar.InitBarycenter(earth_ephem, sun_ephem)
    try:
        SSkyMetric = lalpulsar.ComputeSuperskyMetrics(
            spindowns, ref_time, segments, fiducial_freq, detectors,
            detector_weights, detector_motion, ephemeris)
    except RuntimeError as e:
        logging.debug('Encountered run-time error {}'.format(e))
        return None, None, None

    sqrtdetG_SKY = np.sqrt(np.linalg.det(
        SSkyMetric.semi_rssky_metric.data[:2, :2]))
    sqrtdetG_PE = np.sqrt(np.linalg.det(
        SSkyMetric.semi_rssky_metric.data[2:, 2:]))

    Vsky = .5*sqrtdetG_SKY*DeltaOmega
    Vpe = sqrtdetG_PE * np.prod(DeltaFs)
    if Vsky == 0:
        Vsky = 1
    if Vpe == 0:
        Vpe = 1
    return (Vsky * Vpe, Vsky, Vpe)


279
class BaseSearchClass(object):
280
    """ The base search class, provides general functions """
281
282
283
284

    earth_ephem_default = earth_ephem
    sun_ephem_default = sun_ephem

285
    def add_log_file(self):
286
        """ Log output to a file, requires class to have outdir and label """
287
288
        logfilename = '{}/{}.log'.format(self.outdir, self.label)
        fh = logging.FileHandler(logfilename)
Gregory Ashton's avatar
Gregory Ashton committed
289
        fh.setLevel(logging.INFO)
290
291
292
293
294
        fh.setFormatter(logging.Formatter(
            '%(asctime)s %(levelname)-8s: %(message)s',
            datefmt='%y-%m-%d %H:%M'))
        logging.getLogger().addHandler(fh)

295
    def shift_matrix(self, n, dT):
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
        """ Generate the shift matrix

        Parameters
        ----------
        n: int
            The dimension of the shift-matrix to generate
        dT: float
            The time delta of the shift matrix

        Returns
        -------
        m: array (n, n)
            The shift matrix
        """

311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
        m = np.zeros((n, n))
        factorial = np.math.factorial
        for i in range(n):
            for j in range(n):
                if i == j:
                    m[i, j] = 1.0
                elif i > j:
                    m[i, j] = 0.0
                else:
                    if i == 0:
                        m[i, j] = 2*np.pi*float(dT)**(j-i) / factorial(j-i)
                    else:
                        m[i, j] = float(dT)**(j-i) / factorial(j-i)
        return m

    def shift_coefficients(self, theta, dT):
        """ Shift a set of coefficients by dT

        Parameters
        ----------
        theta: array-like, shape (n,)
            vector of the expansion coefficients to transform starting from the
333
            lowest degree e.g [phi, F0, F1,...].
334
        dT: float
335
            difference between the two reference times as tref_new - tref_old.
336
337
338
339

        Returns
        -------
        theta_new: array-like shape (n,)
340
            vector of the coefficients as evaluate as the new reference time.
341
        """
342

343
344
345
346
        n = len(theta)
        m = self.shift_matrix(n, dT)
        return np.dot(m, theta)

347
    def calculate_thetas(self, theta, delta_thetas, tbounds, theta0_idx=0):
348
349
350
        """ Calculates the set of coefficients for the post-glitch signal """
        thetas = [theta]
        for i, dt in enumerate(delta_thetas):
351
352
353
354
355
356
357
358
359
360
361
362
363
            if i < theta0_idx:
                pre_theta_at_ith_glitch = self.shift_coefficients(
                    thetas[0], tbounds[i+1] - self.tref)
                post_theta_at_ith_glitch = pre_theta_at_ith_glitch - dt
                thetas.insert(0, self.shift_coefficients(
                    post_theta_at_ith_glitch, self.tref - tbounds[i+1]))

            elif i >= theta0_idx:
                pre_theta_at_ith_glitch = self.shift_coefficients(
                    thetas[i], tbounds[i+1] - self.tref)
                post_theta_at_ith_glitch = pre_theta_at_ith_glitch + dt
                thetas.append(self.shift_coefficients(
                    post_theta_at_ith_glitch, self.tref - tbounds[i+1]))
364
365
        return thetas

Gregory Ashton's avatar
Gregory Ashton committed
366
367
368
369
370
371
372
373
374
375
376
377
378
379
    def generate_loudest(self):
        params = read_par(self.label, self.outdir)
        for key in ['Alpha', 'Delta', 'F0', 'F1']:
            if key not in params:
                params[key] = self.theta_prior[key]
        cmd = ('lalapps_ComputeFstatistic_v2 -a {} -d {} -f {} -s {} -D "{}"'
               ' --refTime={} --outputLoudest="{}/{}.loudest" '
               '--minStartTime={} --maxStartTime={}').format(
                    params['Alpha'], params['Delta'], params['F0'],
                    params['F1'], self.sftfilepath, params['tref'],
                    self.outdir, self.label, self.minStartTime,
                    self.maxStartTime)
        subprocess.call([cmd], shell=True)

380

Gregory Ashton's avatar
Gregory Ashton committed
381
class ComputeFstat(object):
382
    """ Base class providing interface to `lalpulsar.ComputeFstat` """
Gregory Ashton's avatar
Gregory Ashton committed
383
384
385
386
387

    earth_ephem_default = earth_ephem
    sun_ephem_default = sun_ephem

    @initializer
388
389
    def __init__(self, tref, sftfilepath=None, minStartTime=None,
                 maxStartTime=None, binary=False, transient=True, BSGL=False,
390
                 detector=None, minCoverFreq=None, maxCoverFreq=None,
391
                 earth_ephem=None, sun_ephem=None, injectSources=None
392
                 ):
393
394
395
396
397
        """
        Parameters
        ----------
        tref: int
            GPS seconds of the reference time.
398
399
        sftfilepath: str
            File patern to match SFTs
400
401
402
403
404
405
406
407
408
409
410
411
        minStartTime, maxStartTime: float GPStime
            Only use SFTs with timestemps starting from (including, excluding)
            this epoch
        binary: bool
            If true, search of binary parameters.
        transient: bool
            If true, allow for the Fstat to be computed over a transient range.
        BSGL: bool
            If true, compute the BSGL rather than the twoF value.
        detector: str
            Two character reference to the data to use, specify None for no
            contraint.
412
413
414
415
416
417
418
419
420
421
        minCoverFreq, maxCoverFreq: float
            The min and max cover frequency passed to CreateFstatInput, if
            either is None the range of frequencies in the SFT less 1Hz is
            used.
        earth_ephem, sun_ephem: str
            Paths of the two files containing positions of Earth and Sun,
            respectively at evenly spaced times, as passed to CreateFstatInput.
            If None defaults defined in BaseSearchClass will be used.

        """
Gregory Ashton's avatar
Gregory Ashton committed
422
423
424
425
426
427
428
429

        if earth_ephem is None:
            self.earth_ephem = self.earth_ephem_default
        if sun_ephem is None:
            self.sun_ephem = self.sun_ephem_default

        self.init_computefstatistic_single_point()

Gregory Ashton's avatar
Gregory Ashton committed
430
431
432
    def get_SFTCatalog(self):
        if hasattr(self, 'SFTCatalog'):
            return
Gregory Ashton's avatar
Gregory Ashton committed
433
434
435
436
        logging.info('Initialising SFTCatalog')
        constraints = lalpulsar.SFTConstraints()
        if self.detector:
            constraints.detector = self.detector
437
438
439
440
441
        if self.minStartTime:
            constraints.minStartTime = lal.LIGOTimeGPS(self.minStartTime)
        if self.maxStartTime:
            constraints.maxStartTime = lal.LIGOTimeGPS(self.maxStartTime)

442
        logging.info('Loading data matching pattern {}'.format(
443
444
                     self.sftfilepath))
        SFTCatalog = lalpulsar.SFTdataFind(self.sftfilepath, constraints)
445
446
        detector_names = list(set([d.header.name for d in SFTCatalog.data]))
        self.detector_names = detector_names
447
        SFT_timestamps = [d.header.epoch for d in SFTCatalog.data]
Gregory Ashton's avatar
Gregory Ashton committed
448
        if args.quite is False and args.no_interactive is False:
449
450
451
452
453
454
            try:
                from bashplotlib.histogram import plot_hist
                print('Data timestamps histogram:')
                plot_hist(SFT_timestamps, height=5, bincount=50)
            except IOError:
                pass
455
        if len(detector_names) == 0:
Gregory Ashton's avatar
Gregory Ashton committed
456
457
            raise ValueError('No data loaded.')
        logging.info('Loaded {} data files from detectors {}'.format(
458
            len(SFT_timestamps), detector_names))
Gregory Ashton's avatar
Gregory Ashton committed
459
460
461
462
463
464
        logging.info('Data spans from {} ({}) to {} ({})'.format(
            int(SFT_timestamps[0]),
            subprocess.check_output('lalapps_tconvert {}'.format(
                int(SFT_timestamps[0])), shell=True).rstrip('\n'),
            int(SFT_timestamps[-1]),
            subprocess.check_output('lalapps_tconvert {}'.format(
465
                int(SFT_timestamps[-1])), shell=True).rstrip('\n')))
Gregory Ashton's avatar
Gregory Ashton committed
466
467
468
469
470
471
        self.SFTCatalog = SFTCatalog

    def init_computefstatistic_single_point(self):
        """ Initilisation step of run_computefstatistic for a single point """

        self.get_SFTCatalog()
Gregory Ashton's avatar
Gregory Ashton committed
472
473
474
475
476
477

        logging.info('Initialising ephems')
        ephems = lalpulsar.InitBarycenter(self.earth_ephem, self.sun_ephem)

        logging.info('Initialising FstatInput')
        dFreq = 0
478
479
480
481
482
        if self.transient:
            self.whatToCompute = lalpulsar.FSTATQ_ATOMS_PER_DET
        else:
            self.whatToCompute = lalpulsar.FSTATQ_2F

483
484
485
486
487
488
489
490
491
492
493
        FstatOAs = lalpulsar.FstatOptionalArgs()
        FstatOAs.randSeed = lalpulsar.FstatOptionalArgsDefaults.randSeed
        FstatOAs.SSBprec = lalpulsar.FstatOptionalArgsDefaults.SSBprec
        FstatOAs.Dterms = lalpulsar.FstatOptionalArgsDefaults.Dterms
        FstatOAs.runningMedianWindow = lalpulsar.FstatOptionalArgsDefaults.runningMedianWindow
        FstatOAs.FstatMethod = lalpulsar.FstatOptionalArgsDefaults.FstatMethod
        FstatOAs.InjectSqrtSX = lalpulsar.FstatOptionalArgsDefaults.injectSqrtSX
        FstatOAs.assumeSqrtSX = lalpulsar.FstatOptionalArgsDefaults.assumeSqrtSX
        FstatOAs.prevInput = lalpulsar.FstatOptionalArgsDefaults.prevInput
        FstatOAs.collectTiming = lalpulsar.FstatOptionalArgsDefaults.collectTiming

494
        if hasattr(self, 'injectSource') and type(self.injectSources) == dict:
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
            logging.info('Injecting source with params: {}'.format(
                self.injectSources))
            PPV = lalpulsar.CreatePulsarParamsVector(1)
            PP = PPV.data[0]
            PP.Amp.h0 = self.injectSources['h0']
            PP.Amp.cosi = self.injectSources['cosi']
            PP.Amp.phi0 = self.injectSources['phi0']
            PP.Amp.psi = self.injectSources['psi']
            PP.Doppler.Alpha = self.injectSources['Alpha']
            PP.Doppler.Delta = self.injectSources['Delta']
            PP.Doppler.fkdot = np.array(self.injectSources['fkdot'])
            PP.Doppler.refTime = self.tref
            if 't0' not in self.injectSources:
                PP.Transient.type = lalpulsar.TRANSIENT_NONE
            FstatOAs.injectSources = PPV
        else:
            FstatOAs.injectSources = lalpulsar.FstatOptionalArgsDefaults.injectSources
Gregory Ashton's avatar
Gregory Ashton committed
512
513

        if self.minCoverFreq is None or self.maxCoverFreq is None:
Gregory Ashton's avatar
Gregory Ashton committed
514
            fAs = [d.header.f0 for d in self.SFTCatalog.data]
Gregory Ashton's avatar
Gregory Ashton committed
515
            fBs = [d.header.f0 + (d.numBins-1)*d.header.deltaF
Gregory Ashton's avatar
Gregory Ashton committed
516
                   for d in self.SFTCatalog.data]
Gregory Ashton's avatar
Gregory Ashton committed
517
518
            self.minCoverFreq = np.min(fAs) + 0.5
            self.maxCoverFreq = np.max(fBs) - 0.5
519
520
521
            logging.info('Min/max cover freqs not provided, using '
                         '{} and {}, est. from SFTs'.format(
                             self.minCoverFreq, self.maxCoverFreq))
Gregory Ashton's avatar
Gregory Ashton committed
522

Gregory Ashton's avatar
Gregory Ashton committed
523
        self.FstatInput = lalpulsar.CreateFstatInput(self.SFTCatalog,
Gregory Ashton's avatar
Gregory Ashton committed
524
525
526
527
                                                     self.minCoverFreq,
                                                     self.maxCoverFreq,
                                                     dFreq,
                                                     ephems,
528
                                                     FstatOAs
Gregory Ashton's avatar
Gregory Ashton committed
529
530
531
532
533
534
535
536
537
538
539
540
541
                                                     )

        logging.info('Initialising PulsarDoplerParams')
        PulsarDopplerParams = lalpulsar.PulsarDopplerParams()
        PulsarDopplerParams.refTime = self.tref
        PulsarDopplerParams.Alpha = 1
        PulsarDopplerParams.Delta = 1
        PulsarDopplerParams.fkdot = np.array([0, 0, 0, 0, 0, 0, 0])
        self.PulsarDopplerParams = PulsarDopplerParams

        logging.info('Initialising FstatResults')
        self.FstatResults = lalpulsar.FstatResults()

542
        if self.BSGL:
543
            if len(self.detector_names) < 2:
Gregory Ashton's avatar
Gregory Ashton committed
544
                raise ValueError("Can't use BSGL with single detector data")
545
            else:
546
                logging.info('Initialising BSGL')
547

548
549
            # Tuning parameters - to be reviewed
            numDetectors = 2
550
551
552
553
554
555
556
557
558
559
            if hasattr(self, 'nsegs'):
                p_val_threshold = 1e-6
                Fstar0s = np.linspace(0, 1000, 10000)
                p_vals = scipy.special.gammaincc(2*self.nsegs, Fstar0s)
                Fstar0 = Fstar0s[np.argmin(np.abs(p_vals - p_val_threshold))]
                if Fstar0 == Fstar0s[-1]:
                    raise ValueError('Max Fstar0 exceeded')
            else:
                Fstar0 = 15.
            logging.info('Using Fstar0 of {:1.2f}'.format(Fstar0))
560
            oLGX = np.zeros(10)
Gregory Ashton's avatar
Gregory Ashton committed
561
            oLGX[:numDetectors] = 1./numDetectors
562
            self.BSGLSetup = lalpulsar.CreateBSGLSetup(numDetectors,
563
                                                       Fstar0,
564
                                                       oLGX,
565
                                                       True,
566
567
                                                       1)
            self.twoFX = np.zeros(10)
Gregory Ashton's avatar
Gregory Ashton committed
568
            self.whatToCompute = (self.whatToCompute +
569
570
                                  lalpulsar.FSTATQ_2F_PER_DET)

571
        if self.transient:
572
            logging.info('Initialising transient parameters')
573
574
575
576
577
578
            self.windowRange = lalpulsar.transientWindowRange_t()
            self.windowRange.type = lalpulsar.TRANSIENT_RECTANGULAR
            self.windowRange.t0Band = 0
            self.windowRange.dt0 = 1
            self.windowRange.tauBand = 0
            self.windowRange.dtau = 1
579

580
581
582
583
584
585
586
587
588
    def compute_fullycoherent_det_stat_single_point(
            self, F0, F1, F2, Alpha, Delta, asini=None, period=None, ecc=None,
            tp=None, argp=None):
        """ Compute the fully-coherent det. statistic at a single point """

        return self.run_computefstatistic_single_point(
            self.minStartTime, self.maxStartTime, F0, F1, F2, Alpha, Delta,
            asini, period, ecc, tp, argp)

Gregory Ashton's avatar
Gregory Ashton committed
589
    def run_computefstatistic_single_point(self, tstart, tend, F0, F1,
590
591
592
                                           F2, Alpha, Delta, asini=None,
                                           period=None, ecc=None, tp=None,
                                           argp=None):
593
        """ Returns twoF or ln(BSGL) fully-coherently at a single point """
Gregory Ashton's avatar
Gregory Ashton committed
594
595
596
597

        self.PulsarDopplerParams.fkdot = np.array([F0, F1, F2, 0, 0, 0, 0])
        self.PulsarDopplerParams.Alpha = Alpha
        self.PulsarDopplerParams.Delta = Delta
598
599
600
601
602
603
        if self.binary:
            self.PulsarDopplerParams.asini = asini
            self.PulsarDopplerParams.period = period
            self.PulsarDopplerParams.ecc = ecc
            self.PulsarDopplerParams.tp = tp
            self.PulsarDopplerParams.argp = argp
Gregory Ashton's avatar
Gregory Ashton committed
604
605
606
607

        lalpulsar.ComputeFstat(self.FstatResults,
                               self.FstatInput,
                               self.PulsarDopplerParams,
608
                               1,
Gregory Ashton's avatar
Gregory Ashton committed
609
610
611
                               self.whatToCompute
                               )

612
        if self.transient is False:
613
614
615
616
617
618
            if self.BSGL is False:
                return self.FstatResults.twoF[0]

            twoF = np.float(self.FstatResults.twoF[0])
            self.twoFX[0] = self.FstatResults.twoFPerDet(0)
            self.twoFX[1] = self.FstatResults.twoFPerDet(1)
619
620
621
            log10_BSGL = lalpulsar.ComputeBSGL(twoF, self.twoFX,
                                               self.BSGLSetup)
            return log10_BSGL/np.log10(np.exp(1))
622

623
624
        self.windowRange.t0 = int(tstart)  # TYPE UINT4
        self.windowRange.tau = int(tend - tstart)  # TYPE UINT4
625

Gregory Ashton's avatar
Gregory Ashton committed
626
        FS = lalpulsar.ComputeTransientFstatMap(
627
            self.FstatResults.multiFatoms[0], self.windowRange, False)
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642

        if self.BSGL is False:
            return 2*FS.F_mn.data[0][0]

        FstatResults_single = copy.copy(self.FstatResults)
        FstatResults_single.lenth = 1
        FstatResults_single.data = self.FstatResults.multiFatoms[0].data[0]
        FS0 = lalpulsar.ComputeTransientFstatMap(
            FstatResults_single.multiFatoms[0], self.windowRange, False)
        FstatResults_single.data = self.FstatResults.multiFatoms[0].data[1]
        FS1 = lalpulsar.ComputeTransientFstatMap(
            FstatResults_single.multiFatoms[0], self.windowRange, False)

        self.twoFX[0] = 2*FS0.F_mn.data[0][0]
        self.twoFX[1] = 2*FS1.F_mn.data[0][0]
643
644
        log10_BSGL = lalpulsar.ComputeBSGL(
                2*FS.F_mn.data[0][0], self.twoFX, self.BSGLSetup)
645

646
        return log10_BSGL/np.log10(np.exp(1))
Gregory Ashton's avatar
Gregory Ashton committed
647

648
649
    def calculate_twoF_cumulative(self, F0, F1, F2, Alpha, Delta, asini=None,
                                  period=None, ecc=None, tp=None, argp=None,
650
651
                                  tstart=None, tend=None, npoints=1000,
                                  minfraction=0.01, maxfraction=1):
652
653
        """ Calculate the cumulative twoF along the obseration span """
        duration = tend - tstart
654
655
        tstart = tstart + minfraction*duration
        taus = np.linspace(minfraction*duration, maxfraction*duration, npoints)
656
        twoFs = []
Gregory Ashton's avatar
Gregory Ashton committed
657
658
659
        if self.transient is False:
            self.transient = True
            self.init_computefstatistic_single_point()
660
661
662
663
664
665
666
667
668
        for tau in taus:
            twoFs.append(self.run_computefstatistic_single_point(
                tstart=tstart, tend=tstart+tau, F0=F0, F1=F1, F2=F2,
                Alpha=Alpha, Delta=Delta, asini=asini, period=period, ecc=ecc,
                tp=tp, argp=argp))

        return taus, np.array(twoFs)

    def plot_twoF_cumulative(self, label, outdir, ax=None, c='k', savefig=True,
669
                             title=None, **kwargs):
670

671
672
673
674
675
676
        taus, twoFs = self.calculate_twoF_cumulative(**kwargs)
        if ax is None:
            fig, ax = plt.subplots()
        ax.plot(taus/86400., twoFs, label=label, color=c)
        ax.set_xlabel(r'Days from $t_{{\rm start}}={:.0f}$'.format(
            kwargs['tstart']))
Gregory Ashton's avatar
Gregory Ashton committed
677
678
679
680
        if self.BSGL:
            ax.set_ylabel(r'$\log_{10}(\mathrm{BSGL})_{\rm cumulative}$')
        else:
            ax.set_ylabel(r'$\widetilde{2\mathcal{F}}_{\rm cumulative}$')
681
        ax.set_xlim(0, taus[-1]/86400)
682
683
        if title:
            ax.set_title(title)
684
        if savefig:
685
            plt.tight_layout()
686
            plt.savefig('{}/{}_twoFcumulative.png'.format(outdir, label))
Gregory Ashton's avatar
Gregory Ashton committed
687
            return taus, twoFs
688
689
690
        else:
            return ax

Gregory Ashton's avatar
Gregory Ashton committed
691

692
693
694
695
696
697
698
class SemiCoherentSearch(BaseSearchClass, ComputeFstat):
    """ A semi-coherent search """

    @initializer
    def __init__(self, label, outdir, tref, nsegs=None, sftfilepath=None,
                 binary=False, BSGL=False, minStartTime=None,
                 maxStartTime=None, minCoverFreq=None, maxCoverFreq=None,
699
700
                 detector=None, earth_ephem=None, sun_ephem=None,
                 injectSources=None):
701
702
703
704
705
        """
        Parameters
        ----------
        label, outdir: str
            A label and directory to read/write data from/to.
706
        tref, minStartTime, maxStartTime: int
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
            GPS seconds of the reference time, and start and end of the data.
        nsegs: int
            The (fixed) number of segments
        sftfilepath: str
            File patern to match SFTs

        For all other parameters, see pyfstat.ComputeFStat.
        """

        self.fs_file_name = "{}/{}_FS.dat".format(self.outdir, self.label)
        if self.earth_ephem is None:
            self.earth_ephem = self.earth_ephem_default
        if self.sun_ephem is None:
            self.sun_ephem = self.sun_ephem_default
        self.transient = True
        self.init_computefstatistic_single_point()
        self.init_semicoherent_parameters()

    def init_semicoherent_parameters(self):
726
727
728
        logging.info(('Initialising semicoherent parameters from {} to {} in'
                      ' {} segments').format(
            self.minStartTime, self.maxStartTime, self.nsegs))
729
730
        self.transient = True
        self.whatToCompute = lalpulsar.FSTATQ_2F+lalpulsar.FSTATQ_ATOMS_PER_DET
731
732
733
        self.tboundaries = np.linspace(self.minStartTime, self.maxStartTime,
                                       self.nsegs+1)

Gregory Ashton's avatar
Gregory Ashton committed
734
735
736
737
    def run_semi_coherent_computefstatistic_single_point(
            self, F0, F1, F2, Alpha, Delta, asini=None,
            period=None, ecc=None, tp=None, argp=None):
        """ Returns twoF or ln(BSGL) semi-coherently at a single point """
738

Gregory Ashton's avatar
Gregory Ashton committed
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
        self.PulsarDopplerParams.fkdot = np.array([F0, F1, F2, 0, 0, 0, 0])
        self.PulsarDopplerParams.Alpha = Alpha
        self.PulsarDopplerParams.Delta = Delta
        if self.binary:
            self.PulsarDopplerParams.asini = asini
            self.PulsarDopplerParams.period = period
            self.PulsarDopplerParams.ecc = ecc
            self.PulsarDopplerParams.tp = tp
            self.PulsarDopplerParams.argp = argp

        lalpulsar.ComputeFstat(self.FstatResults,
                               self.FstatInput,
                               self.PulsarDopplerParams,
                               1,
                               self.whatToCompute
                               )

        if self.transient is False:
            if self.BSGL is False:
                return self.FstatResults.twoF[0]

            twoF = np.float(self.FstatResults.twoF[0])
            self.twoFX[0] = self.FstatResults.twoFPerDet(0)
            self.twoFX[1] = self.FstatResults.twoFPerDet(1)
            log10_BSGL = lalpulsar.ComputeBSGL(twoF, self.twoFX,
                                               self.BSGLSetup)
            return log10_BSGL/np.log10(np.exp(1))

        detStat = 0
        for tstart, tend in zip(self.tboundaries[:-1], self.tboundaries[1:]):
            self.windowRange.t0 = int(tstart)  # TYPE UINT4
            self.windowRange.tau = int(tend - tstart)  # TYPE UINT4

            FS = lalpulsar.ComputeTransientFstatMap(
                self.FstatResults.multiFatoms[0], self.windowRange, False)

            if self.BSGL is False:
                detStat += 2*FS.F_mn.data[0][0]
                continue
778

Gregory Ashton's avatar
Gregory Ashton committed
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
            FstatResults_single = copy.copy(self.FstatResults)
            FstatResults_single.lenth = 1
            FstatResults_single.data = self.FstatResults.multiFatoms[0].data[0]
            FS0 = lalpulsar.ComputeTransientFstatMap(
                FstatResults_single.multiFatoms[0], self.windowRange, False)
            FstatResults_single.data = self.FstatResults.multiFatoms[0].data[1]
            FS1 = lalpulsar.ComputeTransientFstatMap(
                FstatResults_single.multiFatoms[0], self.windowRange, False)

            self.twoFX[0] = 2*FS0.F_mn.data[0][0]
            self.twoFX[1] = 2*FS1.F_mn.data[0][0]
            log10_BSGL = lalpulsar.ComputeBSGL(
                    2*FS.F_mn.data[0][0], self.twoFX, self.BSGLSetup)

            detStat += log10_BSGL/np.log10(np.exp(1))

        return detStat
796
797


Gregory Ashton's avatar
Gregory Ashton committed
798
class SemiCoherentGlitchSearch(BaseSearchClass, ComputeFstat):
799
800
801
    """ A semi-coherent glitch search

    This implements a basic `semi-coherent glitch F-stat in which the data
802
803
    is divided into segments either side of the proposed glitches and the
    fully-coherent F-stat in each segment is summed to give the semi-coherent
804
805
806
807
    F-stat
    """

    @initializer
808
809
810
    def __init__(self, label, outdir, tref, minStartTime, maxStartTime,
                 nglitch=0, sftfilepath=None, theta0_idx=0, BSGL=False,
                 minCoverFreq=None, maxCoverFreq=None,
811
                 detector=None, earth_ephem=None, sun_ephem=None):
812
813
814
815
        """
        Parameters
        ----------
        label, outdir: str
816
            A label and directory to read/write data from/to.
817
        tref, minStartTime, maxStartTime: int
818
819
820
821
            GPS seconds of the reference time, and start and end of the data.
        nglitch: int
            The (fixed) number of glitches; this can zero, but occasionally
            this causes issue (in which case just use ComputeFstat).
822
823
        sftfilepath: str
            File patern to match SFTs
824
825
826
827
        theta0_idx, int
            Index (zero-based) of which segment the theta refers to - uyseful
            if providing a tight prior on theta to allow the signal to jump
            too theta (and not just from)
828
829

        For all other parameters, see pyfstat.ComputeFStat.
830
831
832
833
834
835
836
        """

        self.fs_file_name = "{}/{}_FS.dat".format(self.outdir, self.label)
        if self.earth_ephem is None:
            self.earth_ephem = self.earth_ephem_default
        if self.sun_ephem is None:
            self.sun_ephem = self.sun_ephem_default
837
838
        self.transient = True
        self.binary = False
839
840
841
        self.init_computefstatistic_single_point()

    def compute_nglitch_fstat(self, F0, F1, F2, Alpha, Delta, *args):
842
        """ Returns the semi-coherent glitch summed twoF """
843
844

        args = list(args)
845
846
        tboundaries = ([self.minStartTime] + args[-self.nglitch:]
                       + [self.maxStartTime])
847
848
849
850
851
852
853
854
        delta_F0s = args[-3*self.nglitch:-2*self.nglitch]
        delta_F1s = args[-2*self.nglitch:-self.nglitch]
        delta_F2 = np.zeros(len(delta_F0s))
        delta_phi = np.zeros(len(delta_F0s))
        theta = [0, F0, F1, F2]
        delta_thetas = np.atleast_2d(
                np.array([delta_phi, delta_F0s, delta_F1s, delta_F2]).T)

855
856
        thetas = self.calculate_thetas(theta, delta_thetas, tboundaries,
                                       theta0_idx=self.theta0_idx)
857
858

        twoFSum = 0
859
        for i, theta_i_at_tref in enumerate(thetas):
860
861
862
            ts, te = tboundaries[i], tboundaries[i+1]

            twoFVal = self.run_computefstatistic_single_point(
863
864
                ts, te, theta_i_at_tref[1], theta_i_at_tref[2],
                theta_i_at_tref[3], Alpha, Delta)
865
866
            twoFSum += twoFVal

867
868
869
        if np.isfinite(twoFSum):
            return twoFSum
        else:
870
            return -np.inf
871
872
873

    def compute_glitch_fstat_single(self, F0, F1, F2, Alpha, Delta, delta_F0,
                                    delta_F1, tglitch):
874
875
        """ Returns the semi-coherent glitch summed twoF for nglitch=1

876
        Note: OBSOLETE, used only for testing
877
        """
878
879
880
881
882
883
884
885
886
887
888

        theta = [F0, F1, F2]
        delta_theta = [delta_F0, delta_F1, 0]
        tref = self.tref

        theta_at_glitch = self.shift_coefficients(theta, tglitch - tref)
        theta_post_glitch_at_glitch = theta_at_glitch + delta_theta
        theta_post_glitch = self.shift_coefficients(
            theta_post_glitch_at_glitch, tref - tglitch)

        twoFsegA = self.run_computefstatistic_single_point(
889
            self.minStartTime, tglitch, theta[0], theta[1], theta[2], Alpha,
890
891
            Delta)

892
        if tglitch == self.maxStartTime:
893
894
895
            return twoFsegA

        twoFsegB = self.run_computefstatistic_single_point(
896
            tglitch, self.maxStartTime, theta_post_glitch[0],
897
898
899
900
901
902
            theta_post_glitch[1], theta_post_glitch[2], Alpha,
            Delta)

        return twoFsegA + twoFsegB


Gregory Ashton's avatar
Gregory Ashton committed
903
904
class MCMCSearch(BaseSearchClass):
    """ MCMC search using ComputeFstat"""
905
    @initializer
906
    def __init__(self, label, outdir, sftfilepath, theta_prior, tref,
Gregory Ashton's avatar
Gregory Ashton committed
907
                 minStartTime, maxStartTime, nsteps=[100, 100],
908
909
                 nwalkers=100, ntemps=1, log10temperature_min=-5,
                 theta_initial=None, scatter_val=1e-10,
910
911
                 binary=False, BSGL=False, minCoverFreq=None,
                 maxCoverFreq=None, detector=None, earth_ephem=None,
912
                 sun_ephem=None, injectSources=None):
913
914
915
916
        """
        Parameters
        label, outdir: str
            A label and directory to read/write data from/to
917
918
        sftfilepath: str
            File patern to match SFTs
919
        theta_prior: dict
920
921
922
923
            Dictionary of priors and fixed values for the search parameters.
            For each parameters (key of the dict), if it is to be held fixed
            the value should be the constant float, if it is be searched, the
            value should be a dictionary of the prior.
924
925
926
927
        theta_initial: dict, array, (None)
            Either a dictionary of distribution about which to distribute the
            initial walkers about, an array (from which the walkers will be
            scattered by scatter_val, or  None in which case the prior is used.
928
        tref, minStartTime, maxStartTime: int
929
930
931
932
933
934
            GPS seconds of the reference time, start time and end time
        nsteps: list (m,)
            List specifying the number of steps to take, the last two entries
            give the nburn and nprod of the 'production' run, all entries
            before are for iterative initialisation steps (usually just one)
            e.g. [1000, 1000, 500].
935
936
937
938
939
940
941
942
943
944
945
        nwalkers, ntemps: int,
            The number of walkers and temperates to use in the parallel
            tempered PTSampler.
        log10temperature_min float < 0
            The  log_10(tmin) value, the set of betas passed to PTSampler are
            generated from np.logspace(0, log10temperature_min, ntemps).
        binary: Bool
            If true, search over binary parameters
        detector: str
            Two character reference to the data to use, specify None for no
            contraint.
946
947
948
949
950
951
952
953
954
955
        minCoverFreq, maxCoverFreq: float
            Minimum and maximum instantaneous frequency which will be covered
            over the SFT time span as passed to CreateFstatInput
        earth_ephem, sun_ephem: str
            Paths of the two files containing positions of Earth and Sun,
            respectively at evenly spaced times, as passed to CreateFstatInput
            If None defaults defined in BaseSearchClass will be used

        """

Gregory Ashton's avatar
Gregory Ashton committed
956
957
        if os.path.isdir(outdir) is False:
            os.mkdir(outdir)
958
        self.add_log_file()
Gregory Ashton's avatar
Gregory Ashton committed
959
960
        logging.info(
            'Set-up MCMC search for model {} on data {}'.format(
961
                self.label, self.sftfilepath))
962
963
964
        self.pickle_path = '{}/{}_saved_data.p'.format(self.outdir, self.label)
        self.unpack_input_theta()
        self.ndim = len(self.theta_keys)
965
966
967
968
        if self.log10temperature_min:
            self.betas = np.logspace(0, self.log10temperature_min, self.ntemps)
        else:
            self.betas = None
969

970
971
972
973
974
975
976
977
        if earth_ephem is None:
            self.earth_ephem = self.earth_ephem_default
        if sun_ephem is None:
            self.sun_ephem = self.sun_ephem_default

        if args.clean and os.path.isfile(self.pickle_path):
            os.rename(self.pickle_path, self.pickle_path+".old")

978
979
980
        self.log_input()

    def log_input(self):
981
        logging.info('theta_prior = {}'.format(self.theta_prior))
982
        logging.info('nwalkers={}'.format(self.nwalkers))
983
984
985
986
        logging.info('scatter_val = {}'.format(self.scatter_val))
        logging.info('nsteps = {}'.format(self.nsteps))
        logging.info('ntemps = {}'.format(self.ntemps))
        logging.info('log10temperature_min = {}'.format(
987
            self.log10temperature_min))
988
989
990

    def inititate_search_object(self):
        logging.info('Setting up search object')
Gregory Ashton's avatar
Gregory Ashton committed
991
        self.search = ComputeFstat(
992
993
994
995
            tref=self.tref, sftfilepath=self.sftfilepath,
            minCoverFreq=self.minCoverFreq, maxCoverFreq=self.maxCoverFreq,
            earth_ephem=self.earth_ephem, sun_ephem=self.sun_ephem,
            detector=self.detector, BSGL=self.BSGL, transient=False,
996
            minStartTime=self.minStartTime, maxStartTime=self.maxStartTime,
997
            binary=self.binary, injectSources=self.injectSources)
998
999

    def logp(self, theta_vals, theta_prior, theta_keys, search):
Gregory Ashton's avatar
Gregory Ashton committed
1000
        H = [self.generic_lnprior(**theta_prior[key])(p) for p, key in
1001
1002
1003
1004
1005
1006
             zip(theta_vals, theta_keys)]
        return np.sum(H)

    def logl(self, theta, search):
        for j, theta_i in enumerate(self.theta_idxs):
            self.fixed_theta[theta_i] = theta[j]
1007
1008
        FS = search.compute_fullycoherent_det_stat_single_point(
            *self.fixed_theta)
1009
1010
1011
        return FS

    def unpack_input_theta(self):
1012
        full_theta_keys = ['F0', 'F1', 'F2', 'Alpha', 'Delta']
1013
1014
1015
        if self.binary:
            full_theta_keys += [
                'asini', 'period', 'ecc', 'tp', 'argp']
1016
1017
        full_theta_keys_copy = copy.copy(full_theta_keys)

1018
1019
        full_theta_symbols = ['$f$', '$\dot{f}$', '$\ddot{f}$', r'$\alpha$',
                              r'$\delta$']
1020
1021
1022
1023
        if self.binary:
            full_theta_symbols += [
                'asini', 'period', 'period', 'ecc', 'tp', 'argp']

1024
1025
        self.theta_keys = []
        fixed_theta_dict = {}
1026
        for key, val in self.theta_prior.iteritems():
1027
1028
            if type(val) is dict:
                fixed_theta_dict[key] = 0
Gregory Ashton's avatar
Gregory Ashton committed
1029
                self.theta_keys.append(key)
1030
1031
1032
1033
1034
1035
            elif type(val) in [float, int, np.float64]:
                fixed_theta_dict[key] = val
            else:
                raise ValueError(
                    'Type {} of {} in theta not recognised'.format(
                        type(val), key))
Gregory Ashton's avatar
Gregory Ashton committed
1036
            full_theta_keys_copy.pop(full_theta_keys_copy.index(key))
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052

        if len(full_theta_keys_copy) > 0:
            raise ValueError(('Input dictionary `theta` is missing the'
                              'following keys: {}').format(
                                  full_theta_keys_copy))

        self.fixed_theta = [fixed_theta_dict[key] for key in full_theta_keys]
        self.theta_idxs = [full_theta_keys.index(k) for k in self.theta_keys]
        self.theta_symbols = [full_theta_symbols[i] for i in self.theta_idxs]

        idxs = np.argsort(self.theta_idxs)
        self.theta_idxs = [self.theta_idxs[i] for i in idxs]
        self.theta_symbols = [self.theta_symbols[i] for i in idxs]
        self.theta_keys = [self.theta_keys[i] for i in idxs]

    def check_initial_points(self, p0):
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
        for nt in range(self.ntemps):
            logging.info('Checking temperature {} chains'.format(nt))
            initial_priors = np.array([
                self.logp(p, self.theta_prior, self.theta_keys, self.search)
                for p in p0[nt]])
            number_of_initial_out_of_bounds = sum(initial_priors == -np.inf)

            if number_of_initial_out_of_bounds > 0:
                logging.warning(
                    'Of {} initial values, {} are -np.inf due to the prior'
                    .format(len(initial_priors),
                            number_of_initial_out_of_bounds))

                p0 = self.generate_new_p0_to_fix_initial_points(
                    p0, nt, initial_priors)

    def generate_new_p0_to_fix_initial_points(self, p0, nt, initial_priors):
        logging.info('Attempting to correct intial values')
        idxs = np.arange(self.nwalkers)[initial_priors == -np.inf]
        count = 0
        while sum(initial_priors == -np.inf) > 0 and count < 100:
            for j in idxs:
                p0[nt][j] = (p0[nt][np.random.randint(0, self.nwalkers)]*(
                             1+np.random.normal(0, 1e-10, self.ndim)))
            initial_priors = np.array([
                self.logp(p, self.theta_prior, self.theta_keys,
                          self.search)
                for p in p0[nt]])
            count += 1

        if sum(initial_priors == -np.inf) > 0:
            logging.info('Failed to fix initial priors')
        else:
            logging.info('Suceeded to fix initial priors')

        return p0
1089

Gregory Ashton's avatar
Gregory Ashton committed
1090
    def run_sampler_with_progress_bar(self, sampler, ns, p0):
1091
1092
        for result in tqdm(sampler.sample(p0, iterations=ns), total=ns):
            pass
Gregory Ashton's avatar
Gregory Ashton committed
1093
1094
        return sampler

1095
    def run(self, proposal_scale_factor=2, create_plots=True, **kwargs):
1096

Gregory Ashton's avatar
Gregory Ashton committed
1097
        self.old_data_is_okay_to_use = self.check_old_data_is_okay_to_use()
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
        if self.old_data_is_okay_to_use is True:
            logging.warning('Using saved data from {}'.format(
                self.pickle_path))
            d = self.get_saved_data()
            self.sampler = d['sampler']
            self.samples = d['samples']
            self.lnprobs = d['lnprobs']
            self.lnlikes = d['lnlikes']
            return

        self.inititate_search_object()

        sampler = emcee.PTSampler(
            self.ntemps, self.nwalkers, self.ndim, self.logl, self.logp,
            logpargs=(self.theta_prior, self.theta_keys, self.search),
1113
            loglargs=(self.search,), betas=self.betas, a=proposal_scale_factor)
1114

Gregory Ashton's avatar
Gregory Ashton committed
1115
1116
        p0 = self.generate_initial_p0()
        p0 = self.apply_corrections_to_p0(p0)
1117
1118
1119
1120
1121
        self.check_initial_points(p0)

        ninit_steps = len(self.nsteps) - 2
        for j, n in enumerate(self.nsteps[:-2]):
            logging.info('Running {}/{} initialisation with {} steps'.format(
Gregory Ashton's avatar
Gregory Ashton committed
1122
                j, ninit_steps, n))
Gregory Ashton's avatar
Gregory Ashton committed
1123
            sampler = self.run_sampler_with_progress_bar(sampler, n, p0)
1124
1125
            logging.info("Mean acceptance fraction: {}"
                         .format(np.mean(sampler.acceptance_fraction, axis=1)))
1126
1127
1128
            if self.ntemps > 1:
                logging.info("Tswap acceptance fraction: {}"
                             .format(sampler.tswap_acceptance_fraction))
1129
1130
1131
1132
1133
1134
1135
            if create_plots:
                fig, axes = self.plot_walkers(sampler,
                                              symbols=self.theta_symbols,
                                              **kwargs)
                fig.tight_layout()
                fig.savefig('{}/{}_init_{}_walkers.png'.format(
                    self.outdir, self.label, j), dpi=200)
1136

1137
            p0 = self.get_new_p0(sampler)
Gregory Ashton's avatar
Gregory Ashton committed
1138
            p0 = self.apply_corrections_to_p0(p0)
1139
1140
1141
            self.check_initial_points(p0)
            sampler.reset()

Gregory Ashton's avatar
Gregory Ashton committed
1142
1143
1144
1145
        if len(self.nsteps) > 1:
            nburn = self.nsteps[-2]
        else:
            nburn = 0
1146
1147
1148
        nprod = self.nsteps[-1]
        logging.info('Running final burn and prod with {} steps'.format(
            nburn+nprod))
Gregory Ashton's avatar
Gregory Ashton committed
1149
        sampler = self.run_sampler_with_progress_bar(sampler, nburn+nprod, p0)
1150
1151
        logging.info("Mean acceptance fraction: {}"
                     .format(np.mean(sampler.acceptance_fraction, axis=1)))
1152
1153
1154
        if self.ntemps > 1:
            logging.info("Tswap acceptance fraction: {}"
                         .format(sampler.tswap_acceptance_fraction))
1155

1156
1157
1158
1159
1160
1161
        if create_plots:
            fig, axes = self.plot_walkers(sampler, symbols=self.theta_symbols,
                                          burnin_idx=nburn, **kwargs)
            fig.tight_layout()
            fig.savefig('{}/{}_walkers.png'.format(self.outdir, self.label),
                        dpi=200)
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171

        samples = sampler.chain[0, :, nburn:, :].reshape((-1, self.ndim))
        lnprobs = sampler.lnprobability[0, :, nburn:].reshape((-1))
        lnlikes = sampler.lnlikelihood[0, :, nburn:].reshape((-1))
        self.sampler = sampler
        self.samples = samples
        self.lnprobs = lnprobs
        self.lnlikes = lnlikes
        self.save_data(sampler, samples, lnprobs, lnlikes)

1172
    def plot_corner(self, figsize=(7, 7),  tglitch_ratio=False,
1173
1174
1175
                    add_prior=False, nstds=None, label_offset=0.4,
                    dpi=300, rc_context={}, **kwargs):

Gregory Ashton's avatar
Gregory Ashton committed
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
        if self.ndim < 2:
            with plt.rc_context(rc_context):
                fig, ax = plt.subplots(figsize=figsize)
                ax.hist(self.samples, bins=50, histtype='stepfilled')
                ax.set_xlabel(self.theta_symbols[0])

            fig.savefig('{}/{}_corner.png'.format(
                self.outdir, self.label), dpi=dpi)
            return

1186
1187
1188
1189
1190
1191
        with plt.rc_context(rc_context):
            fig, axes = plt.subplots(self.ndim, self.ndim,
                                     figsize=figsize)

            samples_plt = copy.copy(self.samples)
            theta_symbols_plt = copy.copy(self.theta_symbols)
1192
1193
            theta_symbols_plt = [s.replace('_{glitch}', r'_\textrm{glitch}')
                                 for s in theta_symbols_plt]
1194
1195
1196
1197
1198

            if tglitch_ratio:
                for j, k in enumerate(self.theta_keys):
                    if k == 'tglitch':
                        s = samples_plt[:, j]
1199
1200
1201
                        samples_plt[:, j] = (
                            s - self.minStartTime)/(
                                self.maxStartTime - self.minStartTime)
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
                        theta_symbols_plt[j] = r'$R_{\textrm{glitch}}$'

            if type(nstds) is int and 'range' not in kwargs:
                _range = []
                for j, s in enumerate(samples_plt.T):
                    median = np.median(s)
                    std = np.std(s)
                    _range.append((median - nstds*std, median + nstds*std))
            else:
                _range = None

            fig_triangle = corner.corner(samples_plt,
                                         labels=theta_symbols_plt,
                                         fig=fig,
                                         bins=50,
                                         max_n_ticks=4,
                                         plot_contours=True,
                                         plot_datapoints=True,
                                         label_kwargs={'fontsize': 8},
                                         data_kwargs={'alpha': 0.1,
                                                      'ms': 0.5},
                                         range=_range,
                                         **kwargs)

            axes_list = fig_triangle.get_axes()
            axes = np.array(axes_list).reshape(self.ndim, self.ndim)
            plt.draw()
            for ax in axes[:, 0]:
                ax.yaxis.set_label_coords(-label_offset, 0.5)
            for ax in axes[-1, :]:
                ax.xaxis.set_label_coords(0.5, -label_offset)
            for ax in axes_list:
                ax.set_rasterized(True)
                ax.set_rasterization_zorder(-10)
            plt.tight_layout(h_pad=0.0, w_pad=0.0)
            fig.subplots_adjust(hspace=0.05, wspace=0.05)

            if add_prior:
                self.add_prior_to_corner(axes, samples_plt)

            fig_triangle.savefig('{}/{}_corner.png'.format(
                self.outdir, self.label), dpi=dpi)
1244
1245
1246
1247
1248
1249

    def add_prior_to_corner(self, axes, samples):
        for i, key in enumerate(self.theta_keys):
            ax = axes[i][i]
            xlim = ax.get_xlim()
            s = samples[:, i]
Gregory Ashton's avatar
Gregory Ashton committed
1250
            prior = self.generic_lnprior(**self.theta_prior[key])
1251
1252
1253
1254
1255
1256
            x = np.linspace(s.min(), s.max(), 100)
            ax2 = ax.twinx()
            ax2.get_yaxis().set_visible(False)
            ax2.plot(x, [prior(xi) for xi in x], '-r')
            ax.set_xlim(xlim)

1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
    def plot_prior_posterior(self, normal_stds=2):
        """ Plot the posterior in the context of the prior """
        fig, axes = plt.subplots(nrows=self.ndim, figsize=(8, 4*self.ndim))
        N = 1000
        from scipy.stats import gaussian_kde

        for i, (ax, key) in enumerate(zip(axes, self.theta_keys)):
            prior_dict = self.theta_prior[key]
            prior_func = self.generic_lnprior(**prior_dict)
            if prior_dict['type'] == 'unif':
                x = np.linspace(prior_dict['lower'], prior_dict['upper'], N)
                prior = prior_func(x)
                prior[0] = 0
                prior[-1] = 0
            elif prior_dict['type'] == 'norm':
                lower = prior_dict['loc'] - normal_stds * prior_dict['scale']
                upper = prior_dict['loc'] + normal_stds * prior_dict['scale']
                x = np.linspace(lower, upper, N)
                prior = prior_func(x)
1276
1277
1278
1279
1280
            elif prior_dict['type'] == 'halfnorm':
                lower = prior_dict['loc']
                upper = prior_dict['loc'] + normal_stds * prior_dict['scale']
                x = np.linspace(lower, upper, N)
                prior = [prior_func(xi) for xi in x]
Gregory Ashton's avatar
Gregory Ashton committed
1281
1282
1283
1284
1285
            elif prior_dict['type'] == 'neghalfnorm':
                upper = prior_dict['loc']
                lower = prior_dict['loc'] - normal_stds * prior_dict['scale']
                x = np.linspace(lower, upper, N)
                prior = [prior_func(xi) for xi in x]
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
            else:
                raise ValueError('Not implemented for prior type {}'.format(
                    prior_dict['type']))
            priorln = ax.plot(x, prior, 'r', label='prior')
            ax.set_xlabel(self.theta_symbols[i])

            s = self.samples[:, i]