diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 88bb5a50ccd6cc43f5cf52427099284d41d37abc..316093ccc974d296bacbb4833b91cb3a30830f21 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,7 +1,11 @@
 test_app:
   script:
     - . /home/greg/lalsuite-install/etc/lalapps-user-env.sh
-    - /home/greg/anaconda2/bin/python tests.py TestWriter
-    - /home/greg/anaconda2/bin/python tests.py TestBaseSearchClass
-    - /home/greg/anaconda2/bin/python tests.py TestComputeFstat
-    - /home/greg/anaconda2/bin/python tests.py TestSemiCoherentGlitchSearch
+    - /home/greg/anaconda2/bin/python tests.py Writer
+    - /home/greg/anaconda2/bin/python tests.py par
+    - /home/greg/anaconda2/bin/python tests.py BaseSearchClass
+    - /home/greg/anaconda2/bin/python tests.py ComputeFstat
+    - /home/greg/anaconda2/bin/python tests.py SemiCoherentSearch
+    - /home/greg/anaconda2/bin/python tests.py SemiCoherentGlitchSearch
+    - /home/greg/anaconda2/bin/python tests.py MCMCSearch
+    - /home/greg/anaconda2/bin/python tests.py GridSearch
diff --git a/examples/fully_coherent_search_using_MCMC.py b/examples/fully_coherent_search_using_MCMC.py
index 82fd4dd37425573a127c03e7580e7a2a2d956d12..72874c74e5b0fdc9270607f7a1a8d1b28f004aed 100644
--- a/examples/fully_coherent_search_using_MCMC.py
+++ b/examples/fully_coherent_search_using_MCMC.py
@@ -47,8 +47,8 @@ theta_prior = {'F0': {'type': 'unif',
                'Delta': Delta
                }
 
-ntemps = 1
-log10beta_min = -1
+ntemps = 2
+log10beta_min = -0.5
 nwalkers = 100
 nsteps = [300, 300]
 
@@ -57,6 +57,9 @@ mcmc = pyfstat.MCMCSearch(
     sftfilepattern='{}/*{}*sft'.format(outdir, label), theta_prior=theta_prior,
     tref=tref, minStartTime=tstart, maxStartTime=tend, nsteps=nsteps,
     nwalkers=nwalkers, ntemps=ntemps, log10beta_min=log10beta_min)
-mcmc.run(subtractions=[F0, F1])
+mcmc.transform_dictionary = dict(
+    F0=dict(subtractor=F0, symbol='$f-f^\mathrm{s}$'),
+    F1=dict(subtractor=F1, symbol='$\dot{f}-\dot{f}^\mathrm{s}$'))
+mcmc.run()
 mcmc.plot_corner(add_prior=True)
 mcmc.print_summary()
diff --git a/examples/glitch_examples/fully_coherent_search_using_MCMC_on_glitching_data.py b/examples/glitch_examples/fully_coherent_search_using_MCMC_on_glitching_data.py
deleted file mode 100644
index 011cd5d0b4f417f6ed2bcff92c7ed57f5118012c..0000000000000000000000000000000000000000
--- a/examples/glitch_examples/fully_coherent_search_using_MCMC_on_glitching_data.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import numpy as np
-from pyfstat import MCMCSearch
-
-F0 = 30.0
-F1 = -1e-10
-F2 = 0
-Alpha = np.radians(83.6292)
-Delta = np.radians(22.0144)
-
-tref = 362750407.0
-
-tstart = 1000000000
-duration = 100*86400
-tend = tstart + duration
-
-theta_prior = {'F0': {'type': 'unif', 'lower': F0-1e-4, 'upper': F0+1e-4},
-               'F1': {'type': 'unif', 'lower': F1*(1+1e-3), 'upper': F1*(1-1e-3)},
-               'F2': F2,
-               'Alpha': Alpha,
-               'Delta': Delta
-               }
-
-ntemps = 2
-log10beta_min = -0.01
-nwalkers = 100
-nsteps = [500, 500]
-
-mcmc = MCMCSearch('fully_coherent_search_using_MCMC_on_glitching_data', 'data',
-                  sftfilepattern='data/*_glitch*.sft',
-                  theta_prior=theta_prior, tref=tref, minStartTime=tstart, maxStartTime=tend,
-                  nsteps=nsteps, nwalkers=nwalkers, ntemps=ntemps,
-                  log10beta_min=log10beta_min)
-mcmc.run()
-mcmc.plot_corner(add_prior=True)
-mcmc.print_summary()
diff --git a/examples/glitch_examples/glitch_robust_search.py b/examples/glitch_examples/glitch_robust_search.py
deleted file mode 100644
index 5964b6f9f11b9fe6bf39d86c2e3f62d0e9e7e307..0000000000000000000000000000000000000000
--- a/examples/glitch_examples/glitch_robust_search.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import numpy as np
-import pyfstat
-
-outdir = 'data'
-label = 'glitch_robust_search'
-
-# Properties of the GW data
-tstart = 1000000000
-Tspan = 60 * 86400
-
-# Fixed properties of the signal
-F0s = 30
-F1s = -1e-8
-F2s = 0
-Alpha = np.radians(83.6292)
-Delta = np.radians(22.0144)
-
-tref = tstart + .5 * Tspan
-
-sftfilepattern = 'data/*glitching_signal*sft'
-
-F0_width = np.sqrt(3)/(np.pi*Tspan)
-F1_width = np.sqrt(45/4.)/(np.pi*Tspan**2)
-DeltaF0 = 50 * F0_width
-DeltaF1 = 50 * F1_width
-
-theta_prior = {'F0': {'type': 'unif',
-                      'lower': F0s-DeltaF0,
-                      'upper': F0s+DeltaF0},
-               'F1': {'type': 'unif',
-                      'lower': F1s-DeltaF1,
-                      'upper': F1s+DeltaF1},
-               'F2': F2s,
-               'delta_F0': {'type': 'unif',
-                            'lower': 0,
-                            'upper': 1e-5},
-               'delta_F1': {'type': 'unif',
-                            'lower': -1e-11,
-                            'upper': 1e-11},
-               'tglitch': {'type': 'unif',
-                           'lower': tstart+0.1*Tspan,
-                           'upper': tstart+0.9*Tspan},
-               'Alpha': Alpha,
-               'Delta': Delta,
-               }
-
-search = pyfstat.MCMCGlitchSearch(
-    label=label, outdir=outdir, sftfilepattern=sftfilepattern,
-    theta_prior=theta_prior, nglitch=1, tref=tref, nsteps=[500, 500],
-    ntemps=3, log10beta_min=-0.5, minStartTime=tstart,
-    maxStartTime=tstart+Tspan)
-search.run()
-search.plot_corner(label_offset=0.8, add_prior=True)
-search.print_summary()
diff --git a/examples/glitch_examples/glitch_robust_search_make_simulated_data.py b/examples/glitch_examples/glitch_robust_search_make_simulated_data.py
deleted file mode 100644
index 106eddaea2821d34c433490f53f69597261c64e6..0000000000000000000000000000000000000000
--- a/examples/glitch_examples/glitch_robust_search_make_simulated_data.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import numpy as np
-import pyfstat
-
-outdir = 'data'
-label = 'simulated_glitching_signal'
-
-# Properties of the GW data
-tstart = 1000000000
-Tspan = 60 * 86400
-
-tref = tstart + .5 * Tspan
-
-# Fixed properties of the signal
-F0s = 30
-F1s = -1e-8
-F2s = 0
-Alpha = np.radians(83.6292)
-Delta = np.radians(22.0144)
-h0 = 1e-25
-sqrtSX = 1e-24
-psi = -0.1
-phi = 0
-cosi = 0.5
-
-# Glitch properties
-dtglitch = 0.45 * Tspan  # time (in secs) after minStartTime
-dF0 = 5e-6
-dF1 = 1e-12
-
-
-detectors = 'H1'
-
-glitch_data = pyfstat.Writer(
-    label=label, outdir=outdir, tref=tref, tstart=tstart,
-    F0=F0s, F1=F1s, F2=F2s, duration=Tspan, Alpha=Alpha,
-    Delta=Delta, sqrtSX=sqrtSX, dtglitch=dtglitch,
-    h0=h0, cosi=cosi, phi=phi, psi=psi,
-    delta_F0=dF0, delta_F1=dF1, add_noise=True)
-
-glitch_data.make_data()
diff --git a/examples/glitch_examples/make_fake_data.py b/examples/glitch_examples/make_simulated_data.py
similarity index 81%
rename from examples/glitch_examples/make_fake_data.py
rename to examples/glitch_examples/make_simulated_data.py
index a2e7fc8fef25e511a90ba7d1b09f166d54e149ba..ffa9e3c8378a609a284397f63e14e50ab85f9e1e 100644
--- a/examples/glitch_examples/make_fake_data.py
+++ b/examples/glitch_examples/make_simulated_data.py
@@ -1,6 +1,7 @@
 from pyfstat import Writer, GlitchWriter
 import numpy as np
 
+outdir = 'data'
 # First, we generate data with a reasonably strong smooth signal
 
 # Define parameters of the Crab pulsar as an example
@@ -9,19 +10,19 @@ F1 = -1e-10
 F2 = 0
 Alpha = np.radians(83.6292)
 Delta = np.radians(22.0144)
-tref = 362750407.0
 
 # Signal strength
-h0 = 1e-23
+h0 = 5e-24
 
 # Properties of the GW data
 sqrtSX = 1e-22
 tstart = 1000000000
 duration = 100*86400
 tend = tstart+duration
+tref = tstart + 0.5*duration
 
 data = Writer(
-    label='basic', outdir='data', tref=tref, tstart=tstart, F0=F0, F1=F1,
+    label='0_glitch', outdir=outdir, tref=tref, tstart=tstart, F0=F0, F1=F1,
     F2=F2, duration=duration, Alpha=Alpha, Delta=Delta, h0=h0, sqrtSX=sqrtSX)
 data.make_data()
 
@@ -31,11 +32,11 @@ print 'Predicted twoF value: {}\n'.format(twoF)
 
 # Next, taking the same signal parameters, we include a glitch half way through
 dtglitch = duration/2.0
-delta_F0 = 4e-5
+delta_F0 = 5e-6
 delta_F1 = 0
 
 glitch_data = GlitchWriter(
-    label='glitch', outdir='data', tref=tref, tstart=tstart, F0=F0, F1=F1,
+    label='1_glitch', outdir=outdir, tref=tref, tstart=tstart, F0=F0, F1=F1,
     F2=F2, duration=duration, Alpha=Alpha, Delta=Delta, h0=h0, sqrtSX=sqrtSX,
     dtglitch=dtglitch, delta_F0=delta_F0, delta_F1=delta_F1)
 glitch_data.make_data()
@@ -49,7 +50,7 @@ delta_F1 = [0, 0]
 delta_F2 = [0, 0]
 
 two_glitch_data = GlitchWriter(
-    label='twoglitch', outdir='data', tref=tref, tstart=tstart, F0=F0, F1=F1,
+    label='2_glitch', outdir=outdir, tref=tref, tstart=tstart, F0=F0, F1=F1,
     F2=F2, duration=duration, Alpha=Alpha, Delta=Delta, h0=h0, sqrtSX=sqrtSX,
     dtglitch=dtglitch, delta_phi=delta_phi, delta_F0=delta_F0,
     delta_F1=delta_F1, delta_F2=delta_F2)
diff --git a/examples/glitch_examples/semi_coherent_glitch_search_using_MCMC.py b/examples/glitch_examples/semi_coherent_glitch_search_using_MCMC.py
deleted file mode 100644
index 06caa43aefb397dd574c3685d0cab9709092b36b..0000000000000000000000000000000000000000
--- a/examples/glitch_examples/semi_coherent_glitch_search_using_MCMC.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import pyfstat
-
-F0 = 30.0
-F1 = -1e-10
-F2 = 0
-Alpha = 5e-3
-Delta = 6e-2
-tref = 362750407.0
-
-tstart = 1000000000
-duration = 100*86400
-tend = tstart + duration
-
-theta_prior = {'F0': {'type': 'norm', 'loc': F0, 'scale': abs(1e-6*F0)},
-               'F1': {'type': 'norm', 'loc': F1, 'scale': abs(1e-6*F1)},
-               'F2': F2,
-               'Alpha': Alpha,
-               'Delta': Delta,
-               'delta_F0': {'type': 'halfnorm', 'loc': 0,
-                            'scale': 1e-5*F0},
-               'delta_F1': 0,
-               'tglitch': {'type': 'unif',
-                           'lower': tstart+0.1*duration,
-                           'upper': tstart+0.9*duration},
-               }
-
-ntemps = 4
-log10beta_min = -1
-nwalkers = 100
-nsteps = [5000, 1000, 1000]
-
-mcmc = pyfstat.MCMCGlitchSearch(
-    'semi_coherent_glitch_search_using_MCMC', 'data',
-    sftfilepattern='data/*_glitch*sft', theta_prior=theta_prior, tref=tref,
-    tstart=tstart, tend=tend, nsteps=nsteps, nwalkers=nwalkers,
-    scatter_val=1e-10, nglitch=1, ntemps=ntemps,
-    log10beta_min=log10beta_min)
-
-mcmc.run()
-mcmc.plot_corner(add_prior=True)
-mcmc.print_summary()
diff --git a/examples/glitch_examples/semi_coherent_twoglitch_search_using_MCMC.py b/examples/glitch_examples/semi_coherent_twoglitch_search_using_MCMC.py
deleted file mode 100644
index b467557320391cc5fc6bb8569823efa55314878d..0000000000000000000000000000000000000000
--- a/examples/glitch_examples/semi_coherent_twoglitch_search_using_MCMC.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import pyfstat
-
-F0 = 30.0
-F1 = -1e-10
-F2 = 0
-Alpha = 5e-3
-Delta = 6e-2
-tref = 362750407.0
-
-tstart = 1000000000
-duration = 100*86400
-tend = tstart + duration
-
-theta_prior = {'F0': {'type': 'norm', 'loc': F0, 'scale': abs(1e-6*F0)},
-               'F1': {'type': 'norm', 'loc': F1, 'scale': abs(1e-6*F1)},
-               'F2': F2,
-               'Alpha': Alpha,
-               'Delta': Delta,
-               'delta_F0_0': {'type': 'halfnorm', 'loc': 0,
-                              'scale': 1e-7*F0},
-               'delta_F0_1': {'type': 'halfnorm', 'loc': 0,
-                              'scale': 1e-7*F0},
-               'delta_F1_0': 0,
-               'delta_F1_1': 0,
-               'tglitch_0': {'type': 'unif',
-                             'lower': tstart+0.01*duration,
-                             'upper': tstart+0.5*duration},
-               'tglitch_1': {'type': 'unif',
-                             'lower': tstart+0.5*duration,
-                             'upper': tstart+0.99*duration},
-               }
-
-nwalkers = 100
-nsteps = [1000, 1000, 5000]
-
-mcmc = pyfstat.MCMCGlitchSearch(
-    'semi_coherent_twoglitch_search', 'data', sftfilepattern='data/*twoglitch*sft',
-    theta_prior=theta_prior, tref=tref, tstart=tstart,
-    tend=tend, nsteps=nsteps, nwalkers=nwalkers, scatter_val=1e-10, nglitch=2)
-
-mcmc.run()
-mcmc.plot_corner(add_prior=True, tglitch_ratio=True, figsize=(10, 10))
-mcmc.print_summary()
diff --git a/examples/glitch_examples/semicoherent_glitch_robust_directed_MCMC_search_on_1_glitch.py b/examples/glitch_examples/semicoherent_glitch_robust_directed_MCMC_search_on_1_glitch.py
new file mode 100644
index 0000000000000000000000000000000000000000..9da04a410059f8b472a11211a1c41c6703ace4ec
--- /dev/null
+++ b/examples/glitch_examples/semicoherent_glitch_robust_directed_MCMC_search_on_1_glitch.py
@@ -0,0 +1,51 @@
+import numpy as np
+import matplotlib.pyplot as plt
+import pyfstat
+from make_simulated_data import tstart, duration, tref, F0, F1, F2, Alpha, Delta, outdir
+
+plt.style.use('paper')
+
+label = 'semicoherent_glitch_robust_directed_MCMC_search_on_1_glitch'
+
+Nstar = 100
+F0_width = np.sqrt(Nstar)*np.sqrt(12)/(np.pi*duration)
+F1_width = np.sqrt(Nstar)*np.sqrt(180)/(np.pi*duration**2)
+
+theta_prior = {
+    'F0': {'type': 'unif',
+           'lower': F0-F0_width/2.,
+           'upper': F0+F0_width/2.},
+    'F1': {'type': 'unif',
+           'lower': F1-F1_width/2.,
+           'upper': F1+F1_width/2.},
+    'F2': F2,
+    'delta_F0': {'type': 'unif',
+                 'lower': 0,
+                 'upper': 1e-5},
+    'delta_F1': 0,
+    'tglitch': {'type': 'unif',
+                'lower': tstart+0.1*duration,
+                'upper': tstart+0.9*duration},
+    'Alpha': Alpha,
+    'Delta': Delta,
+    }
+
+ntemps = 2
+log10beta_min = -0.5
+nwalkers = 100
+nsteps = [500, 500]
+
+mcmc = pyfstat.MCMCGlitchSearch(
+    label=label, sftfilepattern='data/*1_glitch*sft', theta_prior=theta_prior,
+    tref=tref, minStartTime=tstart, maxStartTime=tstart+duration,
+    nsteps=nsteps, nwalkers=nwalkers, ntemps=ntemps,
+    log10beta_min=log10beta_min, nglitch=1)
+
+mcmc.transform_dictionary['F0'] = dict(
+    subtractor=F0, symbol='$f-f^\mathrm{s}$')
+mcmc.transform_dictionary['F1'] = dict(
+    subtractor=F1, symbol='$\dot{f}-\dot{f}^\mathrm{s}$')
+
+mcmc.run()
+mcmc.plot_corner()
+mcmc.print_summary()
diff --git a/examples/glitch_examples/semicoherent_glitch_robust_directed_grid_search_on_1_glitch.py b/examples/glitch_examples/semicoherent_glitch_robust_directed_grid_search_on_1_glitch.py
new file mode 100644
index 0000000000000000000000000000000000000000..0a6e80b87b596941c1d76534c5fdea492353e290
--- /dev/null
+++ b/examples/glitch_examples/semicoherent_glitch_robust_directed_grid_search_on_1_glitch.py
@@ -0,0 +1,53 @@
+import pyfstat
+import numpy as np
+import matplotlib.pyplot as plt
+from make_simulated_data import tstart, duration, tref, F0, F1, F2, Alpha, Delta, outdir
+
+try:
+    from gridcorner import gridcorner
+except ImportError:
+    raise ImportError(
+        "Python module 'gridcorner' not found, please install from "
+        "https://gitlab.aei.uni-hannover.de/GregAshton/gridcorner")
+
+label = 'semicoherent_glitch_robust_directed_grid_search_on_1_glitch'
+
+plt.style.use('paper')
+
+Nstar = 150
+F0_width = np.sqrt(Nstar)*np.sqrt(12)/(np.pi*duration)
+F1_width = np.sqrt(Nstar)*np.sqrt(180)/(np.pi*duration**2)
+N = 61
+F0s = [F0-F0_width/2., F0+F0_width/2., F0_width/N]
+F1s = [F1-F1_width/2., F1+F1_width/2., F1_width/N]
+F2s = [F2]
+Alphas = [Alpha]
+Deltas = [Delta]
+
+max_delta_F0 = 1e-5
+tglitchs = [tstart+0.1*duration, tstart+0.9*duration, 0.8*float(duration)/N]
+delta_F0s = [0, max_delta_F0, max_delta_F0/N]
+delta_F1s = [0]
+
+search = pyfstat.GridGlitchSearch(
+    label, outdir, 'data/*1_glitch*sft', F0s=F0s, F1s=F1s, F2s=F2s,
+    Alphas=Alphas, Deltas=Deltas, tref=tref, minStartTime=tstart,
+    maxStartTime=tstart+duration, tglitchs=tglitchs, delta_F0s=delta_F0s,
+    delta_F1s=delta_F1s)
+search.run()
+
+F0_vals = np.unique(search.data[:, 0]) - F0
+F1_vals = np.unique(search.data[:, 1]) - F1
+delta_F0s_vals = np.unique(search.data[:, 5])
+tglitch_vals = np.unique(search.data[:, 7])
+tglitch_vals_days = (tglitch_vals-tstart) / 86400.
+
+twoF = search.data[:, -1].reshape((len(F0_vals), len(F1_vals),
+                                   len(delta_F0s_vals), len(tglitch_vals)))
+xyz = [F0_vals, F1_vals, delta_F0s_vals, tglitch_vals_days]
+labels = ['$f - f_0$\n[Hz]', '$\dot{f} - \dot{f}_0$\n[Hz/s]',
+          '$\delta f$\n[Hz]', '$t^g_0$\n[days]', '$\widehat{2\mathcal{F}}$']
+fig, axes = gridcorner(
+    twoF, xyz, projection='log_mean', whspace=0.1, factor=1.2, labels=labels)
+fig.savefig('{}/{}_projection_matrix.png'.format(outdir, label),
+            bbox_inches='tight')
diff --git a/examples/glitch_examples/standard_directed_MCMC_search_on_1_glitch.py b/examples/glitch_examples/standard_directed_MCMC_search_on_1_glitch.py
new file mode 100644
index 0000000000000000000000000000000000000000..7856a59557bd9b30acbbe4af58558e59c0e5f9f6
--- /dev/null
+++ b/examples/glitch_examples/standard_directed_MCMC_search_on_1_glitch.py
@@ -0,0 +1,44 @@
+import numpy as np
+import matplotlib.pyplot as plt
+import pyfstat
+from make_simulated_data import tstart, duration, tref, F0, F1, F2, Alpha, Delta, outdir
+
+plt.style.use('paper')
+
+label = 'standard_directed_MCMC_search_on_1_glitch'
+
+Nstar = 10000
+F0_width = np.sqrt(Nstar)*np.sqrt(12)/(np.pi*duration)
+F1_width = np.sqrt(Nstar)*np.sqrt(180)/(np.pi*duration**2)
+
+theta_prior = {
+    'F0': {'type': 'unif',
+           'lower': F0-F0_width/2.,
+           'upper': F0+F0_width/2.},
+    'F1': {'type': 'unif',
+           'lower': F1-F1_width/2.,
+           'upper': F1+F1_width/2.},
+    'F2': F2,
+    'Alpha': Alpha,
+    'Delta': Delta,
+    }
+
+ntemps = 2
+log10beta_min = -0.5
+nwalkers = 100
+nsteps = [500, 2000]
+
+mcmc = pyfstat.MCMCSearch(
+    label=label, sftfilepattern='data/*1_glitch*sft', theta_prior=theta_prior,
+    tref=tref, minStartTime=tstart, maxStartTime=tstart+duration,
+    nsteps=nsteps, nwalkers=nwalkers, ntemps=ntemps,
+    log10beta_min=log10beta_min)
+
+mcmc.transform_dictionary['F0'] = dict(
+    subtractor=F0, symbol='$f-f^\mathrm{s}$')
+mcmc.transform_dictionary['F1'] = dict(
+    subtractor=F1, symbol='$\dot{f}-\dot{f}^\mathrm{s}$')
+
+mcmc.run()
+mcmc.plot_corner()
+mcmc.print_summary()
diff --git a/examples/grid_examples/grid_F0F1F2.py b/examples/grid_examples/grid_F0F1F2.py
index 7456e535a19bf9de2faea5ae348b898f72036cfe..2de31067e42c890912bfa179fa98ade1490105da 100644
--- a/examples/grid_examples/grid_F0F1F2.py
+++ b/examples/grid_examples/grid_F0F1F2.py
@@ -1,9 +1,13 @@
 import pyfstat
 import numpy as np
 import matplotlib.pyplot as plt
-from projection_matrix import projection_matrix
 
-plt.style.use('paper')
+try:
+    from gridcorner import gridcorner
+except ImportError:
+    raise ImportError(
+        "Python module 'gridcorner' not found, please install from "
+        "https://gitlab.aei.uni-hannover.de/GregAshton/gridcorner")
 
 F0 = 30.0
 F1 = 1e-10
@@ -55,6 +59,6 @@ twoF = search.data[:, -1].reshape((len(F0_vals), len(F1_vals), len(F2_vals)))
 xyz = [F0_vals, F1_vals, F2_vals]
 labels = ['$f - f_0$', '$\dot{f} - \dot{f}_0$', '$\ddot{f} - \ddot{f}_0$',
           '$\widetilde{2\mathcal{F}}$']
-fig, axes = projection_matrix(twoF, xyz, projection='log_mean', labels=labels,
-                              whspace=0.1, factor=1.8)
+fig, axes = gridcorner(
+    twoF, xyz, projection='log_mean', labels=labels, whspace=0.1, factor=1.8)
 fig.savefig('{}/{}_projection_matrix.png'.format(outdir, label))
diff --git a/examples/grid_examples/grided_frequency_search.py b/examples/grid_examples/grided_frequency_search.py
index 5e4a423ce8a080081de364bf058287d184df9b01..e8f5d55f1a362c79bb631d2ea2b5dd428d3550f7 100644
--- a/examples/grid_examples/grided_frequency_search.py
+++ b/examples/grid_examples/grided_frequency_search.py
@@ -2,8 +2,6 @@ import pyfstat
 import numpy as np
 import matplotlib.pyplot as plt
 
-plt.style.use('paper')
-
 F0 = 30.0
 F1 = 0
 F2 = 0
diff --git a/examples/other_examples/twoF_cumulative.py b/examples/other_examples/twoF_cumulative.py
index cd7f0426b7fc92a8244b17958877c3e8a1672d47..760fd23e90068ad589918e6a5694e568b0a31cdf 100644
--- a/examples/other_examples/twoF_cumulative.py
+++ b/examples/other_examples/twoF_cumulative.py
@@ -56,7 +56,7 @@ mcmc = pyfstat.MCMCSearch(
     sftfilepattern='data/*'+data_label+'*sft', theta_prior=theta_prior, tref=tref,
     minStartTime=tstart, maxStartTime=tend, nsteps=nsteps, nwalkers=nwalkers,
     ntemps=ntemps, log10beta_min=log10beta_min)
-mcmc.run(context='paper', subtractions=[30, -1e-10])
+mcmc.run()
 mcmc.plot_corner(add_prior=True)
 mcmc.print_summary()
 
diff --git a/examples/semi_coherent_directed_follow_up.py b/examples/semi_coherent_directed_follow_up.py
index 1ae81449b1e4c8f171559f73118366f07b485b63..e84976e34b6e28e68e9dcfe04d8067735843ed6e 100644
--- a/examples/semi_coherent_directed_follow_up.py
+++ b/examples/semi_coherent_directed_follow_up.py
@@ -2,8 +2,6 @@ import pyfstat
 import numpy as np
 import matplotlib.pyplot as plt
 
-plt.style.use('./paper-style.mplstyle')
-
 F0 = 30.0
 F1 = -1e-10
 F2 = 0
@@ -63,8 +61,8 @@ NstarMax = 1000
 Nsegs0 = 100
 fig, axes = plt.subplots(nrows=2, figsize=(3.4, 3.5))
 fig, axes = mcmc.run(
-    NstarMax=NstarMax, Nsegs0=Nsegs0, subtractions=[F0, F1], labelpad=0.01,
-    plot_det_stat=False, return_fig=True, context='paper', fig=fig,
+    NstarMax=NstarMax, Nsegs0=Nsegs0, labelpad=0.01,
+    plot_det_stat=False, return_fig=True, fig=fig,
     axes=axes)
 for ax in axes:
     ax.grid()
diff --git a/examples/semi_coherent_search_using_MCMC.py b/examples/semi_coherent_search_using_MCMC.py
index cd8d95acb64775b25a4140760bd3dceb07b3119d..1422fd344ac0191291832f2cd225d6e6eb06eb84 100644
--- a/examples/semi_coherent_search_using_MCMC.py
+++ b/examples/semi_coherent_search_using_MCMC.py
@@ -58,6 +58,9 @@ mcmc = pyfstat.MCMCSemiCoherentSearch(
     theta_prior=theta_prior, tref=tref, minStartTime=tstart, maxStartTime=tend,
     nsteps=nsteps, nwalkers=nwalkers, ntemps=ntemps,
     log10beta_min=log10beta_min)
+mcmc.transform_dictionary = dict(
+    F0=dict(subtractor=F0, symbol='$f-f^\mathrm{s}$'),
+    F1=dict(subtractor=F1, symbol='$\dot{f}-\dot{f}^\mathrm{s}$'))
 mcmc.run()
 mcmc.plot_corner(add_prior=True)
 mcmc.print_summary()
diff --git a/examples/other_examples/transient_search_using_MCMC.py b/examples/transient_examples/long_transient_search_MCMC.py
similarity index 78%
rename from examples/other_examples/transient_search_using_MCMC.py
rename to examples/transient_examples/long_transient_search_MCMC.py
index b70dc3d85c1d17d5ab5906887dfabc4184922f5c..a9255fa63b7b008f70468e86edff0364e11de60a 100644
--- a/examples/other_examples/transient_search_using_MCMC.py
+++ b/examples/transient_examples/long_transient_search_MCMC.py
@@ -25,9 +25,7 @@ theta_prior = {'F0': {'type': 'unif',
                'F2': F2,
                'Alpha': Alpha,
                'Delta': Delta,
-               'transient_tstart': {'type': 'unif',
-                                    'lower': minStartTime,
-                                    'upper': maxStartTime},
+               'transient_tstart': minStartTime,
                'transient_duration': {'type': 'halfnorm',
                                       'loc': 0.001*Tspan,
                                       'scale': 0.5*Tspan}
@@ -39,11 +37,12 @@ nwalkers = 100
 nsteps = [100, 100]
 
 mcmc = pyfstat.MCMCTransientSearch(
-    label='transient_search', outdir='data',
-    sftfilepattern='data/*simulated_transient_signal*sft',
+    label='transient_search', outdir='data_l',
+    sftfilepattern='data_l/*simulated_transient_signal*sft',
     theta_prior=theta_prior, tref=tref, minStartTime=minStartTime,
     maxStartTime=maxStartTime, nsteps=nsteps, nwalkers=nwalkers, ntemps=ntemps,
-    log10beta_min=log10beta_min)
+    log10beta_min=log10beta_min,
+    transientWindowType='rect')
 mcmc.run()
 mcmc.plot_corner(label_offset=0.7)
 mcmc.print_summary()
diff --git a/examples/other_examples/transient_search_using_MCMC_make_simulated_data.py b/examples/transient_examples/long_transient_search_make_simulated_data.py
similarity index 79%
rename from examples/other_examples/transient_search_using_MCMC_make_simulated_data.py
rename to examples/transient_examples/long_transient_search_make_simulated_data.py
index 4fa3dd1d1157446b5de54dfb9b7f3b132889fdc1..de68fccdb40e16063bd870c0b69829b667dc49c3 100644
--- a/examples/other_examples/transient_search_using_MCMC_make_simulated_data.py
+++ b/examples/transient_examples/long_transient_search_make_simulated_data.py
@@ -19,8 +19,8 @@ h0 = 1e-23
 sqrtSX = 1e-22
 
 transient = pyfstat.Writer(
-    label='simulated_transient_signal', outdir='data', tref=tref,
+    label='simulated_transient_signal', outdir='data_l', tref=tref,
     tstart=transient_tstart, F0=F0, F1=F1, F2=F2, duration=transient_duration,
     Alpha=Alpha, Delta=Delta, h0=h0, sqrtSX=sqrtSX, minStartTime=minStartTime,
-    maxStartTime=maxStartTime)
+    maxStartTime=maxStartTime, transientWindowType='rect')
 transient.make_data()
diff --git a/examples/transient_examples/short_transient_search_MCMC.py b/examples/transient_examples/short_transient_search_MCMC.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b8f504a074f20372c9142d6d220f0650b0e498b
--- /dev/null
+++ b/examples/transient_examples/short_transient_search_MCMC.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+
+import pyfstat
+
+F0 = 30.0
+F1 = -1e-10
+F2 = 0
+Alpha = 0.5
+Delta = 1
+
+minStartTime = 1000000000
+maxStartTime = minStartTime + 2*86400
+Tspan = maxStartTime - minStartTime
+tref = minStartTime
+
+Tsft = 1800
+
+DeltaF0 = 1e-2
+DeltaF1 = 1e-9
+
+theta_prior = {'F0': {'type': 'unif',
+                      'lower': F0-DeltaF0/2.,
+                      'upper': F0+DeltaF0/2.},
+               'F1': {'type': 'unif',
+                      'lower': F1-DeltaF1/2.,
+                      'upper': F1+DeltaF1/2.},
+               'F2': F2,
+               'Alpha': Alpha,
+               'Delta': Delta,
+               'transient_tstart': {'type': 'unif',
+                                    'lower': minStartTime,
+                                    'upper': maxStartTime-2*Tsft},
+               'transient_duration': {'type': 'unif',
+                                      'lower': 2*Tsft,
+                                      'upper': Tspan-2*Tsft}
+               }
+
+ntemps = 2
+log10beta_min = -1
+nwalkers = 100
+nsteps = [100, 100]
+
+mcmc = pyfstat.MCMCTransientSearch(
+    label='transient_search', outdir='data_s',
+    sftfilepattern='data_s/*simulated_transient_signal*sft',
+    theta_prior=theta_prior, tref=tref, minStartTime=minStartTime,
+    maxStartTime=maxStartTime, nsteps=nsteps, nwalkers=nwalkers, ntemps=ntemps,
+    log10beta_min=log10beta_min,
+    transientWindowType='rect')
+mcmc.run()
+mcmc.plot_corner(label_offset=0.7)
+mcmc.print_summary()
diff --git a/examples/transient_examples/short_transient_search_gridded.py b/examples/transient_examples/short_transient_search_gridded.py
new file mode 100644
index 0000000000000000000000000000000000000000..64e642342f40b088a5c68c1d44c7b69e68be20e9
--- /dev/null
+++ b/examples/transient_examples/short_transient_search_gridded.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+import pyfstat
+import os
+import numpy as np
+import matplotlib.pyplot as plt
+
+datadir = 'data_s'
+
+F0 = 30.0
+F1 = -1e-10
+F2 = 0
+Alpha = 0.5
+Delta = 1
+
+minStartTime = 1000000000
+maxStartTime = minStartTime + 2*86400
+Tspan = maxStartTime - minStartTime
+tref = minStartTime
+
+Tsft = 1800
+
+m = 0.001
+dF0 = np.sqrt(12*m)/(np.pi*Tspan)
+DeltaF0 = 100*dF0
+F0s = [F0-DeltaF0/2., F0+DeltaF0/2., dF0]
+F1s = [F1]
+F2s = [F2]
+Alphas = [Alpha]
+Deltas = [Delta]
+
+print('Standard CW search:')
+search1 = pyfstat.GridSearch(
+    label='CW', outdir=datadir,
+    sftfilepattern=os.path.join(datadir,'*simulated_transient_signal*sft'),
+    F0s=F0s, F1s=F1s, F2s=F2s, Alphas=Alphas, Deltas=Deltas, tref=tref,
+    minStartTime=minStartTime, maxStartTime=maxStartTime,
+    BSGL=False,
+    outputTransientFstatMap=True)
+search1.run()
+search1.print_max_twoF()
+
+search1.plot_1D(xkey='F0',
+               xlabel='freq [Hz]', ylabel='$2\mathcal{F}$')
+
+print('with t0,tau bands:')
+search2 = pyfstat.GridSearch(
+    label='tCW', outdir=datadir,
+    sftfilepattern=os.path.join(datadir,'*simulated_transient_signal*sft'),
+    F0s=F0s, F1s=F1s, F2s=F2s, Alphas=Alphas, Deltas=Deltas, tref=tref,
+    minStartTime=minStartTime, maxStartTime=maxStartTime,
+    transientWindowType='rect', t0Band=Tspan-2*Tsft, tauBand=Tspan,
+    BSGL=False,
+    outputTransientFstatMap=True)
+search2.run()
+search2.print_max_twoF()
+
+search2.plot_1D(xkey='F0',
+               xlabel='freq [Hz]', ylabel='$2\mathcal{F}$')
diff --git a/examples/transient_examples/short_transient_search_make_simulated_data.py b/examples/transient_examples/short_transient_search_make_simulated_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..320b4e4eb07dd1fce589156f0e1cfa96ea0b78a6
--- /dev/null
+++ b/examples/transient_examples/short_transient_search_make_simulated_data.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+import pyfstat
+
+F0 = 30.0
+F1 = -1e-10
+F2 = 0
+Alpha = 0.5
+Delta = 1
+
+minStartTime = 1000000000
+maxStartTime = minStartTime + 2*86400
+
+transient_tstart = minStartTime + 0.5*86400
+transient_duration = 1*86400
+tref = minStartTime
+
+h0 = 1e-23
+sqrtSX = 1e-22
+detectors = 'H1,L1'
+
+Tsft = 1800
+
+transient = pyfstat.Writer(
+    label='simulated_transient_signal', outdir='data_s',
+    tref=tref, tstart=transient_tstart, duration=transient_duration,
+    F0=F0, F1=F1, F2=F2, Alpha=Alpha, Delta=Delta, h0=h0,
+    detectors=detectors,sqrtSX=sqrtSX,
+    minStartTime=minStartTime, maxStartTime=maxStartTime,
+    transientWindowType='rect', Tsft=Tsft)
+transient.make_data()
diff --git a/examples/using_initialisation.py b/examples/using_initialisation.py
index 5e7ea5e019f0d8b3abf155c41d861d6b7a0ad825..efe43364f0ea6ada232d6fbb972b39ad9550aef3 100644
--- a/examples/using_initialisation.py
+++ b/examples/using_initialisation.py
@@ -59,6 +59,6 @@ mcmc = pyfstat.MCMCSearch(
     nsteps=nsteps, nwalkers=nwalkers, ntemps=ntemps,
     log10beta_min=log10beta_min)
 mcmc.setup_initialisation(100, scatter_val=1e-10)
-mcmc.run(subtractions=[F0, F1])
+mcmc.run()
 mcmc.plot_corner(add_prior=True)
 mcmc.print_summary()
diff --git a/pyfstat/core.py b/pyfstat/core.py
index 7599f108742732c62866b17f804884fb9d0b2847..23500c228c2fd3e8f43b26e73f3f346519faa71b 100755
--- a/pyfstat/core.py
+++ b/pyfstat/core.py
@@ -330,7 +330,8 @@ class ComputeFstat(BaseSearchClass):
 
     @helper_functions.initializer
     def __init__(self, tref, sftfilepattern=None, minStartTime=None,
-                 maxStartTime=None, binary=False, transient=True, BSGL=False,
+                 maxStartTime=None, binary=False, BSGL=False,
+                 transientWindowType=None, t0Band=None, tauBand=None,
                  detectors=None, minCoverFreq=None, maxCoverFreq=None,
                  injectSources=None, injectSqrtSX=None, assumeSqrtSX=None,
                  SSBprec=None):
@@ -347,10 +348,18 @@ class ComputeFstat(BaseSearchClass):
             this epoch
         binary : bool
             If true, search of binary parameters.
-        transient : bool
-            If true, allow for the Fstat to be computed over a transient range.
         BSGL : bool
             If true, compute the BSGL rather than the twoF value.
+        transientWindowType: str
+            If 'rect' or 'exp',
+            allow for the Fstat to be computed over a transient range.
+            ('none' instead of None explicitly calls the transient-window
+            function, but with the full range, for debugging)
+        t0Band, tauBand: int
+            if >0, search t0 in (minStartTime,minStartTime+t0Band)
+                   and tau in (2*Tsft,2*Tsft+tauBand).
+            if =0, only compute CW Fstat with t0=minStartTime,
+                   tau=maxStartTime-minStartTime.
         detectors : str
             Two character reference to the data to use, specify None for no
             contraint. If multiple-separate by comma.
@@ -477,7 +486,7 @@ class ComputeFstat(BaseSearchClass):
 
         logging.info('Initialising FstatInput')
         dFreq = 0
-        if self.transient:
+        if self.transientWindowType:
             self.whatToCompute = lalpulsar.FSTATQ_ATOMS_PER_DET
         else:
             self.whatToCompute = lalpulsar.FSTATQ_2F
@@ -593,20 +602,46 @@ class ComputeFstat(BaseSearchClass):
             self.whatToCompute = (self.whatToCompute +
                                   lalpulsar.FSTATQ_2F_PER_DET)
 
-        if self.transient:
+        if self.transientWindowType:
             logging.info('Initialising transient parameters')
             self.windowRange = lalpulsar.transientWindowRange_t()
-            self.windowRange.type = lalpulsar.TRANSIENT_RECTANGULAR
-            self.windowRange.t0Band = 0
-            self.windowRange.dt0 = 1
-            self.windowRange.tauBand = 0
-            self.windowRange.dtau = 1
+            transientWindowTypes = {'none': lalpulsar.TRANSIENT_NONE,
+                                    'rect': lalpulsar.TRANSIENT_RECTANGULAR,
+                                    'exp':  lalpulsar.TRANSIENT_EXPONENTIAL}
+            if self.transientWindowType in transientWindowTypes:
+                self.windowRange.type = transientWindowTypes[self.transientWindowType]
+            else:
+                raise ValueError(
+                    'Unknown window-type ({}) passed as input, [{}] allows.'
+                    .format(self.transientWindowType,
+                            ', '.join(transientWindowTypes)))
+
+            self.Tsft = int(1.0/SFTCatalog.data[0].header.deltaF)
+            if self.t0Band is None:
+                self.windowRange.t0Band = 0
+                self.windowRange.dt0 = 1
+            else:
+                if not isinstance(self.t0Band, int):
+                    logging.warn('Casting non-integer t0Band={} to int...'
+                                 .format(self.t0Band))
+                    self.t0Band = int(self.t0Band)
+                self.windowRange.t0Band = self.t0Band
+                self.windowRange.dt0 = self.Tsft
+            if self.tauBand is None:
+                self.windowRange.tauBand = 0
+                self.windowRange.dtau = 1
+            else:
+                if not isinstance(self.tauBand, int):
+                    logging.warn('Casting non-integer tauBand={} to int...'
+                                 .format(self.tauBand))
+                    self.tauBand = int(self.tauBand)
+                self.windowRange.tauBand = self.tauBand
+                self.windowRange.dtau = self.Tsft
 
     def get_fullycoherent_twoF(self, tstart, tend, F0, F1, F2, Alpha, Delta,
                                asini=None, period=None, ecc=None, tp=None,
                                argp=None):
         """ Returns twoF or ln(BSGL) fully-coherently at a single point """
-
         self.PulsarDopplerParams.fkdot = np.array([F0, F1, F2, 0, 0, 0, 0])
         self.PulsarDopplerParams.Alpha = Alpha
         self.PulsarDopplerParams.Delta = Delta
@@ -624,7 +659,7 @@ class ComputeFstat(BaseSearchClass):
                                self.whatToCompute
                                )
 
-        if self.transient is False:
+        if not self.transientWindowType:
             if self.BSGL is False:
                 return self.FstatResults.twoF[0]
 
@@ -636,13 +671,21 @@ class ComputeFstat(BaseSearchClass):
             return log10_BSGL/np.log10(np.exp(1))
 
         self.windowRange.t0 = int(tstart)  # TYPE UINT4
-        self.windowRange.tau = int(tend - tstart)  # TYPE UINT4
+        if self.windowRange.tauBand == 0:
+            # true single-template search also in transient params:
+            # actual (t0,tau) window was set with tstart, tend before
+            self.windowRange.tau = int(tend - tstart)  # TYPE UINT4
+        else:
+            # grid search: start at minimum tau required for nondegenerate
+            # F-stat computation
+            self.windowRange.tau = int(2*self.Tsft)
 
-        FS = lalpulsar.ComputeTransientFstatMap(
+        self.FstatMap = lalpulsar.ComputeTransientFstatMap(
             self.FstatResults.multiFatoms[0], self.windowRange, False)
+        F_mn = self.FstatMap.F_mn.data
 
+        twoF = 2*np.max(F_mn)
         if self.BSGL is False:
-            twoF = 2*FS.F_mn.data[0][0]
             if np.isnan(twoF):
                 return 0
             else:
@@ -657,10 +700,17 @@ class ComputeFstat(BaseSearchClass):
         FS1 = lalpulsar.ComputeTransientFstatMap(
             FstatResults_single.multiFatoms[0], self.windowRange, False)
 
-        self.twoFX[0] = 2*FS0.F_mn.data[0][0]
-        self.twoFX[1] = 2*FS1.F_mn.data[0][0]
+        # for now, use the Doppler parameter with
+        # multi-detector F maximised over t0,tau
+        # to return BSGL
+        # FIXME: should we instead compute BSGL over the whole F_mn
+        # and return the maximum of that?
+        idx_maxTwoF = np.argmax(F_mn)
+
+        self.twoFX[0] = 2*FS0.F_mn.data[idx_maxTwoF]
+        self.twoFX[1] = 2*FS1.F_mn.data[idx_maxTwoF]
         log10_BSGL = lalpulsar.ComputeBSGL(
-                2*FS.F_mn.data[0][0], self.twoFX, self.BSGLSetup)
+                twoF, self.twoFX, self.BSGLSetup)
 
         return log10_BSGL/np.log10(np.exp(1))
 
@@ -696,14 +746,16 @@ class ComputeFstat(BaseSearchClass):
         max_tau = SFTmaxStartTime - tstart
         taus = np.linspace(min_tau, max_tau, npoints)
         twoFs = []
-        if self.transient is False:
-            self.transient = True
+        if not self.transientWindowType:
+            # still call the transient-Fstat-map function, but using the full range
+            self.transientWindowType = 'none'
             self.init_computefstatistic_single_point()
         for tau in taus:
-            twoFs.append(self.get_fullycoherent_twoF(
+            detstat = self.get_fullycoherent_twoF(
                 tstart=tstart, tend=tstart+tau, F0=F0, F1=F1, F2=F2,
                 Alpha=Alpha, Delta=Delta, asini=asini, period=period, ecc=ecc,
-                tp=tp, argp=argp))
+                tp=tp, argp=argp)
+            twoFs.append(detstat)
 
         return taus, np.array(twoFs)
 
@@ -880,7 +932,9 @@ class SemiCoherentSearch(ComputeFstat):
 
         self.fs_file_name = "{}/{}_FS.dat".format(self.outdir, self.label)
         self.set_ephemeris_files()
-        self.transient = True
+        self.transientWindowType = 'rect'
+        self.t0Band  = None
+        self.tauBand = None
         self.init_computefstatistic_single_point()
         self.init_semicoherent_parameters()
 
@@ -888,7 +942,7 @@ class SemiCoherentSearch(ComputeFstat):
         logging.info(('Initialising semicoherent parameters from {} to {} in'
                       ' {} segments').format(
             self.minStartTime, self.maxStartTime, self.nsegs))
-        self.transient = True
+        self.transientWindowType = 'rect'
         self.whatToCompute = lalpulsar.FSTATQ_2F+lalpulsar.FSTATQ_ATOMS_PER_DET
         self.tboundaries = np.linspace(self.minStartTime, self.maxStartTime,
                                        self.nsegs+1)
@@ -927,7 +981,7 @@ class SemiCoherentSearch(ComputeFstat):
                                self.whatToCompute
                                )
 
-        #if self.transient is False:
+        #if not self.transientWindowType:
         #    if self.BSGL is False:
         #        return self.FstatResults.twoF[0]
         #    twoF = np.float(self.FstatResults.twoF[0])
@@ -991,7 +1045,7 @@ class SemiCoherentGlitchSearch(ComputeFstat):
 
     @helper_functions.initializer
     def __init__(self, label, outdir, tref, minStartTime, maxStartTime,
-                 nglitch=0, sftfilepattern=None, theta0_idx=0, BSGL=False,
+                 nglitch=1, sftfilepattern=None, theta0_idx=0, BSGL=False,
                  minCoverFreq=None, maxCoverFreq=None, assumeSqrtSX=None,
                  detectors=None, SSBprec=None, injectSources=None):
         """
@@ -1017,8 +1071,10 @@ class SemiCoherentGlitchSearch(ComputeFstat):
 
         self.fs_file_name = "{}/{}_FS.dat".format(self.outdir, self.label)
         self.set_ephemeris_files()
-        self.transient = True
-        self.binary = False
+        self.transientWindowType = 'rect'
+        self.t0Band  = None
+        self.tauBand = None
+        self.binary  = False
         self.init_computefstatistic_single_point()
 
     def get_semicoherent_nglitch_twoF(self, F0, F1, F2, Alpha, Delta, *args):
@@ -1041,11 +1097,11 @@ class SemiCoherentGlitchSearch(ComputeFstat):
         twoFSum = 0
         for i, theta_i_at_tref in enumerate(thetas):
             ts, te = tboundaries[i], tboundaries[i+1]
-
-            twoFVal = self.get_fullycoherent_twoF(
-                ts, te, theta_i_at_tref[1], theta_i_at_tref[2],
-                theta_i_at_tref[3], Alpha, Delta)
-            twoFSum += twoFVal
+            if te - ts > 1800:
+                twoFVal = self.get_fullycoherent_twoF(
+                    ts, te, theta_i_at_tref[1], theta_i_at_tref[2],
+                    theta_i_at_tref[3], Alpha, Delta)
+                twoFSum += twoFVal
 
         if np.isfinite(twoFSum):
             return twoFSum
diff --git a/pyfstat/grid_based_searches.py b/pyfstat/grid_based_searches.py
index bcd0246cbb497204edaf69bb8858a885b65c617c..306c1631eb94a2cd78777cb941fd3c8968d1d491 100644
--- a/pyfstat/grid_based_searches.py
+++ b/pyfstat/grid_based_searches.py
@@ -5,6 +5,9 @@ import os
 import logging
 import itertools
 from collections import OrderedDict
+import datetime
+import getpass
+import socket
 
 import numpy as np
 import matplotlib
@@ -25,13 +28,17 @@ class GridSearch(BaseSearchClass):
                   'Alpha': r'$\alpha$', 'Delta': r'$\delta$'}
     tex_labels0 = {'F0': '$-f_0$', 'F1': '$-\dot{f}_0$', 'F2': '$-\ddot{f}_0$',
                    'Alpha': r'$-\alpha_0$', 'Delta': r'$-\delta_0$'}
+    search_labels = ['minStartTime', 'maxStartTime', 'F0s', 'F1s', 'F2s',
+                     'Alphas', 'Deltas']
 
     @helper_functions.initializer
     def __init__(self, label, outdir, sftfilepattern, F0s, F1s, F2s, Alphas,
                  Deltas, tref=None, minStartTime=None, maxStartTime=None,
                  nsegs=1, BSGL=False, minCoverFreq=None, maxCoverFreq=None,
                  detectors=None, SSBprec=None, injectSources=None,
-                 input_arrays=False, assumeSqrtSX=None):
+                 input_arrays=False, assumeSqrtSX=None,
+                 transientWindowType=None, t0Band=None, tauBand=None,
+                 outputTransientFstatMap=False):
         """
         Parameters
         ----------
@@ -48,8 +55,25 @@ class GridSearch(BaseSearchClass):
             GPS seconds of the reference time, start time and end time
         input_arrays: bool
             if true, use the F0s, F1s, etc as is
+        transientWindowType: str
+            If 'rect' or 'exp', compute atoms so that a transient (t0,tau) map
+            can later be computed.  ('none' instead of None explicitly calls
+            the transient-window function, but with the full range, for
+            debugging). Currently only supported for nsegs=1.
+        t0Band, tauBand: int
+            if >0, search t0 in (minStartTime,minStartTime+t0Band)
+                   and tau in (2*Tsft,2*Tsft+tauBand).
+            if =0, only compute CW Fstat with t0=minStartTime,
+                   tau=maxStartTime-minStartTime.
+        outputTransientFstatMap: bool
+            if true, write output files for (t0,tau) Fstat maps
+            (one file for each doppler grid point!)
 
         For all other parameters, see `pyfstat.ComputeFStat` for details
+
+        Note: if a large number of grid points are used, checks against cached
+        data may be slow as the array is loaded into memory. To avoid this, run
+        with the `clean` option which uses a generator instead.
         """
 
         if os.path.isdir(outdir) is False:
@@ -66,7 +90,9 @@ class GridSearch(BaseSearchClass):
             self.search = ComputeFstat(
                 tref=self.tref, sftfilepattern=self.sftfilepattern,
                 minCoverFreq=self.minCoverFreq, maxCoverFreq=self.maxCoverFreq,
-                detectors=self.detectors, transient=False,
+                detectors=self.detectors,
+                transientWindowType=self.transientWindowType,
+                t0Band=self.t0Band, tauBand=self.tauBand,
                 minStartTime=self.minStartTime, maxStartTime=self.maxStartTime,
                 BSGL=self.BSGL, SSBprec=self.SSBprec,
                 injectSources=self.injectSources,
@@ -97,21 +123,25 @@ class GridSearch(BaseSearchClass):
     def get_input_data_array(self):
         logging.info("Generating input data array")
         coord_arrays = []
-        for tup in ([self.minStartTime], [self.maxStartTime], self.F0s,
-                    self.F1s, self.F2s, self.Alphas, self.Deltas):
-            coord_arrays.append(self.get_array_from_tuple(tup))
-
-        input_data = []
-        for vals in itertools.product(*coord_arrays):
-                input_data.append(vals)
-        self.input_data = np.array(input_data)
+        for sl in self.search_labels:
+            coord_arrays.append(
+                self.get_array_from_tuple(np.atleast_1d(getattr(self, sl))))
         self.coord_arrays = coord_arrays
+        self.total_iterations = np.prod([len(ca) for ca in coord_arrays])
+
+        if args.clean is False:
+            input_data = []
+            for vals in itertools.product(*coord_arrays):
+                    input_data.append(vals)
+            self.input_data = np.array(input_data)
 
     def check_old_data_is_okay_to_use(self):
         if args.clean:
             return False
         if os.path.isfile(self.out_file) is False:
-            logging.info('No old data found, continuing with grid search')
+            logging.info(
+                'No old data found in "{:s}", continuing with grid search'
+                .format(self.out_file))
             return False
         if self.sftfilepattern is not None:
             oldest_sft = min([os.path.getmtime(f) for f in
@@ -122,39 +152,73 @@ class GridSearch(BaseSearchClass):
                 return False
 
         data = np.atleast_2d(np.genfromtxt(self.out_file, delimiter=' '))
-        if np.all(data[:, 0:-1] == self.input_data):
+        if np.all(data[:, 0: len(self.coord_arrays)] ==
+                  self.input_data[:, 0:len(self.coord_arrays)]):
             logging.info(
-                'Old data found with matching input, no search performed')
+                'Old data found in "{:s}" with matching input, no search '
+                'performed'.format(self.out_file))
             return data
         else:
             logging.info(
-                'Old data found, input differs, continuing with grid search')
+                'Old data found in "{:s}", input differs, continuing with '
+                'grid search'.format(self.out_file))
             return False
         return False
 
     def run(self, return_data=False):
         self.get_input_data_array()
-        old_data = self.check_old_data_is_okay_to_use()
-        if old_data is not False:
-            self.data = old_data
-            return
+
+        if args.clean:
+            iterable = itertools.product(*self.coord_arrays)
+        else:
+            old_data = self.check_old_data_is_okay_to_use()
+            iterable = self.input_data
+
+            if old_data is not False:
+                self.data = old_data
+                return
 
         if hasattr(self, 'search') is False:
             self.inititate_search_object()
 
         data = []
-        for vals in tqdm(self.input_data):
-            FS = self.search.get_det_stat(*vals)
-            data.append(list(vals) + [FS])
+        for vals in tqdm(iterable,
+                         total=getattr(self, 'total_iterations', None)):
+            detstat = self.search.get_det_stat(*vals)
+            windowRange = getattr(self.search, 'windowRange', None)
+            FstatMap = getattr(self.search, 'FstatMap', None)
+            thisCand = list(vals) + [detstat]
+            if getattr(self, 'transientWindowType', None):
+                if self.outputTransientFstatMap:
+                    tCWfile = os.path.splitext(self.out_file)[0]+'_tCW_%.16f_%.16f_%.16f_%.16g_%.16g.dat' % (vals[2],vals[5],vals[6],vals[3],vals[4]) # freq alpha delta f1dot f2dot
+                    fo = lal.FileOpen(tCWfile, 'w')
+                    lalpulsar.write_transientFstatMap_to_fp ( fo, FstatMap, windowRange, None )
+                    del fo # instead of lal.FileClose() which is not SWIG-exported
+                Fmn = FstatMap.F_mn.data
+                maxidx = np.unravel_index(Fmn.argmax(), Fmn.shape)
+                thisCand += [windowRange.t0+maxidx[0]*windowRange.dt0,
+                             windowRange.tau+maxidx[1]*windowRange.dtau]
+            data.append(thisCand)
 
         data = np.array(data, dtype=np.float)
         if return_data:
             return data
         else:
-            logging.info('Saving data to {}'.format(self.out_file))
-            np.savetxt(self.out_file, data, delimiter=' ')
+            self.save_array_to_disk(data)
             self.data = data
 
+    def get_header(self):
+        header = ';'.join(['date:{}'.format(str(datetime.datetime.now())),
+                           'user:{}'.format(getpass.getuser()),
+                           'hostname:{}'.format(socket.gethostname())])
+        header += '\n' + ' '.join(self.keys)
+        return header
+
+    def save_array_to_disk(self, data):
+        logging.info('Saving data to {}'.format(self.out_file))
+        header = self.get_header()
+        np.savetxt(self.out_file, data, delimiter=' ', header=header)
+
     def convert_F0_to_mismatch(self, F0, F0hat, Tseg):
         DeltaF0 = F0[1] - F0[0]
         m_spacing = (np.pi*Tseg*DeltaF0)**2 / 12.
@@ -205,7 +269,7 @@ class GridSearch(BaseSearchClass):
             fig.tight_layout()
             fig.savefig('{}/{}_1D.png'.format(self.outdir, self.label))
         else:
-            return fig, ax
+            return ax
 
     def plot_2D(self, xkey, ykey, ax=None, save=True, vmin=None, vmax=None,
                 add_mismatch=None, xN=None, yN=None, flat_keys=[],
@@ -286,6 +350,16 @@ class GridSearch(BaseSearchClass):
             return ax
 
     def get_max_twoF(self):
+        """ Get the maximum twoF over the grid
+
+        Returns
+        -------
+        d: dict
+            Dictionary containing, 'minStartTime', 'maxStartTime', 'F0', 'F1',
+            'F2', 'Alpha', 'Delta' and 'twoF' of maximum
+
+        """
+
         twoF = self.data[:, -1]
         idx = np.argmax(twoF)
         v = self.data[idx, :]
@@ -350,10 +424,12 @@ class SliceGridSearch(GridSearch):
         self.ndim = 4
 
         self.search_keys = ['F0', 'F1', 'Alpha', 'Delta']
-        self.Lambda0 = np.array(Lambda0)
+        if self.Lambda0 is None:
+            raise ValueError('Lambda0 undefined')
         if len(self.Lambda0) != len(self.search_keys):
             raise ValueError(
                 'Lambda0 must be of length {}'.format(len(self.search_keys)))
+        self.Lambda0 = np.array(Lambda0)
 
     def run(self, factor=2, max_n_ticks=4, whspace=0.07, save=True,
             **kwargs):
@@ -470,14 +546,18 @@ class GridUniformPriorSearch():
 
 class GridGlitchSearch(GridSearch):
     """ Grid search using the SemiCoherentGlitchSearch """
+    search_labels = ['F0s', 'F1s', 'F2s', 'Alphas', 'Deltas', 'delta_F0s',
+                     'delta_F1s', 'tglitchs']
+
     @helper_functions.initializer
-    def __init__(self, label, outdir, sftfilepattern=None, F0s=[0],
+    def __init__(self, label, outdir='data', sftfilepattern=None, F0s=[0],
                  F1s=[0], F2s=[0], delta_F0s=[0], delta_F1s=[0], tglitchs=None,
                  Alphas=[0], Deltas=[0], tref=None, minStartTime=None,
                  maxStartTime=None, minCoverFreq=None, maxCoverFreq=None,
-                 write_after=1000):
-
+                 detectors=None):
         """
+        Run a single-glitch grid search
+
         Parameters
         ----------
         label, outdir: str
@@ -487,20 +567,25 @@ class GridGlitchSearch(GridSearch):
             mutiple patterns can be given separated by colons.
         F0s, F1s, F2s, delta_F0s, delta_F1s, tglitchs, Alphas, Deltas: tuple
             Length 3 tuple describing the grid for each parameter, e.g
-            [F0min, F0max, dF0], for a fixed value simply give [F0].
+            [F0min, F0max, dF0], for a fixed value simply give [F0]. Note that
+            tglitchs is referenced to zero at minStartTime.
         tref, minStartTime, maxStartTime: int
             GPS seconds of the reference time, start time and end time
 
         For all other parameters, see pyfstat.ComputeFStat.
         """
+
+        self.BSGL = False
+        self.input_arrays = False
         if tglitchs is None:
-            self.tglitchs = [self.maxStartTime]
+            raise ValueError('You must specify `tglitchs`')
 
         self.search = SemiCoherentGlitchSearch(
             label=label, outdir=outdir, sftfilepattern=self.sftfilepattern,
             tref=tref, minStartTime=minStartTime, maxStartTime=maxStartTime,
             minCoverFreq=minCoverFreq, maxCoverFreq=maxCoverFreq,
             BSGL=self.BSGL)
+        self.search.get_det_stat = self.search.get_semicoherent_nglitch_twoF
 
         if os.path.isdir(outdir) is False:
             os.mkdir(outdir)
@@ -508,19 +593,6 @@ class GridGlitchSearch(GridSearch):
         self.keys = ['F0', 'F1', 'F2', 'Alpha', 'Delta', 'delta_F0',
                      'delta_F1', 'tglitch']
 
-    def get_input_data_array(self):
-        arrays = []
-        for tup in (self.F0s, self.F1s, self.F2s, self.Alphas, self.Deltas,
-                    self.delta_F0s, self.delta_F1s, self.tglitchs):
-            arrays.append(self.get_array_from_tuple(tup))
-
-        input_data = []
-        for vals in itertools.product(*arrays):
-            input_data.append(vals)
-
-        self.arrays = arrays
-        self.input_data = np.array(input_data)
-
 
 class SlidingWindow(GridSearch):
     @helper_functions.initializer
@@ -643,6 +715,10 @@ class FrequencySlidingWindow(GridSearch):
         For all other parameters, see `pyfstat.ComputeFStat` for details
         """
 
+        self.transientWindowType = None
+        self.t0Band = None
+        self.tauBand = None
+
         if os.path.isdir(outdir) is False:
             os.mkdir(outdir)
         self.set_out_file()
@@ -652,13 +728,14 @@ class FrequencySlidingWindow(GridSearch):
         self.Alphas = [Alpha]
         self.Deltas = [Delta]
         self.input_arrays = False
+        self.keys = ['_', '_', 'F0', 'F1', 'F2', 'Alpha', 'Delta']
 
     def inititate_search_object(self):
         logging.info('Setting up search object')
         self.search = ComputeFstat(
             tref=self.tref, sftfilepattern=self.sftfilepattern,
             minCoverFreq=self.minCoverFreq, maxCoverFreq=self.maxCoverFreq,
-            detectors=self.detectors, transient=True,
+            detectors=self.detectors, transientWindowType=self.transientWindowType,
             minStartTime=self.minStartTime, maxStartTime=self.maxStartTime,
             BSGL=self.BSGL, SSBprec=self.SSBprec,
             injectSources=self.injectSources)
@@ -755,6 +832,10 @@ class EarthTest(GridSearch):
 
         For all other parameters, see `pyfstat.ComputeFStat` for details
         """
+        self.transientWindowType = None
+        self.t0Band = None
+        self.tauBand = None
+
         if os.path.isdir(outdir) is False:
             os.mkdir(outdir)
         self.nsegs = 1
@@ -864,10 +945,15 @@ class EarthTest(GridSearch):
                   r'$\Delta P_\mathrm{spin}$ [min]',
                   r'$2\mathcal{F}$']
 
-        from projection_matrix import projection_matrix
+        try:
+            from gridcorner import gridcorner
+        except ImportError:
+            raise ImportError(
+                "Python module 'gridcorner' not found, please install from "
+                "https://gitlab.aei.uni-hannover.de/GregAshton/gridcorner")
 
-        fig, axes = projection_matrix(data, xyz, projection=projection,
-                                      factor=1.6, labels=labels)
+        fig, axes = gridcorner(data, xyz, projection=projection, factor=1.6,
+                               labels=labels)
         axes[-1][-1].axvline((lal.DAYJUL_SI - lal.DAYSID_SI)/60.0, color='C3')
         plt.suptitle(
             'T={:.1f} days, $f$={:.2f} Hz, $\log\mathcal{{B}}_{{S/A}}$={:.1f},'
diff --git a/pyfstat/helper_functions.py b/pyfstat/helper_functions.py
index 1a776368ffce7a594bd1ae3a3e4cbf5b52ff7abc..5c011fd91fda3a2785ee596d7c5b85a14e00baf0 100644
--- a/pyfstat/helper_functions.py
+++ b/pyfstat/helper_functions.py
@@ -38,16 +38,18 @@ def set_up_command_line_arguments():
     parser.add_argument("-v", "--verbose", action="store_true",
                         help="Increase output verbosity [logging.DEBUG]")
     parser.add_argument("-q", "--quite", action="store_true",
-                        help="Decrease output verbosity [logging.WARNGING]")
-    parser.add_argument("-vq", "--very_quite", action="store_true",
-                        help="Increase output verbosity [logging.ERROR]")
+                        help="Decrease output verbosity [logging.WARNING]")
     parser.add_argument("--no-interactive", help="Don't use interactive",
                         action="store_true")
-    parser.add_argument("-c", "--clean", help="Don't use cached data",
-                        action="store_true")
-    parser.add_argument("-u", "--use-old-data", action="store_true")
-    parser.add_argument('-s', "--setup-only", action="store_true")
-    parser.add_argument("--no-template-counting", action="store_true")
+    parser.add_argument("-c", "--clean", action="store_true",
+                        help="Force clean data, never use cached data")
+    fu_parser = parser.add_argument_group(
+        'follow-up options', 'Options related to MCMCFollowUpSearch')
+    fu_parser.add_argument('-s', "--setup-only", action="store_true",
+                           help="Only generate the setup file, don't run")
+    fu_parser.add_argument(
+        "--no-template-counting", action="store_true",
+        help="No counting of templates, useful if the setup is predefined")
     parser.add_argument(
         '-N', type=int, default=3, metavar='N',
         help="Number of threads to use when running in parallel")
diff --git a/pyfstat/make_sfts.py b/pyfstat/make_sfts.py
index bf4e258faafca5a30ed042b1da221de3d4a9a963..5e47b56c6f0a5c56e71697326303cf6307f483cb 100644
--- a/pyfstat/make_sfts.py
+++ b/pyfstat/make_sfts.py
@@ -25,7 +25,8 @@ class Writer(BaseSearchClass):
                  tref=None, F0=30, F1=1e-10, F2=0, Alpha=5e-3,
                  Delta=6e-2, h0=0.1, cosi=0.0, psi=0.0, phi=0, Tsft=1800,
                  outdir=".", sqrtSX=1, Band=4, detectors='H1',
-                 minStartTime=None, maxStartTime=None, add_noise=True):
+                 minStartTime=None, maxStartTime=None, add_noise=True,
+                 transientWindowType='none'):
         """
         Parameters
         ----------
@@ -91,9 +92,9 @@ class Writer(BaseSearchClass):
         self.make_cff()
         self.run_makefakedata()
 
-    def get_single_config_line(self, i, Alpha, Delta, h0, cosi, psi, phi, F0,
-                               F1, F2, tref, tstart, duration_days):
-        template = (
+    def get_base_template(self, i, Alpha, Delta, h0, cosi, psi, phi, F0,
+                          F1, F2, tref):
+        return (
 """[TS{}]
 Alpha = {:1.18e}
 Delta = {:1.18e}
@@ -104,12 +105,35 @@ phi0 = {:1.18e}
 Freq = {:1.18e}
 f1dot = {:1.18e}
 f2dot = {:1.18e}
-refTime = {:10.6f}
-transientWindowType=rect
-transientStartTime={:10.3f}
-transientTauDays={:1.3f}\n""")
+refTime = {:10.6f}""")
+
+    def get_single_config_line_cw(
+            self, i, Alpha, Delta, h0, cosi, psi, phi, F0, F1, F2, tref):
+        template = (self.get_base_template(
+            i, Alpha, Delta, h0, cosi, psi, phi, F0, F1, F2, tref) + """\n""")
+        return template.format(
+            i, Alpha, Delta, h0, cosi, psi, phi, F0, F1, F2, tref)
+
+    def get_single_config_line_tcw(
+            self, i, Alpha, Delta, h0, cosi, psi, phi, F0, F1, F2, tref,
+            window, tstart, duration_days):
+        template = (self.get_base_template(
+            i, Alpha, Delta, h0, cosi, psi, phi, F0, F1, F2, tref) + """
+transientWindowType = {:s}
+transientStartTime = {:10.3f}
+transientTauDays = {:1.3f}\n""")
         return template.format(i, Alpha, Delta, h0, cosi, psi, phi, F0, F1,
-                               F2, tref, tstart, duration_days)
+                               F2, tref, window, tstart, duration_days)
+
+    def get_single_config_line(self, i, Alpha, Delta, h0, cosi, psi, phi, F0,
+                               F1, F2, tref, window, tstart, duration_days):
+        if window == 'none':
+            return self.get_single_config_line_cw(
+                i, Alpha, Delta, h0, cosi, psi, phi, F0, F1, F2, tref)
+        else:
+            return self.get_single_config_line_tcw(
+                i, Alpha, Delta, h0, cosi, psi, phi, F0, F1, F2, tref, window,
+                tstart, duration_days)
 
     def make_cff(self):
         """
@@ -119,8 +143,8 @@ transientTauDays={:1.3f}\n""")
 
         content = self.get_single_config_line(
             0, self.Alpha, self.Delta, self.h0, self.cosi, self.psi,
-            self.phi, self.F0, self.F1, self.F2, self.tref, self.tstart,
-            self.duration_days)
+            self.phi, self.F0, self.F1, self.F2, self.tref,
+            self.transientWindowType, self.tstart, self.duration_days)
 
         if self.check_if_cff_file_needs_rewritting(content):
             config_file = open(self.config_file_name, "w+")
@@ -247,7 +271,8 @@ class GlitchWriter(Writer):
                  delta_F2=0, tref=None, F0=30, F1=1e-10, F2=0, Alpha=5e-3,
                  Delta=6e-2, h0=0.1, cosi=0.0, psi=0.0, phi=0, Tsft=1800,
                  outdir=".", sqrtSX=1, Band=4, detectors='H1',
-                 minStartTime=None, maxStartTime=None, add_noise=True):
+                 minStartTime=None, maxStartTime=None, add_noise=True,
+                 transientWindowType='rect'):
         """
         Parameters
         ----------
@@ -317,7 +342,8 @@ class GlitchWriter(Writer):
                                            self.tbounds[:-1])):
             line = self.get_single_config_line(
                 i, self.Alpha, self.Delta, self.h0, self.cosi, self.psi,
-                t[0], t[1], t[2], t[3], self.tref, ts, d)
+                t[0], t[1], t[2], t[3], self.tref, self.transientWindowType,
+                ts, d)
 
             content += line
 
diff --git a/pyfstat/mcmc_based_searches.py b/pyfstat/mcmc_based_searches.py
index 912445e33f2e2846710b8edaf70df7490f262144..b09c7d0397c7133862993bced37bed74eed085ae 100644
--- a/pyfstat/mcmc_based_searches.py
+++ b/pyfstat/mcmc_based_searches.py
@@ -76,6 +76,12 @@ class MCMCSearch(core.BaseSearchClass):
         the search
     assumeSqrtSX: float, optional
         Don't estimate noise-floors, but assume (stationary) per-IFO sqrt{SX}
+    transientWindowType: str
+        If 'rect' or 'exp',
+        compute atoms so that a transient (t0,tau) map can later be computed.
+        ('none' instead of None explicitly calls the transient-window function,
+        but with the full range, for debugging)
+        Currently only supported for nsegs=1.
 
     Attributes
     ----------
@@ -108,7 +114,8 @@ class MCMCSearch(core.BaseSearchClass):
                  log10beta_min=-5, theta_initial=None,
                  rhohatmax=1000, binary=False, BSGL=False,
                  SSBprec=None, minCoverFreq=None, maxCoverFreq=None,
-                 injectSources=None, assumeSqrtSX=None):
+                 injectSources=None, assumeSqrtSX=None,
+                 transientWindowType=None):
 
         if os.path.isdir(outdir) is False:
             os.mkdir(outdir)
@@ -150,7 +157,8 @@ class MCMCSearch(core.BaseSearchClass):
         self.search = core.ComputeFstat(
             tref=self.tref, sftfilepattern=self.sftfilepattern,
             minCoverFreq=self.minCoverFreq, maxCoverFreq=self.maxCoverFreq,
-            detectors=self.detectors, BSGL=self.BSGL, transient=False,
+            detectors=self.detectors, BSGL=self.BSGL,
+            transientWindowType=self.transientWindowType,
             minStartTime=self.minStartTime, maxStartTime=self.maxStartTime,
             binary=self.binary, injectSources=self.injectSources,
             assumeSqrtSX=self.assumeSqrtSX, SSBprec=self.SSBprec)
@@ -212,38 +220,39 @@ class MCMCSearch(core.BaseSearchClass):
         self.theta_symbols = [self.theta_symbols[i] for i in idxs]
         self.theta_keys = [self.theta_keys[i] for i in idxs]
 
+    def _evaluate_logpost(self, p0vec):
+        init_logp = np.array([
+            self.logp(p, self.theta_prior, self.theta_keys, self.search)
+            for p in p0vec])
+        init_logl = np.array([
+            self.logl(p, self.search)
+            for p in p0vec])
+        return init_logl + init_logp
+
     def _check_initial_points(self, p0):
         for nt in range(self.ntemps):
             logging.info('Checking temperature {} chains'.format(nt))
-            initial_priors = np.array([
-                self.logp(p, self.theta_prior, self.theta_keys, self.search)
-                for p in p0[nt]])
-            number_of_initial_out_of_bounds = sum(initial_priors == -np.inf)
-
-            if number_of_initial_out_of_bounds > 0:
+            num = sum(self._evaluate_logpost(p0[nt]) == -np.inf)
+            if num > 0:
                 logging.warning(
                     'Of {} initial values, {} are -np.inf due to the prior'
-                    .format(len(initial_priors),
-                            number_of_initial_out_of_bounds))
-
+                    .format(len(p0[0]), num))
                 p0 = self._generate_new_p0_to_fix_initial_points(
-                    p0, nt, initial_priors)
+                    p0, nt)
 
-    def _generate_new_p0_to_fix_initial_points(self, p0, nt, initial_priors):
+    def _generate_new_p0_to_fix_initial_points(self, p0, nt):
         logging.info('Attempting to correct intial values')
-        idxs = np.arange(self.nwalkers)[initial_priors == -np.inf]
+        init_logpost = self._evaluate_logpost(p0[nt])
+        idxs = np.arange(self.nwalkers)[init_logpost == -np.inf]
         count = 0
-        while sum(initial_priors == -np.inf) > 0 and count < 100:
+        while sum(init_logpost == -np.inf) > 0 and count < 100:
             for j in idxs:
                 p0[nt][j] = (p0[nt][np.random.randint(0, self.nwalkers)]*(
                              1+np.random.normal(0, 1e-10, self.ndim)))
-            initial_priors = np.array([
-                self.logp(p, self.theta_prior, self.theta_keys,
-                          self.search)
-                for p in p0[nt]])
+            init_logpost = self._evaluate_logpost(p0[nt])
             count += 1
 
-        if sum(initial_priors == -np.inf) > 0:
+        if sum(init_logpost == -np.inf) > 0:
             logging.info('Failed to fix initial priors')
         else:
             logging.info('Suceeded to fix initial priors')
@@ -1215,10 +1224,6 @@ class MCMCSearch(core.BaseSearchClass):
         return d
 
     def _check_old_data_is_okay_to_use(self):
-        if args.use_old_data:
-            logging.info("Forcing use of old data")
-            return True
-
         if os.path.isfile(self.pickle_path) is False:
             logging.info('No pickled data found')
             return False
@@ -1596,7 +1601,7 @@ class MCMCGlitchSearch(MCMCSearch):
             'multiplier': 1/86400.,
             'subtractor': 'minStartTime',
             'unit': 'day',
-            'label': 'Glitch time \n days after minStartTime'}
+            'label': '$t^{g}_0$ \n [days]'}
             )
 
     @helper_functions.initializer
@@ -1739,7 +1744,9 @@ class MCMCGlitchSearch(MCMCSearch):
                  ntemps=self.ntemps, theta_keys=self.theta_keys,
                  theta_prior=self.theta_prior,
                  log10beta_min=self.log10beta_min,
-                 theta0_idx=self.theta0_idx, BSGL=self.BSGL)
+                 theta0_idx=self.theta0_idx, BSGL=self.BSGL,
+                 minStartTime=self.minStartTime,
+                 maxStartTime=self.maxStartTime)
         return d
 
     def _apply_corrections_to_p0(self, p0):
@@ -1853,7 +1860,9 @@ class MCMCSemiCoherentSearch(MCMCSearch):
                  ntemps=self.ntemps, theta_keys=self.theta_keys,
                  theta_prior=self.theta_prior,
                  log10beta_min=self.log10beta_min,
-                 BSGL=self.BSGL, nsegs=self.nsegs)
+                 BSGL=self.BSGL, nsegs=self.nsegs,
+                 minStartTime=self.minStartTime,
+                 maxStartTime=self.maxStartTime)
         return d
 
     def _initiate_search_object(self):
@@ -2194,10 +2203,13 @@ class MCMCTransientSearch(MCMCSearch):
 
     def _initiate_search_object(self):
         logging.info('Setting up search object')
+        if not self.transientWindowType:
+            self.transientWindowType = 'rect'
         self.search = core.ComputeFstat(
             tref=self.tref, sftfilepattern=self.sftfilepattern,
             minCoverFreq=self.minCoverFreq, maxCoverFreq=self.maxCoverFreq,
-            detectors=self.detectors, transient=True,
+            detectors=self.detectors,
+            transientWindowType=self.transientWindowType,
             minStartTime=self.minStartTime, maxStartTime=self.maxStartTime,
             BSGL=self.BSGL, binary=self.binary,
             injectSources=self.injectSources)
diff --git a/tests.py b/tests.py
index 50b4c2fed8b5044cb60063349157e7300400499d..4b853e4fd0d61247fe68a75c481ffc935a542ba8 100644
--- a/tests.py
+++ b/tests.py
@@ -13,6 +13,27 @@ class Test(unittest.TestCase):
     def setUpClass(self):
         if os.path.isdir(self.outdir):
             shutil.rmtree(self.outdir)
+        h0 = 1
+        sqrtSX = 1
+        F0 = 30
+        F1 = -1e-10
+        F2 = 0
+        minStartTime = 700000000
+        duration = 2 * 86400
+        Alpha = 5e-3
+        Delta = 1.2
+        tref = minStartTime
+        Writer = pyfstat.Writer(F0=F0, F1=F1, F2=F2, label='test',
+                                h0=h0, sqrtSX=sqrtSX,
+                                outdir=self.outdir, tstart=minStartTime,
+                                Alpha=Alpha, Delta=Delta, tref=tref,
+                                duration=duration,
+                                Band=4)
+        Writer.make_data()
+        self.sftfilepath = Writer.sftfilepath
+        self.minStartTime = minStartTime
+        self.maxStartTime = minStartTime + duration
+        self.duration = duration
 
     @classmethod
     def tearDownClass(self):
@@ -20,7 +41,7 @@ class Test(unittest.TestCase):
             shutil.rmtree(self.outdir)
 
 
-class TestWriter(Test):
+class Writer(Test):
     label = "TestWriter"
 
     def test_make_cff(self):
@@ -53,13 +74,13 @@ class TestWriter(Test):
         self.assertFalse(time_first == time_third)
 
 
-class TestBunch(Test):
+class Bunch(Test):
     def test_bunch(self):
         b = pyfstat.core.Bunch(dict(x=10))
         self.assertTrue(b.x == 10)
 
 
-class Test_par(Test):
+class par(Test):
     label = 'TestPar'
 
     def test(self):
@@ -79,7 +100,7 @@ class Test_par(Test):
         os.system('rm -r {}'.format(self.outdir))
 
 
-class TestBaseSearchClass(Test):
+class BaseSearchClass(Test):
     def test_shift_matrix(self):
         BSC = pyfstat.BaseSearchClass()
         dT = 10
@@ -118,7 +139,7 @@ class TestBaseSearchClass(Test):
                 rtol=1e-9, atol=1e-9))
 
 
-class TestComputeFstat(Test):
+class ComputeFstat(Test):
     label = "TestComputeFstat"
 
     def test_run_computefstatistic_single_point(self):
@@ -196,7 +217,7 @@ class TestComputeFstat(Test):
         self.assertTrue(FS_from_dict == FS_from_file)
 
 
-class TestSemiCoherentSearch(Test):
+class SemiCoherentSearch(Test):
     label = "TestSemiCoherentSearch"
 
     def test_get_semicoherent_twoF(self):
@@ -249,7 +270,7 @@ class TestSemiCoherentSearch(Test):
         self.assertTrue(BSGL > 0)
 
 
-class TestSemiCoherentGlitchSearch(Test):
+class SemiCoherentGlitchSearch(Test):
     label = "TestSemiCoherentGlitchSearch"
 
     def test_get_semicoherent_nglitch_twoF(self):
@@ -292,7 +313,7 @@ class TestSemiCoherentGlitchSearch(Test):
         self.assertTrue(np.abs((FS - predicted_FS))/predicted_FS < 0.3)
 
 
-class TestMCMCSearch(Test):
+class MCMCSearch(Test):
     label = "TestMCMCSearch"
 
     def test_fully_coherent(self):
@@ -335,5 +356,49 @@ class TestMCMCSearch(Test):
             FS > predicted_FS or np.abs((FS-predicted_FS))/predicted_FS < 0.3)
 
 
+class GridSearch(Test):
+    F0s = [29, 31, 0.1]
+    F1s = [-1e-10, 0, 1e-11]
+    tref = 700000000
+
+    def test_grid_search(self):
+        search = pyfstat.GridSearch(
+            'grid_search', self.outdir, self.sftfilepath, F0s=self.F0s,
+            F1s=[0], F2s=[0], Alphas=[0], Deltas=[0], tref=self.tref)
+        search.run()
+        self.assertTrue(os.path.isfile(search.out_file))
+
+    def test_semicoherent_grid_search(self):
+        search = pyfstat.GridSearch(
+            'sc_grid_search', self.outdir, self.sftfilepath, F0s=self.F0s,
+            F1s=[0], F2s=[0], Alphas=[0], Deltas=[0], tref=self.tref, nsegs=2)
+        search.run()
+        self.assertTrue(os.path.isfile(search.out_file))
+
+    def test_slice_grid_search(self):
+        search = pyfstat.SliceGridSearch(
+            'slice_grid_search', self.outdir, self.sftfilepath, F0s=self.F0s,
+            F1s=self.F1s, F2s=[0], Alphas=[0], Deltas=[0], tref=self.tref,
+            Lambda0=[30, 0, 0, 0])
+        search.run()
+        self.assertTrue(os.path.isfile('{}/{}_slice_projection.png'
+                                       .format(search.outdir, search.label)))
+
+    def test_glitch_grid_search(self):
+        search = pyfstat.GridGlitchSearch(
+            'grid_grid_search', self.outdir, self.sftfilepath, F0s=self.F0s,
+            F1s=self.F1s, F2s=[0], Alphas=[0], Deltas=[0], tref=self.tref,
+            tglitchs=[self.tref])
+        search.run()
+        self.assertTrue(os.path.isfile(search.out_file))
+
+    def test_sliding_window(self):
+        search = pyfstat.FrequencySlidingWindow(
+            'grid_grid_search', self.outdir, self.sftfilepath, F0s=self.F0s,
+            F1=0, F2=0, Alpha=0, Delta=0, tref=self.tref,
+            minStartTime=self.minStartTime, maxStartTime=self.maxStartTime)
+        search.run()
+        self.assertTrue(os.path.isfile(search.out_file))
+
 if __name__ == '__main__':
     unittest.main()