Commit 80e6cfe3 authored by David Keitel's avatar David Keitel

use os.path.join instead of manual string joining with "/"

parent e6d7b19d
...@@ -94,7 +94,7 @@ def read_par( ...@@ -94,7 +94,7 @@ def read_par(
""" """
if filename is None: if filename is None:
filename = "{}/{}.{}".format(outdir, label, suffix) filename = os.path.join(outdir, "{}.{}".format(label, suffix))
if os.path.isfile(filename) is False: if os.path.isfile(filename) is False:
raise ValueError("No file {} found".format(filename)) raise ValueError("No file {} found".format(filename))
d = {} d = {}
...@@ -220,7 +220,7 @@ class BaseSearchClass(object): ...@@ -220,7 +220,7 @@ class BaseSearchClass(object):
def _add_log_file(self): def _add_log_file(self):
""" Log output to a file, requires class to have outdir and label """ """ Log output to a file, requires class to have outdir and label """
logfilename = "{}/{}.log".format(self.outdir, self.label) logfilename = os.path.join(self.outdir, self.label + ".log")
fh = logging.FileHandler(logfilename) fh = logging.FileHandler(logfilename)
fh.setLevel(logging.INFO) fh.setLevel(logging.INFO)
fh.setFormatter( fh.setFormatter(
...@@ -756,7 +756,10 @@ class ComputeFstat(BaseSearchClass): ...@@ -756,7 +756,10 @@ class ComputeFstat(BaseSearchClass):
self.windowRange.tau = self.tauMin self.windowRange.tau = self.tauMin
logging.info("Initialising transient FstatMap features...") logging.info("Initialising transient FstatMap features...")
self.tCWFstatMapFeatures, self.gpu_context = tcw.init_transient_fstat_map_features( (
self.tCWFstatMapFeatures,
self.gpu_context,
) = tcw.init_transient_fstat_map_features(
self.tCWFstatMapVersion == "pycuda", self.cudaDeviceName self.tCWFstatMapVersion == "pycuda", self.cudaDeviceName
) )
...@@ -942,7 +945,7 @@ class ComputeFstat(BaseSearchClass): ...@@ -942,7 +945,7 @@ class ComputeFstat(BaseSearchClass):
""" """
if pfs_input is None: if pfs_input is None:
if os.path.isfile("{}/{}.loudest".format(outdir, label)) is False: if os.path.isfile(os.path.join(outdir, label + ".loudest")) is False:
raise ValueError("Need a loudest file to add the predicted Fstat") raise ValueError("Need a loudest file to add the predicted Fstat")
loudest = read_par(label=label, outdir=outdir, suffix="loudest") loudest = read_par(label=label, outdir=outdir, suffix="loudest")
pfs_input = { pfs_input = {
...@@ -1089,7 +1092,7 @@ class ComputeFstat(BaseSearchClass): ...@@ -1089,7 +1092,7 @@ class ComputeFstat(BaseSearchClass):
ax.set_title(title) ax.set_title(title)
if savefig: if savefig:
plt.tight_layout() plt.tight_layout()
plt.savefig("{}/{}_twoFcumulative.png".format(outdir, label)) plt.savefig(os.path.join(outdir, label + "_twoFcumulative.png"))
return taus, twoFs return taus, twoFs
else: else:
return ax return ax
...@@ -1179,7 +1182,7 @@ class SemiCoherentSearch(ComputeFstat): ...@@ -1179,7 +1182,7 @@ class SemiCoherentSearch(ComputeFstat):
For all other parameters, see pyfstat.ComputeFStat. For all other parameters, see pyfstat.ComputeFStat.
""" """
self.fs_file_name = "{}/{}_FS.dat".format(self.outdir, self.label) self.fs_file_name = os.path.join(self.outdir, self.label + "_FS.dat")
self.set_ephemeris_files() self.set_ephemeris_files()
self.transientWindowType = "rect" self.transientWindowType = "rect"
self.t0Band = None self.t0Band = None
...@@ -1356,7 +1359,7 @@ class SemiCoherentGlitchSearch(ComputeFstat): ...@@ -1356,7 +1359,7 @@ class SemiCoherentGlitchSearch(ComputeFstat):
For all other parameters, see pyfstat.ComputeFStat. For all other parameters, see pyfstat.ComputeFStat.
""" """
self.fs_file_name = "{}/{}_FS.dat".format(self.outdir, self.label) self.fs_file_name = os.path.join(self.outdir, self.label + "_FS.dat")
self.set_ephemeris_files() self.set_ephemeris_files()
self.transientWindowType = "rect" self.transientWindowType = "rect"
self.t0Band = None self.t0Band = None
......
...@@ -826,7 +826,7 @@ class SliceGridSearch(GridSearch): ...@@ -826,7 +826,7 @@ class SliceGridSearch(GridSearch):
axes[i, i].set_ylabel("$2\mathcal{F}$") axes[i, i].set_ylabel("$2\mathcal{F}$")
if save: if save:
fig.savefig("{}/{}_slice_projection.png".format(self.outdir, self.label)) fig.savefig(os.path.join(self.outdir, self.label + "_slice_projection.png"))
else: else:
return fig, axes return fig, axes
...@@ -1049,7 +1049,9 @@ class SlidingWindow(GridSearch): ...@@ -1049,7 +1049,9 @@ class SlidingWindow(GridSearch):
def run(self, key="h0", errkey="dh0"): def run(self, key="h0", errkey="dh0"):
self.key = key self.key = key
self.errkey = errkey self.errkey = errkey
out_file = "{}/{}_{}-sliding-window.txt".format(self.outdir, self.label, key) out_file = os.path.join(
self.outdir, "{}_{}-sliding-window.txt".format(self.label, key)
)
if self.check_old_data_is_okay_to_use(out_file) is False: if self.check_old_data_is_okay_to_use(out_file) is False:
self.inititate_search_object() self.inititate_search_object()
...@@ -1090,7 +1092,9 @@ class SlidingWindow(GridSearch): ...@@ -1090,7 +1092,9 @@ class SlidingWindow(GridSearch):
if fig: if fig:
fig.savefig( fig.savefig(
"{}/{}_{}-sliding-window.png".format(self.outdir, self.label, self.key) os.path.join(
self.outdir, "{}_{}-sliding-window.png".format(self.label, self.key)
)
) )
else: else:
return ax return ax
...@@ -1239,7 +1243,7 @@ class FrequencySlidingWindow(GridSearch): ...@@ -1239,7 +1243,7 @@ class FrequencySlidingWindow(GridSearch):
ax.set_title(ax.get_title(), y=1.18) ax.set_title(ax.get_title(), y=1.18)
if savefig: if savefig:
plt.tight_layout() plt.tight_layout()
plt.savefig("{}/{}_sliding_window.png".format(self.outdir, self.label)) plt.savefig(os.path.join(self.outdir, self.label + "_sliding_window.png"))
else: else:
return ax return ax
...@@ -1454,7 +1458,7 @@ class EarthTest(GridSearch): ...@@ -1454,7 +1458,7 @@ class EarthTest(GridSearch):
y=0.99, y=0.99,
size=14, size=14,
) )
fig.savefig("{}/{}_projection_matrix.png".format(self.outdir, self.label)) fig.savefig(os.path.join(self.outdir, self.label + "_projection_matrix.png"))
def plot(self, key, prior_widths=None): def plot(self, key, prior_widths=None):
Bsa, FmaxMismatch = self.marginalised_bayes_factor(prior_widths) Bsa, FmaxMismatch = self.marginalised_bayes_factor(prior_widths)
...@@ -1479,7 +1483,7 @@ class EarthTest(GridSearch): ...@@ -1479,7 +1483,7 @@ class EarthTest(GridSearch):
) )
) )
fig.tight_layout() fig.tight_layout()
fig.savefig("{}/{}_1D.png".format(self.outdir, self.label)) fig.savefig(os.path.join(self.outdir, self.label + "_1D.png"))
class DMoff_NO_SPIN(GridSearch): class DMoff_NO_SPIN(GridSearch):
......
...@@ -124,7 +124,7 @@ def set_up_command_line_arguments(): ...@@ -124,7 +124,7 @@ def set_up_command_line_arguments():
def get_ephemeris_files(): def get_ephemeris_files():
""" Returns the earth_ephem and sun_ephem """ """ Returns the earth_ephem and sun_ephem """
config_file = os.path.expanduser("~") + "/.pyfstat.conf" config_file = os.path.join(os.path.expanduser("~"), ".pyfstat.conf")
env_var = "LALPULSAR_DATADIR" env_var = "LALPULSAR_DATADIR"
please = "Please provide the ephemerides paths when initialising searches." please = "Please provide the ephemerides paths when initialising searches."
if os.path.isfile(config_file): if os.path.isfile(config_file):
......
...@@ -96,7 +96,7 @@ class Writer(BaseSearchClass): ...@@ -96,7 +96,7 @@ class Writer(BaseSearchClass):
os.makedirs(self.outdir) os.makedirs(self.outdir)
if self.tref is None: if self.tref is None:
self.tref = self.tstart self.tref = self.tstart
self.config_file_name = "{}/{}.cff".format(self.outdir, self.label) self.config_file_name = os.path.join(self.outdir, self.label + ".cff")
self.sftfilenames = [ self.sftfilenames = [
lalpulsar.OfficialSFTFilename( lalpulsar.OfficialSFTFilename(
dets[0], dets[0],
...@@ -110,7 +110,7 @@ class Writer(BaseSearchClass): ...@@ -110,7 +110,7 @@ class Writer(BaseSearchClass):
for dets in self.detectors.split(",") for dets in self.detectors.split(",")
] ]
self.sftfilepath = ";".join( self.sftfilepath = ";".join(
["{}/{}".format(self.outdir, fn) for fn in self.sftfilenames] [os.path.join(self.outdir, fn) for fn in self.sftfilenames]
) )
self.IFOs = ",".join(['"{}"'.format(d) for d in self.detectors.split(",")]) self.IFOs = ",".join(['"{}"'.format(d) for d in self.detectors.split(",")])
...@@ -354,7 +354,7 @@ transientTau = {:10.0f}\n""" ...@@ -354,7 +354,7 @@ transientTau = {:10.0f}\n"""
# Remove old data: # Remove old data:
try: try:
os.unlink("{}/*{}*.sft".format(self.outdir, self.label)) os.unlink(os.path.join(self.outdir, "*" + self.label + "*.sft"))
except OSError: except OSError:
pass pass
...@@ -671,24 +671,21 @@ class FrequencyModulatedArtifactWriter(Writer): ...@@ -671,24 +671,21 @@ class FrequencyModulatedArtifactWriter(Writer):
int(self.duration), int(self.duration),
self.label, self.label,
) )
SFTFile_fullpath = os.path.join(self.outdir, SFTFilename)
# If the file already exists, simply remove it for now (no caching # If the file already exists, simply remove it for now (no caching
# implemented) # implemented)
helper_functions.run_commandline( helper_functions.run_commandline(
"rm {}/{}".format(self.outdir, SFTFilename), raise_error=False, log_level=10 "rm {}".format(SFTFile_fullpath), raise_error=False, log_level=10
) )
cl_splitSFTS = "lalapps_splitSFTs -fs {} -fb {} -fe {} -o {}/{} -i {}/*sft".format( inpattern = os.path.join(self.tmp_outdir, "*sft")
self.fmin, cl_splitSFTS = "lalapps_splitSFTs -fs {} -fb {} -fe {} -o {} -i {}".format(
self.Band, self.fmin, self.Band, self.fmin + self.Band, SFTFile_fullpath, inpattern
self.fmin + self.Band,
self.outdir,
SFTFilename,
self.tmp_outdir,
) )
helper_functions.run_commandline(cl_splitSFTS) helper_functions.run_commandline(cl_splitSFTS)
helper_functions.run_commandline("rm {} -r".format(self.tmp_outdir)) helper_functions.run_commandline("rm {} -r".format(self.tmp_outdir))
files = glob.glob("{}/{}*".format(self.outdir, SFTFilename)) files = glob.glob(SFTFile_fullpath + "*")
if len(files) == 1: if len(files) == 1:
fn = files[0] fn = files[0]
fn_new = fn.split(".")[0] + ".sft" fn_new = fn.split(".")[0] + ".sft"
...@@ -736,7 +733,7 @@ class FrequencyModulatedArtifactWriter(Writer): ...@@ -736,7 +733,7 @@ class FrequencyModulatedArtifactWriter(Writer):
self.maxStartTime = None self.maxStartTime = None
self.duration = self.Tsft self.duration = self.Tsft
self.tmp_outdir = "{}/{}_tmp".format(self.outdir, self.label) self.tmp_outdir = os.path.join(self.outdir, self.label + "_tmp")
if os.path.isdir(self.tmp_outdir) is True: if os.path.isdir(self.tmp_outdir) is True:
raise ValueError( raise ValueError(
"Temporary directory {} already exists, please rename".format( "Temporary directory {} already exists, please rename".format(
......
...@@ -188,7 +188,7 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -188,7 +188,7 @@ class MCMCSearch(core.BaseSearchClass):
logging.info("No sftfilepattern given") logging.info("No sftfilepattern given")
if injectSources: if injectSources:
logging.info("Inject sources: {}".format(injectSources)) logging.info("Inject sources: {}".format(injectSources))
self.pickle_path = "{}/{}_saved_data.p".format(self.outdir, self.label) self.pickle_path = os.path.join(self.outdir, self.label + "_saved_data.p")
self._unpack_input_theta() self._unpack_input_theta()
self.ndim = len(self.theta_keys) self.ndim = len(self.theta_keys)
if self.log10beta_min: if self.log10beta_min:
...@@ -609,7 +609,9 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -609,7 +609,9 @@ class MCMCSearch(core.BaseSearchClass):
fig, axes = self._plot_walkers(sampler, **kwargs) fig, axes = self._plot_walkers(sampler, **kwargs)
fig.tight_layout() fig.tight_layout()
fig.savefig( fig.savefig(
"{}/{}_init_{}_walkers.png".format(self.outdir, self.label, j) os.path.join(
self.outdir, "{}_init_{}_walkers.png".format(self.label, j)
)
) )
p0 = self._get_new_p0(sampler) p0 = self._get_new_p0(sampler)
...@@ -629,7 +631,7 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -629,7 +631,7 @@ class MCMCSearch(core.BaseSearchClass):
try: try:
fig, axes = self._plot_walkers(sampler, nprod=nprod, **kwargs) fig, axes = self._plot_walkers(sampler, nprod=nprod, **kwargs)
fig.tight_layout() fig.tight_layout()
fig.savefig("{}/{}_walkers.png".format(self.outdir, self.label)) fig.savefig(os.path.join(self.outdir, self.label + "_walkers.png"))
except RuntimeError as e: except RuntimeError as e:
logging.warning("Failed to save walker plots due to Erro {}".format(e)) logging.warning("Failed to save walker plots due to Erro {}".format(e))
...@@ -802,7 +804,7 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -802,7 +804,7 @@ class MCMCSearch(core.BaseSearchClass):
ax.hist(self.samples, bins=50, histtype="stepfilled") ax.hist(self.samples, bins=50, histtype="stepfilled")
ax.set_xlabel(self.theta_symbols[0]) ax.set_xlabel(self.theta_symbols[0])
fig.savefig("{}/{}_corner.png".format(self.outdir, self.label), dpi=dpi) fig.savefig(os.path.join(self.outdir, self.label + "_corner.png"), dpi=dpi)
return return
with plt.rc_context(rc_context): with plt.rc_context(rc_context):
...@@ -881,7 +883,7 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -881,7 +883,7 @@ class MCMCSearch(core.BaseSearchClass):
if save_fig: if save_fig:
fig_triangle.savefig( fig_triangle.savefig(
"{}/{}_corner.png".format(self.outdir, self.label), dpi=dpi os.path.join(self.outdir, self.label + "_corner.png"), dpi=dpi
) )
else: else:
return fig, axes return fig, axes
...@@ -936,7 +938,7 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -936,7 +938,7 @@ class MCMCSearch(core.BaseSearchClass):
fig.subplots_adjust(hspace=0.05, wspace=0.05) fig.subplots_adjust(hspace=0.05, wspace=0.05)
if save_fig: if save_fig:
fig.savefig("{}/{}_corner.png".format(self.outdir, self.label), dpi=dpi) fig.savefig(os.path.join(self.outdir, self.label + "_corner.png"), dpi=dpi)
else: else:
return fig return fig
...@@ -1023,7 +1025,7 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -1023,7 +1025,7 @@ class MCMCSearch(core.BaseSearchClass):
labs = [l.get_label() for l in lns] labs = [l.get_label() for l in lns]
axes[0].legend(lns, labs, loc=1, framealpha=0.8) axes[0].legend(lns, labs, loc=1, framealpha=0.8)
fig.savefig("{}/{}_prior_posterior.png".format(self.outdir, self.label)) fig.savefig(os.path.join(self.outdir, self.label + "_prior_posterior.png"))
def plot_cumulative_max(self, **kwargs): def plot_cumulative_max(self, **kwargs):
""" Plot the cumulative twoF for the maximum posterior estimate """ Plot the cumulative twoF for the maximum posterior estimate
...@@ -1643,17 +1645,13 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -1643,17 +1645,13 @@ class MCMCSearch(core.BaseSearchClass):
def write_par(self, method="med"): def write_par(self, method="med"):
""" Writes a .par of the best-fit params with an estimated std """ """ Writes a .par of the best-fit params with an estimated std """
logging.info( filename = os.path.join(self.outdir, self.label + ".par")
"Writing {}/{}.par using the {} method".format( logging.info("Writing {} using the {} method".format(filename, method))
self.outdir, self.label, method
)
)
median_std_d = self.get_median_stds() median_std_d = self.get_median_stds()
max_twoF_d, max_twoF = self.get_max_twoF() max_twoF_d, max_twoF = self.get_max_twoF()
logging.info("Writing par file with max twoF = {}".format(max_twoF)) logging.info("Writing par file with max twoF = {}".format(max_twoF))
filename = "{}/{}.par".format(self.outdir, self.label)
with open(filename, "w+") as f: with open(filename, "w+") as f:
f.write("MaxtwoF = {}\n".format(max_twoF)) f.write("MaxtwoF = {}\n".format(max_twoF))
f.write("tref = {}\n".format(self.tref)) f.write("tref = {}\n".format(self.tref))
...@@ -1673,9 +1671,10 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -1673,9 +1671,10 @@ class MCMCSearch(core.BaseSearchClass):
for key in ["Alpha", "Delta", "F0", "F1"]: for key in ["Alpha", "Delta", "F0", "F1"]:
if key not in params: if key not in params:
params[key] = self.theta_prior[key] params[key] = self.theta_prior[key]
filename = os.path.join(self.outdir, self.label + ".loudest")
cmd = ( cmd = (
'lalapps_ComputeFstatistic_v2 -a {} -d {} -f {} -s {} -D "{}"' 'lalapps_ComputeFstatistic_v2 -a {} -d {} -f {} -s {} -D "{}"'
' --refTime={} --outputLoudest="{}/{}.loudest" ' ' --refTime={} --outputLoudest="{}" '
"--minStartTime={} --maxStartTime={}" "--minStartTime={} --maxStartTime={}"
).format( ).format(
params["Alpha"], params["Alpha"],
...@@ -1684,8 +1683,7 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -1684,8 +1683,7 @@ class MCMCSearch(core.BaseSearchClass):
params["F1"], params["F1"],
self.sftfilepattern, self.sftfilepattern,
params["tref"], params["tref"],
self.outdir, filename,
self.label,
self.minStartTime, self.minStartTime,
self.maxStartTime, self.maxStartTime,
) )
...@@ -1693,7 +1691,7 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -1693,7 +1691,7 @@ class MCMCSearch(core.BaseSearchClass):
def write_prior_table(self): def write_prior_table(self):
""" Generate a .tex file of the prior """ """ Generate a .tex file of the prior """
with open("{}/{}_prior.tex".format(self.outdir, self.label), "w") as f: with open(os.path.join(self.outdir, self.label + "_prior.tex"), "w") as f:
f.write( f.write(
r"\begin{tabular}{c l c} \hline" + "\n" r"\begin{tabular}{c l c} \hline" + "\n"
r"Parameter & & & \\ \hhline{====}" r"Parameter & & & \\ \hhline{====}"
...@@ -1860,7 +1858,7 @@ class MCMCSearch(core.BaseSearchClass): ...@@ -1860,7 +1858,7 @@ class MCMCSearch(core.BaseSearchClass):
) )
ax2.set_xlabel(r"$\beta_{\textrm{min}}$") ax2.set_xlabel(r"$\beta_{\textrm{min}}$")
plt.tight_layout() plt.tight_layout()
fig.savefig("{}/{}_beta_lnl.png".format(self.outdir, self.label)) fig.savefig(os.path.join(self.outdir, self.label + "_beta_lnl.png"))
return log10evidence, log10evidence_err return log10evidence, log10evidence_err
...@@ -1966,7 +1964,7 @@ class MCMCGlitchSearch(MCMCSearch): ...@@ -1966,7 +1964,7 @@ class MCMCGlitchSearch(MCMCSearch):
"Set-up MCMC glitch search with {} glitches for model {}" " on data {}" "Set-up MCMC glitch search with {} glitches for model {}" " on data {}"
).format(self.nglitch, self.label, self.sftfilepattern) ).format(self.nglitch, self.label, self.sftfilepattern)
) )
self.pickle_path = "{}/{}_saved_data.p".format(self.outdir, self.label) self.pickle_path = os.path.join(self.outdir, self.label + "_saved_data.p")
self._unpack_input_theta() self._unpack_input_theta()
self.ndim = len(self.theta_keys) self.ndim = len(self.theta_keys)
if self.log10beta_min: if self.log10beta_min:
...@@ -2189,7 +2187,7 @@ class MCMCGlitchSearch(MCMCSearch): ...@@ -2189,7 +2187,7 @@ class MCMCGlitchSearch(MCMCSearch):
ax.plot(ts + taus, twoFs) ax.plot(ts + taus, twoFs)
ax.set_xlabel("GPS time") ax.set_xlabel("GPS time")
fig.savefig("{}/{}_twoFcumulative.png".format(self.outdir, self.label)) fig.savefig(os.path.join(self.outdir, self.label + "_twoFcumulative.png"))
class MCMCSemiCoherentSearch(MCMCSearch): class MCMCSemiCoherentSearch(MCMCSearch):
...@@ -2309,7 +2307,7 @@ class MCMCSemiCoherentSearch(MCMCSearch): ...@@ -2309,7 +2307,7 @@ class MCMCSemiCoherentSearch(MCMCSearch):
self.label, self.sftfilepattern self.label, self.sftfilepattern
) )
) )
self.pickle_path = "{}/{}_saved_data.p".format(self.outdir, self.label) self.pickle_path = os.path.join(self.outdir, self.label + "_saved_data.p")
self._unpack_input_theta() self._unpack_input_theta()
self.ndim = len(self.theta_keys) self.ndim = len(self.theta_keys)
if self.log10beta_min: if self.log10beta_min:
...@@ -2508,7 +2506,7 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -2508,7 +2506,7 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
self.label, self.sftfilepattern self.label, self.sftfilepattern
) )
) )
self.pickle_path = "{}/{}_saved_data.p".format(self.outdir, self.label) self.pickle_path = os.path.join(self.outdir, self.label + "_saved_data.p")
self._unpack_input_theta() self._unpack_input_theta()
self.ndim = len(self.theta_keys) self.ndim = len(self.theta_keys)
if self.log10beta_min: if self.log10beta_min:
...@@ -2603,7 +2601,9 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -2603,7 +2601,9 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
if run_setup is None: if run_setup is None:
logging.info("No run_setup provided") logging.info("No run_setup provided")
run_setup_input_file = "{}/{}_run_setup.p".format(self.outdir, self.label) run_setup_input_file = os.path.join(
self.outdir, self.label + "_run_setup.p"
)
if os.path.isfile(run_setup_input_file): if os.path.isfile(run_setup_input_file):
logging.info( logging.info(
...@@ -2699,7 +2699,7 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -2699,7 +2699,7 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
) )
if gen_tex_table: if gen_tex_table:
filename = "{}/{}_run_setup.tex".format(self.outdir, self.label) filename = os.path.join(self.outdir, self.label + "_run_setup.tex")
with open(filename, "w+") as f: with open(filename, "w+") as f:
f.write(r"\begin{tabular}{c|ccc}" + "\n") f.write(r"\begin{tabular}{c|ccc}" + "\n")
f.write( f.write(
...@@ -2884,7 +2884,7 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch): ...@@ -2884,7 +2884,7 @@ class MCMCFollowUpSearch(MCMCSemiCoherentSearch):
if return_fig: if return_fig:
return fig, axes return fig, axes
else: else:
fig.savefig("{}/{}_walkers.png".format(self.outdir, self.label)) fig.savefig(os.path.join(self.outdir, self.label + "_walkers.png"))
class MCMCTransientSearch(MCMCSearch): class MCMCTransientSearch(MCMCSearch):
......
...@@ -64,7 +64,9 @@ class Writer(Test): ...@@ -64,7 +64,9 @@ class Writer(Test):
def test_make_cff(self): def test_make_cff(self):
Writer = pyfstat.Writer(self.label, outdir=self.outdir) Writer = pyfstat.Writer(self.label, outdir=self.outdir)
Writer.make_cff() Writer.make_cff()
self.assertTrue(os.path.isfile("./{}/{}.cff".format(self.outdir, self.label))) self.assertTrue(
os.path.isfile(os.path.join(".", self.outdir, self.label + ".cff"))
)
def test_run_makefakedata(self): def test_run_makefakedata(self):
Writer = pyfstat.Writer(self.label, outdir=self.outdir, duration=3600) Writer = pyfstat.Writer(self.label, outdir=self.outdir, duration=3600)
...@@ -72,7 +74,9 @@ class Writer(Test): ...@@ -72,7 +74,9 @@ class Writer(Test):
Writer.run_makefakedata() Writer.run_makefakedata()
self.assertTrue( self.assertTrue(
os.path.isfile( os.path.isfile(
"./{}/H-2_H1_1800SFT_TestWriter-700000000-3600.sft".format(self.outdir) os.path.join(
".", self.outdir, "H-2_H1_1800SFT_TestWriter-700000000-3600.sft"
)
) )
) )
...@@ -107,11 +111,10 @@ class par(Test): ...@@ -107,11 +111,10 @@ class par(Test):
label = "TestPar" label = "TestPar"
def test(self): def test(self):
os.system('echo "x=100\ny=10" > {}/{}.par'.format(self.outdir, self.label)) parfile = os.path.join(self.outdir, self.label + ".par")
os.system('echo "x=100\ny=10" > ' + parfile)
par = pyfstat.core.read_par( par = pyfstat.core.read_par(parfile, return_type="Bunch")
"{}/{}.par".format(self.outdir, self.label), return_type="Bunch"
)
self.assertTrue(par.x == 100) self.assertTrue(par</