From ea69739b7ed987ace7e7d89e9d3e258e109c630a Mon Sep 17 00:00:00 2001
From: Yifan Wang <yifan.wang@aei.mpg.de>
Date: Mon, 19 Jul 2021 19:21:37 +0200
Subject: [PATCH] transplant samples generation to python3

---
 python3_samples/.DS_Store                     | Bin 0 -> 8196 bytes
 python3_samples/config_files/.DS_Store        | Bin 0 -> 6148 bytes
 python3_samples/config_files/default.json     |  12 +
 .../config_files/waveform_params.ini          | 116 ++++
 python3_samples/generate_sample.py            | 306 +++++++++
 python3_samples/output/.DS_Store              | Bin 0 -> 6148 bytes
 python3_samples/utils/__init__.py             |   0
 .../utils/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 198 bytes
 .../__pycache__/configfiles.cpython-39.pyc    | Bin 0 -> 2877 bytes
 .../utils/__pycache__/hdffiles.cpython-39.pyc | Bin 0 -> 14557 bytes
 .../__pycache__/samplefiles.cpython-39.pyc    | Bin 0 -> 8647 bytes
 .../samplegeneration.cpython-39.pyc           | Bin 0 -> 4594 bytes
 .../__pycache__/staticargs.cpython-39.pyc     | Bin 0 -> 2397 bytes
 .../__pycache__/waveforms.cpython-39.pyc      | Bin 0 -> 8830 bytes
 python3_samples/utils/configfiles.py          |  98 +++
 python3_samples/utils/hdffiles.py             | 602 ++++++++++++++++++
 python3_samples/utils/progressbar.py          | 230 +++++++
 python3_samples/utils/samplefiles.py          | 368 +++++++++++
 python3_samples/utils/samplegeneration.py     | 288 +++++++++
 python3_samples/utils/staticargs.py           |  96 +++
 python3_samples/utils/waveforms.py            | 312 +++++++++
 21 files changed, 2428 insertions(+)
 create mode 100644 python3_samples/.DS_Store
 create mode 100644 python3_samples/config_files/.DS_Store
 create mode 100755 python3_samples/config_files/default.json
 create mode 100755 python3_samples/config_files/waveform_params.ini
 create mode 100644 python3_samples/generate_sample.py
 create mode 100644 python3_samples/output/.DS_Store
 create mode 100755 python3_samples/utils/__init__.py
 create mode 100644 python3_samples/utils/__pycache__/__init__.cpython-39.pyc
 create mode 100644 python3_samples/utils/__pycache__/configfiles.cpython-39.pyc
 create mode 100644 python3_samples/utils/__pycache__/hdffiles.cpython-39.pyc
 create mode 100644 python3_samples/utils/__pycache__/samplefiles.cpython-39.pyc
 create mode 100644 python3_samples/utils/__pycache__/samplegeneration.cpython-39.pyc
 create mode 100644 python3_samples/utils/__pycache__/staticargs.cpython-39.pyc
 create mode 100644 python3_samples/utils/__pycache__/waveforms.cpython-39.pyc
 create mode 100755 python3_samples/utils/configfiles.py
 create mode 100755 python3_samples/utils/hdffiles.py
 create mode 100755 python3_samples/utils/progressbar.py
 create mode 100755 python3_samples/utils/samplefiles.py
 create mode 100755 python3_samples/utils/samplegeneration.py
 create mode 100755 python3_samples/utils/staticargs.py
 create mode 100755 python3_samples/utils/waveforms.py

diff --git a/python3_samples/.DS_Store b/python3_samples/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..749e222af6d2280dd99e24f4c7942a3c7586b65e
GIT binary patch
literal 8196
zcmZQzU|@7AO)+F(kYHe7;9!8z0^AH(0Z1N%F(jFwB5WY@7#IW?7%~~s8R8kz7%~}h
zpmL+sXb6mkz-S1JhQMeDjE2DA3IRrlb2xC+`w-btax?@+LtsRP0H}OWfVAxy9H4Xq
zga%15FfuTJy8w&~3@oq!Vg&aC7(j9$tsokt6-0xyGBAQzU^Bp485p5j8NuBUkUjyh
zQH%^=?F@`yn?d4W?F@`yn;94wA=()jp*Ax@dnk+$?F@_%?F@`y+rdVT5~Cq78Ukn`
z0P4H*FeEeNGvtB$?TG$+DnoHnd2vBfPJR*t1H*}=f}G6a5(9(lj7-cdtZeKYAi&EJ
z8=R3}9$b=GQd;bkSQHK71tgYaBth9B`T03;c4AUkW@>r7fQWN`UP)qRUTP6Yd2nV*
zYEoiROn7EqN`ARheraAxG1wfa1Sbf@3y4=&n;PgS7#r5=C{!Don3(G*SeTgA)^c))
zs~XyRCgfIDRoB$k&4Bn1R!lJPL-=rVCIbTlPXFc<mpMcH%d`RJ-&-*Ma&mF=@Je}j
zdU$#B#0v<OWu}(<7nEe?=M{&g78Pga=fw-~7iZ*`dnV`S1r()%<inHmb4qjbisJ<Y
zlM{22OLG!SQXO-0f-|d9i$SWBi}G`F0`iM9LB>Qzl%!UafaEZwBBF}(i%OiqhDBs1
z=jTCG36vJ62BqdCmSmQtx+IpQ77H+PuI3Y$QB>8^H?g#HzQQjcC?qVx!xNR6lA2eN
znU<Ma6k48{Qj!swSe%@imy(&69$uDNoSBr98j_con<~ic%*hGXB_Sy#6;Ya$Sy-A`
zQYoO!!2uSPm6PI$Nl7e8_03NKnU*53l>?$)Nlr>C8YJUfl$uzQngZ3JrY^-3Qk+?p
zDv$_OsI9}p6Ox^omm*LMwaUPdM=CTYF)1}iAcYfRkEt1tRCpOAR0X`Df>zc%QW44d
zxw)x%CB=e_I#6MI2PvsIkj*}c#U=g)sd=d>0w<u>yU0mNMS|oUQ&K=~;!uXA17!yd
z2ovmYWd{QYkCTCeL6*UY!G*zxA%UTYp_-wWVFtrOhAj-+7`8JUW;nxeo#8ISbB6Z}
zUl_hK{9yRW$i&Fa$jivb$j>OjsLH6$sKKboXvAp8XwT@t=*j5E7{D0F7{nOOn829C
zn9P{Mn9EqqSjt$&SkBnM*umJv*v;6(IF)f8<3h$ojEfo9Fz#mD%eaqmKjR6;ON^Hp
zuP|O^yvO*Q@eSiwhy$6R)gOZ|gbyPbzY$2+(0t4^6??jtarbcd^cuwJ`X(PFF^h_c
zE2wJf8kyTT!V?2p^rNIu<U|QhiIAiQPJ*x`22KN4BqXJzWn_8eVL1Xlr-WwarKDDH
zfKr|SJ4CgTj0%Uq2@U}cFqcPNhMQAhD}*DXE~CT2A;1KY(U&pg6iDO{U<Pw#OnA&V
z1X4H!SixK#OCD=ZfnrVp7BE-Fj>myRz?)Nm4b0_n=5gf&WlRAE3X|_ChRY0(7~V7d
zW@KPwW8`KOW|Ux5WYlETXS4(-S7$~SMmI)(#sJ13#$d)!#yG}!#ze*>#%#tM#$3jH
z#tOzt#%jhI#yZA!XwvOvoW?kvaVFy|#yN~j8J97xU|h+#nsE!`R>mESI~jK~?qNL2
zc#iQh;~hvcg(qGg2p>i=-o=-A%YuvYa`N-i!Hq@+1~!I#hEj$Sh5`_Yw}Rs2<lyAx
zq<sa&0kQ~OK^YtBC>U89fodpABLf`;6Jzo!C=@@U`5zW&RG^t5W=Meg|E>%SxW@k>
zsz%At5Eu=CVHpC9EH1$=PSEldn-4*KZBTzY0V)Y<gn+u^jG%rwL?1{AsA~`Ijx$0g
r6~U?*86Z7*Mg~Yvo{@n8q;=p10>Dkp(fWV1{s(t5sn9_kn)N>bXF9Oa

literal 0
HcmV?d00001

diff --git a/python3_samples/config_files/.DS_Store b/python3_samples/config_files/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..2db5130bf4e2d5f5c211a5667f8578af23fa86c3
GIT binary patch
literal 6148
zcmZQzU|@7AO)+F(5MW?n;9!8zEL;p&0Z1N%F(jFwB5WY@z-E*(<S>*l=rLq56hq}k
zsnHM^4S~TM0-(Ih!;r#|%8<s8$bg)G^BM9ybMljua`KZP1q9<n1_lPl|4_idz<^U<
zK}nKNX>myr0|SE&Loh=nLoq`MLn=cqgFZtTLq0<eLn%WpLn=ctgFb@`Ln1>7Ln4Dd
zLnuQrLn=cNShRv6lOd5I1FR>LA&(&)tP-Rno1ugupP_(3pMd>-Q2TMZk%56hn4z2@
zk)aIi_k4yThFpeth608}h9ZVUhFpeX20aFl-I-WIVIwpU{u6Hsox+4jKNXZD!9s{g
z9fLOskGhP^5MYGp<KTwSqvU7^jE2By2*5)CR6Zy`+V%_%P`Uv^gQOT385qD_07eD|
z7MLbRa6f<nBnQ$8qCr|gG)OB0BZvhy1FV&S5vr9D+zkQg19eG2G*~+WBiLpTAFQ2$
z5o|L910zH`10&RCMraR(5u%-e5u%-e5o|lmb))oX2#kgRECiS#3;|I6@5;b{tN#yC
zHA;?#z-R~z%Mf5>aS3*D0$0k|{RgURLG@_@R2ozVgQ{ajP`wP%2T}r-Wr7SSN`N^K
eIgnORwGXa}85tNrT1OiKumBpRM?+u$LI40WAcpk-

literal 0
HcmV?d00001

diff --git a/python3_samples/config_files/default.json b/python3_samples/config_files/default.json
new file mode 100755
index 0000000..0b83109
--- /dev/null
+++ b/python3_samples/config_files/default.json
@@ -0,0 +1,12 @@
+{
+  "random_seed": 42,
+  "background_data_directory": null,
+  "dq_bits": [0, 1, 2, 3],
+  "inj_bits": [0, 1, 2, 4],
+  "waveform_params_file_name": "waveform_params.ini",
+  "max_runtime": 60,
+  "n_injection_samples": 32,
+  "n_noise_samples": 16,
+  "n_processes": 4,
+  "output_file_name": "default.hdf"
+}
diff --git a/python3_samples/config_files/waveform_params.ini b/python3_samples/config_files/waveform_params.ini
new file mode 100755
index 0000000..c6a3477
--- /dev/null
+++ b/python3_samples/config_files/waveform_params.ini
@@ -0,0 +1,116 @@
+; -----------------------------------------------------------------------------
+; DECLARE ARGUMENTS
+; -----------------------------------------------------------------------------
+
+[variable_args]
+; Waveform parameters that will vary in MCMC
+mass1 =
+mass2 =
+spin1z =
+spin2z =
+ra =
+dec =
+coa_phase =
+inclination =
+polarization =
+injection_snr =
+
+
+[static_args]
+; Waveform parameters that will not change in MCMC
+approximant = SEOBNRv4
+domain = time
+f_lower = 18
+distance = 100
+waveform_length = 128
+
+; Width of the background noise interval (in seconds) around the event_time,
+; which is used to make the injection. Should be larger than (see below):
+;   sample_length = seconds_before_event + seconds_after_event
+; because we need to crop off the edges that are corrupted by the whitening.
+noise_interval_width = 16
+
+; original_sampling_rate = Sampling rate of raw HDF files (usually 4096 Hz)
+; target_sampling_rate = Desired sampling rate for sample generation output
+original_sampling_rate = 4096
+target_sampling_rate = 2048
+
+; Define parameters for the whitening procedure. See documentation of the
+; pycbc.types.TimeSeries.whiten() method for an explanation of what these
+; values exactly mean.
+whitening_segment_duration = 4
+whitening_max_filter_duration = 4
+
+; Define the lower and upper bound for the bandpass filter (in Hertz)
+bandpass_lower = 20
+bandpass_upper = 2048
+
+; Define how to align the sample around the event time. By convention, the
+; event time is the H1 time!
+; The sum of these values will be the the sample_length!
+seconds_before_event = 5.5
+seconds_after_event = 2.5
+
+; alpha for the Tukey window that is used to "fade on" the waveforms
+; It represents the fraction of the window inside the cosine tapered region.
+; To turn off the "fade on", simply choose tukey_alpha = 0.
+tukey_alpha = 0.25
+
+
+; -----------------------------------------------------------------------------
+; DEFINE DISTRIBUTIONS FOR PARAMETERS
+; -----------------------------------------------------------------------------
+
+[prior-mass1]
+; Prior for mass1
+name = uniform
+min-mass1 = 10.
+max-mass1 = 80.
+
+
+[prior-mass2]
+; Prior for mass2
+name = uniform
+min-mass2 = 10.
+max-mass2 = 80.
+
+
+[prior-spin1z]
+; Prior for spin1z
+name = uniform
+min-spin1z = 0
+max-spin1z = 0.998
+
+
+[prior-spin2z]
+; Prior for spin2z
+name = uniform
+min-spin2z = 0
+max-spin2z = 0.998
+
+
+[prior-injection_snr]
+; Prior for the injection SNR
+name = uniform
+min-injection_snr = 5
+max-injection_snr = 20
+
+
+[prior-coa_phase]
+; Coalescence phase prior
+name = uniform_angle
+
+
+[prior-inclination]
+; Inclination prior
+name = sin_angle
+
+
+[prior-ra+dec]
+; Sky position prior
+name = uniform_sky
+
+
+[prior-polarization]
+; Polarization prior
+name = uniform_angle
diff --git a/python3_samples/generate_sample.py b/python3_samples/generate_sample.py
new file mode 100644
index 0000000..d98ff98
--- /dev/null
+++ b/python3_samples/generate_sample.py
@@ -0,0 +1,306 @@
+"""
+The "main script" of this repository: Read in a configuration file and
+generate synthetic GW data according to the provided specifications.
+"""
+
+# -----------------------------------------------------------------------------
+# IMPORTS
+# -----------------------------------------------------------------------------
+
+from __future__ import print_function
+
+import argparse
+import numpy as np
+import os
+import sys
+import time
+
+from itertools import count
+from multiprocessing import Process, Queue
+from tqdm import tqdm
+
+from utils.configfiles import read_ini_config, read_json_config
+from utils.hdffiles import NoiseTimeline
+from utils.samplefiles import SampleFile
+from utils.samplegeneration import generate_sample
+from utils.waveforms import WaveformParameterGenerator
+
+# -----------------------------------------------------------------------------
+# MAIN CODE
+# -----------------------------------------------------------------------------
+
+if __name__ == '__main__':
+
+    # -------------------------------------------------------------------------
+    # Preliminaries
+    # -------------------------------------------------------------------------
+
+    # Disable output buffering ('flush' option is not available for Python 2)
+    #sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+
+    # Start the stopwatch
+    script_start = time.time()
+
+    print('')
+    print('GENERATE A GW DATA SAMPLE FILE')
+    print('')
+    
+    # -------------------------------------------------------------------------
+    # Parse the command line arguments
+    # -------------------------------------------------------------------------
+
+    # Set up the parser and add arguments
+    parser = argparse.ArgumentParser(description='Generate a GW data sample.')
+    parser.add_argument('--config-file',
+                        help='Name of the JSON configuration file which '
+                             'controls the sample generation process.',
+                        default='default.json')
+
+    # Parse the arguments that were passed when calling this script
+    print('Parsing command line arguments...', end=' ')
+    command_line_arguments = vars(parser.parse_args())
+    print('Done!')
+
+    # -------------------------------------------------------------------------
+    # Read in JSON config file specifying the sample generation process
+    # -------------------------------------------------------------------------
+
+    # Build the full path to the config file
+    json_config_name = command_line_arguments['config_file']
+    json_config_path = os.path.join('.', 'config_files', json_config_name)
+    
+    # Read the JSON configuration into a dict
+    print('Reading and validating in JSON configuration file...', end=' ')
+    config = read_json_config(json_config_path)
+    print('Done!')
+
+    # -------------------------------------------------------------------------
+    # Read in INI config file specifying the static_args and variable_args
+    # -------------------------------------------------------------------------
+
+    # Build the full path to the waveform params file
+    ini_config_name = config['waveform_params_file_name']
+    ini_config_path = os.path.join('.', 'config_files', ini_config_name)
+
+    # Read in the variable_arguments and static_arguments
+    print('Reading and validating in INI configuration file...', end=' ')
+    variable_arguments, static_arguments = read_ini_config(ini_config_path)
+    print('Done!\n')
+
+    # -------------------------------------------------------------------------
+    # Shortcuts and random seed
+    # -------------------------------------------------------------------------
+
+    # Set the random seed for this script
+    np.random.seed(config['random_seed'])
+
+    # Define some useful shortcuts
+    random_seed = config['random_seed']
+    max_runtime = config['max_runtime']
+    bkg_data_dir = config['background_data_directory']
+
+    # -------------------------------------------------------------------------
+    # Construct a generator for sampling waveform parameters
+    # -------------------------------------------------------------------------
+
+    # Initialize a waveform parameter generator that can sample injection
+    # parameters from the distributions specified in the config file
+    waveform_parameter_generator = \
+        WaveformParameterGenerator(config_file=ini_config_path,
+                                   random_seed=random_seed)
+
+    # Wrap it in a generator expression so that we can we can easily sample
+    # from it by calling next(waveform_parameters)
+    waveform_parameters = \
+        (waveform_parameter_generator.draw() for _ in iter(int, 1))
+
+    # -------------------------------------------------------------------------
+    # Construct a generator for sampling valid noise times
+    # -------------------------------------------------------------------------
+
+    # If the 'background_data_directory' is None, we will use synthetic noise
+    if config['background_data_directory'] is None:
+
+        print('Using synthetic noise! (background_data_directory = None)\n')
+
+        # Create a iterator that returns a fake "event time", which we will
+        # use as a seed for the RNG to ensure the reproducibility of the
+        # generated synthetic noise.
+        # For the HDF file path that contains that time, we always yield
+        # None, so that we know that we need to generate synthetic noise.
+        noise_times = ((1000000000 + _, None) for _ in count())
+
+    # Otherwise, we set up a timeline object for the background noise, that
+    # is, we read in all HDF files in the raw_data_directory and figure out
+    # which parts of it are useable (i.e., have the right data quality and
+    # injection bits set as specified in the config file).
+    else:
+
+        print('Using real noise from LIGO recordings! '
+              '(background_data_directory = {})'.format(bkg_data_dir))
+        print('Reading in raw data. This may take several minutes...', end=' ')
+
+        # Create a timeline object by running over all HDF files once
+        noise_timeline = NoiseTimeline(background_data_directory=bkg_data_dir,
+                                       random_seed=random_seed)
+
+        # Create a noise time generator so that can sample valid noise times
+        # simply by calling next(noise_time_generator)
+        delta_t = int(static_arguments['noise_interval_width'] / 2)
+        noise_times = (noise_timeline.sample(delta_t=delta_t,
+                                             dq_bits=config['dq_bits'],
+                                             inj_bits=config['inj_bits'],
+                                             return_paths=True)
+                       for _ in iter(int, 1))
+        
+        print('Done!\n')
+
+    # -------------------------------------------------------------------------
+    # Define a convenience function to generate arguments for the simulation
+    # -------------------------------------------------------------------------
+
+    def generate_arguments(injection=True):
+
+        # Only sample waveform parameters if we are making an injection
+        waveform_params = next(waveform_parameters) if injection else None
+
+        # Return all necessary arguments as a dictionary
+        return dict(static_arguments=static_arguments,
+                    event_tuple=next(noise_times),
+                    waveform_params=waveform_params)
+
+    # -------------------------------------------------------------------------
+    # Finally: Create our samples!
+    # -------------------------------------------------------------------------
+
+    # Keep track of all the samples (and parameters) we have generated
+    samples = dict(injection_samples=[], noise_samples=[])
+    injection_parameters = dict(injection_samples=[], noise_samples=[])
+
+
+    print('Generating samples containing an injection...')
+    n_samples = config['n_injection_samples']
+    arguments_generator = \
+                (generate_arguments(injection=True) for _ in iter(int, 1))
+    print('Number of samples:',n_samples)
+
+    sample_type = 'injection_samples'
+    for i in range(n_samples):
+        print(i)
+
+        results_list = []
+        arguments = next(arguments_generator)
+        print(arguments)
+        result = generate_sample(**arguments)
+        results_list.append(result)
+        
+        # ---------------------------------------------------------------------
+        # Process results in the results_list
+        # ---------------------------------------------------------------------
+
+        # Separate the samples and the injection parameters
+        samples[sample_type], injection_parameters[sample_type] = \
+            zip(*results_list)
+
+        # Sort all results by the event_time
+        idx = np.argsort([_['event_time'] for _ in list(samples[sample_type])])
+        samples[sample_type] = \
+            list([samples[sample_type][i] for i in idx])
+        injection_parameters[sample_type] = \
+            list([injection_parameters[sample_type][i] for i in idx])
+
+        print('Sample generation completed!\n')
+
+    # -------------------------------------------------------------------------
+    # Compute the normalization parameters for this file
+    # -------------------------------------------------------------------------
+
+    print('Computing normalization parameters for sample...', end=' ')
+
+    # Gather all samples (with and without injection) in one list
+    all_samples = list(samples['injection_samples'] + samples['noise_samples'])
+
+    # Group all samples by detector
+    h1_samples = [_['h1_strain'] for _ in all_samples]
+    l1_samples = [_['l1_strain'] for _ in all_samples]
+
+    # Stack recordings along first axis
+    h1_samples = np.vstack(h1_samples)
+    l1_samples = np.vstack(l1_samples)
+    
+    # Compute the mean and standard deviation for both detectors as the median
+    # of the means / standard deviations for each sample. This is more robust
+    # towards outliers than computing "global" parameters by concatenating all
+    # samples and treating them as a single, long time series.
+    normalization_parameters = \
+        dict(h1_mean=np.median(np.mean(h1_samples, axis=1), axis=0),
+             l1_mean=np.median(np.mean(l1_samples, axis=1), axis=0),
+             h1_std=np.median(np.std(h1_samples, axis=1), axis=0),
+             l1_std=np.median(np.std(l1_samples, axis=1), axis=0))
+    
+    print('Done!\n')
+
+    # -------------------------------------------------------------------------
+    # Create a SampleFile dict from list of samples and save it as an HDF file
+    # -------------------------------------------------------------------------
+
+    print('Saving the results to HDF file ...', end=' ')
+
+    # Initialize the dictionary that we use to create a SampleFile object
+    sample_file_dict = dict(command_line_arguments=command_line_arguments,
+                            injection_parameters=dict(),
+                            injection_samples=dict(),
+                            noise_samples=dict(),
+                            normalization_parameters=normalization_parameters,
+                            static_arguments=static_arguments)
+
+    # Collect and add samples (with and without injection)
+    for sample_type in ('injection_samples', 'noise_samples'):
+        for key in ('event_time', 'h1_strain', 'l1_strain'):
+            if samples[sample_type]:
+                value = np.array([_[key] for _ in list(samples[sample_type])])
+            else:
+                value = None
+            sample_file_dict[sample_type][key] = value
+
+    # Collect and add injection_parameters (ignore noise samples here, because
+    # for those, the injection_parameters are always None)
+    other_keys = ['h1_signal', 'h1_output_signal','h1_snr', 'l1_signal','l1_output_signal', 'l1_snr', 'scale_factor']
+    for key in list(variable_arguments + other_keys):
+        if injection_parameters['injection_samples']:
+            value = np.array([_[key] for _ in
+                              injection_parameters['injection_samples']])
+        else:
+            value = None
+        sample_file_dict['injection_parameters'][key] = value
+
+    # Construct the path for the output HDF file
+    output_dir = os.path.join('.', 'output')
+    if not os.path.exists(output_dir):
+        os.mkdir(output_dir)
+    sample_file_path = os.path.join(output_dir, config['output_file_name'])
+
+    # Create the SampleFile object and save it to the specified output file
+    sample_file = SampleFile(data=sample_file_dict)
+    sample_file.to_hdf(file_path=sample_file_path)
+
+    print('Done!')
+
+    # Get file size in MB and print the result
+    sample_file_size = os.path.getsize(sample_file_path) / 1024**2
+    print('Size of resulting HDF file: {:.2f}MB'.format(sample_file_size))
+    print('')
+
+    # -------------------------------------------------------------------------
+    # Postliminaries
+    # -------------------------------------------------------------------------
+
+    # PyCBC always create a copy of the waveform parameters file, which we
+    # can delete at the end of the sample generation process
+    duplicate_path = os.path.join('.', config['waveform_params_file_name'])
+    if os.path.exists(duplicate_path):
+        os.remove(duplicate_path)
+
+    # Print the total run time
+    print('Total runtime: {:.1f} seconds!'.format(time.time() - script_start))
+    print('')
diff --git a/python3_samples/output/.DS_Store b/python3_samples/output/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..88760d6b4f82f62024c05bb0a0f4b8d34d378264
GIT binary patch
literal 6148
zcmZQzU|@7AO)+F(5MW?n;9!8z3~dZp0Z1N%F(jFwB8(vOz-AOM<S?W%=rLq4q(J2e
zQ6RnC45<ti42cZ649NP^Jah7slXCKtpw4q-U|`Vu4+ew`9hDyqfzc2c4S~@R7!85Z
z5WpD%pt=`S|8wJ1F)A?{0;3@?QbPb#J}5xi_6!bCx&cCiq!<_(7{FZsMg|5JSQs&a
z`vD9fIgnNm4blptL0TCYK`gKtV66;{P_2yMZU{&ps7nH(!P*%Z!8U{VVC@WyV4E2j
z7$Mpj7@;;ZLVGBT5bX?%5bX?%VB2A?8>L4>U^E0^A;1h_2!QH;R|W=L{eOt6QF1f{
zMnhm&h5#drOR$R*xKhUMKTur@s!tQ3Y7Rh^F@ox4h!{u-sA>mS$4rnxMG2@fkT^&?
ahz3{3j0_Cm+I+Mz01Kf}dNc&+8v+28VjI5z

literal 0
HcmV?d00001

diff --git a/python3_samples/utils/__init__.py b/python3_samples/utils/__init__.py
new file mode 100755
index 0000000..e69de29
diff --git a/python3_samples/utils/__pycache__/__init__.cpython-39.pyc b/python3_samples/utils/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..26517d9b974eea945630a2a15ba2903dfaf7f135
GIT binary patch
literal 198
zcmYe~<>g{vU|^_8`<eivAA<;F%*epN;K0DZP|U)>z>vZa%%I8Wx00a<B#a<_W$T9)
zrxq3KS7xRq=9MSrrR)2ZmK2xd7o{q=W#*(7!@2r~y8e0T`I&j?x&cM`S*gh-`bI{&
zzKO{hnR%%`sfk5-nR)5@MtaWqDXIGD>E$W<r6rj;#rpB_nR%Hd@$q^EmA5!-a`RJ4
Nb5iX<j`$36EC3HlH1Gfb

literal 0
HcmV?d00001

diff --git a/python3_samples/utils/__pycache__/configfiles.cpython-39.pyc b/python3_samples/utils/__pycache__/configfiles.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0a56010086881851a061239a47d3d40083bdb64b
GIT binary patch
literal 2877
zcmYe~<>g{vU|^_8`<k$dkAdMah=Yuo7#J8F7#J9e0~i<>QW&BbQW#U1au}l+!8B78
zQwn1Wa}IMZOB4$uNRB0kHJ2@ljgcXRHH9sQJ(nYjBbPIZlaV2XJ%yu%A&M)7Gli>#
zA&NVNDVRZ%yUKwppeVmAGbL3atu!yWBr`v+SRpOHNTDb-F(orET_G_qMWG<Es2Idf
z&d*EBOfM};1SwNU%gjkF*5i5!a;={x<1LZ!{G#l%ocwZUu(E)}qT<vdO~zZ|Afw_7
z5{nXZi{sOZ@^j-M=DcKNU|`T>x+R#Ho0^vrUtE${l9?QzSd?CTOQfW-AT>F$xCB`=
z8KfR$83?m5Fff3yGbm`h7#J8z7_t}_Fx4<DWGt2{VNPLOzye}7Gd3}nu+}i7Go~>0
zGS)Jdu+=a&GsZC0GSxDdu-7m*GuASfa6sr9rW)onP@pg;_AxUsa49G#C<LV@rYL0Q
zDI_W+Xz69<WhN*<+z$4<jzW1xW^#r?X0bwPacYV}Zhmn|PNhOgzCv+9YI0^;r9w$Y
zD%hCv#In@1{Gwci;>_IA97ynKfI>O7A~ClhCshZeQUR_iH$Npc2O<Uzw$zf;BCt8d
z1&PV2;OHsINL5J6%u6h)R7lE6OwLxw$j?bF)=@|;N!HT@M_N&8Noi4@LS{)Z$jv3G
zc_qbqU_-!!V^Mms6_^F`CMYW73ld8*6f}xUiZrbh0zmANe6X{y_#EWNq*QPkQOL~G
z;{qEL1hx>#R7Zu9(t?~+g#?YV#G=f^q?}Yx?3U(&oT8(Eh;N99W`aU_Mrsixz(9@y
z8wz4*DI{PTkf2bUkzbmV0t(WCoJxhHRE0!^oXp}9h5R&y#GD+E)8Qs4fRb=xE+`y}
zp^=uJTB1;tn3s~Dn^UP!4332q1yJe)X@o{#W@-wO5h<C)B}JJ@rJ%%LtfNp`T$-4Z
zQ>jo2&Kik%3YmGuC5d^-sS5dNNV=?&a}tY-trAL0GINUcU{PGG7Y<Dl0ib*Va+rH+
zUTRTdNq$iRB$}W;M-L{rzrh|($pmLvP%=o)&nrpH%md|9P~yl=tt>`z1}GbsCFYc-
z7K6eEqyV0>iWN#S5=#^ki&7z8Pt1iRvDC!m3?vhvNfn|vvsfV=;@;F01&vI-R6R&?
z&CAczEy$=W&P-0sK?-vuV~R_YGZYd*cBY~wcyOu(xeOc|Ab%7kmZWMzLgSZyFgQ<T
zrlx?BGg>jEkdmKTtdN&qqL5mVSzKbJP+hCZ7{v|At8SojN|W;zQ-1L+7EmI;#RgVY
ze2d-F-?gYHzvvblD03#36oCrcB4!2#h9Wi)!3rv%SZ;B`vqzLzIkb#}mSLb`tvI#l
zmJntFy(NGibt@T)1Q-|?e&y+h7N-^!>sMx`CFYeU=B4ZVm6jBj<QJtXfZYn`>Kp3%
z=cVUo=B4Wf6y;~7CYR_N8R_~aCTC>krTU~M7J*`0-$>6nKP6Q^J-s|dADqhcA^rxr
zx>&EE@)ke10M5+IggRObRA!2RN>OnhMm9#K|7<KgENuVTSU4Ctn2LlL7#L8BT2Lr(
zFfcHHi`ou?MQsXGGgH45sHk1QUc<PMv6d;EA%&TRfrX)&nUNt+z>uNX#ejhk3?msB
z88R4BSQazZGS@IK;3(m&VOqeokf9l*kEMpCsHKEEg>?Z-3flsn65bjXkoZDIP@!G}
zDyZ2Lr!rzIsI!Xm^YE6`Sc+(HqDzIP>dgE+g@U5|<kaF~Pzjovlcs=KUgwqOCZ!g^
zvOC0pVo=ct%No!;0aFG_xXGnOMX7luFvC;9>Pw4Lb--DKHl;U4aDd!MO!1unDqg?=
ziCnUP3K~!m4l78~Gs{x*6ug4{{lK*%B)=B1GcYjRVl48r12u1Mv8NQqCuNot-{OXp
zx$(uRsVTQ4lM<7&(~I&;^HSnd5=#=}Q!<NElS}f8DsOS;CRW53mFAUX=BD0~gcX~h
z>;`T{WagyC=OyN*-r~=T&&$s&PK}2K|1E+1(vpJGl6X++hbZC5%*%pWo)-^|)YRfz
z!g=wTd7wO%nV$#KU1jE*Sqv`Ei&6_qGmBDF6tYt*HHtMsZ2+(jlfi8p<R%cPu&H9w
zQPAWo0_Bn-PEg4K@^le5h{Xd62bTPT)Vy0PpoqA|l9Qj9a*Hb^Gc7H(C^auR^%iq+
zYRN4YP=S4m!#lMSQY5it<!9#IVgc1?nyf{<3=9lWoFM&RkKJN}xZ@Tt%$@Nd?NL0r
z5a)w=;KBw%2!b3T04@f=W<Ye?gUSIZP&vR4F9#Skm^s*l7+DzE7<vA)F&Alqia?%Q
z?D6p_`N{F|Me-m&f&3Z8S5TRpl&l9U5W!_<lrTgPrK$uMs8L*y(h$_VD*}}$kP;SD
t*nsVTxD!-7LRerIbJ*nOr<CTT+JRbr#eAS99|x$6;$h@r5@6@h1OSxca!LRI

literal 0
HcmV?d00001

diff --git a/python3_samples/utils/__pycache__/hdffiles.cpython-39.pyc b/python3_samples/utils/__pycache__/hdffiles.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..297bd1cb160ef013ac80326a409ba565f6071ef5
GIT binary patch
literal 14557
zcmYe~<>g{vU|^_8`<ie&mVx0hh=Yuo85kHG7#J9es~8v<QW&BbQW#U1au{=&qL>&#
zY~~#1D3%n)6y_Y(C{{4d7R3gp*`wG~7*kksIC43oIKgUJbGUN3qqxCrwj7>Z-Y8y1
zh7|r3_7sj5#wflNffVi(o)*R^es_ix-W0wTh7`VLrYM0F=3oX*!73-NfTH}e%#>7x
z<ebFf;?!b=#Jm)Rw9>rflFa<PVuiH)B88&V#FWgubg)c$QDzB<?cw64kd~Q~TCB(Q
z667;Y##?*^MVWae@i4u9nvA#DofAtEbMn(Q8E<igWag#@rxs<V7Hcxz;`Z@$_Xml%
z2LvZGfee6Rb_NCp4h9AWPzV)=F)%QsGt@GbFf3rKVOYrM!Vs$w!(7W;%Tmi)!|K8i
zD^bf<!?b{@gdvN00ZR>Y4ckJdg^Y|0g))VF9SkXqrHn;E?F?y*DNHHMEgUsWE)30#
zwM;ck!3>%#iBlOF7`PM^6cj=dvr`ok6;d*bQj<&aiz>nXEJ`gYEy`0U$w*ZwNG!=H
zRw&6=NX*HB_*o$nB$HVT){d$#v!qy|xHJhSn3-Cvr{G@z3d_WtoJx@PlGGxF{L&JI
z{Jfk>umRv;R46V;P0mcqOjby(C`rvL1_izz7g*l0D81MU%mTR<W^$#1MsZ1zrj<fK
zVo3(rbqF8m!Bry+Q_#rCEH2TsQt-(vE>Xx&1NkPuJT(R6q*NqJb>MnIwt=EQwIVUM
zAScyIAt72lBPC5;M?pQqR6RBU>bxLuNFlipW&tGJAU1(*DOSi$EJ@A)B_B|@CMM;V
zr7EPP7AF^Ff<ven;%>iWMrdLHQ6M%O0|SFFD7&bE6GRO|tXwBU2?IE48B!QqI64?X
zQQOQ|#L~eqk*Sa+m_d^X#S~ByV}+R_1~Y}Pma&6j0YeSrLPm(Ll?<AUw>VPsQi{tn
zOENT>ZZYW@++t3xC|SvPi!r_k<c?q2`k}?CMaBA+nQ4i6<%xOe`hKM)#U=SgsS0kO
z3<2lr8|wP!rRQhnrRxS1<!7ZPm*^WA>G~#uLLk*AHL)l!GcR4=NY6PxB~?E?y*x#~
zv?Md9SU)2r4V)zO3My}L*??j#IX}0+&JYv;EDQ_`Y>Z6*t8~*-OX5Mw;=#eJXOokk
zoS0K=r-#sh#METG#hjQ|smWLb%3CWLZ*d_KMiHo}D&k>aU?}2cU|@h-BLcEUiLf=A
zthbo*i*K=%C+1||VoNM2NX<*R#R78GEtahO%sfr@Tb%GjTO`21z!1fSaCngj0|Ub?
zmXyq*;#;gB;o@72Y2bh^;s-ei<gQzMsIHI%g#;fcaqzP+i!gF9N-%RU3NaT+F)%Q^
zWDsCra7zZo6i5LG3o<Y;a56A3ID@q1FfuTdFw`)XFg7z43wbf5Fg7zbF_tjYFo8u>
zycklLpd!p*5i>7_6lSOh3s}U%iy?&tBvQj%!y?9z!kW!g9OK21!Uhs6VXR?FVV7oT
zW~^l^VFalFv)N0Sni*<2QaD@~Vwcr&)-cp?N;1@Pm4J#Zt`yFNO#L;r+$D@P+$mg=
z3@O}_3@JR4ATxMs7;AVW7;4xg7;2bB7;1T8d~pb$uY`30TMc&#uOveXpCrRV##;Uw
z{u<sIzE&nlh8m6<P8%>O391nIYuH<vVWQy-Df}!9EDX)ej0}Z!h71!Ji=_-07{M@-
z0VGz#I)Sm+poSxu0qk}Gu-i3iIZN0<=1MZ82-L7hGn8=DFoImy%$OqB%UmM>vT*`q
zp;e7QFhdG|FoUL0VhSs?pmobHQb<%t&n!#LQ%FcJD2^}5%uP+uQ7BH$Nlh*Rm$jLB
zC8<Sai8-JGDkn8By(9x%93{ZS<I6KsN-`1@G?G(6DpOMw5{vRn^HPw^(^N<+%FhLx
z2Ct;RhCoZ$)D%$hosf}|hLnF2bfBevF}SP)>COces-SWT>|n472_=a|pri;^l$n<v
zUzAvq3M&Uui)mEj6*N*ZlR<^AqXLMLnV*+fRH*<eV-&JeD~lBps?|LVK?SRip?Yls
zY7tzXk(r#KkddF0QVenyqB;V(4empQ<ovvn#7t0;0#XHbE~o$pVOZc{h83j11*w5W
zhk{0C9>k#<Wr;bNDVhpNiOJdNkU-AM&n!+=0L7YuhI>FTm{nYom|LKUFbHNDB&J}-
zg=C~EK;jb=vmlojmlP#t<|%-KG8Lo@#mU8~$@zIH#hRcvNJ@pK5Kv`>5w!@L^NTXm
zGxHL2;=yLZq6cI&$U`6(z*InN1=T~K2n4A>vD_oIsH930=1@csgUwCVQ*cR5ODxSP
zQOGP-Ffp(+L%0B2@&K6*vma~#vb73Ot4i`w8~}-O<WS1UFU?6&NJ>>m$uG~-1)Gza
zqM(tP2C6?Zi&9fG^%Q(di%U?9NrE(M$})@di(tM<fQ12SC?y~~kyspGP??;R49=a!
z;0Bw5MpAx$j;57Dct&bTMrsi#R*)J<(5Or-R!Bs#AC%NV_U4u57E~%E78NB{7At_t
zfMRfSD;eAj$xi~+jKv^zC6xuKC<Y{e9ij&kFV+JkJBV}iKn+JoQxcZ>Fd895Wi&kn
zP?40Hn4AI1hB_!22jqBYK80p2kgK6yg+zdY2FS|_NZx}b8n`dOWd}6n>nPwZv~(0S
zKryQck6NS@0ZIocsl}jJR=`yPfwFLwI4FofsWLCWL?JOx0hATfQ;QU<z${R)3eA+6
z#R{lOsx+`C3rxeR)UYOU3=@k$?Y1g6&pdD`qJT}4m4YA0oeC+L@Dz&O6^buG&9{|I
zx0pN(Z!!58-eN4e#gdy^l6Z^5Js`NaB(bOj++w)J28o_1wqS7K;+GDp{uvlP*lRN1
z;?2y<EXhpFi3g{{TkI*RIVFklCAU~p3-XgQG&yf^Wfo`V6_+IDC8yqE&de(THA<38
zZn0#T7F6D10X6MzG36E9Vg>6j0yV^nKrKp5!CQQ&h5aoKSYaB)2Z{njz~2(X?!j9k
z*up1D7)w^X#g?CzR-9TA#Scz{pi(X^5!7wD#Re8gy~UA|S_1CK+~Near@-yQTO2Sa
z-r@p>7dUg@;)Jj>Qz~w;r{<-^XQosXDTCT$T%eQ$$!y?|g%IGjm;|^21cz@tsG^OB
znQR4WUipDST%CuJ=|2k-Cdmg9=i*`JVP<3IVCG=vV-{iJ`_IJ8_n(PHh*5yqf}M?#
zg$*pn$i~R^kL5oXYmqer14A;%SY!+eesBl6jDdk6l_82Tg&~S5g)xdbMJ|OUg|&q-
ziY0|Dg}sF#iZz8Jg|meriY<jJg}a3ziakX>g+E20g)xdFMIl8nMW}@_iZewxMWlrx
ziYrAlMXZG(iaSL-MWTfviYJvfMKVRIg`t@-iZ7KnMLI<W%;ry#4Q9|(yv6GWst`e?
zQBG!FYGM~Nw4vjukdS~Pm5`tSDd!Zx9fe|r%)GSxqTEDKzfd7D3Dg|~=kml7NC{e4
znwXPW0_llnBo?KVCl;kDWafcN%FO&cg|wW+bV!MrkqYfufqSH>kj53b^hrR}+wm!h
zC5iFyHh6-bLP$ntF(_{%J0r6g+|dU$j^JI>{Ji8;g+xfxEGacTGY{Op$WH^6l#og^
zRUth!FSQ8jle`p#<U~;4I5kfp2~w7W`ZSqUsVSg3qp&nHIXkBkrc()AHYh0|nia*M
zGB^=bv+5|o8yH~klxL(CrGl*mC2CLrD&X;<LIHG81LSiBaCHhcr694mSRpyJ2-H%7
z)tm~jz)mj8EJ-cOOoUX0VD+eNI-DVj-W3POC44vmVLhmn24Q%G0#aC%SgrterUtyF
zp$Uqi)M8lN<|bAuB<2+7!-5oK1PG^O7MB!dCY6??rYPi>r4}g^r<SD_CFY<HK!Cjh
z(omF`my(|wUz`eVJAm7H!5}uMmk)6YNFG$7C8ZX@A~wGWDF8vHfN*B9LTNFmJ(gbt
ziA_+e1{|Hpu?Y!DkovOJqNM!dRE&m1ei67PDgmcQXz8m^3~B9vhm+uL2bq$SpPmk~
z2szo7<bzX1a(-?uXygi%5h2#T)M5bj1VJSQtmFW(K^WW<oB$f2W2|9_XQ*MWVTfle
zVaj68VyR(DVeAEoveqzUv4IATQdoMK7#T{~YZ$T^Qdm>i(wS1&dzoq&;yG#<7O<so
zfCikvYB^JwA!=(F7H}?Ps9}ibLQ}zoq=Fl)f(M}@g&~+hlN&T@#sZ3<DoOX$5|p7f
zNXuVOPwy9_!b-+l%%FZYb4E(qEv5`paA&!S)g?bKRk2Db2s&Vg98RFJS^=KIs-&Gt
zGjqVBdZ3~n6l%~CN>5KullPV+&a{4u9U7NKI-r_@4U(pAaR-6ZR&YsTN$M@;qP+B5
zY-QjG1oz5|^gwF);z4x>tiC9|#ff0@$0vc@1Fn`pj=9AFbCxE{EtcZcoU|erkP+Mv
z>%d94$Q3j|0P5)@RZkr8@t}%6KK?MMI+6j!JR2J`8;bxlA0rzh+kXxwE@nPPE=CAh
zC5BQ=>%lam3`v4g3<&#!Oavt=P)aCLD`Bi*OlPcNTErN{kjYTPyo9ljk&ywEiWaae
zWB?7vf<~bd17O39padKS%?99*$;?Z)QV1^sjhGa{yJlb+4b22laS1Ah6_P=tj+rG2
znQ8E>kN^q~J!mu}fXeaW)Dn<}lA_X7=%^gXHq_D!Bmycg^3q|&Rd7iWC^waW3Kj*h
z#h|`%ngXi%aBD#ZfN*B9LPAJUDX8NM;=-_2Vo`drm4ahYdTDNIUP-Y6sIY~U`w3vf
zH8hcW86aI+TG{0gP2Q=M<@rUR&OA|?{WMv@k$;O7Z164S;>u!B<1Zz@wB#0RT25(k
zh9*l9s4W5#1$puo3&^5dY!C|}=>?o}Ky8{LPX-2tC!p8`2{G`nF^Vx(iJ+zoNMc4A
zQ{-k~U;t?cmnI2};Gt^JND@O9BWRepgsBG1a$%Uj7%LaU1RAymkN2>oFfL^3|4|Dn
zVOUd`7O>SY*Dx()f|m`fDXfwVDQuDqDeRIAwQMyIJ_m%qfIWqCA!98&R1`E?1X9IO
z!jZ*U!wMTY0*wciaAa|%@bxm)aw5s`gT`$n8Q^kUHS8&ZWemlqyckl17O>T@F9eT=
z)o_$B)NnR4vM?|+WHS{#D`BYNYG%x4D*lki#K_>qkRl8k>#AW~z+J*q!&1Xm10EBq
z;YtzdWrLQ7tf00Oq&y5~NMV4FHA#TSn)Fb{nj~r%vLGSBgd7qjyfuuFmRO2pFKEzS
z%CAU)fq|h)K%?5q&ZJf!Ols=s=@r?7iULrBut*WaQUVd6$h^hj0&NXfv6hyk=~_f_
zxRgR#E4SF875^<Rc+09dirq6WD>tz?dnMy7))Y|3^A=liW^O@FY80DGAV{J}7UTjE
zSjPs|*jA{nRjjh`0S{S$`m&yRS-y$I*`V&OOCXq)lAj9cc7X@M!If4D)G3OZY(=IZ
zC!2!^6Hr0N0h7DMk(rkj53>0ddrBdgQN<x0ZD{Hu9qoOK0VPF(`dpx{QZOhjgGPF5
z7*ZG|LG=ogUlG`BP?$gle8DU*0ZPNSIBaqfbCXgM?c^947(RoF%_=?A9DrJQBM&%3
zG=j#Mvr{WI`ED`iq~<|Ne9qLo(p*sMFtrF&a29EU{0GWyx7Z-<-&<@csmb{%so=__
zNEhUD*37&TW1}L_cz=;5NPw+4zo;ZNMUxwxql>&jBgpJ|;K94%Ta1}eys&Bn+|?-p
zjSfRR3kt}hIFOa`AR+-ofPD-mK;AC$VqjqS1_~`3ke8WRShyHP82OmQm<1Tc7zG%G
z7+L<aF@Z@IE=G?3EKCZZE({9?BNGh%=3qBq7GtcE!53O6u>=~621O3I+_(X1g)n3>
zfXWI9hFZoFrV>yqgt3G<i#3I@mjqr4Q!h&`sLWw=VTf(0Wv*dPVU}d5WvO9KVUc90
zWvyWbl`FMuHO!#01!^CA4SNkUsGJdJsO6|(N#V$5D%w=TR>PFSDaioJ)vZi63^lAZ
zOrUXT7{4$Gtb!X&1rLH>7zCbV<Mk_wW?*1g$#RQ3!!RD6j=6J?I9wTq@z8XAiz^4t
zE&@j$ILg402Tr}<xRe7$V=jou2N8K7qJV*cL6iL!52)h}>d1l`54ZS0JV*~0#0MuP
za5}riT9sOqUtE+4(h5pWnw;RI6~&nspPE+!nxZW71<9F#2vmpM;>=8`hzFZ`iyg#F
z%}YVAPJ9>`7?c<p7>ZTEiHe6=fRPUb|FW@hF>^3-Fmf>SG4e2TfD#y!0HY9dl`LA~
zL2V4Z1WlT(WC0JUyaeU(B2c^LC1{EiDq2**z`y_z1+`R?K?4&YyFnN<jRKnD0Vyw5
zVggNnbU-FM;L{%+j44dnti?+3sSc(Z=4QqP%q2`EETE~5X2yk##SUH!DXgGkzl5oV
z1uo0h%m|i^^I}M02g|ZHGju|x=0F{9jt+(_Hqhi;Eo%vTJ7YUj8WXr9&H<Xg1?erC
zlEM=VO8%g!k%(Hh8a7bnRSQa(Y@jI(aKdEoU;<Y~3`NIkII<avX63OkG8E3LVeeoR
zXMoE+tKsNilwinaD1Ha3rWhGOCNnY=ekq&<YLa&_Eo21AgQi~=a6)+Eph+068ip+H
z6n;sD8qOLn@Wcs-FM!4uMB@t~_$k7@EVbMvJSifep@te}kbj#QYk6w8Q$V_SvRObZ
z8-^0z1$-bDXrg8UV{sqIeKp)EAd?_!gy3p~plUXO)RgdM@u!G(Fp4mg@Ma05i1jkn
z^474pFvJGc@`21O;awnD1NK1)?*bvv^v^;j&`kIQ#^NuKP(V^8API5@s;W~ERTCHs
zPo;<lGiXXA-hfTofEsF`QBrIZH^~{P$=M2-X`m)(q5`D*2g+)owkTvY3f_2x^zp%+
zkfcflutreZGZQ=y4e9?D<Y$7KjnI({*vJ{&goJ2Neg+$_s{kFn(@}tnX~V=~6BIHM
z%OGO~@F^dJDJg~VNtq?Z3E=sj1W=g-;TETsK>Cd!55tw~=_wSarh+=(R#xD_;#|-?
zFLJ*MF?y4LY&6^ikZ}lWbRb>_F90aXNL46LO)AbTNd>hSKwS551^<H7JcZ!o%+$PO
z$N&n+01$Qt&v@xTI^znd6$LqonR(zTA<#fUei67uk(^izHq74;v0?zE8O1pufsB%p
zf?_Lu{qpj1z4UV3{DRcHVu+=B`9<maiABj7nPsW^pn-|v)DnGvL&#bLkXFoY1w0a<
zlSiPr7TEBOLU{(L;}1?8pdqdDR0Ysr7AyrI!W?8YG+L48O!7)|L4y(bX$sIeA$V5_
zW@`e5=fEQ_3Lu+6qbB8vdEnkrQYtiwY9ys5CzgU{vNBR3qZg?P<(WA-80LT`P$0wT
zpfOyeNiMKkAqh!AqolMTCsor*0px3!#F9jXK<Mb56KF9A#4Y)GsS4%bIh>r#Y}9xK
znN^anP?TC&npu>ppi!Kf3K|nh1&s@qrJ`91Qmh5aj<7`;S_-gaXRBeLqhP3`V5Fm9
zteK#Yo0<qt9gxvG46lJA7C!QomztVVtN_ZjpgtRDWr#vaYH>-iLTLd=Fu~a|#2Bsb
z2Fp08-uHwLQPRopFlQin-UQY2piVS~??D4_nR%u8rNs*6pt3^4Ib0JlzFG`Up#`Z$
zxtS#;sVUZ=0KqV#C^b31C?zv5y%>4wH8HOel$Y}%vt*S8;LHf`USJpkN_C)Fy-esd
z31}b=TySRQr6YCZF=n?w$pF#^2bI2vxpYuL49;mf3ScLcgXU`!+!Aw&k$dY^nn9__
zshMS|Deys7g_4X!(6B2g<snZUf!iEaYH;=7StYOypuAL?3SJ)u&9#azL5sJlY+zj@
zl)R9XSpr|;1Q{#_B{j(KKxT0~*hmdc#h0L#OO++e2t;}!Qa?&V8`Kp7HHSc5iDC)Z
zq8|a!qMs~=8b(QmEa<Wya05=0DT+NewJ1HcsQ4BOsG3>H1YRvu1nTF5*0Lo-*0O;W
zBi75*F)}a|f#xTwv``yyFkd0}lroD;z-xK1I@1``4*|7qYZzk1Y8guyIv8shB^a6+
zI~cMV!$4~?n80I%jG!?>=ES7pqIyuXm#Ih<)cDf}5%7-9ElwNIxO_!HkzFI)ftmyz
z2-D*R=|F+o+u)`)xRniVZ-biJMLD238Bj|U)W8H`SWts|o}eW?=?patu{^bmDGW7?
z!mu@2jNm3JH~=-Fftw9?Hr(ytWk~B`u7+rdW?*0l(G)IH2PxM8^$MAbQ%i1fWEK~f
zCV^P2MTvRosYReh?k(25#N5>4BG3vUaBuMzdwM~!5omG?)Sm=5?Qby`m*zq`1I&pz
zIk#By^HL!lf?M24nI)in6Q7uua*Hb`HLWDRI3qKyM3W!fNhksbG$_cy{c%v^0n$YW
zv7!WuQWJCHQ_E8GO2A{8#YNyg1gQ4{URMSdEGhx{q7>u{?$o@p%%c1}(1=+STW)4v
zd`e+a7f2c&Fq+Io4IlxKl@QB8Jy(#|A-Tp4)YC`;MS><|#*GWqMP-sekQ{7`BFqwu
zES#X;2M@CZiyR{x6B|1pBOhqijZuJ6;4jO6CT<~S0Y(8v76{2H!UF20aQ)|CVgn7B
zf(K5~hf85e9HqYjiboIzcW*tQ-CNN5%`B!GMsZL}pBa>_7czkdz*uTnYZz;oYM4_P
zds&JYYFLWwYFKL+7O>W^E@Vt&3TCKb4Q420NC6H1FefU)X4pUlAY{ZDF`bkMUXP*x
zZev0ki{Ne=ILILlMOfD`7SUvYRL|hD2;7!ZumY`LwL+b~(}Q^wJTRMDT!K_KW44B%
z#UZQ_1FEeP^B@hjJaEwmTJeP01|VcHT1yGN^+6kJVD&4anx~CDkkusM0i0r-lSH6K
z62_`rj5-z6j)u>3AlJ<x2}D(i6opu;S4U6<mx)wEf@YWU!Ru7PD|r(Y5<p8t5+JL%
zKxTs`CQ)1iS_27QI1G}-;iSY8L>;UFUJC|l6QCHAoL^LwT3nEy2cAO(nU#{6mX=zS
znpdI#8-6TCYA0&4K#B=)fzb;pI{FwG82mKZia@#X7CUGPB{Q$2s0bug2qGqeiXw5)
z<P69UkXVRM%P#_F9Zfb!xrb`pEgs}3gIEa4_29xCQagg`v+JP3NeEOp$?<{4&saFX
zBWf&+d`##g?BI4!D$2|Y$Usno09=%SoLj=s%m5za0xbnk0FQAog2%W(qg+gq4C0`1
zE|wa|xHUtOMGb2<Ly-??upmzWG9CslYFH#0Y#35lK}*LMGL$hC@uYy4kwe%uOiT=n
z48aVAJSptK44NE?)yOkA;N%aPW><i%kb`w~bQIDuL6MY^s!&p%kC<#z&;YgTN-_|6
z9wZHqIt33yaA(xVP!rm@12s=Ui{n6Xjfj0ve;KmC1u5-f7C*3V5v=^dNXXz7dq_s$
zNdBPI4P5|*YoQ`~?gA%xl(Y+-orNcU9ngTdAE-B!nWm7JuTWf?oB>_IkPPk~C4rih
zrFkic(hjL90v;m=kH>+UNk#JDWX%j2*aMB@-C_adAXtJ0Rn)iGic*V9b4n1^a8W-f
z#e-57Qc|7Fz`(%7#K2GtPO5xt%;1`mMS@X)S%Q&^xk?qa<%A=rprl<;iwK0lX%{s9
zR>KIIHxvhr#xo|`A*U(d!~#g0J0-OO)Za=(6bYcQ5>WO}2G0aTlaT_rUW3$j`G{#j
zP-@9fDh92e1+Ob9D$UbTNGt}I5k<L~d7$ZfEXyb1@r9X`GE*wxsR+~$2DuNko*!a0
zWPK21U7vzRGH3+?D2XQK!A%6!(jbiHDosL(57d)JP4}=YhDa7G8NmgZCKI@Hy2T7?
z-l4`Sczo6<y`Wef6tkeXVBoD1MUBbKlnRgvVq#7L5_3sdVh-jyaOA-PYG9)-y`UH#
zbx6r6H77q0yo3U@okamW;s#pq1M2Q5B&C*=r>0`Y9w=QwQXHrXOwCIH^>7pO6wqRo
z^hgEU3Qa|sc`2zCh}bQf4(Zx}=Wv<R3yL9`6&%5!=5J9u0|SEv?s$bZ_)+><p#BOd
z6@YUjsB-|ykD&Rn6edYf+%i|$z*8<L1%hG~;%~4+p{t%Sd<{zAAPnv*gA1#bjNop^
zN=9(fM0Yu;9`l5|oDF^A2vmH7Z3S1p7#;w1aKIh_O#vW#0M2FxRfC`%PF08@K$v$x
zkw=tgUjG08|3BC>kO{Qeph9yFhyX_cm;lEEsQnNO@(oCcfsKce=YN$HY90gm2-foU
z(_|{L1+B}`WG(_N-76{wnF-njr^$jQ;HD{Xiz7ZB)R&8ozr__FpPQdjnv)tIe~TwR
z9yE0em0^#MPsvY?k1q-U83meL1kdUffo5TfKrNdhP(!2$RKyp}1StU}om(6QMfnA(
zMJ1KsQltpv4Wx(yc?Qym0nH1p2X)L1KpWE-SwtBa8UAu9atLwgaHxTIOZ?%gXAlOn
z7#aR>X>+J@2yzHAF>-M-F&2SzYcl$2G8ajMYz7f-nu0|OLG3NB`1rKalG38o`1m4F
zvb@C#UPuh?ltN}pK^s?!f<V)qypV;^un`DQB8U=$h=Jy)krzjUSEk-#&PmJxFHeOm
xzX7eG1Um+sA8&EkK!O%jDKQj-5*7;w4-<HzijjkfhmnVohn-6W?8V<)r2sAAYQO*h

literal 0
HcmV?d00001

diff --git a/python3_samples/utils/__pycache__/samplefiles.cpython-39.pyc b/python3_samples/utils/__pycache__/samplefiles.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4a7afc6bbaf3df9c8b931984aef4200fcba35ad8
GIT binary patch
literal 8647
zcmYe~<>g{vU|^_8`<igUhJoQRh=Yuo85kHG7#J9egBTbXQW&BbQW#U1au}nSz%+9d
z3z%k&VohO8Va{R8WshQK1gT-k;mGBT;smo<bGUN3qqrFv+!<2XQrKG<QrMfBqIgo6
zf*CY9s^Yl<it@`cQ&JU5^7C_w71HvH6v~S-OEUA)6%zAO6pB(4Q$XyJj8ui<#N2|M
zR0R(gH-)s!oYZ24f};GC(&W?>g``R@Bn9cId8tK-C7JnodR#9--tp69yv3PWl3J8m
zlA2qr$#{#sAT7TrH?c&M@fJ&YVo_c)NEI?>fq6WEfq@~FA&N1D0p#f@<`l6M<`k9|
z#weCl))dwhwibqF#wfNF_7sj5hA8$F&J?Z|hA55{?i8LDhA7Sy-W0wThA6HS{uF^0
zhA8e7!4#nuhA5sC@f491(H6!iUT`o-+~Nub`^_ygCpEE?iGhJjK|w*mDmf>yxY#NI
zA(x<#pOlrFTvDu1oLW?ts*qT$keH{Cm{eR+l$Z?iFgW}`K~a=iT$)n?HU<(_`DqFu
zacCGK2N772V^Mms6_^FGIVG_qQ9&amGr2_5O2JV9#K_FgODw8XNY2kINzBZHMM+wI
zPELL~h?kvOiDUrC5V&a|j-H-Ea(+sxZc<Kua<-L~LP2FoMt&ZO5)_Vtf<m==a(-@Z
zVqQvoPG(+ed}2|0X>Mv>NwK<)f_iaDVo7E)vKYE<1@+84P*7#&=fy)j4c3^KpIMxW
z5WuIaAh9ShH?<_S2&%CtH!&x(3Y@Ouk>qMo%s}CUWTYyTCFYc-f}<t1EVZaoAv?8F
zA-A-+L?JOfF*8pgDODj6Ied#!a}q&`AH^I{#N{RCrWPyYr@?Xo!UYN%se0*pItuC;
zhVjLDMe3RgCHV@83VEfu1(hhqBo-AVV#FZWeUN-tl95=V05?|;tk*A@2`OKI*c=QD
z49=i30aP&5Ff3rGVO+?_$WX(W!cfXsBvr#$!w}Dyr&PjJ!qm)w4W}^nvVqj3Fa$Gb
zGWlt;-4eqd(YFN9qxqH~Mx@^2MTyc|A_x_5uN2>s$OGkfEYYXQd`mnYk-_5QlQUA2
zv*SS-;1&y%1Z4xJTP($?IcY__3=9k_8HxlL7#M!#>4z4l78UDPW~L?Pl_%z<>-&|K
z6qn=|r7D0jaxt8%Z>Z~^m!6-Qm#!O7ln+Xk`bI{&zKO{hnR%%`sfk6P{G@NB=bWFC
zs-K=-o}yn`l9^Mi4@tM+{Hs?`d5a@HJ~J<~BtBjX6#OEfcwymU=3wMtVq;{1!QWNF
z2=C~@bR;u^R6{X{4Jz5eu{e)`fuV$<hM|P9nW<Q+h9QNql%ZJ4iy?)nnX!ox%x5f`
zmBL)Zuz+bHLm5M{nHNI}3rH@76)xuG#gM`V7GsBt#d$HLaDc@);bKKz3@Kb-F>bh6
zn-@b04_J&BE;h@HA%zbt#u&_?$)D&0D-uCPNl<B?LUDdBs6NQcEU8og$5^pKex5=C
zD8dr-6oL{ni&G&*V{vh65x8JXEh@?{QpijLr)rQfpiBi)uaK6QnWK=HR|(2oB^jy3
zXa?(X!L^_kk|0CC24F2H6N?p!3sRFa(=t<26f*O`g>OOvEItwv;D&;X0by_foms2^
zF7$O2z?m_#SO>LuKrMbT%)nP1gY3Z2Kt!>ORUb%@!txnp1et|2x>iI+2h|O@vbs8=
zr1q;)1jih>AWlkENX;zCNG$@F!wUID3V!){sftBx3=9la`UrIe(1r@A1XsddVks#W
zv4eDJ;?RX&o+&97fr`^AEgb3*Wf`O>C{|J|;sjZshC>5NS*WB~1Zv(@>Ecj~FdY$q
zN{U6?APWp}=)o1ZN{X7yx41HkGxLf|67!N%i})BA7;dqES_LZ^!KEg+%!Ckv3=9lW
zJji9OEvQ7(0hLceB8*J`S(rJP_&_vXvPvAKqyv`&;F24ov;#Hgz@;6?RiM%?ixJdv
z?PUsPSjp`75+tq3c#ADFFC{gvWF=D(sB{E7dL?5KKLZ1#yhpelR1z1Z78J$DgBp&-
zpjwfE39a0P%7e;Ukhee>#0FV~!zD#xAe$sXgcOJX)iFqRvd704mlVavXM%zXYNZfL
z0Dz@2>T8fvu#KR!P{WYLP{Sz65X`WW(XU99fq_Aj3GBmLETFbA#5%CWpg6k49Uq^b
zS^|=ZkIzT4R}95oxRPX0xPg2M!XP#XgY5=|dkquBb{hsz&0i!7@*~&<%n%naXQx))
zVg<KHu(+cb=8jUF?tm*n2?J1wgS-xQ#}7sZhD?SUhIob&#sy3@j46x@nI<sypQ~jm
zVXk3HVUlFXVrgasxif{yh5_8vXDDHX@Ff|V8C@6<vS1yoNIF<;Ks_9m7MKoJ6dkNK
z3?<CX3}Ca^Z5UEGTo__~YMHAPO4t^#*D$59OET0jOEN5k*be5|FoZJ{cCj#&a8zDl
zWB@^s+7wPyHIWR3T?Pz=U2xO6pr-pFna+h`I+zDG{RPPM35=B{Ak!HcP|Sy_iexB!
zVZc!M0&YIH4MPo63Xdei0?vgDE)1~&NcM1}*aPN)?8(!B+A{}>Jy2DV40%ij3=<d&
z1Hd86ixjfFHVi3zDTt8eMG0A6kPRuEEDS6R&CHApc|wK^6BvtM8Gu8Kks*?Sks*Z>
z+>Y>Igm$e!HAqluBDiKw%u`6M$Sel6y1{)*NKp&!V#1o%CHV@a#i^ja7OZ@O*QEJr
zSo>+9o*AgKRF;~TnVOfJs!(2(SWu8!1nvnWCMTyB7iZ?B!%YD-MnH8tsDA@$ha~19
zIi(ov7EQbj5l|yBz96wALqVgsq)5|B0o3;Z^Gou<Z4X$l8PqvQN(J|;6*BWm^5ISZ
z1qld)oMHuTgkbA=LV5)tb+;Icz*QtlVGGLepn5YHlu2b6Aek*zB!;P$v6iWZv4&{@
zLkhz}#>q^DETAIj7HeX0a%SdA##?MDsi6MhO6FTkdIm)bpt3;`M1ad8Faa)*xNJau
z(d7Kx0=s?&28Pd|f~HCxsWbwO7sO|zr0Ln@<R>TQ6x-<`bb#w-@||0x0&<WVh|mBL
zU{`?&kkg7ljq+(AacvL*s@6d@Vrp4xUP*jOW^U>&PEh}>q$n{n?-pl{A*e7<%*^ux
z*XNLWT2vofjSQ}jF<i~cz`y_wWKdNBD$!zvV1Zo1P{Y&=4PtOXs>yhZH7zGUv7`u>
zyLW(`405+77r5HH#gbuKP<e|5)Qy4E<gAG$B}K(WpvvSHQ(nO>R&ZzX7H3FhL8>dL
zX<Y>Bk7%+$igHePa=gWnk&+e<;zA-79A01o6lk|NVDVKAN-&^NCI%ihCIJ?n{~XM0
zj6DBY{&TP)u~~T-IT)Fk1pW&!@i59TBY1pZo&Z#ik5LZE1eA&yW*<rw0cx{>A`Dzb
z++hTDh!!xyD~BDmOf^gkm{OP)GS)JeFxN0-v4AQHP-PG+7Q<A_Qp;MxTEp7RSi_vf
zR>NY$P^bY`&y1v=8B$>3P#*zS&w`|$1*+Z+Vtx%v3Trk~(VP^vJf;-(TDBTiaA$j8
z4RaQI4NDDc7Do+8jH8!@k)f~yBAUWb!<NF?%M2C)xt|Nkd@iW@9w_GjK{bB@W03;H
zEHoQlKtz#jcmehSH<At9P#dhEHh^6`fw8Cvq8DUh3f#?k0ua#@Pz4Hh^9061FR+<B
zNM`at&6I$cS;C1N>Ju0X=Y=z*FrZZf=TNEv(2xVK-z~;+aM1-W5Wp?)B2e4&7CShX
zn3+Hs#zvYfw^%>}FSl5WGZG6@Z?UG7R2HOyvxp{hkqs!<fLg9ab|4n0VGhp7;2f*T
zRb&7XH3YSTd6J7#6H8L#(~I&;3&5EOY{o6-;*z2wBajL{s0vWCyEwI^2yCoAsC~u`
z&Qe8YAj8Z-N?9SMAj(Lv?ch8GCcqhstt1~*4DJPGE6^Aw1Csy?7o!*>3dzX92pTzH
zVr2Tu!OF(S#K`oIgAJ^Tkqd_mrZ^W^N0lH_Aq=tGEg4iufl6o)2Du89$3QhiaSRg!
zLpnn(q+!J<$pFevS&S*n*~~=+C7`AlLl#2{RJ@rHoYS;A8QK}r7*kkMSX($sSisFF
zMo4;SW(4Jk1*|n}pn<4DJy0WNAww2x4QmP;Xvn#jxt2YT10q_>p3YFq0oKidV($yM
zy-;y9dpVKp<v_BR18(m!hz(h+H5_nzIpOwl)-X<BEUHUqsO19N&Lzo^!Y#=F@%048
zq6D!0>`+^EpuXinvK5+V!M^2!**bx-PzG)<7u;TMxV_vZY*`#N>^0o!Oewr6Y$<%b
z%(XmuEH&)4JT*)c7>o8m!;&B3r&yI3rdr-wz8bz7o*G^oh8o@!fii{(j78IG_-Z&o
zA`=)3r`7OevDR?HZR4q7p1@f21!|HY)Fe=agk;hL#=-|xVl_OVxL&|n!nKeQ#0E9p
z@?=;TO1LL5RjNT7hde1l&5WRi8>m58!!m)XD5Qp`h9`@60pCIfNrqZ}1dkuY7pUQ{
z5lCYSW~ku_W=IhZX3!K#%w<Mt>XnohLB?Q^TUof8I^f1zLP9}eUP@vy(ohX3b1I~P
zMuqXV#jw@l3K~iI`8k?a3gH>4;IW4MB89yC5>RWbC>7*5tnIJ_xKW64+2RCn8xk^_
z3~8$(8fBn{8TudtnO374L&#d}BO+v4j@>XqPAD$O$t;P7hgoqvcsK;^1W(WuNM^A@
zCTRKu9P}YYrKvg!i5OvDoC+EdDM?j8#JECvW=;-hxE>N+sVU%CNXjqC05z{MO#sj5
z6f2ZxmSlh@vOtXd(h^7?0yG2+o}Pm13j!wsv>6otyi^5^{33;t@_ZeI%ru1r>>f-&
z@=*fFO9`5Ab3q9Ll8%x60?GtPvpA4cqyQee26u@{GEz}YAf`Wns7Jw#POL3fVNi91
z*<!6>>R^y$fOHDLn!&_M<|0i{eaD!6i?M7a<1NNauy7HmCIPp$<rx_mia@Q_Ds807
zC$Shj1_a5bSb6~=noLnVndy1?MXB+i0iTLnEXDanCE#{A<&G|L0oBv)AOc<)gB_~_
zb1b-taEmoJvAEcfhW6RQ?0X3scG6_J#gbT&SzP1^av3|gU2%)MC^0W3KR3QOH8tgy
zNJ(N*dTI$|YAiD^J-#ThB=r_wQes|8L1J-nd`^COY7r8@6x5r&B~qN4oS&Cc9G{e$
z1`1GccjuNcOf)gAB(*3WBFGC#W$`(wdFdq?;PzLQF_&LHct%VKtOndqQnFHTNi0c(
zl(V3eT~L&tl$exLsgRmmP*SPrr^yd)Jb{}?w^)l3^U_mqG3TV_6@fb^Y@lv-Udk<|
zf|OgFAcNdM<@GJL<ovwk#FATViN&BM+%4wJypmfS-l>(4&I@Re2+~2zOwUWqiDJ*o
zFUc#-$<Y*qw7x(CNRZYdsCOGBhCL&Kv%oFJ_$ZN-G(<533DVSJa0-hONJ)bfOEBSE
z0`LM0GObt~C5)mSR21bF-(pHhgLI)l{o*KIWF^H>957QMSp=N^K>6er4{~;3WMp6{
zE(5j3q*)k67^N7w7<vA)G4U{RLE3&S9LzYxS=bm^n8g?cpgQ;vI+(Z^S^jgdu&}W)
ziZOC9a<T9*3$TFLJj_gtVvKx@e9UZ&e2fB&94si~waD($Wc1TyadUHX)8qpUt%6$I
z@$t8~;^TAkQ%ZAE<Ku7f#K#wwCgwn8*yH0<@{{A^i-bVs0}rI8%1tfF$WJNa0VQy0
z5CIx~D3S-Ult6?9hyeBQiV{IAaPCA9puj2uwO7SKMHHxQ&B4eb%<!K}n2C`~m4gim
zoj6>fqe_3ksx^6wQbBHD1<$q?8H3ntkn*-D9Apis4{(dQII{wpItvPlGV@Bn1I)KL
v${{m<#YLdx2R089eW=d5#bE;p2Rl$sF6LukU|`|oU<A$k@i6i*@~{H{J4cX)

literal 0
HcmV?d00001

diff --git a/python3_samples/utils/__pycache__/samplegeneration.cpython-39.pyc b/python3_samples/utils/__pycache__/samplegeneration.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b2b966df5f1085f2f8503556c1df4fddaccf1c89
GIT binary patch
literal 4594
zcmYe~<>g{vU|=}A`D?-tBL;@YAPzESW?*1&U|?V<u47<eNMVR#NMTH2%3;i9ieh2}
zv6*w2qgYZHQ&@6XbJ?QUz+$X9?719K9AGwE4reY`6c?Dyp2MBX6U75&bL8;m@<s76
zGNf>(aOLpl3PcHj+1xpTxk6Dwxx!Jxj0`D!DLg5>EsRkjDa^qPn*3GiTmeP-Wtl0d
z3MCn-3RY>QdC67@>8W|CMTsS;@x_U`1v#l2nh6TIsU;ctDLM+}8JWo$3Yo<Ui6vki
z8L5dyB?|dzTp;Zb<qA;knfZAN1x5MEsl~;5TrWXEpvicPub?P1uOuF1Pf2Eeo}VV;
zEp8uAcmI&g+*J2~U=TMmuOzjoAU`LuBvq5~mS`eKIx4j&-zBxgBQrfCAiq4d2xL-T
zer9oMd|FX{ZhS#;iYDVN*5b_c+{BlR3=9mKjJG7xQ%mBDONtUR^B{^cQqtnnGILTj
znQn=I<Wf>gQj<&ai{gtj)AJH@if{3NM9UM)Qq%H_a+5*v1adS8t1~b#a4;}1ID_KN
zhM9q(h9QMflA)Hlh9QMXlA)HRh9QMnlA)Hhh9QMTlA)HZh9QMjlA)Hph9QMblA)HP
zh9QMrlA)Hfh9QMRlA)HXhB1bzmb;dxh9`xyl%Z%>2}2Eg4O<Os3Rf?4EpG{9Gea$3
z2~!GpGouSbtXB+EEq|>*4Mz=&2t%!433H8L3QrA-G(!z<3U3X!2ty6OI713wFG~$y
zjere94R;Mk3a>PS1VgP*2}_MoGb2b`k|Cb8Fegu`gt3MPq@sqoh9!kRMWC0hR=9*M
zjUkhvMtBKhA7d?F4HMW+Us5=0MN&9xMQiwK#U?Q3iIuR|Fw`)m2=+47ikEPJ*frcW
z;weJCEVUw;47C!O47HM(47E}z+%62UF|`5^TNiNFh|~y3GAv}QkpR0+0>oRul_I>5
zu|^UkQo;?QB^heiQbc;0Kx`X^5*~;y5Koe!hBZaBm$^m?q<R8lp+X7o0=^ol6fsGL
z6kcJ58mSa<2uqw{A!Ds{4O5DQBtxxC3QvuU2t$puG()XyCPS?l*j)-p?vjPrAp&xn
z7}V??HL?(sp{7dCVXhUeVOqeykimr^Rw#z4R<2gQM!rV0M$U#|0%PGfu&GV80yPXN
zQj!d{3N;KV(vl3diZx;&e`X2PDAXvX$fU^jGJ`~HKq*H~lA%_qh9N~>lA%_)Mkz%h
zn|T6LQ4UC6k|9g5Mkz%mMKMJQu0y$o1+0P(szRtn8CAsurb0fjJ926TYPf4SB^YW|
zYPf5-#2IQ;A$k@F*QnH}E@T4nY(TNdS;{zpsc0G`1mLnFpin5`T_92;0t(%Qj5VSu
z$~Fv85pi%Vil(U8Fic=7oL0jP78ir4Tp(J)n<ci85h?~&3lmFGox@zKR-;xUn#L5&
zpsAL4jg^6cOF=<F!5v!CC?qNrXXd5nq$)tm5RLpIh13dg@uR7bRH;ysn4OuIu8^ou
zkeQmCs*s<i04f&1W+WvhXQvnCm*%A?fQtkLjiS`V9EJQMh2qM*l8n@n%w!#fl+=RM
zycCcT`FRQnsb#5oCGjPt1v#k+nmS-p@(V!aQDRO`r9x(2R%&tyNG-@!xuu|zC`AEQ
zFeqpsn+4XJ0F#R^NGwXsEl$u>NX$!7D9A4^(S??PAk#oaWKn8yX-<iPMtMeNNornZ
zUOL3~q{O@w-GanoP#|dPfei%{jz#IkR$vw=w2Dg-OEQz=6N}PIb5rw5iWM|cGLuU*
ztrQ#;K#a`%yu_kPh2;FalEh4q*$U}7`ALa6aQz@NN>Y<E@-mYXa}>aCO)W_+Dpp9#
zF9Mh5(7;C&;5uL_XhfizlaZ=Wl2`;PGQqks^U~u%bp|MEKpxF6Eh#81fp`UEFH(>z
zXn;c=>~1hiAwdHwotc}eqW~(T;|mf?GBgvQ6%LBc#RaL!nQ57+#b7792LvmWWag%V
zDkMndRFbcdo0y%dkeCOH$^1M}e4rQtaw5nIm~%Z`+`#riD>!Hv7Aq7dW~L~>>_GSi
z<Ri~Cg#@@$6F_xhf?s}KYJv_T-9Zy%d1g+ILQ<+iX>n=_iYqenic3-xQ$Vf*dj^&;
zGjmfD6!P<ODiw-Ti^@`q6%va<IV3eT1r#4BMu0q$oL^LwT3nD1N#GhqiFqmcxtdVR
zp^=$igm67tawV9IKz2Y3&8bAO931nhDac8wBqOmzA+ab`0hF3kQ$Q}y%qu7@DOM=S
z2l)|2Kg=Ntx%nxnISLx7dg*#PAXN&viN(dK#X1Vb1(|upI-snS3Cf>33VM2antEJd
zQ-V@UN{jN4@`s}WEIC0Orh`ZTkemrl?eJ6u%PHW51U3;At<dmFO;JdIn3AB7SyG&u
zlcu1N2+n2-<(VZJ3fZZZ#R>`4>WGA^4#~<OGt@H-p-m5U9R>9qIHxv2QwI^*&~hd}
z56N)}2ty#L85ESMnI##iMGBDQrJ#|Sr;wajoT`wQkMN&Dd1A3bZemKRrVgm&ON4s|
zR4~9i3M+&#$}6y&it|CaC?y3P&`6~QQaJ&QAW$illd1q~Kq&<K1r<X}ty^OGMVaZD
zd5JlY#v`m)xFv$Eu(-vZ7M}xZ&)s5ANzExqj8D78nU)ftlbV-al5tA}T%5;)+k<6^
zIq~J0DIn3})a3lUl;Zd#P&+L(9vlI;gkhqIX(g#e5J5jp=38tKvx@i`7#O1XbMnjM
z(~43H<C9BE^3&2ZnQk$87~W#?F-!-w@)#IC*uP`~wH{fDL>L$tV2&!e#avLFa*L%n
zH8mwflkFBCtR)F9q>68Gq!$#&gUX;=e5h_M;$dK5DB=YfC7f6sUr?Exlnho_oLZEb
zTCB-*O8~t{zr_#B6OcqvtjSU&4>CmoWC|D9vtT1`aljP5WC1yu5$r{fk26X#b5c@^
zBtaTPU_LHQ%}cI?db>yzWS|&Go)?h@i}Q*!nQ!qFCnx5l#-}BMTE@57K<PNI=oT9&
z3Fj5vl7d%T@x`g>pgJf%rL-s!lrwM1BII%tD?lx6P(;9_-87kR31H|E#-j3;Kv8OL
zepzZfs8lO0C`nDZ#RsY$LFEZFu^{-R1qG=^MW7acCIb@#gC_GWPEh=UeOsgl@*!(}
zQA%pjEl!Y6!0r57oFLDDnVQT+iXb&$NpMdAA_-<{iWjkhBw0W?=oV*cUTH44Q&hwT
z3O!Ko;1*kaYC(Q-MiD1Sgd0SFx_-Af5>rw@sWu-R2SuRhDPjj1#gtcYi>0`*sN@!F
zW=U#p@hvt;nBC&Z$V|@wdoV4t=oWWQemR`O1xi@)#W|VDskc}Yi;5B}Z?Pp7msA#{
z-eOP7$xkdXHqulFCy`s+(9{8H))ax#3nKj$@qt_i$^=CMAeJDA5CRdxAOaK$MbaP^
zIMuL%Gu|!6%v;PUsU=Z7U@k0Iaut^(7L`EKAX|2MVo`c=lmM(KfcK?D5D5!We%xY%
z^v8;nK)I0}!ikS!DJV`UzQt0US5zFuk(ZyF7GIoKRHO!S4yfaGi#a>B@)m1ZVoqsl
zkq$^y7ewfT2m?^CGA7<)OuEGeDXmvB6a_OdF#PJ(4=qkDD%P*eOiRowPs~f#_bV+a
zF3B%SRdCD9NiBwR^$m6X^V0J(^U`$#it<4%KYb%3UEjpyjLf`LpVY)6P)kbRNY6Px
zB~`z;G&eW3NH-TEn*)=DSY51NT9TPltPkNq>yga-JiUU-Tl`QitWPZi>PcP#l{#u1
zj8MqJ!OX$P!OX$V!NS7G#K`oIjg^g2fSH4tg^`VsiIM9s7dsD=5Gz;?B+tn7n~jZ+
zlY^0kk%f_in}w0-KO0Q;FUx->Zjd+&GY7L6vk;>Q3kS%Qe|&6m>>P|dOe~BXtXzz2
z|M@tW7}*%v{;{zNF!Hd0UBJc#GW{RRe>OG_Mh+G(MhKsa4aVZ*0IOpbVdi2MVd44D
z#?14di;Ia-gi+)l8%t3rxIFUH6ureBAD@z+93Ov+D?UE0w4}5sH9o!wRKnh3%`43<
zs4TJu#UFD{Vonh_LvezONWFsMlp;{Bi4uYH^%C<Eb1F+f4OeisjN*n!fs-aUOWxvv
zh@@ral%y6xG8{)nN*bt%Rt(Ofw>V)%W^oZHqZWa)EXcLsB!fhRfSko)lbfGXnv-e=
bDqe~~#U%?T4-*Fy4<i&Z@h}Q-GW-Jom0wjQ

literal 0
HcmV?d00001

diff --git a/python3_samples/utils/__pycache__/staticargs.cpython-39.pyc b/python3_samples/utils/__pycache__/staticargs.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9f7b313531af13020cade7da87a7899e4553a5a7
GIT binary patch
literal 2397
zcmYe~<>g{vU|^_8`<ifxmx19ih=Yuo7#J8F7#J9e6&M&8QW&BbQW#U1au}l+Q<zg&
zS{S03Qdm>iS{S03Q<#DoG})_`as?FSmu04;DwO2s=M*cHWF(d-Bo?JA<fW#jq^2mO
z<rgU==BDPQWagzSB<7_klvEa^CMOn`fcPaDsS3p<i6xoIT!}^LrManjCB+J9Mftf3
ziFpbMT6&pznF$KX`FUxX>7_-9C7Jno3Tc@+sX7Yf8JWo$AeoXPh^47q<%wmfY57ID
z3dNbZr8yAA1x5MEsl~;5TrWXh^h;(0QBVx>C<_AvgEPpp>lhdqN*ER})-Wt&tYu7L
ztYKWhw2+~Uu}G_iF@;Hzp@uPqS&~5<L^Ikjq_Du{-BMU<7*p6J8KgmSEH(@&>~Ogx
zup9?ej@^bKg%d8<R>EAvn8E=znN5;GgrS+ShB1ZHh9QLuu4WZj4W}eS3b!;v3XeEM
z4VMi=4P!8aCT}7O69WU6f`WpABPe_o5P=z=Sd<P8N01=yfK*7TR7gw#MGe>-km7>G
zqQu<PlGGw-R3<0pDI}#TB<JTAl$NBXD5PW-r6!l;R6?RDza#^sQX@CNC>3l(W?o5Y
zW=UpQszQEVYO$uCf?s|~DkM>WOov1^#C^pX`K3823K@xIsR~J{sd-@i1x5J<sYN-J
z@MNNpmRVF>Vy#e|nyO%xR+^V=l>kdY@kpj=XeQ`!flY8MN-wqovp_+Eq(ng@B{R81
z(@G%(p6VbD0SPN4rj?`?DP)!?WF!_NoC7ifgke^K0*okE1f`ag7Udy11Y{{VAEc%z
zBp|se0hE|ii&7Oba|?1ZlQTg<o06KAnU|WPP?lJfnV6K5S`7CNJg`8~mYSjmmWbjl
zPRuRHNsZ4*%}X!IxFu4Ynw+1PQXHQI$`GmXsb#5oCAWlOqF|pw1=&+lb4n89OVS~k
z;)DGyk&?us^wg4gu#uU0>G4I0C8@Wd>e6oU!?GFFvMA1y6gV>t#)KNj;P4WZ<1|@r
zu_WggRNmr9Nlh&PvskiID~oS2XXcfxWV*$T6dbo$Kxua+Ly-Uj1H-Re{m|mnqGJ8Z
z%(TS3^2EG!eZSI@;*$KLR0X%poYZ1CSKm<AKQBE$GcR2?peR2pHMvCJ$Vk^WF*zeM
zFV!bCu_!MyFJ0eA&pAIORX;twJVn2>Br~U2A7V4eO1*;0TY}&SLQYmfpu#~06gYxh
z%sfnd|Jhi07zLQYm`MoClVB<Wslq5*m|<nhIh3-cgsFxxg;5fcml<ssz<eesp9#um
zhVq%Ad=@C51<GfI@>!vLHYlIXh5=k=utWLmP<ak0p93n-3FUJ_`CL#w7nIKp<#R*%
zJWxIlyzE)U2rYX;60<@1I~6riD}eG;W`15`QKdp+F(_MQ7Aq8`CZ;H4=0QphP-zfQ
z>FneTPQsXFyn;riUaFpsLSjyiLRn%?X=*XJRw*ti%FIhI)&y52plSqE?17DdDk{kb
zi6|uJ7lFzta4}M>pplT4lb=|UppajrkdT>IlAuXaVNsM=PS|^3M}W!-5C(e{T#<qN
ziMs%XWMGgE5C$88wd6o4F^~hdJhLQ20a9o}{R*yd;eiIRAc`+3F)yVcvA8%sC%-(k
z2#H@>P>@<w#LmFLa7!dFKeISBJ~OW*wWusHC%!y0r6eOtEWao-Ju@#c2en8jVr5`p
zh>|ML$Sg?(Wv=+*)O1k&9iIZNsiR~Oa=D2W@oAYkC8<Sl>02BrnZ+fEdC94_*wY}c
zDgsr>MWB46$x_70z`#%hYN{0RfZWCkj=&;bkSM5V0GAI%f*>goSmBJEaqU1E7u1Ad
z;NxNzV1z>^uK#SzT>sfvpdw5~;tUK7nk={2<Kt8EljGwd7K3s$SO#JbnvEPbx%nxj
VIjMG_(yf?{fq{XAi-Vm*2LS)yx_bZs

literal 0
HcmV?d00001

diff --git a/python3_samples/utils/__pycache__/waveforms.cpython-39.pyc b/python3_samples/utils/__pycache__/waveforms.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e660db95e1a7a22bd1f8dd4d6e39b9cd2ceaed57
GIT binary patch
literal 8830
zcmYe~<>g{vU|^_8`<ftW%)sy%#6iX^3=9ko3=9m#>lhdqQW&BbQW#U1au{=&qL>&#
zY~~#1D3%n)6qX#;T(&4Suo!C&doD*5M=oa+XD(M1S1xxHH&~o4hbNaeiZ_=piVw_Z
z&*9G%h!Oy^IdTMZg`$LVg`<SQe9jz^T+t{|Fq<n!ELS{AoRPtuA%#1Ir-dPfCsm@E
zIZ84`B!w@9zlAYMDn%egu!SK?Iz=c&xP>7~CWSeeK~uCUoGYLxzbrE)RUtREBqKki
zSRpOHNFhBnFSRJKBr`8vAu%sSp`a)~IkmVL#4XOuEzL<RNloEOFG?)SEJ-ZM%+E{A
z(JfCbOH}|7Y57ID#d=&XK|!R+c#E%~C^N4lKCLt_8Klxrlkpa7NojU!r6$WQL9hJG
zyb_np;*z4wq*9RdE%Bn%#FY4g#G=I9;`p?p{M`8D{JgZx^jk7uiRApe;*z4o%)Am*
zSvjz53X(}E@|sMy1QH7haw_9XiW2jR!A`s-4c3DYK(<bk@s>z<eo=N>PJX#F*Z~2F
zMa8K_noPI&(^E_0OH$%teuW66Ap|rTZ*jP!mZT<^<QHi&-r@?$%uNkWEy_$SP6mYu
zGG>A$9R&skhE#?q#uSDqrWD2~<`kwB<`#x1mK2s0))s~+))cm222J)`QsIeZsUX(}
zfC4DBB(=yL5>fd@iC35y7`PM^6cntIa}tY-trBpnO;E^B%1TWxDON}<N>xZLE>6uW
z$xO`2sZ_`+EiO?=1RGPFnOl&Ps*s$YSC*QWnVOfJs!(2(SWu8!q)?I%5-u(&Dorj?
zD9K1w$O6S9D3v57CS~SimQ;dGK#mRt4QM(7C$&UqVgs3!Qj}P(P@D*|tXKgQ;|iH2
znhN=8AcMiifD;4Auf+<*1*yrIX_=`h3Q3h9d4+`Nvc#gy#H5_m_{5_0;@AX*;#6=t
z0~wH*2iBjUrI(qPnE-NfT4s7FC|T#{DWqlQq=M6SNk(dBkpi+?ixm=!6;e{uGV@YX
z6fz-(fP9&pUsRM@T#%oa0?G&p(FH}B`9-=~2uq9gz!riD$D;IND=-TbCy+!O54KdJ
zxTHwaN+BSzBm?Askjc26r~@|wWK4NRW^x9|BPEHMdBxxm$jk$!2XHP_NKDEvg+vHa
zn3d!!B&DL50?pj1DY*O(@dn7kqQtzE{M`8B)YKFOjm*3fO)G_95F1oRfE*5ySI8^P
zO-hBA8u>*C13-p2CzhoqmRKo-rz#{T<|*Xo<y0yZr$QVGHW?Dx3h6odNr^ci!wM=D
zg8bYerX({X<rr251_lsz29>vI3=9k<3|Wi|m};07GBPrhFxN1qGo~>1GS)Jdu+%U%
zGsZC0FvPRgGMBJ{cr^_1>?Ir^RxL{jCy1_Li03Ndu3^YxO=qlOS;QE`kjaq3RKt+P
zp2@g`v5&EaA)Y6NA(%mv8B}g@Lt@b_Gbgq97Kj5YiZpp|G36E9VuOUnEf$ccia?b{
z5y-8#ILZ=>z{#_Sje&uoNDxG@g9uIr28LU#;1Z|^REykVDJV<K(PRagkdp?sA}uo=
zoN8`yLktH;#VyA8TP&ayyON<umVtrcSB`#YacWVqer0A_VqSS-Ub?<tX-RQOeo?9d
z*tKx3zM-yvUV46JUb=2TQ9h^)&^I#D^-WC9$jnRiNlh%u%gjsHH_~&?Pf68JPcKi=
zFD=Q;Db|M-cEx%HmA5$J<1_OzOXA~oL9rkPiXK)TRvs2UMlNO!W-ew9MyCI4OdS8J
zlyMh9dN4DR8A1A?7!=i@fB;9Y7C3rqz!AKVp@t!gxtS@2QIY`^xh1SM3|XK^Wol-u
zWvXEcX3%6x%w&X?fS^R@0xDJ#p#=-1TmdCmNJ#|>J6M$i&IFKp1yr0t3O|quDVgAs
zB(Vro)FhTD<R%tCvK%~fDdZ*QrWV68aam$cX=*V%ry+6~G_&ODals{nQcFsU@{kG$
zkcEy42_QScB^4-xgG!l1a3+V?11dowmViq)a30D}gPRF50+L8T`3qd2rod}t1-PS&
z6*N-y()Dx{aubV-Q;T&JiVHIHic!m%g8br4P<5=3nFqGNIJ;6uK~GOl6OtvtHi6<z
zlf8%ol#s<i37NU5thfl28bE0?x#SjeRc1kvC<6n7CUcPlNC6wf@weEDQj1G-O2A1M
zLdb!nSwJ;`Hz-MgvIzsX7^47Fl{}s#1XAy($p%V~pa6=Gzr__FpPQdjnv)tIe~TwR
zzOXbg2P(rJAD@z+93NjK4|1<6hydjri1lC=$PGpO3=9l1Feh*@vIua<fyJIf66gi{
zWKfk2;V>|O{0?#}IJqW(+K3EU47H&0gRz#mh9QfomIah-YgtQJN?2=HYFI@Wni*?A
zB@|OFTMAPRTM26lsBB_uW(3Kku(UGSFx0S>urJ`KVOYp0!cfaz!db&!!&JkZ!rBXJ
zudyX|!xDK&Vs@%RqC!BWvy-zzNoH=ULNU0J0Zz}Q#UQ!-yj0!d%#_p=g%D7SOQAe7
zFD1VmlwOt65>p_RLVlhSIErC8TtTBGU!f>9r8GGeR4FFs=appUm1dS?rWV6e11M=_
zmXxNXYC=;qYF!6%3)CqZ1(nH3$$BM~1*ye)2ys0`lStD_AtWOe;tQ0Nj8Fn?tbm-I
zl&S!7c#1-P9wH?s<`iTkDrltT<R^maCy;RvA$XDpH)%n+A2}GHZpX-pApHplw<ah+
zD&364VuhsC)I2l?XfoYm^u5Ijw(un=OQb^*#{+vmP3Bv?pmH)ZF(*E;s3@`W7JEu+
zPDx^X$t~8@g8bwRO|Bw+P{QXbE=epZi3izviz5ZvK)J;YX%od4C6=TXfeOZ3%$a#5
zx0rKM^NNH(+E~Gc7V&{{7{@IxkakG07wLeKH^}HB0}#syL>Pm#ut8G5EvAxUw3L5~
z9Tc$f`FWsZTnx&^3<7M7V8|@OD8bCf!otYJ$n>wsih+RvB@=*ZGF}D-25=_01I`4X
zQaXhxo2f{ygrSzHgt3Mpg;|oJnX#5RPpyUlEX$J3RAf`a1eIll%5s!2r?53MHi4>d
zh7@*5h8o5c4oQX@h7?XP$pt34!6Xk@mKRL&fk}QaDF7w~B^gqLdbw*^YM9a(G8t-E
zK-GB+Q!Q&PTM26oYYjt+up~o@NH0?@YYA%&8xo(bhIIj34Kt`gQNspjvDUDqfod7i
z#BvU3EfWmw2`MBZN?DCka3=}Yl`4RAt-#GyNJiCH@Nx7BF3l`S)dVN^q{QM>P;v(6
z!GvN^cPbgwRw&I)%_}KRP|$#+K1gd+M*&iDgOV~Vzk*H4%}+_qfo71>;?xvPa3KIv
zmH<-+?NcS_K+`j%DGjRHAmu<oMrCnka$*j0MFDM>!Hj@a93XQcl?T`waQy+T8Nja5
z(^JUHFG;n6H-AA{DlxgFG%-g3+I`H-&jZzAAd}#xXBO)yq@)(4LYmk4c?y;JrA1%^
z6%-VbGx9T&Q(<}%Qu1>XGxHL36be#{K<!^dj{p+P&<KF(wgPoOtrF0BZW@{iU=JY4
zB6Z_5G!x*JBB<F9>Q|Jd=2XH9o{)^x;#7qaa4`((Nu@*LOd(MrIX^ckGY`@xhlU|$
z)WX~bs?|Z^fF4tN2?~x18oBvJshS`I3yM-RQuB&4%Tg6mD++QF;l{(v0hs|W@jwlg
z#1e4#uDG<IAit;t9zw;4o<68G4k}XfOY>4-#z1TQ1V}ih<R^oD1@VhbMoCFQv6a3)
zq*%`{N{6uYK?4HCCHfg9xjFhUgCJ^Pv8xA?Q$`bmOQJ=LT|zuOl$}e9ic<4Raw>Hc
zGV;q)%TkMUAUz0>f79}Fa`MYT?WF9~N>Ci6DnQa%PNhNtsCfhTR!KfMhan}uqWsd5
z%)C@Y8w}=fJw1iw{FGGPq@4WZY%42;g36MNd`K@4)bLSIfGJc^P^eZ<EGQ_-ugJ_z
z%qvmXQBY6LPmC|fNGwhTaUjKg8iHA(4o^}bqtrp|(8RoCu;R4%oc!|CB9P+DyyP5c
z`T=o4dCd^UFhVl|l=}>;K(Zi~QI&cvlJg)=)zecbhqUYwS<*8Nl)V(n6Z1+y<p!j^
zkdm2}mI?}OM1ck=S@Me%5*6UGFbm+RO`*6TF*#KSY+`O=r9vL4i&&DckeZSS>g8n?
zD}eGJxIvkioSa_->OaE_Dai+ED^f_UNK7sP>nv6T%OMEV<^_5gsh|OBG(+1F5J`o^
zB3NdGwslJ~Af+5A)Ik`Q@)VGw4_p?2QWd!31tls_)vJj#5&%j<U}u4h0AW1+cJyL4
zH8~>>T)rlPO5PGgKLKP8w2(kbozM|Ccs78?2(%nW@ip$U8^yJ7PlJa9U`26iijG2Y
zX>tZ=&;#UqP-;spR>&{H@GX{Jfu2HWaVcozNC%|4I8_1C#D^3-pmt&^JO_gOsQ{@S
z6Nwt5%FHV+0c8%9ECUVJ%wh#d%?T~8V52}tF%KIEf~Y_V2;9*I32(4sjE143LP;rT
zL@YrgBfcP~v{*+WBR;t(zqnX4L7_YoG?<f-s!&p%uTYSmlUS5l1t~K@8K_tRshb80
zV`xODBKl`K3Pq{KnN_JNpxg*jl9F1SS(KWhkdvC1UXlTE*DW?ky?2WR)Y!YlS&|YD
zk-o*5R+L&;nwpnfd5bd*Ci+XyGp{T$Co@F>qD@i37c{(=l&YWv(ys)HV<osoCB<9Z
zNV(w_C%h<wwOi95%~cLqsdbATT4vqiMk=vxv4T>NA%rrz#Re*t467itQI#flkt3)%
z>I5P{O-8VpMJ^z&D~NCd5$+(u14MX&2rmW(hA3|EsBt{FrLD;bZsTgQ7jc8+O+W+>
zsDaBB2I_>n78T_e6)A#5LCsZghcmM(Rg?Xe0HlCGw6JdR!*T^QV?_zVA_p<T%LWPM
zC{d_XW_}(-HF#9=78@iz++v5O2lS>j52$*BS<?*aU4mNV3?@8`ER1}NV3LW0g#*&s
zMkT+o@d_~uFbXhpF>)~#1%cY$pz#q%AA^Ab)DHl)zd-?6{0r3fW{zR1WvOKabuuLx
zYT0TSQ<x+fYT0WTK`roFj!cGH&Ju<cmS)Bhh7{Ij#uT<WOtoAoY%UD3yK1>>xN5j1
z8EScIc(NF4*lO5oI8xYCIC@!Pm}+@z`D%Ewm}<CESR@%haw+U7oV_fy{58BaEFugw
ze4x&)I76*K4MPf-Btr^!HdE226!sc^afTX!EM{?rTEQBD1uQj!3mIz#YM4^kB^YW1
z;+gZTpz3+DnTno3)MHm!Bd~yVAw#V|9!CjV3NNUSP$N*oS;K9^P-s=d8O)%`msrIL
zZJ9%dwm?Oa0%#Zr-n2?k(8$zF)dLq+xN|c&`xk*nXdta7qy{1^8Z|T%G!^pmpv{WJ
zypq(syhMe9#FCQKqC9whhh${PQ~<d9?2!l>u7wP|`ec@6=A{>x<mc%rgk)qED}b_a
zW)WyWDI-+@R@j0a1FIkMON)|IVcl@>pbVsYkXZ~H6b6r2E5JPrNv6dK(9Z5qtVtn7
zAE@?#g+0_$s8t|rLIm3KEl({<RRDE`K-F4iUO_2ZjSC%<gIB8xh^(!nkeE}Fkzbmg
zp-=*f7D!)0p*%Au2i)=m#|X$hAPkK&a8g6GM&T_Z@ZexlszO0gDrk}g-XnrE%s>Xf
zu&zQvLQ!Huf`Y9=P-c2Yi9%v=GN`?e=qMF~V;{wKkY<p^l+<LfPM1`8!x7e1!lMhh
zo&swRK(4qF^U`zR9WETEz#WrXmYN3{!GKK@fckvy0l^BOng%}p0#*j?f$AtEL8gg7
zo&aIUOcSV;sH2bxipErk=EM?cr4PzY&}d3cQ7Fz#&r8gKy8vWDQEFmx256kBq*x(0
zu_80KG*<!C2mudP!Lm()hhc(3az<iaUTO|z+EUN}SMt#2WI<_BDlBEdD(j;BTu@~V
zjZts`f#Qfn+yx8N=OEveLYhsmiB_~`Qc|S?_A&>WkFm5(K#gF~Ol(mlq%i{S5G7Qr
zdw_cmK8EVG2?`|{i6yXt0usO=55ip!2~lu{DN!iTFU?6&NJ>@6Pby9=Dg#Xxl_=mS
zzQ88jVk%0!#hj9we2WJ;VclZ#FucX&W2nhg6b7o0!x<PDUZ#M?tXOXG7lBfGJUpe}
z;!Z&%;G$4ah0a<MpI%U`$#RP?za%5IC_V)`v04-fGAs&2M1u%x5Mcu%>_CJA$O85v
z(74nserV|&4=tT<Nr2L1d`fCgVkKnG7jCE_$V$QF%H*8PWbk-gd~rr*TFEWml6){1
zI=%)Pq}CKF@&(!C2hzd?PS{{2w>V(MR8as(J`hA891Hf80L;R8NRSub;)ID5-{OTa
z<3U5$w>V(jTWo3IW+$lC1#WgmaY6c*CGi=CQT#AvP$O@#<)@_;r(z7Ui-6i9Xlje^
zgW4pZ78!#w2QvpF2O|eF3$qY22Qvo?8)(#zk?AkXe>M&-IGc%GfR%@lkCBU!hmnVo
zgOLjanK+oan2NGMA<tL@n!M4JEz$&sPJDb?X-R2OYJ7YVX!!gVYhGz?K_z$;GfJd5
zIkTWr4-yc1knzc4$atqPWVis-+=NZm6oco<z|(4w$uv;w=av9OA$TGUG#XS4nU&xK
zS<VNMD~C?zgQw+g!33eR>5x_=xB&^3frSt_1x1NL%s?N;1`oa7VuQ@3gQp3=qod$v
z8aNz~8iMdChFcsqkSPi~&}eZnXtIHYlZT0ek%viwNq~`u5rlcz1zZ{ab18Gg008yP
BMTGzW

literal 0
HcmV?d00001

diff --git a/python3_samples/utils/configfiles.py b/python3_samples/utils/configfiles.py
new file mode 100755
index 0000000..cf99e11
--- /dev/null
+++ b/python3_samples/utils/configfiles.py
@@ -0,0 +1,98 @@
+"""
+Provide functions for reading and parsing configuration files.
+"""
+
+# -----------------------------------------------------------------------------
+# IMPORTS
+# -----------------------------------------------------------------------------
+
+import json
+import os
+
+from pycbc.workflow import WorkflowConfigParser
+from pycbc.distributions import read_params_from_config
+
+from .staticargs import amend_static_args, typecast_static_args
+
+
+# -----------------------------------------------------------------------------
+# FUNCTION DEFINITIONS
+# -----------------------------------------------------------------------------
+
+def read_ini_config(file_path):
+    """
+    Read in a `*.ini` config file, which is used mostly to specify the
+    waveform simulation (for example, the waveform model, the parameter
+    space for the binary black holes, etc.) and return its contents.
+    
+    Args:
+        file_path (str): Path to the `*.ini` config file to be read in.
+
+    Returns:
+        A tuple `(variable_arguments, static_arguments)` where
+        
+        * `variable_arguments` should simply be a list of all the
+          parameters which get randomly sampled from the specified
+          distributions, usually using an instance of
+          :class:`utils.waveforms.WaveformParameterGenerator`.
+        * `static_arguments` should be a dictionary containing the keys
+          and values of the parameters that are the same for each
+          example that is generated (i.e., the non-physical parameters
+          such as the waveform model and the sampling rate).
+    """
+    
+    # Make sure the config file actually exists
+    if not os.path.exists(file_path):
+        raise IOError('Specified configuration file does not exist: '
+                      '{}'.format(file_path))
+    
+    # Set up a parser for the PyCBC config file
+    workflow_config_parser = WorkflowConfigParser(configFiles=[file_path])
+    
+    # Read the variable_arguments and static_arguments using the parser
+    variable_arguments, static_arguments = \
+        read_params_from_config(workflow_config_parser)
+    
+    # Typecast and amend the static arguments
+    static_arguments = typecast_static_args(static_arguments)
+    static_arguments = amend_static_args(static_arguments)
+    
+    return variable_arguments, static_arguments
+
+
+def read_json_config(file_path):
+    """
+    Read in a `*.json` config file, which is used to specify the
+    sample generation process itself (for example, the number of
+    samples to generate, the number of concurrent processes to use,
+    etc.) and return its contents.
+    
+    Args:
+        file_path (str): Path to the `*.json` config file to be read in.
+
+    Returns:
+        A `dict` containing the contents of the given JSON file.
+    """
+    
+    # Make sure the config file actually exists
+    if not os.path.exists(file_path):
+        raise IOError('Specified configuration file does not exist: '
+                      '{}'.format(file_path))
+    
+    # Open the config while and load the JSON contents as a dict
+    with open(file_path, 'r') as json_file:
+        config = json.load(json_file)
+
+    # Define the required keys for the config file in a set
+    required_keys = {'background_data_directory', 'dq_bits', 'inj_bits',
+                     'waveform_params_file_name', 'max_runtime',
+                     'n_injection_samples', 'n_noise_samples', 'n_processes',
+                     'random_seed', 'output_file_name'}
+    
+    # Make sure no required keys are missing
+    missing_keys = required_keys.difference(set(config.keys()))
+    if missing_keys:
+        raise KeyError('Missing required key(s) in JSON configuration file: '
+                       '{}'.format(', '.join(list(missing_keys))))
+
+    return config
diff --git a/python3_samples/utils/hdffiles.py b/python3_samples/utils/hdffiles.py
new file mode 100755
index 0000000..47310ef
--- /dev/null
+++ b/python3_samples/utils/hdffiles.py
@@ -0,0 +1,602 @@
+"""
+Provide classes and functions for reading and writing HDF files.
+"""
+
+# -----------------------------------------------------------------------------
+# IMPORTS
+# -----------------------------------------------------------------------------
+
+from __future__ import print_function
+
+import numpy as np
+import h5py
+import os
+import sys
+
+from pycbc.catalog import Catalog
+from pycbc.types.timeseries import TimeSeries
+from lal import LIGOTimeGPS
+
+
+# -----------------------------------------------------------------------------
+# FUNCTION DEFINITIONS
+# -----------------------------------------------------------------------------
+
+def get_file_paths(directory, extensions=None):
+    """
+    Take a directory and return the paths to all files in this
+    directory and its subdirectories. Optionally filter out only
+    files specific extensions.
+
+    Args:
+        directory (str): Path to a directory.
+        extensions (list): List of allowed file extensions,
+            for example: `['hdf', 'h5']`.
+
+    Returns:
+        List of paths of all files matching the above descriptions.
+    """
+
+    file_paths = []
+
+    # Walk over the directory and find all files
+    for path, dirs, files in os.walk(directory):
+        for f in files:
+            file_paths.append(os.path.join(path, f))
+
+    # If a list of extensions is provided, only keep the corresponding files
+    if extensions is not None:
+        file_paths = [_ for _ in file_paths if any([_.endswith(ext) for
+                                                    ext in extensions])]
+
+    return file_paths
+
+
+# -----------------------------------------------------------------------------
+
+
+def get_strain_from_hdf_file(hdf_file_paths,
+                             gps_time,
+                             interval_width,
+                             original_sampling_rate=4096,
+                             target_sampling_rate=4096,
+                             as_pycbc_timeseries=False):
+    """
+    For a given `gps_time`, select the interval of length
+    `interval_width` (centered around `gps_time`) from the HDF files
+    specified in `hdf_file_paths`, and resample them to the given
+    `target_sampling_rate`.
+
+    Args:
+        hdf_file_paths (dict): A dictionary with keys `{'H1', 'L1'}`,
+            which holds the paths to the HDF files containing the
+            interval around `gps_time`.
+        gps_time (int): A (valid) background noise time (GPS timestamp).
+        interval_width (int): The length of the strain sample (in
+            seconds) to be selected from the HDF files.
+        original_sampling_rate (int): The original sampling rate (in
+            Hertz) of the HDF files sample. Default is 4096.
+        target_sampling_rate (int): The sampling rate (in Hertz) to
+            which the strain should be down-sampled (if desired). Must
+            be a divisor of the `original_sampling_rate`.
+        as_pycbc_timeseries (bool): Whether to return the strain as a
+            dict of numpy arrays or as a dict of objects of type
+            `pycbc.types.timeseries.TimeSeries`.
+
+    Returns:
+        A dictionary with keys `{'H1', 'L1'}`. For each key, the
+        dictionary contains a strain sample (as a numpy array) of the
+        given length, centered around `gps_time`, (down)-sampled to
+        the desired `target_sampling_rate`.
+    """
+
+    # -------------------------------------------------------------------------
+    # Perform some basic sanity checks on the arguments
+    # -------------------------------------------------------------------------
+
+    assert isinstance(gps_time, int), \
+        'time is not an integer!'
+    assert isinstance(interval_width, int), \
+        'interval_width is not an integer'
+    assert isinstance(original_sampling_rate, int), \
+        'original_sampling_rate is not an integer'
+    assert isinstance(target_sampling_rate, int), \
+        'target_sampling_rate is not an integer'
+    assert original_sampling_rate % target_sampling_rate == 0, \
+        'Invalid target_sampling_rate: Not a divisor of ' \
+        'original_sampling_rate!'
+
+    # -------------------------------------------------------------------------
+    # Read out the strain from the HDF files
+    # -------------------------------------------------------------------------
+
+    # Compute the offset = half the interval width (intervals are centered
+    # around the given gps_time)
+    offset = int(interval_width / 2)
+
+    # Compute the resampling factor
+    sampling_factor = int(original_sampling_rate / target_sampling_rate)
+
+    # Store the sample we have selected from the HDF files
+    sample = dict()
+
+    # Loop over both detectors
+    for detector in ('H1', 'L1'):
+
+        # Extract the path to the HDF file
+        file_path = hdf_file_paths[detector]
+
+        # Read in the HDF file and select the noise sample
+        with h5py.File(file_path, 'r') as hdf_file:
+
+            # Get the start_time and compute array indices
+            start_time = int(hdf_file['meta']['GPSstart'][()])
+            start_idx = \
+                (gps_time - start_time - offset) * original_sampling_rate
+            end_idx = \
+                (gps_time - start_time + offset) * original_sampling_rate
+
+            # Select the sample from the strain
+            strain = np.array(hdf_file['strain']['Strain'])
+            sample[detector] = strain[start_idx:end_idx]
+
+            # Down-sample the selected sample to the target_sampling_rate
+            sample[detector] = sample[detector][::sampling_factor]
+
+    # -------------------------------------------------------------------------
+    # Convert to PyCBC time series, if necessary
+    # -------------------------------------------------------------------------
+
+    # If we just want a plain numpy array, we can return it right away
+    if not as_pycbc_timeseries:
+        return sample
+
+    # Otherwise we need to convert the numpy array to a time series first
+    else:
+
+        # Initialize an empty dict for the time series results
+        timeseries = dict()
+
+        # Convert strain of both detectors to a TimeSeries object
+        for detector in ('H1', 'L1'):
+
+            timeseries[detector] = \
+                TimeSeries(initial_array=sample[detector],
+                           delta_t=1.0/target_sampling_rate,
+                           epoch=LIGOTimeGPS(gps_time - offset))
+
+        return timeseries
+
+
+# -----------------------------------------------------------------------------
+# CLASS DEFINITIONS
+# -----------------------------------------------------------------------------
+
+class NoiseTimeline:
+    """
+    A ``NoiseTimeline`` object stores information about the data
+    quality and hardware injection flags of the files in the given
+    `background_data_directory`. This is information is read in only
+    once at the beginning of the sample generation and can then be
+    utilized to quickly sample "valid" noise times, that is, GPS times
+    where the files in `background_data_directory` provide data which
+    pass certain desired quality criteria.
+    
+    Args:
+        background_data_directory (str): Path to the directory which
+            contains the raw data (HDF files). These files may also be
+            distributed over several subdirectories.
+        random_seed (int): Seed for the random number generator which
+            is used for sampling valid noise times.
+        verbose (bool): Whether or not this instance should print
+            logging information to the command line.
+    """
+
+    def __init__(self,
+                 background_data_directory,
+                 random_seed=42,
+                 verbose=False):
+
+        # Store the directory and sampling rate of the raw HDF files
+        self.background_data_directory = background_data_directory
+
+        # Print debug messages or not?
+        self.verbose = verbose
+
+        # Create a new random number generator with the given seed to
+        # decouple this from the global numpy RNG (for reproducibility)
+        self.rng = np.random.RandomState(seed=random_seed)
+
+        # Get the list of all HDF files in the specified directory
+        self.vprint('Getting HDF file paths...', end=' ')
+        self.hdf_file_paths = get_file_paths(self.background_data_directory,
+                                             extensions=['hdf', 'h5'])
+        self.vprint('Done!')
+
+        # Read in the meta information and masks from HDF files
+        self.vprint('Reading information from HDF files', end=' ')
+        self.hdf_files = self._get_hdf_files()
+        self.vprint('Done!')
+
+        # Build the timeline for these HDF files
+        self.vprint('Building timeline object...', end=' ')
+        self.timeline = self._build_timeline()
+        self.vprint('Done!')
+
+    # -------------------------------------------------------------------------
+
+    def vprint(self, string, *args, **kwargs):
+        """
+        Verbose printing: Wrapper around `print()` to only call it if
+        `self.verbose` is set to true.
+
+        Args:
+            string (str): String to be printed if `self.verbose`
+                is `True`.
+            *args: Arguments passed to `print()`.
+            **kwargs: Keyword arguments passed to `print()`.
+        """
+
+        if self.verbose:
+            print(string, *args, **kwargs)
+            sys.stdout.flush()
+
+    # -------------------------------------------------------------------------
+
+    def _get_hdf_files(self):
+
+        # Keep track of all the files whose information we need to store
+        hdf_files = []
+
+        # Open every HDF file once to read in the meta information as well
+        # as the injection and data quality (DQ) masks
+        n_files = len(self.hdf_file_paths)
+        for i, hdf_file_path in enumerate(self.hdf_file_paths):
+            with h5py.File(hdf_file_path, 'r') as f:
+
+                self.vprint('({:>4}/{:>4})...'.format(i, n_files), end=' ')
+
+                # Select necessary information from the HDF file
+                start_time = f['meta']['GPSstart'][()]
+                detector = f['meta']['Detector'][()].decode('utf-8')
+                duration = f['meta']['Duration'][()]
+                inj_mask = np.array(f['quality']['injections']['Injmask'],
+                                    dtype=np.int32)
+                dq_mask = np.array(f['quality']['simple']['DQmask'],
+                                   dtype=np.int32)
+
+                # Perform some basic sanity checks
+                assert detector in ['H1', 'L1'], \
+                    'Invalid detector {}!'.format(detector)
+                assert duration == len(inj_mask) == len(dq_mask), \
+                    'Length of InjMask or DQMask does not match the duration!'
+
+                # Collect this information in a dict
+                hdf_files.append(dict(file_path=hdf_file_path,
+                                      start_time=start_time,
+                                      detector=detector,
+                                      duration=duration,
+                                      inj_mask=inj_mask,
+                                      dq_mask=dq_mask))
+
+                self.vprint('\033[15D\033[K', end='')
+
+        # Sort the read in HDF files by start time and return them
+        self.vprint('({:>4}/{:>4})...'.format(n_files, n_files), end=' ')
+        return sorted(hdf_files, key=lambda _: _['start_time'])
+
+    # -------------------------------------------------------------------------
+
+    def _build_timeline(self):
+
+        # Get the size of the arrays that we need to initialize
+        n_entries = self.gps_end_time - self.gps_start_time
+
+        # Initialize the empty timeline
+        timeline = dict(h1_inj_mask=np.zeros(n_entries, dtype=np.int32),
+                        l1_inj_mask=np.zeros(n_entries, dtype=np.int32),
+                        h1_dq_mask=np.zeros(n_entries, dtype=np.int32),
+                        l1_dq_mask=np.zeros(n_entries, dtype=np.int32))
+
+        # Add information from HDF files to timeline
+        for hdf_file in self.hdf_files:
+
+            # Define some shortcuts
+            detector = hdf_file['detector']
+            dq_mask = hdf_file['dq_mask']
+            inj_mask = hdf_file['inj_mask']
+
+            # Map start/end from GPS time to array indices
+            idx_start = hdf_file['start_time'] - self.gps_start_time
+            idx_end = idx_start + hdf_file['duration']
+
+            # Add the mask information to the correct detector
+            if detector == 'H1':
+                timeline['h1_inj_mask'][idx_start:idx_end] = inj_mask
+                timeline['h1_dq_mask'][idx_start:idx_end] = dq_mask
+            else:
+                timeline['l1_inj_mask'][idx_start:idx_end] = inj_mask
+                timeline['l1_dq_mask'][idx_start:idx_end] = dq_mask
+
+        # Return the completed timeline
+        return timeline
+
+    # -------------------------------------------------------------------------
+
+    def is_valid(self,
+                 gps_time,
+                 delta_t=16,
+                 dq_bits=(0, 1, 2, 3),
+                 inj_bits=(0, 1, 2, 4)):
+        """
+        For a given `gps_time`, check if is a valid time to sample
+        noise from by checking if all data points in the interval
+        `[gps_time - delta_t, gps_time + delta_t]` have the specified
+        `dq_bits` and `inj_bits` set.
+        
+        .. seealso:: For more information about the `dq_bits` and
+            `inj_bits`, check out the website of the GW Open Science
+            Center, which explains these for the case of O1:
+            
+                https://www.gw-openscience.org/archive/dataset/O1
+
+        Args:
+            gps_time (int): The GPS time whose validity we are checking.
+            delta_t (int): The number of seconds around `gps_time`
+                which we also want to be valid (because the sample will
+                be an interval).
+            dq_bits (tuple): The Data Quality Bits which one would like
+                to require (see note above).
+                *For example:* `dq_bits=(0, 1, 2, 3)` means that the
+                data quality needs  to pass all tests up to `CAT3`.
+            inj_bits (tuple): The Injection Bits which one would like
+                to require (see note above).
+                *For example:* `inj_bits=(0, 1, 2, 4)` means that only
+                continuous wave (CW) injections are permitted; all
+                recordings containing any of other type of injection
+                will be invalid for sampling.
+
+        Returns:
+            `True` if `gps_time` is valid, otherwise `False`.
+        """
+
+        # ---------------------------------------------------------------------
+        # Perform some basic sanity checks
+        # ---------------------------------------------------------------------
+
+        assert isinstance(gps_time, int), \
+            'Received GPS time that is not an integer!'
+        assert delta_t >= 0, \
+            'Received an invalid value for delta_t!'
+        assert set(dq_bits).issubset(set(range(7))), \
+            'Invalid Data Quality bit specification passed to is_valid()!'
+        assert set(inj_bits).issubset(set(range(5))), \
+            'Invalid Injection bit specification passed to is_valid()!'
+
+        # ---------------------------------------------------------------------
+        # Check if given time is too close to a real event
+        # ---------------------------------------------------------------------
+
+        # Get GPS times of all confirmed mergers
+        catalog = Catalog()
+        real_event_times = [catalog.mergers[_].time for _ in catalog.names]
+
+        # Check if gps_time is too close to any of these times
+        if any(abs(gps_time - _) <= delta_t for _ in real_event_times):
+            return False
+
+        # ---------------------------------------------------------------------
+        # Check if the given time is too close to the edge within its HDF file
+        # ---------------------------------------------------------------------
+
+        # Loop over all HDF files to find the one that contains the given
+        # gps_time. Here, we do not distinguish between H1 and L1, because
+        # we assume that the files for the detectors are aligned on a grid.
+        for hdf_file in self.hdf_files:
+
+            # Get the start and end time for the current HDF file
+            start_time = hdf_file['start_time']
+            end_time = start_time + hdf_file['duration']
+
+            # Find the file that contains the given gps_time
+            if start_time < gps_time < end_time:
+
+                # Check if it is far away enough from the edges: If not, it
+                # is not a valid time; otherwise we can still stop searching
+                if not start_time + delta_t < gps_time < end_time - delta_t:
+                    return False
+                else:
+                    break
+
+        # ---------------------------------------------------------------------
+        # Select the environment around the specified time
+        # ---------------------------------------------------------------------
+
+        # Map time to indices
+        idx_start = self.gps2idx(gps_time) - delta_t
+        idx_end = self.gps2idx(gps_time) + delta_t
+
+        # Select the mask intervals
+        environment = \
+            dict(h1_inj_mask=self.timeline['h1_inj_mask'][idx_start:idx_end],
+                 l1_inj_mask=self.timeline['l1_inj_mask'][idx_start:idx_end],
+                 h1_dq_mask=self.timeline['h1_dq_mask'][idx_start:idx_end],
+                 l1_dq_mask=self.timeline['l1_dq_mask'][idx_start:idx_end])
+
+        # ---------------------------------------------------------------------
+        # Data Quality Check
+        # ---------------------------------------------------------------------
+
+        # Compute the minimum data quality
+        min_dq = sum([2**i for i in dq_bits])
+
+        # Perform the DQ check for H1
+        environment['h1_dq_mask'] = environment['h1_dq_mask'] > min_dq
+        if not np.all(environment['h1_dq_mask']):
+            return False
+
+        # Perform the DQ check for L1
+        environment['l1_dq_mask'] = environment['l1_dq_mask'] > min_dq
+        if not np.all(environment['l1_dq_mask']):
+            return False
+
+        # ---------------------------------------------------------------------
+        # Injection Check
+        # ---------------------------------------------------------------------
+
+        # Define an array of ones that matches the length of the environment.
+        # This  is needed because for a given number N, we  can check if the
+        # K-th bit is set by evaluating the expression: N & (1 << K)
+        ones = np.ones(2 * delta_t, dtype=np.int32)
+
+        # For each requested injection bit, check if it is set for the whole
+        # environment (for both H1 and L1)
+        for i in inj_bits:
+
+            # Perform the injection check for H1
+            if not np.all(np.bitwise_and(environment['h1_inj_mask'],
+                                         np.left_shift(ones, i))):
+                return False
+
+            # Perform the injection check for L1
+            if not np.all(np.bitwise_and(environment['l1_inj_mask'],
+                                         np.left_shift(ones, i))):
+                return False
+
+        # If we have not returned False yet, the time must be valid!
+        return True
+
+    # -------------------------------------------------------------------------
+
+    def sample(self,
+               delta_t=16,
+               dq_bits=(0, 1, 2, 3),
+               inj_bits=(0, 1, 2, 4),
+               return_paths=False):
+
+        """
+        Randomly sample a time from `[gps_start_time, gps_end_time]`
+        which passes the :func:`NoiseTimeline.is_valid()` test.
+
+        Args:
+            delta_t (int): For an explanation, see
+                :func:`NoiseTimeline.is_valid()`.
+            dq_bits (tuple): For an explanation, see
+                :func:`NoiseTimeline.is_valid()`.
+            inj_bits (tuple): For an explanation, see
+                :func:`NoiseTimeline.is_valid()`.
+            return_paths (bool): Whether or not to return the paths to
+                the HDF files containing the `gps_time`.
+
+        Returns:
+            A valid GPS time and optionally a `dict` with the file
+            paths to the HDF files containing that GPS time (keys will
+            correspond to the different detectors).
+        """
+
+        # Keep sampling random times until we find a valid one...
+        while True:
+
+            # Randomly choose a GPS time between the start and end
+            gps_time = self.rng.randint(self.gps_start_time + delta_t,
+                                        self.gps_end_time - delta_t)
+
+            # If it is a valid time, return it
+            if self.is_valid(gps_time=gps_time, delta_t=delta_t,
+                             dq_bits=dq_bits, inj_bits=inj_bits):
+                if return_paths:
+                    return gps_time, self.get_file_paths_for_time(gps_time)
+                else:
+                    return gps_time
+
+    # -------------------------------------------------------------------------
+
+    def get_file_paths_for_time(self, gps_time):
+        """
+        For a given (valid) GPS time, find the two HDF files (for the
+        two detectors H1 and L1) which contain the corresponding strain.
+
+        Args:
+            gps_time (int): A valid GPS time stamp.
+
+        Returns:
+            A dictionary with keys `{'H1', 'L1'}` containing the paths
+            to the HDF files, or None if no such files could be found.
+        """
+
+        # Keep track of the results, i.e., the paths to the HDF files
+        result = dict()
+
+        # Loop over all HDF files to find the ones containing the given time
+        for hdf_file in self.hdf_files:
+
+            # Get the start and end time for the current HDF file
+            start_time = hdf_file['start_time']
+            end_time = start_time + hdf_file['duration']
+
+            # Check if the given GPS time falls into the interval of the
+            # current HDF file, and if so, store the file path for it
+            if start_time < gps_time < end_time:
+                result[hdf_file['detector']] = hdf_file['file_path']
+
+            # If both files were found, we are done!
+            if 'H1' in result.keys() and 'L1' in result.keys():
+                return result
+
+        # If we didn't both files, return None
+        return None
+
+    # -------------------------------------------------------------------------
+
+    def idx2gps(self, idx):
+        """
+        Map an index to a GPS time by correcting for the start time of
+        the observation run, as determined from the HDF files.
+
+        Args:
+            idx (int): An index of a time series array (covering an
+                observation run).
+
+        Returns:
+            The corresponding GPS time.
+        """
+
+        return idx + self.gps_start_time
+
+    # -------------------------------------------------------------------------
+
+    def gps2idx(self, gps):
+        """
+        Map an GPS time to an index by correcting for the start time of
+        the observation run, as determined from the HDF files.
+
+        Args:
+            gps (int): A GPS time belonging to a point in time between
+                the start and end of an observation run.
+
+        Returns:
+            The corresponding time series index.
+        """
+
+        return gps - self.gps_start_time
+
+    # -------------------------------------------------------------------------
+
+    @property
+    def gps_start_time(self):
+        """
+        The GPS start time of the observation run.
+        """
+
+        return self.hdf_files[0]['start_time']
+
+    # -------------------------------------------------------------------------
+
+    @property
+    def gps_end_time(self):
+        """
+        The GPS end time of the observation run.
+        """
+
+        return self.hdf_files[-1]['start_time'] + \
+            self.hdf_files[-1]['duration']
diff --git a/python3_samples/utils/progressbar.py b/python3_samples/utils/progressbar.py
new file mode 100755
index 0000000..e7c644a
--- /dev/null
+++ b/python3_samples/utils/progressbar.py
@@ -0,0 +1,230 @@
+"""
+Provide a custom ProgressBar class, which provides a wrapper around
+an iterable that automatically produces a progressbar when iterating
+over it.
+"""
+
+# -----------------------------------------------------------------------------
+# IMPORTS
+# -----------------------------------------------------------------------------
+
+import sys
+import time
+from threading import Event, Thread
+
+
+# -----------------------------------------------------------------------------
+# CLASS DEFINITIONS
+# -----------------------------------------------------------------------------
+
+class RepeatedTimer:
+    """
+    Wrapper class to repeat the given `func` every `interval` seconds
+    (asynchronously in the background).
+    Source: https://stackoverflow.com/a/33054922/4100721.
+    """
+
+    def __init__(self, interval, func, *args, **kwargs):
+        self.interval = interval
+        self.func = func
+        self.args = args
+        self.kwargs = kwargs
+        self.start = time.time()
+        self.event = Event()
+        self.thread = Thread(target=self._target)
+        self.thread.start()
+
+    def _target(self):
+        while not self.event.wait(self._time):
+            self.func(*self.args, **self.kwargs)
+
+    @property
+    def _time(self):
+        return self.interval - ((time.time() - self.start) % self.interval)
+
+    def stop(self):
+        self.event.set()
+        self.thread.join()
+
+
+# -----------------------------------------------------------------------------
+
+
+class ProgressBar:
+    """
+    :class:`ProgressBar` objects are a custom way to "decorate"
+    a given iterable to produce a progress bar when looping over it.
+    This class allows to also produce some output with the progress
+    bar, such as information about the element of the iterable that is
+    currently being processed.
+
+    Args:
+        iterable (iterable): The iterable to be "decorated" with
+            a progressbar.
+        bar_length (int): Length of the bar itself (in characters).
+        auto_update (bool): Whether or not to automatically write
+            the updated progressbar to the command line.
+    """
+    
+    def __init__(self,
+                 iterable,
+                 bar_length=50,
+                 auto_update=False):
+
+        self.iterable = iterable
+        self.max_value = len(iterable)
+        self.bar_length = bar_length
+        self.auto_update = auto_update
+
+        self.start_time = None
+        self.last_timediff = None
+
+        self.progressbar = self.get_progressbar(-1)
+
+        self.extras_ = []
+        self.scheduler = None
+
+    # -------------------------------------------------------------------------
+
+    def __iter__(self):
+
+        # Start the stop watch as soon as we start iterating
+        self.start_time = time.time()
+
+        # Initialize index to 0 to ensure it is always defined
+        index = 0
+
+        # Start the scheduler that will update the elapsed time every second
+        def update():
+            self.progressbar = self.get_progressbar(index)
+            self.write(extras=self.extras_)
+        self.scheduler = RepeatedTimer(1, update)
+
+        # Actually loop over the iterable
+        for index, value in enumerate(self.iterable):
+
+            # Update the last_timediff, which is used to estimate when we
+            # will be done
+            self.last_timediff = self.get_timediff()
+
+            # Update the progressbar string
+            self.progressbar = self.get_progressbar(index)
+
+            # If are doing auto-updates (i.e. no extras), we can already
+            # write the progress bar to stdout
+            if self.auto_update:
+                self.write()
+
+            # Finally, actually yield the current value of the iterable
+            yield value
+
+        # Update our progress bar string one last time to indicate we have
+        # made it to 100%
+        self.progressbar = self.get_progressbar(self.max_value)
+
+        # Stop our background scheduler
+        self.scheduler.stop()
+
+    # -------------------------------------------------------------------------
+
+    def get_timediff(self):
+        """
+        Returns: Time elapsed since progress bar was instantiated.
+        """
+
+        if self.start_time is not None:
+            return time.time() - self.start_time
+        else:
+            return None
+
+    # -------------------------------------------------------------------------
+
+    def get_eta(self,
+                percent):
+        """
+        Get the estimated time of arrival (ETA) by linear interpolation.
+        
+        Args:
+            percent (float): Current progress in percent.
+
+        Returns:
+            Estimated time of arrival in seconds.
+        """
+
+        if self.last_timediff is not None and percent != 0:
+            return max(0, self.last_timediff / percent - self.get_timediff())
+        else:
+            return None
+
+    # -------------------------------------------------------------------------
+
+    def get_progressbar(self,
+                        index):
+        """
+        Construct the progressbar itself (bar, ETA, etc.).
+        
+        Args:
+            index (int): Current index of the iterable; used to compute
+                the current progress percentage.
+
+        Returns:
+            A string containing the basic progress bar.
+        """
+
+        # Construct the actual progress bar
+        percent = float(index) / self.max_value
+        bar = '=' * int(round(percent * self.bar_length))
+        spaces = '-' * (self.bar_length - len(bar))
+
+        # Get the elapsed time as a proper string
+        elapsed_time = self.get_timediff()
+        if elapsed_time is None:
+            elapsed_time = 0
+        elapsed_time = '{:.2f}'.format(elapsed_time)
+
+        # Get the expected time of arrival (ETA) as a proper string
+        eta = self.get_eta(percent)
+        if eta is None:
+            eta = '?'
+        else:
+            eta = '{:.2f}'.format(eta)
+
+        # Construct the actual progress bar string
+        out = "[{0}] {1:>3}% ({2:>{3}}/{4:>{3}}) | Elapsed: {5} | ETA: {6}"
+        progressbar = out.format(bar + spaces, round(percent * 100),
+                                 index, len(str(self.max_value)),
+                                 self.max_value, elapsed_time, eta)
+
+        return progressbar
+
+    # -------------------------------------------------------------------------
+
+    def write(self,
+              clear_line=False,
+              extras=()):
+        """
+        Construct the progress bar and write it to the command line.
+        
+        Args:
+            clear_line (bool): Whether or not to clear the last line.
+            extras (list): List of additional outputs (e.g., the file
+                that is currently being downloaded).
+        """
+
+        self.extras_ = extras
+
+        if extras:
+            for _ in range(len(extras)):
+                sys.stdout.write('\r\033[K\033[F')
+        if clear_line:
+            sys.stdout.write('\r\033[K\033[F')
+
+        # Actually write the finished progress bar to the command line
+        sys.stdout.write('\r\033[K\033[F')
+        sys.stdout.write('\r\033[K\033[K')
+        sys.stdout.write(self.progressbar)
+        if extras:
+            sys.stdout.write('\n' + '\n'.join(extras))
+        if not clear_line:
+            sys.stdout.write('\n')
+        sys.stdout.flush()
diff --git a/python3_samples/utils/samplefiles.py b/python3_samples/utils/samplefiles.py
new file mode 100755
index 0000000..4a6de14
--- /dev/null
+++ b/python3_samples/utils/samplefiles.py
@@ -0,0 +1,368 @@
+"""
+Provide tools for writing and reading the sample HDF files produced by
+the sample generation.
+"""
+
+# -----------------------------------------------------------------------------
+# IMPORTS
+# -----------------------------------------------------------------------------
+
+import numpy as np
+import pandas as pd
+import h5py
+
+from six import iteritems
+from pprint import pformat
+from warnings import warn
+
+
+# -----------------------------------------------------------------------------
+# CLASS DEFINITIONS
+# -----------------------------------------------------------------------------
+
+class SampleFile:
+    """
+    :class:`SampleFile` objects serve as an abstraction for the result
+    files of the sample generation.
+
+    Args:
+        data (dict): A dictionary containing the following keys:
+            
+            .. code-block:: python
+            
+               {'command_line_arguments', 'static_arguments',
+                'injection_samples', 'noise_samples',
+                'injection_parameters', 'normalization_parameters'}
+            
+            The value for every key must again be a dictionary relating
+            the names of sample parameters (e.g., 'h1_snr') to a numpy
+            array containing the values for that parameter.
+    """
+
+    def __init__(self,
+                 data=None):
+
+        # Perform sanity checks on data
+        self.__check_data(data)
+
+        # If we have received data, store it; else initialize an empty dict
+        if data is not None:
+            self.data = data
+        else:
+            self.data = dict(command_line_arguments=dict(),
+                             static_arguments=dict(),
+                             injection_samples=dict(),
+                             noise_samples=dict(),
+                             injection_parameters=dict(),
+                             normalization_parameters=dict())
+
+    # -------------------------------------------------------------------------
+
+    @staticmethod
+    def __check_data(data):
+        """
+        Run some sanity checks on `data`. Raises an assertion error if
+        the data fail any of these sanity checks.
+
+        Args:
+            data (dict): A dictionary as specified in the ``__init__``
+                of this class, that is, a dictionary containing the
+                following keys:
+                
+                .. code-block:: python
+                
+                    {'command_line_arguments', 'static_arguments',
+                     'injection_samples', 'noise_samples',
+                     'injection_parameters', 'normalization_parameters'}
+        """
+
+        assert isinstance(data, dict) or data is None, \
+            'data must be either dict or None!'
+
+        if data is not None:
+
+            assert 'command_line_arguments' in data.keys(), \
+                'data must provide key "command_line_arguments"!'
+            assert 'static_arguments' in data.keys(), \
+                'data must provide key "static_arguments"!'
+            assert 'injection_samples' in data.keys(), \
+                'data must provide key "injection_samples"!'
+            assert 'noise_samples' in data.keys(), \
+                'data must provide key "noise_samples"!'
+            assert 'injection_parameters' in data.keys(), \
+                'data must provide key "injection_parameters"!'
+            assert 'normalization_parameters' in data.keys(), \
+                'data must provide key "normalization_parameters"!'
+
+    # -------------------------------------------------------------------------
+
+    def __repr__(self):
+
+        return pformat(self.data, indent=4)
+
+    # -------------------------------------------------------------------------
+
+    def __str__(self):
+
+        return pformat(self.data, indent=4)
+
+    # -------------------------------------------------------------------------
+
+    def __getitem__(self, item):
+
+        return self.data[item]
+
+    # -------------------------------------------------------------------------
+
+    def __setitem__(self, key, value):
+
+        self.data[key] = value
+
+    # -------------------------------------------------------------------------
+
+    def read_hdf(self, file_path):
+        """
+        Read in an existing HDF sample file (e.g., to use an instance
+        of :class:`SampleFile` as a convenience wrapper for accessing
+        the contents of an HDF samples file).
+
+        Args:
+            file_path (str): The path to the HDF file to be read into
+                the :class:`SampleFile` object.
+        """
+
+        # Clear the existing data
+        self.data = {}
+
+        with h5py.File(file_path, 'r') as hdf_file:
+
+            # Read in dict with command_line_arguments
+            self.data['command_line_arguments'] = \
+                dict(hdf_file['command_line_arguments'].attrs)
+            self.data['command_line_arguments'] = \
+                {key: value.decode('ascii') for key, value in
+                 iteritems(self.data['command_line_arguments'])}
+
+            # Read in dict with static_arguments
+            self.data['static_arguments'] = \
+                dict(hdf_file['static_arguments'].attrs)
+            self.data['static_arguments'] = \
+                {key: value.decode('ascii') for key, value in
+                 iteritems(self.data['static_arguments'])}
+
+            # Read in group containing injection samples
+            self.data['injection_samples'] = dict()
+            for key in ('event_time', 'h1_strain', 'l1_strain'):
+                try:
+                    self.data['injection_samples'][key] = \
+                        np.array(hdf_file['injection_samples'][key])
+                except TypeError:
+                    self.data['injection_samples'][key] = np.array(None)
+
+            # Read in group containing noise samples
+            self.data['noise_samples'] = dict()
+            for key in ('event_time', 'h1_strain', 'l1_strain'):
+                try:
+                    self.data['noise_samples'][key] = \
+                        np.array(hdf_file['noise_samples'][key])
+                except TypeError:
+                    self.data['noise_samples'][key] = np.array(None)
+
+            # Read in injection parameters
+            self.data['injection_parameters'] = dict()
+            for key in hdf_file['/injection_parameters'].keys():
+                try:
+                    self.data['injection_parameters'][key] = \
+                        np.array(hdf_file['injection_parameters'][key])
+                except TypeError:
+                    self.data['injection_parameters'][key] = np.array(None)
+
+            # Read in dict with normalization parameters
+            self.data['normalization_parameters'] = \
+                dict(hdf_file['normalization_parameters'].attrs)
+            self.data['normalization_parameters'] = \
+                {key: float(value) for key, value in
+                 iteritems(self.data['normalization_parameters'])}
+
+    # -------------------------------------------------------------------------
+
+    def to_hdf(self, file_path):
+
+        with h5py.File(file_path, 'w') as hdf_file:
+
+            # Create group for command_line_arguments and save the values of
+            # the dict as attributes of the group
+            group = hdf_file.create_group('command_line_arguments')
+            for key, value in iteritems(self.data['command_line_arguments']):
+                group.attrs[key] = str(value)
+
+            # Create group for static_arguments and save the values of
+            # the dict as attributes of the group
+            group = hdf_file.create_group('static_arguments')
+            for key, value in iteritems(self.data['static_arguments']):
+                group.attrs[key] = str(value)
+
+            # Create group for injection_samples and save every item of the
+            # dict as a new dataset
+            group = hdf_file.create_group('injection_samples')
+            for key, value in iteritems(self.data['injection_samples']):
+                dtype = 'float64' if key == 'event_time' else 'float32'
+                if value is not None:
+                    group.create_dataset(name=key,
+                                         shape=value.shape,
+                                         dtype=dtype,
+                                         data=value)
+                else:
+                    group.create_dataset(name=key,
+                                         shape=None,
+                                         dtype=dtype)
+
+            # Create group for noise_samples and save every item of the
+            # dict as a new dataset
+            group = hdf_file.create_group('noise_samples')
+            for key, value in iteritems(self.data['noise_samples']):
+                dtype = 'float64' if key == 'event_time' else 'float32'
+                if value is not None:
+                    group.create_dataset(name=key,
+                                         shape=value.shape,
+                                         dtype=dtype,
+                                         data=value)
+                else:
+                    group.create_dataset(name=key,
+                                         shape=None,
+                                         dtype=dtype)
+
+            # Create group for injection_parameters and save every item of the
+            # dict as a new dataset
+            group = hdf_file.create_group('injection_parameters')
+            for key, value in iteritems(self.data['injection_parameters']):
+                if value is not None:
+                    group.create_dataset(name=key,
+                                         shape=value.shape,
+                                         dtype='float64',
+                                         data=value)
+                else:
+                    group.create_dataset(name=key,
+                                         shape=None,
+                                         dtype='float64')
+
+            # Create group for normalization_parameters and save every item
+            # of the dict as a new attribute
+            group = hdf_file.create_group('normalization_parameters')
+            for key, value in iteritems(self.data['normalization_parameters']):
+                group.attrs[key] = float(value)
+
+    # -------------------------------------------------------------------------
+
+    def as_dataframe(self,
+                     injection_parameters=False,
+                     static_arguments=False,
+                     command_line_arguments=False,
+                     split_injections_noise=False):
+        """
+        Return the contents of the :class:`SampleFile` as a ``pandas``
+        data frame.
+
+        Args:
+            injection_parameters (bool): Whether or not to return
+                the `injection parameters` for every sample.
+            static_arguments (bool): Whether or not to return
+                the `static_arguments` for every sample.
+            command_line_arguments (bool): Whether or not to return
+                the `command_line_arguments` for every sample.
+            split_injections_noise (bool): If this is set to True, a
+                separate data frame will be returned for both the
+                samples with and without an injection.
+
+        Returns:
+            One (or two, if `split_injections_noise` is set to `True`)
+            pandas data frame containing the sample stored in the
+            :class:`SampleFile` object.
+        """
+
+        # Create a data frame for the samples containing an injection
+        injection_samples = []
+        if self.data['injection_samples']['event_time'].shape != ():
+            for i in range(len(self.data['injection_samples']['event_time'])):
+                _ = {k: v[i] for k, v in
+                     iteritems(self.data['injection_samples'])}
+                injection_samples.append(_)
+            df_injection_samples = pd.DataFrame().append(injection_samples,
+                                                         ignore_index=True,
+                                                         sort=True)
+        else:
+            df_injection_samples = pd.DataFrame()
+
+        # Create a data frame for the samples not containing an injection
+        noise_samples = []
+        if self.data['noise_samples']['event_time'].shape != ():
+            for i in range(len(self.data['noise_samples']['event_time'])):
+                _ = {k: v[i] for k, v in
+                     iteritems(self.data['noise_samples'])}
+                noise_samples.append(_)
+            df_noise_samples = pd.DataFrame().append(noise_samples,
+                                                     ignore_index=True,
+                                                     sort=True)
+        else:
+            df_noise_samples = pd.DataFrame()
+
+        # If requested, create a data frame for the injection parameters and
+        # merge it with the data frame containing the injection samples
+        if injection_parameters:
+            injection_params = []
+
+            # Check if we even have any injection parameters
+            if self.data['injection_parameters']['mass1'].shape != ():
+                for i in range(len(df_injection_samples)):
+                    _ = {k: v[i] for k, v in
+                         iteritems(self.data['injection_parameters'])}
+                    injection_params.append(_)
+                df_injection_params = pd.DataFrame().append(injection_params,
+                                                            ignore_index=True,
+                                                            sort=True)
+            else:
+                df_injection_params = pd.DataFrame()
+
+            df = pd.concat([df_injection_samples, df_injection_params],
+                           axis=1, sort=True)
+
+        else:
+            df = df_injection_samples
+
+        # If requested, add the static_arguments to the data frame
+        # containing the injections, and a smaller subset of the
+        # static_arguments also to the data frame containing the noise
+        # samples (only those arguments that make sense there)
+        if static_arguments:
+            for key, value in iteritems(self.data['static_arguments']):
+                df[key] = value
+                if key in ('random_seed', 'target_sampling_rate',
+                           'bandpass_lower', 'bandpass_upper',
+                           'seconds_before_event', 'seconds_after_event',
+                           'sample_length'):
+                    df_noise_samples[key] = value
+
+        # Merge the data frames for the samples with and without injections
+        df = df.append(df_noise_samples, ignore_index=True, sort=True)
+
+        # If requested, add the command line arguments that were used in the
+        # creation of the sample file to the combined data frame
+        if command_line_arguments:
+            for key, value in iteritems(self.data['command_line_arguments']):
+                df[key] = value
+
+        # Ensure the `event_time` variable is an integer
+        try:
+            df['event_time'] = df['event_time'].astype(int)
+        except KeyError:
+            warn('\nNo key "event_time": Data frame is probably empty!')
+
+        # Either split into two data frames for injection and noise samples
+        if split_injections_noise:
+            df_injections = df[df.h1_signal.notnull()]
+            df_noise = df[~df.h1_signal.notnull()]
+            return df_injections, df_noise
+
+        # Or just return a single data frame containing both types of samples
+        else:
+            return df
diff --git a/python3_samples/utils/samplegeneration.py b/python3_samples/utils/samplegeneration.py
new file mode 100755
index 0000000..76be58a
--- /dev/null
+++ b/python3_samples/utils/samplegeneration.py
@@ -0,0 +1,288 @@
+"""
+Provide the :func:`generate_sample()` method, which is at the heart of
+the sample generation process.
+"""
+
+# -----------------------------------------------------------------------------
+# IMPORTS
+# -----------------------------------------------------------------------------
+
+from __future__ import print_function
+
+import numpy as np
+
+from lal import LIGOTimeGPS
+from pycbc.psd import interpolate
+from pycbc.psd.analytical import aLIGOZeroDetHighPower
+from pycbc.noise import noise_from_psd
+from pycbc.filter import sigma
+
+from .hdffiles import get_strain_from_hdf_file
+from .waveforms import get_detector_signals, get_waveform
+
+
+# -----------------------------------------------------------------------------
+# FUNCTION DEFINITIONS
+# -----------------------------------------------------------------------------
+
+def generate_sample(static_arguments,
+                    event_tuple,
+                    waveform_params=None):
+    """
+    Generate a single sample (or example) by taking a piece of LIGO
+    background noise (real or synthetic, depending on `event_tuple`),
+    optionally injecting a simulated waveform (depending on
+    `waveform_params`) and post-processing the result (whitening,
+    band-passing).
+    
+    Args:
+        static_arguments (dict): A dictionary containing global
+            technical parameters for the sample generation, for example
+            the target_sampling_rate of the output.
+        event_tuple (tuple): A tuple `(event_time, file_path)`, which
+            specifies the GPS time at which to make an injection and
+            the path of the HDF file which contains said GPS time.
+            If `file_path` is `None`, synthetic noise will be used
+            instead and the `event_time` only serves as a seed for
+            the corresponding (random) noise generator.
+        waveform_params (dict): A dictionary containing the randomly
+            sampled parameters that are passed as inputs to the
+            waveform model (e.g., the masses, spins, position, ...).
+
+    Returns:
+        A tuple `(sample, injection_parameters)`, which contains the
+        generated `sample` itself (a dict with keys `{'event_time',
+        'h1_strain', 'l1_strain'}`), and the `injection_parameters`,
+        which are either `None` (in case no injection was made), or a
+        dict containing the `waveform_params` and some additional
+        parameters (e.g., single detector SNRs).
+    """
+
+    # -------------------------------------------------------------------------
+    # Define shortcuts for some elements of self.static_arguments
+    # -------------------------------------------------------------------------
+
+    # Read out frequency-related arguments
+    original_sampling_rate = static_arguments['original_sampling_rate']
+    target_sampling_rate = static_arguments['target_sampling_rate']
+    f_lower = static_arguments['f_lower']
+    delta_f = static_arguments['delta_f']
+    fd_length = static_arguments['fd_length']
+
+    # Get the width of the noise sample that we either select from the raw
+    # HDF files, or generate synthetically
+    noise_interval_width = static_arguments['noise_interval_width']
+
+    # Get how many seconds before and after the event time to use
+    seconds_before_event = static_arguments['seconds_before_event']
+    seconds_after_event = static_arguments['seconds_after_event']
+
+    # Get the event time and the dict containing the HDF file path
+    event_time, hdf_file_paths = event_tuple
+
+    # -------------------------------------------------------------------------
+    # Get the background noise (either from data or synthetically)
+    # -------------------------------------------------------------------------
+
+    # If the event_time is None, we generate synthetic noise
+    if hdf_file_paths is None:
+
+        # Create an artificial PSD for the noise
+        # TODO: Is this the best choice for this task?
+        psd = aLIGOZeroDetHighPower(length=fd_length,
+                                    delta_f=delta_f,
+                                    low_freq_cutoff=f_lower)
+
+        # Actually generate the noise using the PSD and LALSimulation
+        noise = dict()
+        for i, det in enumerate(('H1', 'L1')):
+
+            # Compute the length of the noise sample in time steps
+            noise_length = noise_interval_width * target_sampling_rate
+
+            # Generate the noise for this detector
+            noise[det] = noise_from_psd(length=noise_length,
+                                        delta_t=(1.0 / target_sampling_rate),
+                                        psd=psd,
+                                        seed=(2 * event_time + i))
+
+            # Manually fix the noise start time to match the fake event time.
+            # However, for some reason, the correct setter method seems broken?
+            start_time = event_time - noise_interval_width / 2
+            # noinspection PyProtectedMember
+            noise[det]._epoch = LIGOTimeGPS(start_time)
+
+    # Otherwise we select the noise from the corresponding HDF file
+    else:
+
+        kwargs = dict(hdf_file_paths=hdf_file_paths,
+                      gps_time=event_time,
+                      interval_width=noise_interval_width,
+                      original_sampling_rate=original_sampling_rate,
+                      target_sampling_rate=target_sampling_rate,
+                      as_pycbc_timeseries=True)
+        noise = get_strain_from_hdf_file(**kwargs)
+
+    # -------------------------------------------------------------------------
+    # If applicable, make an injection
+    # -------------------------------------------------------------------------
+
+    # If no waveform parameters are given, we are not making an injection.
+    # In this case, there are no detector signals and no injection
+    # parameters, and the strain is simply equal to the noise
+    if waveform_params is None:
+        detector_signals = None
+        injection_parameters = None
+        strain = noise
+
+    # Otherwise, we need to simulate a waveform for the given waveform_params
+    # and add it into the noise to create the strain
+    else:
+
+        # ---------------------------------------------------------------------
+        # Simulate the waveform with the given injection parameters
+        # ---------------------------------------------------------------------
+
+        # Actually simulate the waveform with these parameters
+        waveform = get_waveform(static_arguments=static_arguments,
+                                waveform_params=waveform_params)
+
+        # Get the detector signals by projecting on the antenna patterns
+        detector_signals = \
+            get_detector_signals(static_arguments=static_arguments,
+                                 waveform_params=waveform_params,
+                                 event_time=event_time,
+                                 waveform=waveform)
+
+        # ---------------------------------------------------------------------
+        # Add the waveform into the noise as is to calculate the NOMF-SNR
+        # ---------------------------------------------------------------------
+
+        # Store the dummy strain, the PSDs and the SNRs for the two detectors
+        strain_ = {}
+        psds = {}
+        snrs = {}
+
+        # Calculate these quantities for both detectors
+        for det in ('H1', 'L1'):
+
+            # Add the simulated waveform into the noise to get the dummy strain
+            strain_[det] = noise[det].add_into(detector_signals[det])
+
+            # Estimate the Power Spectral Density from the dummy strain
+            psds[det] = strain_[det].psd(4)
+            psds[det] = interpolate(psds[det], delta_f=delta_f)
+
+            # Use the PSD estimate to calculate the optimal matched
+            # filtering SNR for this injection and this detector
+            snrs[det] = sigma(htilde=detector_signals[det],
+                              psd=psds[det],
+                              low_frequency_cutoff=f_lower)
+
+        # Calculate the network optimal matched filtering SNR for this
+        # injection (which we need for scaling to the chosen injection SNR)
+        nomf_snr = np.sqrt(snrs['H1']**2 + snrs['L1']**2)
+
+        # ---------------------------------------------------------------------
+        # Add the waveform into the noise with the chosen injection SNR
+        # ---------------------------------------------------------------------
+
+        # Compute the rescaling factor
+        injection_snr = waveform_params['injection_snr']
+        scale_factor = 1.0 * injection_snr / nomf_snr
+
+        strain = {}
+        for det in ('H1', 'L1'):
+
+            # Add the simulated waveform into the noise, using a scaling
+            # factor to ensure that the resulting NOMF-SNR equals the chosen
+            # injection SNR
+            strain[det] = noise[det].add_into(scale_factor *
+                                              detector_signals[det])
+
+        # ---------------------------------------------------------------------
+        # Store some information about the injection we just made
+        # ---------------------------------------------------------------------
+
+        # Store the information we have computed ourselves
+        injection_parameters = {'scale_factor': scale_factor,
+                                'h1_snr': snrs['H1'],
+                                'l1_snr': snrs['L1']}
+
+        # Also add the waveform parameters we have sampled
+        for key, value in waveform_params.items():
+            injection_parameters[key] = value
+
+    # -------------------------------------------------------------------------
+    # Whiten and bandpass the strain (also for noise-only samples)
+    # -------------------------------------------------------------------------
+
+    for det in ('H1', 'L1'):
+
+        # Get the whitening parameters
+        segment_duration = static_arguments['whitening_segment_duration']
+        max_filter_duration = static_arguments['whitening_max_filter_duration']
+
+        # Whiten the strain (using the built-in whitening of PyCBC)
+        # We don't need to remove the corrupted samples here, because we
+        # crop the strain later on
+        strain[det] = \
+            strain[det].whiten(segment_duration=segment_duration,
+                               max_filter_duration=max_filter_duration,
+                               remove_corrupted=False)
+
+        # Get the limits for the bandpass
+        bandpass_lower = static_arguments['bandpass_lower']
+        bandpass_upper = static_arguments['bandpass_upper']
+
+        # Apply a high-pass to remove everything below `bandpass_lower`;
+        # If bandpass_lower = 0, do not apply any high-pass filter.
+        if bandpass_lower != 0:
+            strain[det] = strain[det].highpass_fir(frequency=bandpass_lower,
+                                                   remove_corrupted=False,
+                                                   order=512)
+
+        # Apply a low-pass filter to remove everything above `bandpass_upper`.
+        # If bandpass_upper = sampling rate, do not apply any low-pass filter.
+        if bandpass_upper != target_sampling_rate:
+            strain[det] = strain[det].lowpass_fir(frequency=bandpass_upper,
+                                                  remove_corrupted=False,
+                                                  order=512)
+
+    # -------------------------------------------------------------------------
+    # Cut strain (and signal) time series to the pre-specified length
+    # -------------------------------------------------------------------------
+
+    for det in ('H1', 'L1'):
+
+        # Define some shortcuts for slicing
+        a = event_time - seconds_before_event
+        b = event_time + seconds_after_event
+
+        # Cut the strain to the desired length
+        strain[det] = strain[det].time_slice(a, b)
+
+        # If we've made an injection, also cut the simulated signal
+        if waveform_params is not None:
+
+            # Cut the detector signals to the specified length
+            detector_signals[det] = detector_signals[det].time_slice(a, b)
+
+            # Also add the detector signals to the injection parameters
+            injection_parameters['h1_signal'] = \
+                np.array(detector_signals['H1'])
+            injection_parameters['l1_signal'] = \
+                np.array(detector_signals['L1'])
+
+    # -------------------------------------------------------------------------
+    # Collect all available information about this sample and return results
+    # -------------------------------------------------------------------------
+
+    # The whitened strain is numerically on the order of O(1), so we can save
+    # it as a 32-bit float (unlike the original signal, which is down to
+    # O(10^-{30}) and thus requires 64-bit floats).
+    sample = {'event_time': event_time,
+              'h1_strain': np.array(strain['H1']).astype(np.float32),
+              'l1_strain': np.array(strain['L1']).astype(np.float32)}
+
+    return sample, injection_parameters
diff --git a/python3_samples/utils/staticargs.py b/python3_samples/utils/staticargs.py
new file mode 100755
index 0000000..b77e55c
--- /dev/null
+++ b/python3_samples/utils/staticargs.py
@@ -0,0 +1,96 @@
+"""
+Provide tools that are needed for amending and typecasting the static
+arguments from an `*.ini` configuration file, which controls the
+waveform simulation process.
+"""
+
+# -----------------------------------------------------------------------------
+# IMPORTS
+# -----------------------------------------------------------------------------
+
+import copy
+
+
+# -----------------------------------------------------------------------------
+# FUNCTION DEFINITIONS
+# -----------------------------------------------------------------------------
+
+def amend_static_args(static_args):
+    """
+    Amend the static_args from the `*.ini` configuration file by adding
+    the parameters that can be computed directly from others (more
+    intuitive ones). Note that the static_args should have been
+    properly typecast first; see :func:`typecast_static_args()`.
+
+    Args:
+        static_args (dict): The static_args dict after it has been
+            typecast by :func:`typecast_static_args()`.
+
+    Returns:
+        The amended `static_args`, where implicitly defined variables
+        have been added.
+    """
+
+    # Create a copy of the original static_args
+    args = copy.deepcopy(static_args)
+    
+    # If necessary, compute the sample length
+    if 'sample_length' not in args.keys():
+        args['sample_length'] = \
+            args['seconds_before_event'] + args['seconds_after_event']
+
+    # If necessary, add delta_t = 1 / target_sampling_rate
+    if 'delta_t' not in args.keys():
+        args['delta_t'] = 1.0 / args['target_sampling_rate']
+
+    # If necessary, add delta_f = 1 / waveform_length
+    if 'delta_f' not in args.keys():
+        args['delta_f'] = 1.0 / args['waveform_length']
+
+    # If necessary, add td_length = waveform_length * target_sampling_rate
+    if 'td_length' not in args.keys():
+        args['td_length'] = \
+            int(args['waveform_length'] * args['target_sampling_rate'])
+
+    # If necessary, add fd_length = td_length / 2 + 1
+    if 'fd_length' not in args.keys():
+        args['fd_length'] = int(args['td_length'] / 2.0 + 1)
+
+    return args
+
+
+def typecast_static_args(static_args):
+    """
+    Take the `static_args` dictionary as it is read in from the PyCBC
+    configuration file (i.e., all values are strings) and cast the
+    values to the correct types (`float` or `int`).
+
+    Args:
+        static_args (dict): The raw `static_args` dictionary as it is
+            read from the `*.ini` configuration file.
+            
+    Returns:
+        The `static_args` dictionary with proper types for all values.
+    """
+
+    args = copy.deepcopy(static_args)
+
+    # Cast variables to integer that need to be integers
+    args['bandpass_lower'] = int(args['bandpass_lower'])
+    args['bandpass_upper'] = int(args['bandpass_upper'])
+    args['waveform_length'] = int(args['waveform_length'])
+    args['noise_interval_width'] = int(args['noise_interval_width'])
+    args['original_sampling_rate'] = int(args['original_sampling_rate'])
+    args['target_sampling_rate'] = int(args['target_sampling_rate'])
+    args['whitening_segment_duration'] = \
+        float(args['whitening_segment_duration'])
+    args['whitening_max_filter_duration'] = \
+        int(args['whitening_max_filter_duration'])
+
+    # Cast variables to float that need to be floats
+    args['distance'] = float(args['distance'])
+    args['f_lower'] = float(args['f_lower'])
+    args['seconds_before_event'] = float(args['seconds_before_event'])
+    args['seconds_after_event'] = float(args['seconds_after_event'])
+
+    return args
diff --git a/python3_samples/utils/waveforms.py b/python3_samples/utils/waveforms.py
new file mode 100755
index 0000000..9c82fd4
--- /dev/null
+++ b/python3_samples/utils/waveforms.py
@@ -0,0 +1,312 @@
+"""
+Provide methods for generating and processing simulated
+gravitational-wave waveforms.
+"""
+
+# -----------------------------------------------------------------------------
+# IMPORTS
+# -----------------------------------------------------------------------------
+
+from __future__ import print_function
+
+import numpy as np
+from scipy.signal.windows import tukey
+
+from pycbc.distributions import JointDistribution, read_params_from_config, \
+    read_constraints_from_config, read_distributions_from_config
+from pycbc.transforms import apply_transforms, read_transforms_from_config
+from pycbc.workflow import WorkflowConfigParser
+from pycbc.waveform import get_td_waveform, get_fd_waveform
+from pycbc.detector import Detector
+from pycbc.types.timeseries import TimeSeries
+
+
+# -----------------------------------------------------------------------------
+# CLASS DEFINITIONS
+# -----------------------------------------------------------------------------
+
+class WaveformParameterGenerator(object):
+    """
+    :class:`WaveformParameterGenerator` objects are essentially just a
+    simple convenience wrapper to construct the joint probability
+    distribution (and provide a method to draw samples from it) of the
+    parameters specified by the `[variable_args]` section of an
+    `*.ini` configuration file and their distributions as defined in
+    the corresponding `[prior-*]` sections.
+    
+    Args:
+        config_file (str): Path to the `*.ini` configuration file,
+            which contains the information about the parameters to be
+            generated and their distributions.
+        random_seed (int): Seed for the random number generator.
+            Caveat: We can only set the seed of the global numpy RNG.
+    """
+
+    def __init__(self,
+                 config_file,
+                 random_seed):
+
+        # Fix the seed for the random number generator
+        np.random.seed(random_seed)
+
+        # Read in the configuration file using a WorkflowConfigParser.
+        # Note that the argument `configFiles` has to be a list here,
+        # so we need to wrap the `config_file` argument accordingly...
+        config_file = WorkflowConfigParser(configFiles=[config_file])
+
+        # Extract variable arguments and constraints
+        # We don't need the static_args here, hence they do not get amended.
+        self.var_args, _ = read_params_from_config(config_file)
+        self.constraints = read_constraints_from_config(config_file)
+
+        # Extract distributions
+        dist = read_distributions_from_config(config_file)
+
+        # Extract transformations
+        self.trans = read_transforms_from_config(config_file)
+
+        # Set up a joint distribution to sample from
+        self.pval = JointDistribution(self.var_args,
+                                      *dist,
+                                      **{'constraints': self.constraints})
+
+    # -------------------------------------------------------------------------
+
+    def draw(self):
+        """
+        Draw a sample from the joint distribution and construct a
+        dictionary that maps the parameter names to the values
+        generated for them.
+
+        Returns:
+            A `dict` containing a the names and values of a set of
+            randomly sampled waveform parameters (e.g., masses, spins,
+            position in the sky, ...).
+        """
+        values = apply_transforms(self.pval.rvs(), self.trans)[0]
+        result = dict(zip(self.var_args, values))
+
+        return result
+
+
+# -----------------------------------------------------------------------------
+# FUNCTION DEFINITIONS
+# -----------------------------------------------------------------------------
+
+def fade_on(timeseries,
+            alpha=0.25):
+    """
+    Take a PyCBC time series and use a one-sided Tukey window to "fade
+    on" the waveform (to reduce discontinuities in the amplitude).
+
+    Args:
+        timeseries (pycbc.types.timeseries.TimeSeries): The PyCBC
+            TimeSeries object to be faded on.
+        alpha (float): The alpha parameter for the Tukey window.
+
+    Returns:
+        The `timeseries` which has been faded on.
+    """
+
+    # Save the parameters from the time series we are about to fade on
+    delta_t = timeseries.delta_t
+    epoch = timeseries.start_time
+    duration = timeseries.duration
+    sample_rate = timeseries.sample_rate
+
+    # Create a one-sided Tukey window for the turn on
+    window = tukey(M=int(duration * sample_rate), alpha=alpha)
+    window[int(0.5*len(window)):] = 1
+
+    # Apply the one-sided Tukey window for the fade-on
+    ts = window * np.array(timeseries)
+
+    # Create and return a TimeSeries object again from the resulting array
+    # using the original parameters (delta_t and epoch) of the time series
+    return TimeSeries(initial_array=ts,
+                      delta_t=delta_t,
+                      epoch=epoch)
+
+
+def get_waveform(static_arguments,
+                 waveform_params):
+    """
+    Simulate a waveform (using methods provided by PyCBC / LALSuite)
+    based on the `static_arguments` (which define, e.g., the waveform
+    model to be used) and the `waveform_params`, which specify the
+    physical parameters of the waveform (e.g., the masses and spins).
+    
+    .. note::
+       The actual simulation of the waveform is, depending on your
+       choice of the `domain`, performed by the PyCBC methods
+       :func:`get_td_waveform()` and :func:`get_fd_waveform()`,
+       respectively.
+       These take as arguments a combination of the `static_arguments`
+       and the `waveform_params.` A (more) comprehensive explanation of
+       the parameters that are supported by these methods can be found
+       in the `PyCBC documentation <https://pycbc.org/pycbc/latest/html/
+       pycbc.waveform.html#pycbc.waveform.waveform.get_td_waveform>`_.
+       Currently, however, only the following keys are actually passed
+       to the simulation routines:
+       
+       .. code-block:: python
+          
+          {'approximant', 'coa_phase', 'delta_f', 'delta_t',
+           'distance', 'f_lower', 'inclination', 'mass1', 'mass2',
+           'spin1z', 'spin2z'}
+           
+    .. warning::
+       If you want to use a different waveform model or a different
+       parameter space, you may need to edit this function according
+       to your exact needs!
+    
+    
+    Args:
+        static_arguments (dict): The static arguments (e.g., the
+            waveform approximant and the sampling rate) defined in the
+            `*.ini` configuration file, which specify technical aspects
+            of the simulation process.
+        waveform_params (dict): The physical parameters of the
+            waveform to be simulated, such as the masses or the
+            position in the sky. Usually, these values are sampled
+            using a :class:`WaveformParameterGenerator` instance,
+            which is based in the variable arguments section in the
+            `*.ini` configuration file.
+    
+    Returns:
+        A tuple `(h_plus, h_cross)` with the two polarization modes of
+        the simulated waveform, resized to the desired length.
+    """
+
+    # Check if we are using a time domain (TD) or frequency domain (FD)
+    # approximant and retrieve the required parameters for the simulation
+    if static_arguments['domain'] == 'time':
+        simulate_waveform = get_td_waveform
+        length = int(static_arguments['td_length'])
+    elif static_arguments['domain'] == 'frequency':
+        simulate_waveform = get_fd_waveform
+        length = int(static_arguments['fd_length'])
+    else:
+        raise ValueError('Invalid domain! Must be "time" or "frequency"!')
+
+    # Collect all the required parameters for the simulation from the given
+    # static and variable parameters
+    simulation_parameters = dict(approximant=static_arguments['approximant'],
+                                 coa_phase=waveform_params['coa_phase'],
+                                 delta_f=static_arguments['delta_f'],
+                                 delta_t=static_arguments['delta_t'],
+                                 distance=static_arguments['distance'],
+                                 f_lower=static_arguments['f_lower'],
+                                 inclination=waveform_params['inclination'],
+                                 mass1=waveform_params['mass1'],
+                                 mass2=waveform_params['mass2'],
+                                 spin1z=waveform_params['spin1z'],
+                                 spin2z=waveform_params['spin2z'])
+
+    # Perform the actual simulation with the given parameters
+    h_plus, h_cross = simulate_waveform(**simulation_parameters)
+
+    # Apply the fade-on filter to them
+    h_plus = fade_on(h_plus, alpha=static_arguments['tukey_alpha'])
+    h_cross = fade_on(h_cross, alpha=static_arguments['tukey_alpha'])
+
+    # Resize the simulated waveform to the specified length
+    h_plus.resize(length)
+    h_cross.resize(length)
+
+    return h_plus, h_cross
+
+
+# -----------------------------------------------------------------------------
+
+
+def get_detector_signals(static_arguments,
+                         waveform_params,
+                         event_time,
+                         waveform):
+    """
+    Project the raw `waveform` (i.e., the tuple `(h_plus, h_cross)`
+    returned by :func:`get_waveform()`) onto the antenna patterns of
+    the detectors in Hanford and Livingston. This requires the position
+    of the source in the sky, which is contained in `waveform_params`.
+
+    Args:
+        static_arguments (dict): The static arguments (e.g., the
+            waveform approximant and the sampling rate) defined in the
+            `*.ini` configuration file.
+        waveform_params (dict): The parameters that were used as inputs
+            for the waveform simulation, although this method will only
+            require the following parameters to be present:
+        
+                - ``ra`` = Right ascension of the source
+                - ``dec`` = Declination of the source
+                - ``polarization`` = Polarization angle of the source
+                
+        event_time (int): The GPS time for the event, which, by
+            convention, is the time at which the simulated signal
+            reaches its maximum amplitude in the `H1` channel.
+        waveform (tuple): The pure simulated wavefrom, represented by
+            a tuple `(h_plus, h_cross)`, which is usually generated
+            by :func:`get_waveform()`.
+
+    Returns:
+        A dictionary with keys `{'H1', 'L1'}` that contains the pure
+        signal as it would be observed at Hanford and Livingston.
+    """
+
+    # Retrieve the two polarization modes from the waveform tuple
+    h_plus, h_cross = waveform
+
+    # Extract the parameters we will need later for the projection
+    right_ascension = waveform_params['ra']
+    declination = waveform_params['dec']
+    polarization = waveform_params['polarization']
+
+    # Store the detector signals we will get through projection
+    detector_signals = {}
+
+    # Set up detectors
+    detectors = {'H1': Detector('H1'), 'L1': Detector('L1')}
+
+    # Loop over both detectors and calculate the signal we would see there
+    for detector_name in ('H1', 'L1'):
+
+        # Set up the detector based on its name
+        detector = detectors[detector_name]
+
+        # Calculate the antenna pattern for this detector
+        f_plus, f_cross = \
+            detector.antenna_pattern(right_ascension=right_ascension,
+                                     declination=declination,
+                                     polarization=polarization,
+                                     t_gps=100)
+
+        # Calculate the time offset from H1 for this detector
+        delta_t_h1 = \
+            detector.time_delay_from_detector(other_detector=detectors['H1'],
+                                              right_ascension=right_ascension,
+                                              declination=declination,
+                                              t_gps=100)
+
+        # Project the waveform onto the antenna pattern
+        detector_signal = f_plus * h_plus + f_cross * h_cross
+
+        # Map the signal from geocentric coordinates to the specific
+        # reference frame of the detector. This depends on whether we have
+        # simulated the waveform in the time or frequency domain:
+        if static_arguments['domain'] == 'time':
+            offset = 100 + delta_t_h1 + detector_signal.start_time
+            detector_signal = detector_signal.cyclic_time_shift(offset)
+            detector_signal.start_time = event_time - 100
+        elif static_arguments['domain'] == 'frequency':
+            offset = 100 + delta_t_h1
+            detector_signal = detector_signal.cyclic_time_shift(offset)
+            detector_signal.start_time = event_time - 100
+            detector_signal = detector_signal.to_timeseries()
+        else:
+            raise ValueError('Invalid domain! Must be "time" or "frequency"!')
+
+        # Store the result
+        detector_signals[detector_name] = detector_signal
+
+    return detector_signals
-- 
GitLab