diff --git a/data/groups/hbsm/.gitignore b/data/groups/hbsm/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..72e8ffc0db8aad71a934dd11e5968bd5109e54b4
--- /dev/null
+++ b/data/groups/hbsm/.gitignore
@@ -0,0 +1 @@
+*
diff --git a/data/hbsm/batch/templates/cern/one_lep_nom.sh b/data/hbsm/batch/templates/cern/one_lep_nom.sh
index 2faa81567bb37ff7b602509330b0829d6386a4df..bff9eec050e764648c3b92334487f9fbbfadb9e1 100644
--- a/data/hbsm/batch/templates/cern/one_lep_nom.sh
+++ b/data/hbsm/batch/templates/cern/one_lep_nom.sh
@@ -1,10 +1,22 @@
 #!/bin/bash
 
-#BSUB -o test.%J
-
 # template to submit VLQ jobs on the lxbatch cluster
 #
-# note to self:
+# Note to self:
+# if you want to use curly braces to highlight bash variables, you
+# need to escape them with double curly (otherwise they're picked up
+# by the python string.format
+#
+# Variable replacement strategy:
+# - replace variables are early as possible, to make commands in
+#   resulting scritps explicit (--> more easily 'joinable')
+# - when there is a value repeated several times, declare a local
+#   variable within the function
+# - assume mininal env vars (i.e. only LSB_JOBID)
+# Log file strategy:
+# - keep separate log files for compilation and processing, since we
+#   might want to join samples
+# - keep explicit logfile names within each function (easier joining)
 # if you want to use curly braces to highlight bash variables, you
 # need to escape them with double curly (otherwise they're picked up
 # by the python string.format
@@ -12,65 +24,33 @@
 export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase
 source ${{ATLAS_LOCAL_ROOT_BASE}}/user/atlasLocalSetup.sh
 
-function abortJob {{
-  touch "{absolute_output_base_dir:s}/{relative_output_dir:s}/fail_{job_label:s}.${{LSB_JOBID}}"
-  exit 1
-}}
-
-function main() {{
-
-    local tar_file="{tar_file:s}"
-    local relative_output_dir="{relative_output_dir:s}"
-    local absolute_output_base_dir="{absolute_output_base_dir:s}"
-
-    local exe="VLQAnalysis"
-
-    local subtask_log_file=""
-
-    local tmp_dir=${{TMPDIR}}/LSF_${{LSB_JOBID}}
 
-    mkdir -vp ${{tmp_dir}}
-    cd ${{tmp_dir}}
-
-    echo "Start `date`"
-    echo "working from `pwd`"
-    echo "current contents:"
+function prepare() {{
+    local absolute_base_dir="{absolute_base_dir:s}"
+    echo "Current directory: `pwd`"
+    echo "Current contents:"
     ls -ltrh
-    echo "using tar file ${{tar_file}}"
-    ls ${{tar_file}}
-    cp -p ${{tar_file}} ./tarball.tgz
+    cp -p {tar_file:s} ./tarball.tgz
     echo "untar tarball.tgz"
-    ls tarball.tgz
-
+    ls -lh tarball.tgz
     tar xzf tarball.tgz
-    # echo "contents of tarball (./all_links/):"
-    # ls -ltrh ./all_links/
-
-    mkdir -p ${{relative_output_dir}}
-
-    echo "Processing ${{sample}} `date`"
-
-    subtask_log_file=${{relative_output_dir}}/compile_{job_label:s}.log
-    echo "Starting 'compilation' step `date`"   >> ${{subtask_log_file}} 2>&1
-    # rcSetup Base,2.3.50                         >> ${{subtask_log_file}} 2>&1
-    lsetup 'rcSetup -u'                         >> ${{subtask_log_file}} 2>&1
-    lsetup 'rcsetup Base,2.3.50'                >> ${{subtask_log_file}} 2>&1
-    rc find_packages                            >> ${{subtask_log_file}} 2>&1
-    rc clean                                    >> ${{subtask_log_file}} 2>&1
-    rc compile                                  >> ${{subtask_log_file}} 2>&1
-    echo "Finishing 'compile' step `date`"      >> ${{subtask_log_file}} 2>&1
+    echo "Starting 'compilation' step `date`"
+    rc find_packages
+    rc clean
+    rc compile
+    echo "Completed 'compile' step `date`"
+}}
 
-# can lead to large logfiles:
- # --msgLevel=DEBUG \
-# for test
- # --nEvents=10 \
-    subtask_log_file=${{relative_output_dir}}/run_{job_label:s}.log
-    echo "Starting 'run' step `date`"  >> ${{subtask_log_file}} 2>&1
-    ${{exe}} \
- --outputFile={relative_output_dir:}/{output_file:s} \
+function run() {{
+    echo "Processing {sample_name:s} `date`"
+    mkdir -p {relative_log_dir:s}
+    mkdir -p {relative_output_dir:s}
+    subtask_log_file={relative_log_dir:s}/run_{job_label:s}.log
+    VLQAnalysis \
+ --outputFile={relative_output_dir:s}/{output_file:s} \
  --inputFile={filelist_name:s} \
  --textFileList=true \
- --sampleName=hbsm \
+ --sampleName={sample_name:s} \
  --weightConfigs=${{ROOTCOREBIN}}/data/VLQAnalysis/list_weights.list \
  --doOneLeptonAna=true \
  --useLeptonsSF=true \
@@ -92,24 +72,46 @@ function main() {{
  --isData=false \
  --computeWeightSys={compute_weight_sys:s} \
  --onlyDumpSystHistograms=true \
- >> ${{subtask_log_file}} 2>&1
+ {other_options:s} \
+ >> ${{subtask_log_file}} 2>&1 || true
+    echo "Completed 'run' step `date`"  >> ${{subtask_log_file}} 2>&1
+
+# These options are only for tests
+# --msgLevel=DEBUG \  # can lead to large logfiles
+# --nEvents=10 \      # for test
 
-    echo "Finishing 'run' step `date`" >> ${{subtask_log_file}} 2>&1
-    if test -e {relative_output_dir:}/{output_file:s}.root
+    if test -e {relative_output_dir:s}/{output_file:s}
         then
-        touch ${{relative_output_dir}}/done_{job_label:s}.${{LSB_JOBID}}
+        echo "files being copied: [begin]"
+        ls -ltrh {relative_output_dir:s}/*
+        echo "files being copied: [end]"
+        rsync -az {relative_log_dir:s}/* {absolute_base_dir:s}/{relative_log_dir:s}
+        rsync -az {relative_output_dir:s}/* {absolute_base_dir:s}/{relative_output_dir:s}
+        touch "{absolute_base_dir:s}/{relative_status_dir:s}/{job_label:s}.done.${{LSB_JOBID}}"
+        else
+        touch "{absolute_base_dir:s}/{relative_status_dir:s}/{job_label:s}.fail.${{LSB_JOBID}}"
     fi
-    echo "files being copied: [begin]"
-    ls -ltrh ${{relative_output_dir}}/*
-    echo "files being copied: [begin]"
-    rsync -az ${{relative_output_dir}}/* ${{absolute_output_base_dir}}/${{relative_output_dir}}
 
-    echo "Processed ${{sample}} `date`"
+    echo "Processed {sample_name:s} `date`"
+
+}}
 
-    # echo "Cleaning up"
+function main() {{
+
+    local relative_output_dir="{relative_output_dir:s}"
+    local absolute_base_dir="{absolute_base_dir:s}"
+    echo "Start `date`"
+    local tmp_dir=${{TMPDIR}}/LSF_${{LSB_JOBID}}
+    mkdir -vp ${{tmp_dir}}
+    cd ${{tmp_dir}}
+    echo "working from `pwd`"
+    lsetup 'rcSetup -u'
+    lsetup 'rcsetup {rc_release_version:s}'
+    prepare
+    run
+    # echo "Cleaning up" # done automatically on lxbatch
     # cd ..
     # rm -rf $TMPDIR
-    test -f "${{absolute_output_base_dir}}/${{relative_output_dir}}/done_{job_label:s}.${{LSB_JOBID}}" || abortJob
 }}
 
 main
diff --git a/data/hbsm/batch/templates/ifae/one_lep_nom.sh b/data/hbsm/batch/templates/ifae/one_lep_nom.sh
index ecd7ce3db78a2fafb9cd02e25c217126c8cc61d9..8ffe8d594a0492aabc0c0da73fb4fff901e661e3 100644
--- a/data/hbsm/batch/templates/ifae/one_lep_nom.sh
+++ b/data/hbsm/batch/templates/ifae/one_lep_nom.sh
@@ -2,68 +2,52 @@
 
 # template to submit VLQ jobs on the at3 cluster
 #
-# note to self:
+# Note to self:
 # if you want to use curly braces to highlight bash variables, you
 # need to escape them with double curly (otherwise they're picked up
 # by the python string.format
+#
+# Variable replacement strategy:
+# - replace variables are early as possible, to make commands in
+#   resulting scritps explicit (--> more easily 'joinable')
+# - when there is a value repeated several times, declare a local
+#   variable within the function
+# - assume mininal env vars (i.e. only LSB_JOBID)
+# Log file strategy:
+# - keep separate log files for compilation and processing, since we
+#   might want to join samples
+# - keep explicit logfile names within each function (easier joining)
 
 echo "setting up root "
-    # source ${{trex_dir}}/setup.sh # this doesn't work on batch
 export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase
 source ${{ATLAS_LOCAL_ROOT_BASE}}/user/atlasLocalSetup.sh
 
-function abortJob {{
-  touch "{absolute_output_base_dir:s}/{relative_output_dir:s}/fail_{job_label:s}.${{LSB_JOBID}}"
-  exit 1
-}}
-
-function main() {{
-
-    local tar_file="{tar_file:s}"
-    local relative_output_dir="{relative_output_dir:s}"
-    local absolute_output_base_dir="{absolute_output_base_dir:s}"
-
-    local exe="VLQAnalysis"
-
-    local subtask_log_file=""
-
-    cd ${{TMPDIR}}
-    echo "Start `date`"
-    echo "working from `pwd`"
-    echo "current contents:"
+function prepare() {{
+    local absolute_base_dir="{absolute_base_dir:s}"
+    echo "Current directory: `pwd`"
+    echo "Current contents:"
     ls -ltrh
-    cp -p ${{tar_file}} ./tarball.tgz
+    cp -p {tar_file:s} ./tarball.tgz
     echo "untar tarball.tgz"
-    ls tarball.tgz
-
+    ls -lh tarball.tgz
     tar xzf tarball.tgz
-    # echo "contents of tarball (./all_links/):"
-    # ls -ltrh ./all_links/
-
-    mkdir -p ${{relative_output_dir}}
-
-    echo "Processing ${{sample}} `date`"
+    echo "Starting 'compilation' step `date`"
+    rc find_packages
+    rc clean
+    rc compile
+    echo "Completed 'compile' step `date`"
+}}
 
-    subtask_log_file=${{relative_output_dir}}/compile_{job_label:s}.log
-    echo "Starting 'compilation' step `date`"   >> ${{subtask_log_file}} 2>&1
-    # rcSetup Base,2.3.50                         >> ${{subtask_log_file}} 2>&1
-    lsetup 'rcSetup -u'                         >> ${{subtask_log_file}} 2>&1
-    lsetup 'rcsetup Base,2.3.50'                >> ${{subtask_log_file}} 2>&1
-    rc find_packages                            >> ${{subtask_log_file}} 2>&1
-    rc clean                                    >> ${{subtask_log_file}} 2>&1
-    rc compile                                  >> ${{subtask_log_file}} 2>&1
-    echo "Finishing 'compile' step `date`"      >> ${{subtask_log_file}} 2>&1
-# can lead to large logfiles:
- # --msgLevel=DEBUG \
-# for test
- # --nEvents=10 \
-    subtask_log_file=${{relative_output_dir}}/run_{job_label:s}.log
-    echo "Starting 'run' step `date`"  >> ${{subtask_log_file}} 2>&1
-    ${{exe}} \
- --outputFile={relative_output_dir:}/{output_file:s} \
+function run() {{
+    echo "Processing {sample_name:s} `date`"
+    mkdir -p {relative_log_dir:s}
+    mkdir -p {relative_output_dir:s}
+    local subtask_log_file={relative_log_dir:s}/run_{job_label:s}.log
+    VLQAnalysis \
+ --outputFile={relative_output_dir:s}/{output_file:s} \
  --inputFile={filelist_name:s} \
  --textFileList=true \
- --sampleName=hbsm \
+ --sampleName={sample_name:s} \
  --weightConfigs=${{ROOTCOREBIN}}/data/VLQAnalysis/list_weights.list \
  --doOneLeptonAna=true \
  --useLeptonsSF=true \
@@ -85,24 +69,39 @@ function main() {{
  --isData=false \
  --computeWeightSys={compute_weight_sys:s} \
  --onlyDumpSystHistograms=true \
- >> ${{subtask_log_file}} 2>&1
+ {other_options:s} \
+ >> ${{subtask_log_file}} 2>&1 || true
+
+# These options are only for tests
+# --msgLevel=DEBUG \  # can lead to large logfiles
+# --nEvents=10 \      # for test
 
-    echo "Finishing 'run' step `date`" >> ${{subtask_log_file}} 2>&1
-    if test -e {relative_output_dir:}/{output_file:s}
+    if test -e {relative_output_dir:s}/{output_file:s}
         then
-        touch ${{relative_output_dir}}/done_{job_label:s}.${{LSB_JOBID}}
+        echo "files being copied: [begin]"
+        ls -ltrh {relative_output_dir:s}/*
+        echo "files being copied: [end]"
+        rsync -az {relative_log_dir:s}/* {absolute_base_dir:s}/{relative_log_dir:s}
+        rsync -az {relative_output_dir:s}/* {absolute_base_dir:s}/{relative_output_dir:s}
+        touch "{absolute_base_dir:s}/{relative_status_dir:s}/{job_label:s}.done.${{LSB_JOBID}}"
+        else
+        touch "{absolute_base_dir:s}/{relative_status_dir:s}/{job_label:s}.fail.${{LSB_JOBID}}"
     fi
-    echo "files being copied: [begin]"
-    ls -ltrh ${{relative_output_dir}}/*
-    echo "files being copied: [begin]"
-    rsync -az ${{relative_output_dir}}/* ${{absolute_output_base_dir}}/${{relative_output_dir}}
+    echo "Processed {sample_name:s} `date`"
 
-    echo "Processed ${{sample}} `date`"
+}}
 
+function main() {{
+    echo "Start `date`"
+    cd ${{TMPDIR}}
+    echo "working from `pwd`"
+    echo "Setting up release:"
+    lsetup 'rcSetup -u'
+    lsetup 'rcsetup {rc_release_version:s}'
+    prepare
+    run
     echo "Cleaning up"
     rm -rf $TMPDIR/*
-    test -f "${{absolute_output_base_dir}}/${{relative_output_dir}}/done_{job_label:s}.${{LSB_JOBID}}" || abortJob
-
 }}
 
 main
diff --git a/data/hbsm/groups/.gitignore b/data/hbsm/groups/.gitignore
deleted file mode 100644
index 2211df63dd2831aa0cfc38ba1ebc95e3c4620894..0000000000000000000000000000000000000000
--- a/data/hbsm/groups/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-*.txt
diff --git a/data/samples_HtX4TopsNtuple-00-00-11.txt b/data/samples_HtX4TopsNtuple-00-00-11.txt
index 7c72588b7c54aed202a76205eda166d2ec5d66a0..48afabd68590b0afcca28b2754300b50173446d6 100644
--- a/data/samples_HtX4TopsNtuple-00-00-11.txt
+++ b/data/samples_HtX4TopsNtuple-00-00-11.txt
@@ -1,5 +1,4 @@
 user.prose.410000.PowhegPythiaEvtGen.DAOD_TOPQ1.e3698_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
-user.prose.410001.PowhegPythiaEvtGen.DAOD_TOPQ1.e3783_s2608_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.410002.PowhegPythiaEvtGen.DAOD_TOPQ1.e3783_s2608_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.410003.aMcAtNloHerwigppEvtGen.DAOD_TOPQ1.e4441_s2726_r7772_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.410120.PowhegPythiaEvtGen.DAOD_TOPQ1.e4373_s2608_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
@@ -51,6 +50,7 @@ user.prose.363457.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00
 user.prose.363460.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.363463.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.363466.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.363469.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.363472.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.363475.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.363478.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-11_out.root/
@@ -617,3 +617,17 @@ user.prose.341551.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.H
 user.prose.341552.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.341554.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
 user.prose.341555.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.341556.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.341557.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.341558.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344066.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_s2726_r7772_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344074.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_s2726_r7772_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344066.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344068.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344072.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344073.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344074.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344076.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344079.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344080.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
+user.prose.344081.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-11_out.root/
diff --git a/data/samples_HtX4TopsNtuple-00-00-12.txt b/data/samples_HtX4TopsNtuple-00-00-12.txt
new file mode 100644
index 0000000000000000000000000000000000000000..8a255b0fcaf8a7f574dc183a196fa734fb7d237b
--- /dev/null
+++ b/data/samples_HtX4TopsNtuple-00-00-12.txt
@@ -0,0 +1,543 @@
+user.mcasolin.00276262.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00276329.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00276336.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00276416.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00276511.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00276689.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00276778.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00276790.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00276952.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00276954.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00278880.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00278912.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00278968.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279169.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279259.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279279.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279284.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279345.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279515.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279598.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279685.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279764.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279813.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279867.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279928.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279932.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00279984.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280231.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280319.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280368.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280423.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280464.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280500.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280520.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280614.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280673.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280753.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280853.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280862.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280950.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00280977.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00281070.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00281074.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00281075.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00281317.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00281411.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00282625.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00282631.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00282712.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00282784.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00282992.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00283074.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00283155.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00283270.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00283429.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00283608.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00283780.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00284006.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00284154.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00284213.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00284285.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00284420.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00284427.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00284484.physics_Main.DAOD_TOPQ4.r7562_p2521_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00297730.physics_Main.DAOD_TOPQ4.f694_m1583_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00298595.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00298609.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00298633.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00298687.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00298690.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00298771.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00298773.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00298862.physics_Main.DAOD_TOPQ4.f696_m1588_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00298967.physics_Main.DAOD_TOPQ4.f696_m1588_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00299055.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00299144.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00299147.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00299184.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00299243.physics_Main.DAOD_TOPQ4.f698_m1594_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00299584.physics_Main.DAOD_TOPQ4.f703_m1600_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300279.physics_Main.DAOD_TOPQ4.f705_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300345.physics_Main.DAOD_TOPQ4.f705_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300415.physics_Main.DAOD_TOPQ4.f705_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300418.physics_Main.DAOD_TOPQ4.f705_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300487.physics_Main.DAOD_TOPQ4.f705_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300540.physics_Main.DAOD_TOPQ4.f705_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300571.physics_Main.DAOD_TOPQ4.f705_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300600.physics_Main.DAOD_TOPQ4.f708_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300655.physics_Main.DAOD_TOPQ4.f708_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300687.physics_Main.DAOD_TOPQ4.f708_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300784.physics_Main.DAOD_TOPQ4.f708_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300800.physics_Main.DAOD_TOPQ4.f708_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300863.physics_Main.DAOD_TOPQ4.f708_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00300908.physics_Main.DAOD_TOPQ4.f708_m1606_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00301912.physics_Main.DAOD_TOPQ4.f709_m1611_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00301918.physics_Main.DAOD_TOPQ4.f709_m1611_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00301932.physics_Main.DAOD_TOPQ4.f709_m1611_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00301973.physics_Main.DAOD_TOPQ4.f709_m1611_p2667.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302053.physics_Main.DAOD_TOPQ4.f709_m1611_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302137.physics_Main.DAOD_TOPQ4.f709_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302265.physics_Main.DAOD_TOPQ4.f709_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302269.physics_Main.DAOD_TOPQ4.f709_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302300.physics_Main.DAOD_TOPQ4.f711_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302347.physics_Main.DAOD_TOPQ4.f711_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302380.physics_Main.DAOD_TOPQ4.f711_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302391.physics_Main.DAOD_TOPQ4.f711_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302393.physics_Main.DAOD_TOPQ4.f711_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302737.physics_Main.DAOD_TOPQ4.f711_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302831.physics_Main.DAOD_TOPQ4.f711_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302872.physics_Main.DAOD_TOPQ4.f716_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302919.physics_Main.DAOD_TOPQ4.f715_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302925.physics_Main.DAOD_TOPQ4.f715_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00302956.physics_Main.DAOD_TOPQ4.f715_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303007.physics_Main.DAOD_TOPQ4.f715_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303079.physics_Main.DAOD_TOPQ4.f715_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303201.physics_Main.DAOD_TOPQ4.f715_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303208.physics_Main.DAOD_TOPQ4.f715_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303264.physics_Main.DAOD_TOPQ4.f715_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303266.physics_Main.DAOD_TOPQ4.f715_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303291.physics_Main.DAOD_TOPQ4.f716_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303304.physics_Main.DAOD_TOPQ4.f716_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303338.physics_Main.DAOD_TOPQ4.f716_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303421.physics_Main.DAOD_TOPQ4.f716_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303499.physics_Main.DAOD_TOPQ4.f716_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.00303560.physics_Main.DAOD_TOPQ4.f716_m1620_p2689.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302055.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4017_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302056.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4017_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302057.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4017_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302058.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4017_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302059.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4017_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302468.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302469.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302470.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302471.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302472.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302473.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302474.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302475.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302476.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302477.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302478.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302479.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302480.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302481.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302482.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302483.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302484.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302485.ProtosLHEFPythia8EvtGen.DAOD_TOPQ1.e4112_s2608_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.302777.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4476_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341541.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341543.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341545.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341546.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341547.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341548.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341549.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341550.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341551.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341552.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341553.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341554.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341555.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341556.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.341557.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.343365.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4706_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.343366.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4706_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.343367.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4706_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344066.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344067.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344068.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344069.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344070.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344071.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344072.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344073.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5075_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344074.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344075.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344076.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344077.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344078.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344079.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344080.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.344081.MadGraphPythia8EvtGen.DAOD_TOPQ1.e5052_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361091.Sherpa.DAOD_TOPQ1.e4607_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361092.Sherpa.DAOD_TOPQ1.e4607_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361093.Sherpa.DAOD_TOPQ1.e4607_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361094.Sherpa.DAOD_TOPQ1.e4607_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361095.Sherpa.DAOD_TOPQ1.e4607_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361096.Sherpa.DAOD_TOPQ1.e4607_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361097.Sherpa.DAOD_TOPQ1.e4607_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361300.Sherpa.DAOD_TOPQ1.e3651_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361301.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361302.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361303.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361304.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361305.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361306.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361307.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361308.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361309.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361310.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361311.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361312.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361313.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361314.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361315.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361316.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361317.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361318.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361319.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361320.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361321.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361322.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361323.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361324.Sherpa.DAOD_TOPQ1.e3651_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361325.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361326.Sherpa.DAOD_TOPQ1.e3651_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361327.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361328.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361329.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361330.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361331.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361332.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361333.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361334.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361335.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361336.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361337.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361338.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361339.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361340.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361341.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361342.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361343.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361344.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361345.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361346.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361347.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361348.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361349.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361350.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361351.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361352.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361353.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361354.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361355.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361356.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361357.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361358.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361359.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361360.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361361.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361362.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361363.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361364.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361365.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361366.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361367.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361368.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361369.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361370.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361371.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361372.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361373.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361374.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361375.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361376.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361377.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361378.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361379.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361380.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361381.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361382.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361383.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361384.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361385.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361386.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361387.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361388.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361389.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361390.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361391.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361392.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361393.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361394.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361395.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361396.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361397.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361398.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361399.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361400.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361401.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361402.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361403.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361404.Sherpa.DAOD_TOPQ1.e3651_s2586_s2174_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361405.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361406.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361407.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361408.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361409.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361410.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361411.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361412.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361413.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361414.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361415.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361416.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361417.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361418.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361419.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361420.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361421.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361422.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361423.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361424.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361425.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361426.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361427.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361428.Sherpa.DAOD_TOPQ1.e3733_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361429.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361430.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361431.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361432.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361433.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361434.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361435.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361436.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361437.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361438.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361439.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361440.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361441.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361442.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.361443.Sherpa.DAOD_TOPQ1.e4133_s2608_s2183_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363102.Sherpa.DAOD_TOPQ1.e4742_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363103.Sherpa.DAOD_TOPQ1.e4742_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363104.Sherpa.DAOD_TOPQ1.e4792_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363105.Sherpa.DAOD_TOPQ1.e4666_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363106.Sherpa.DAOD_TOPQ1.e4666_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363107.Sherpa.DAOD_TOPQ1.e4742_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363108.Sherpa.DAOD_TOPQ1.e4666_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363109.Sherpa.DAOD_TOPQ1.e4792_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363110.Sherpa.DAOD_TOPQ1.e4792_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363111.Sherpa.DAOD_TOPQ1.e4666_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363112.Sherpa.DAOD_TOPQ1.e4742_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363113.Sherpa.DAOD_TOPQ1.e4742_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363114.Sherpa.DAOD_TOPQ1.e4742_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363115.Sherpa.DAOD_TOPQ1.e4792_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363116.Sherpa.DAOD_TOPQ1.e4742_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363117.Sherpa.DAOD_TOPQ1.e4666_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363118.Sherpa.DAOD_TOPQ1.e4666_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363119.Sherpa.DAOD_TOPQ1.e4666_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363120.Sherpa.DAOD_TOPQ1.e4690_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363121.Sherpa.DAOD_TOPQ1.e4690_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363331.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363332.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363333.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363334.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363335.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363336.Sherpa.DAOD_TOPQ1.e4779_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363337.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363338.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363339.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363340.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363341.Sherpa.DAOD_TOPQ1.e4779_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363342.Sherpa.DAOD_TOPQ1.e4779_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363343.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363344.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363345.Sherpa.DAOD_TOPQ1.e4779_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363346.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363347.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363348.Sherpa.DAOD_TOPQ1.e4779_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363349.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363350.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363351.Sherpa.DAOD_TOPQ1.e4779_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363352.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363353.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363354.Sherpa.DAOD_TOPQ1.e4709_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363361.Sherpa.DAOD_TOPQ1.e4689_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363362.Sherpa.DAOD_TOPQ1.e4689_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363363.Sherpa.DAOD_TOPQ1.e4743_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363364.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363365.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363366.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363367.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363368.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363369.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363370.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363371.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363372.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363373.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363374.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363375.Sherpa.DAOD_TOPQ1.e4772_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363376.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363377.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363378.Sherpa.DAOD_TOPQ1.e4772_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363379.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363380.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363381.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363382.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363383.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363384.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363385.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363386.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363387.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363388.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363389.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363390.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363391.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363392.Sherpa.DAOD_TOPQ1.e4772_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363393.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363394.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363395.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363396.Sherpa.DAOD_TOPQ1.e4772_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363397.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363398.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363399.Sherpa.DAOD_TOPQ1.e4772_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363400.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363401.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363402.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363403.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363404.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363405.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363406.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363407.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363408.Sherpa.DAOD_TOPQ1.e4772_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363409.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363410.Sherpa.DAOD_TOPQ1.e4716_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363411.Sherpa.DAOD_TOPQ1.e4772_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363412.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363413.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363414.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363415.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363416.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363417.Sherpa.DAOD_TOPQ1.e4772_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363418.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363419.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363420.Sherpa.DAOD_TOPQ1.e4772_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363421.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363422.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363423.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363424.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363425.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363426.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363427.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363428.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363429.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363430.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363431.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363432.Sherpa.DAOD_TOPQ1.e4772_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363433.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363434.Sherpa.DAOD_TOPQ1.e4716_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363435.Sherpa.DAOD_TOPQ1.e4772_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363436.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363437.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363438.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363439.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363440.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363441.Sherpa.DAOD_TOPQ1.e4771_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363442.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363443.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363444.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363445.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363446.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363447.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363448.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363449.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363450.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363451.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363452.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363453.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363454.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363455.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363456.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363457.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363458.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363459.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363460.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363461.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363462.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363463.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363464.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363465.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363466.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363467.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363468.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363469.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363470.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363471.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363472.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363473.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363474.Sherpa.DAOD_TOPQ1.e4771_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363475.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363476.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363477.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363478.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363479.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363480.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363481.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363482.Sherpa.DAOD_TOPQ1.e4715_s2726_r7772_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.363483.Sherpa.DAOD_TOPQ1.e4715_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410000.PowhegPythiaEvtGen.DAOD_TOPQ1.e3698_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410011.PowhegPythiaEvtGen.DAOD_TOPQ1.e3824_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410012.PowhegPythiaEvtGen.DAOD_TOPQ1.e3824_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410013.PowhegPythiaEvtGen.DAOD_TOPQ1.e3753_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410014.PowhegPythiaEvtGen.DAOD_TOPQ1.e3753_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410015.PowhegPythiaEvtGen.DAOD_TOPQ1.e3753_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410016.PowhegPythiaEvtGen.DAOD_TOPQ1.e3753_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410017.PowhegPythiaEvtGen.DAOD_TOPQ1.e3978_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410018.PowhegPythiaEvtGen.DAOD_TOPQ1.e3978_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410019.PowhegPythiaEvtGen.DAOD_TOPQ1.e3978_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410020.PowhegPythiaEvtGen.DAOD_TOPQ1.e3978_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410025.PowhegPythiaEvtGen.DAOD_TOPQ1.e3998_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410026.PowhegPythiaEvtGen.DAOD_TOPQ1.e3998_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410047.PowhegHerwigppEvtGen.DAOD_TOPQ1.e4775_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410048.PowhegHerwigppEvtGen.DAOD_TOPQ1.e4775_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410062.PowhegPythiaEvtGen.DAOD_TOPQ1.e4132_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410063.PowhegPythiaEvtGen.DAOD_TOPQ1.e4132_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410066.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4111_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410067.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4111_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410068.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4111_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410073.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4631_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410074.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4631_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410075.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4631_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410080.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4111_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410081.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4111_s2608_s2183_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410099.PowhegPythiaEvtGen.DAOD_TOPQ1.e4403_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410100.PowhegPythiaEvtGen.DAOD_TOPQ1.e4403_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410101.PowhegPythiaEvtGen.DAOD_TOPQ1.e4403_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410102.PowhegPythiaEvtGen.DAOD_TOPQ1.e4403_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410107.PowhegPythiaEvtGen.DAOD_TOPQ1.e4403_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410108.PowhegPythiaEvtGen.DAOD_TOPQ1.e4403_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410109.PowhegPythiaEvtGen.DAOD_TOPQ1.e4403_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410110.PowhegPythiaEvtGen.DAOD_TOPQ1.e4403_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410111.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4632_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410112.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4632_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410113.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4632_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410114.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4632_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410115.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4632_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410116.MadGraphPythia8EvtGen.DAOD_TOPQ1.e4632_s2726_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410120.PowhegPythiaEvtGen.DAOD_TOPQ1.e4373_s2608_r7725_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410147.PowhegHerwigppEvtGen.DAOD_TOPQ1.e4595_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
+user.mcasolin.410148.PowhegHerwigppEvtGen.DAOD_TOPQ1.e4595_a766_a818_r7676_p2669.HtX4Tops_00-00-12_out.root/
diff --git a/python/batch_utils.py b/python/batch_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..f57e950df2e2d931d9534b6deaad774d1bd51c3f
--- /dev/null
+++ b/python/batch_utils.py
@@ -0,0 +1,786 @@
+"""
+This module provides utilities to interact with batch systems (lxplus or at3)
+
+Overall design:
+
+JobManager creates Jobs and JobSets
+Jobs    can be generated/submitted/checked/resubmitted
+JobSets can be generated/submitted/checked/resubmitted
+
+Each group of jobs has a label; all relevant files go in the directories below:
+
+batch/
+`--- <label>/
+     |--- input/
+     |--- log/
+     |--- output/
+     `--- status/
+
+Each sample will produce a status file sample.[done,fail] and a root file.
+
+TODO: to avoid having too many files in one place, think about having
+      subdirectories under output (for example by group + merged)
+
+davide.gerbaudo@gmail.com
+Jul 2016
+"""
+
+# import copy
+import glob
+import os
+# import re
+import collections
+
+from VLQAnalysis import utils
+from VLQAnalysis.sample_catalogues import HbsmSampleCatalogue, SampleCatalogue, LocalDiskInterface, RucioEosCernInterface
+
+#___________________________________________________________
+
+
+def base_directory():
+    """The base directory is the one above VLQAnalysis and RootCoreBin
+
+    All relatives paths are relative to base_directory or to the
+    working directory on the batch node.
+    """
+    python_dir = os.path.dirname(os.path.abspath(__file__))
+    up_two = (os.pardir, os.pardir)
+    return os.path.normpath(os.path.abspath(os.path.join(python_dir, *up_two)))
+
+def vlq_directory():
+    return base_directory()+'/VLQAnalysis'
+
+#___________________________________________________________
+
+class Job(object):
+    """A job with a script file and an expected ouput.
+    """
+    keyword_output_file = 'outputFile='
+    keyword_input_line = 'inputFile='
+    keyword_run_function = 'function run() {'
+    keyword_main_function = 'function main() {'
+    keyword_exe_line = 'VLQAnalysis'
+    def __init__(self, script_path, nickname=None):
+        self.script_path = script_path
+        self._expected_output_file = None
+        self._number_input_files = None
+        self._nickname = nickname # to get reasonable names for split jobs; otherwise = basename(script_path)
+
+    @property
+    def nickname(self):
+        "use to name the status and log files"
+        return (self._nickname if self._nickname else
+                utils.filename_without_extension(self.script_path)) # w/out ext, =sample name
+    @classmethod
+    def parse_expected_output_file(cls, file_path):
+        cmd = "grep %s %s" % (Job.keyword_output_file, file_path)
+        out = utils.get_command_output(cmd)
+        tokens = out['stdout'].strip().replace('=', ' ').split()
+        output_file = file_path = next((t for t in tokens if '.root' in t), '')
+        output_file = output_file.strip()
+        return output_file
+
+    @classmethod
+    def parse_input_file(cls, file_path):
+        cmd = "grep %s %s" % (Job.keyword_input_line, file_path)
+        out = utils.get_command_output(cmd)
+        input_file = out['stdout'].split('=')[1].strip().split()[0]
+        return input_file
+
+    @classmethod
+    def parse_run_cmd_line(cls, file_path):
+        run_function = JobSet.extract_function(file_path, Job.keyword_run_function)
+        run_line     = ' '.join(l for l in utils.drop_continuation_lines(run_function.split('\n'))
+                                # if Job.keyword_run_function in l # TODO I think should be needed, but it works without ?????
+                                )
+        return run_line
+
+    @classmethod
+    def has_multiple_configuration_lines(cls, input_filelist):
+        number_of_config_blocks = 0
+        in_config_block = False
+        keyword = SampleCatalogue.keyword_job_option
+        for line in SampleCatalogue.read_lines_from_txt(input_filelist,
+                                                        keywords_useful_comment_line=[keyword]):
+            if in_config_block and keyword in line:
+                continue
+            elif keyword in line:
+                in_config_block = True
+                number_of_config_blocks += 1
+            else:
+                in_config_block = False
+
+        return number_of_config_blocks>1
+
+    @property
+    def expected_output_file(self):
+        if not self._expected_output_file:
+            self._expected_output_file = self.parse_expected_output_file(file_path=self.script_path)
+            if not self._expected_output_file:
+                raise RuntimeError("cannot extract output file from %s" % self.script_path)
+        return self._expected_output_file
+    @property
+    def expected_output_files(self):
+        "Just to provide the same interface as JobSet"
+        return [self.expected_output_file]
+    @property
+    def number_input_files(self):
+        if not self._number_input_files:
+            cmd = 'grep "inputFile=" '+self.script_path
+            out = utils.get_command_output(cmd)
+            filelist_path = next((t for t in out['stdout'].split() if 'inputFile' in t), None) # find inputFile keyword
+            filelist_path = filelist_path.split('=')[1].strip() # take what follows =
+            filelist_path = filelist_path.split()[0].strip() # and drop any subsequent words
+            self._number_input_files = sum(1 for l in open(filelist_path).readlines() if not l.strip().startswith('#'))
+        return self._number_input_files
+    @classmethod
+    def is_completed(cls, job):
+        "make it a classmethod so that we can use it also for JobSet"
+        out_filenames = job.expected_output_files
+        expect = len(out_filenames)
+        done = sum(1 for f in out_filenames if os.path.exists(f))
+        return expect==done
+
+#___________________________________________________________
+
+class JobSet(object):
+    """A job with a script file and multiple expected ouputs.
+
+    A jobset can be built either merging several jobs or from a merged script.
+    """
+    def __init__(self,script_path=None, jobs=[], verbose=False):
+        self.script_path = None
+        self._expected_output_files = []
+        if not script_path:
+            raise NotImplementedError("JobSet requires a script path")
+        from_merged_script = os.path.exists(script_path) and not jobs
+        from_jobs_to_merge = jobs and len(jobs)>0
+        if from_merged_script==from_jobs_to_merge:
+            raise NotImplementedError("Invalid arguments: script_path %s, %d jobs"%(script_path,
+                                                                                    len(jobs)))
+        if from_merged_script:
+            self.script_path = script_path
+            self._expected_output_files = JobSet.parse_expected_output_files(script_path)
+        else:
+            pre_merge_script_paths = [j.script_path for j in jobs]
+            self.script_path = JobSet.merge_scripts(orig_job_script_paths=pre_merge_script_paths,
+                                                    dest_script_path=script_path)
+            os.chmod(self.script_path, 0755)
+            JobSet.delete_pre_merge_scripts(file_paths=pre_merge_script_paths)
+            self._expected_output_files = JobSet.parse_expected_output_files(script_path)
+            if verbose:
+                print "merged into\n > %s\nfrom%s" % (script_path, '\n< '.join(['']+pre_merge_script_paths))
+
+    @property
+    def expected_output_files(self):
+        "No need to cache here: the constructor should extract from the script file (which is always there)"
+        if not self._expected_output_files:
+            raise RuntimeError("something went wrong when parsing %s?" % self.script_path)
+        return self._expected_output_files
+
+    @classmethod
+    def parse_expected_output_files(cls, file_path, expected_keyword='outputFile='):
+        "same as Job.parse_expected_output_file, but with multiple output files"
+        cmd = "grep %s %s" % (expected_keyword, file_path)
+        out = utils.get_command_output(cmd)
+        lines = out['stdout'].split('\n')
+        filenames = []
+        for line in lines:
+            tokens = line.strip().replace('=', ' ').split()
+            output_file = file_path = next((t for t in tokens if '.root' in t), '')
+            if output_file and output_file.strip():
+                filenames.append(output_file.strip())
+        return filenames
+
+    @classmethod
+    def merge_scripts(cls, orig_job_script_paths=[], dest_script_path=None):
+        """merge the batch job scripts.
+
+        Modify the first script and plug in the 'run' functions from
+        all the subsequent ones. The subsequent scripts are deleted
+        after the merge.
+        """
+        template_path = orig_job_script_paths[0]
+        template_contents = open(template_path).read()
+        run_fuction   = JobSet.extract_function(template_path, Job.keyword_run_function)
+        main_function = JobSet.extract_function(template_path, Job.keyword_main_function)
+
+        run_fuctions = [JobSet.extract_function(j, Job.keyword_run_function) for j in orig_job_script_paths]
+        run_fuctions = [f.replace(Job.keyword_run_function,
+                                  Job.keyword_run_function.replace('run', "run%02d"%i))
+                        for i, f in enumerate(run_fuctions)]
+        main_function_new = '\n'.join(l if not l.strip()=='run' else
+                                      '\n'.join(l.replace('run', "run%02d"%i)
+                                                for i in range(len(run_fuctions)))
+                                      for l in main_function.split('\n'))
+        script_contents = (template_contents
+                           .replace(main_function, main_function_new)
+                           .replace(run_fuction,
+                                    '\n'.join(run_fuctions)))
+        with open(dest_script_path, 'w') as output_script:
+            output_script.write(script_contents)
+        return dest_script_path
+
+    @classmethod
+    def delete_pre_merge_scripts(cls, file_paths=[]):
+        for f in file_paths:
+            os.remove(f)
+
+    @classmethod
+    def extract_function(cls, script_path, starting_token):
+        """extract the 'run' bash function from the script
+
+        Note to self: cannot use regex because they cannot parse nested structures.
+        """
+        if not ('function' in starting_token and '{' in starting_token):
+            raise NotImplementedError("invalid starting_token, must contain 'function and ''{': \n''%s'"%starting_token)
+        function_lines = []
+        with open(script_path) as input_file:
+            contents = input_file.read()
+            if starting_token in contents:
+                begin_pos = contents.find(starting_token)
+                contents = contents[begin_pos:]
+                scope_counter = 0
+                for line in contents.split('\n'):
+                    if line.strip().startswith('#'):
+                        function_lines.append(line)
+                        continue
+                    open_curly_counter = line.count('{')
+                    close_curly_counter = line.count('}')
+                    scope_counter += (open_curly_counter - close_curly_counter)
+                    function_lines.append(line)
+                    if scope_counter==0:
+                        break
+        return '\n'.join(function_lines)
+
+    @property
+    def nickname(self):
+        "use to name the status and log files"
+        return utils.filename_without_extension(self.script_path) # w/out ext
+
+#___________________________________________________________
+
+class JobSplit(object):
+    """A job split in several subjobs.
+
+    A jobset can be built either splitting an existing job or from several splitted script.
+
+    The input/log/output/status files will be stored in the
+    'split/nnn/mmm' subdirectories of each directory used by
+    JobManager, where nnn is a counter of the JobSplit for this
+    session (i.e. JobManager lable) and mmm is the index of the
+    children job.
+    """
+    existing_jobsplit_counter = 0 # used to keep track of NN
+    def __init__(self, base_job=None, children_jobs=[], job_manager=None, max_n_input_files=10, verbose=False):
+        need_to_write_children_scripts = len(children_jobs)==0
+        self._expected_output_file = None
+        self.requires_merging = True
+        if not need_to_write_children_scripts:
+            self.parent_job = base_job
+            self.children_jobs = children_jobs
+        else:
+            self.parent_job = base_job
+            self.children_jobs = []
+            nnn = "%03d" % JobSplit.existing_jobsplit_counter
+            input_filelist = Job.parse_input_file(file_path=base_job.script_path)
+            filelist_dir = os.path.dirname(input_filelist)
+            dirs_to_modify = [job_manager.relative_input_directory,
+                              job_manager.relative_log_directory,
+                              job_manager.relative_output_directory,
+                              job_manager.relative_status_directory]
+            script_dir = os.path.dirname(base_job.script_path)
+            script_content = open(base_job.script_path).read()
+            run_function = JobSet.extract_function(base_job.script_path, Job.keyword_run_function)
+            sub_filelists = JobSplit.split_filelist(orig_filelist=input_filelist,
+                                                    dest_subdir=os.path.join(filelist_dir, nnn),
+                                                    max_n_input_files=max_n_input_files)
+            for iJob, sub_filelist in enumerate(sub_filelists):
+                mmm = "%03d" % iJob
+                job_manager.create_directories(subdir=nnn+'/'+mmm, verbose=False)
+                script_dest = utils.mkdir_if_needed(script_dir+'/'+nnn)+'/'+mmm+'.sh'
+                new_run_function = run_function.replace(input_filelist, sub_filelist)
+                for dtm in dirs_to_modify:
+                    new_run_function = new_run_function.replace(dtm, dtm+'/'+nnn+'/'+mmm)
+                with open(script_dest, 'w') as of:
+                    of.write(script_content.replace(run_function, new_run_function))
+                os.chmod(script_dest, 0755)
+                self.children_jobs.append(Job(script_dest, nickname=nnn+'_'+mmm))
+            if verbose:
+                print "split job in %d subjobs (subdir '%s') %s" % (len(self.children_jobs), nnn, self.parent_job.script_path)
+            JobSplit.existing_jobsplit_counter += 1
+
+    @classmethod
+    def split_filelist(cls, orig_filelist=None, dest_subdir=None, max_n_input_files=10):
+        "take a filelist, split it and return a list of smaller ones that have at most N input files"
+        if not os.path.exists(orig_filelist) and orig_filelist.endswith('.txt'):
+            raise NotImplementedError("This does not look like a txt filelist: %s" % orig_filelist)
+        input_lines = []
+        with open(orig_filelist) as input_file:
+            input_lines = [l.strip() for l in input_file.readlines()]
+            if Job.has_multiple_configuration_lines(orig_filelist):
+                raise NotImplementedError("Cannot split a filelists containing multiple '# config:' blocks:"
+                                          "\n %s; split it." % orig_filelist)
+        utils.mkdir_if_needed(dest_subdir)
+        sub_job_counter = 0
+        sub_job_lines = []
+        comment_lines = [l for l in input_lines if l.startswith('#')] # these are propagated to each subfile
+        other_lines = [l for l in input_lines if not l.startswith('#')]
+        sub_filelists = []
+        for line in other_lines:
+            sub_job_lines.append(line)
+            if len(sub_job_lines) >= max_n_input_files:
+                sub_job_filelist = "%s/%03d.txt" % (dest_subdir, sub_job_counter)
+                with open(sub_job_filelist, 'w') as of:
+                        of.write('\n'.join(comment_lines + sub_job_lines + ['']))
+                sub_job_counter += 1
+                sub_job_lines = []
+                sub_filelists.append(sub_job_filelist)
+        if len(sub_job_lines):
+            sub_job_filelist = "%s/%03d.txt" % (dest_subdir, sub_job_counter)
+            with open(sub_job_filelist, 'w') as of:
+                of.write('\n'.join(comment_lines + sub_job_lines + ['']))
+            sub_job_counter += 1
+            sub_filelists.append(sub_job_filelist)
+        return sub_filelists
+
+    @property
+    def expected_output_file(self):
+        "TODO remove duplication with Job "
+        if not self._expected_output_file:
+            self._expected_output_file = self.parse_expected_output_file(file_path=self.script_path)
+            if not self._expected_output_file:
+                raise RuntimeError("cannot extract output file from %s" % self.script_path)
+        return self._expected_output_file
+
+    @property
+    def expected_output_files(self):
+        if self.requires_merging:
+            return [j.expected_output_file for j in self.children_jobs]
+        else:
+            return [self.expected_output_file]
+
+
+    # TODO
+    # @property
+    # def expected_output_files(self):
+    #     "No need to cache here: the constructor should extract from the script file (which is always there)"
+    #     if not self._expected_output_files:
+    #         raise RuntimeError("something went wrong when parsing %s?" % self.script_path)
+    #     return self._expected_output_files
+#___________________________________________________________
+
+class SplitMap(object):
+    """Used to keep track of how jobs are split
+
+    Just a list of lines, where in each line the first word is the
+    master script_path, and the following ones are the children script
+    paths. Mainly useful for __str__.
+    """
+    def __init__(self, splitjobs=[]):
+        self.splitjobs = splitjobs
+    def __str__(self):
+        return '\n'.join(' '.join([jm.parent_job.script_path]+
+                                  [jc.script_path for jc in jm.children_jobs])
+                         for jm in self.splitjobs)
+
+#___________________________________________________________
+
+class JobManager(object):
+    """Manage a set of jobs; all inputs/outputs will be under batch/<jobset_label>
+
+    A split_map is used to keep track of what job goes into which subjobs.
+    """
+    def __init__(self, jobset_label, verbose=False, debug=False, overwrite_batch_scripts=False):
+        self.jobset = jobset_label
+        self.queues = []
+        self._queue = None
+        self.absolute_base_dir = base_directory()
+        self.dry_run = True
+        self.jobs = []
+        self._tar_file = None
+        self.template_path = None
+        self._template_contents = None # cached
+        self.verbose = verbose
+        self.debug = debug
+        self.overwrite_tar = False
+        self.overwrite_batch_scripts = overwrite_batch_scripts
+        self.rc_release_version = guess_rc_release_version()
+        self.create_directories()
+        existing_scripts = glob.glob(self.relative_input_directory+'/*.sh')
+        if existing_scripts and not overwrite_batch_scripts:
+            using_sample_scripts = all('jobset' not in f for f in existing_scripts)
+            using_merged_scripts = all('jobset' in f for f in existing_scripts)
+            using_split_scripts = os.path.exists(self.split_map_file_path)
+            if using_merged_scripts==using_sample_scripts:
+                raise NotImplementedError("Cannot handle a mix of merged/unmerged scripts from %s" % self.relative_input_directory)
+            self.jobs = [JobSet(script_path=f, verbose=self.verbose) if using_merged_scripts else
+                         Job(script_path=f)
+                         for f in sorted(existing_scripts)]
+            if using_split_scripts:
+                self.read_split_map()
+        elif verbose:
+            print "JobManager: you now need to loop over samples/variations and create the jobs"
+
+    def __del__(self):
+        self.save_split_map()
+
+    @property
+    def split_map_file_path(self):
+        return self.relative_status_directory+'/split_map.txt'
+
+    def save_split_map(self):
+        splitjobs = self.get_split_jobs()
+        if splitjobs:
+            split_map = SplitMap(splitjobs=splitjobs)
+            with open(self.relative_status_directory+'/split_map.txt', 'w') as of:
+                of.write(str(split_map))
+            if self.debug:
+                print "saved split map in %s" % self.relative_status_directory
+
+    def read_split_map(self):
+        "Read the txt file, and replace the master Job with its corresponding JobSplit"
+        split_lines = []
+        map_path = self.split_map_file_path
+        with open(map_path, 'r') as input_file:
+            split_lines = [l.strip() for l in input_file.readlines() if l.strip()]
+        if not split_lines:
+            return
+        master_counts = collections.Counter([l.split()[0] for l in split_lines])
+        most_common = master_counts.most_common()
+        max_label, max_occurrences = most_common[0]
+        min_label, min_occurrences = most_common[-1]
+        if max_occurrences!=1 or min_occurrences!=1:
+            raise NotImplementedError("The split map %s is invalid; %d duplicates '%s'" % (map_path, max_occurrences, max_label))
+        nnn = 0
+        for line in split_lines:
+            tokens = line.split()
+            if len(tokens)<len(['master', 'child0', 'child1']):
+                raise NotImplementedError("Cannot parse split map line: too few elements.\n > %s"%line)
+            master_script = tokens[0]
+            children_scripts = tokens[1:]
+            iJob = next(i for i, job in enumerate(self.jobs) if type(job)==Job and job.script_path==master_script)
+            self.jobs[iJob] = JobSplit(base_job=self.jobs[iJob],
+                                       children_jobs=[Job(script_path=cs, nickname="%03d_%03d"%(nnn, mmm))
+                                                      for mmm, cs in enumerate(children_scripts)],
+                                       verbose=False)
+            nnn += 1
+        if self.debug:
+            print "read %d split jobs from %s" % self.relative_status_directory
+
+    def get_split_jobs(self): # just because 'split_jobs' is already a member function
+        return [j for j in self.jobs if type(j) is JobSplit]
+
+    @property
+    def needs_to_generate_scripts(self):
+        return self.overwrite_batch_scripts or not self.jobs
+
+    def create_job(self, sample, systematic, variation, template_path=None):
+        "This will need access to several specialised attributes (template, dirs, etc.)"
+        raise NotImplementedError("create_job should be implemented in each specialised JobManager class")
+
+    def generic_create_job(self, sample, systematic, variation, template_path=None):
+        """create the script and append Job to self.jobs template_path
+        should be used only for special samples using non-default
+        template; for all other cases go through the
+        implementation-specific 'create_job'.
+        """
+        template_path = template_path if template_path else self.template_path
+        template_contents = (self.template_contents if template_path==self.template_path # use cache if default
+                             else open(self.template_path).read()) # otherwise read it
+        sample_name = sample.full_name.strip('/')
+        # sample_name = sample.short_name # maybe this should be configurable? does the plotting/fitting code depend on it?
+        is_nominal = systematic.is_nominal
+        job_label = self.job_label(sample_name=sample_name, variation_name=variation.name)
+        parameters = {'sample_name' : sample_name,
+                      'tar_file' : self.tar_file,
+                      'absolute_base_dir' : self.absolute_base_dir,
+                      'relative_input_dir' : self.relative_input_directory,
+                      'relative_log_dir' : self.relative_log_directory,
+                      'relative_output_dir' : self.relative_output_directory,
+                      'relative_status_dir' : self.relative_status_directory,
+                      'filelist_name' : variation.filelist,
+                      'input_tree' : variation.input_tree,
+                      'output_file' : (sample_name+'.root' if is_nominal else
+                                       "%s_%s.root" % (sample_name, variation.name)),
+                      'dsid' : sample.dsid,
+                      'compute_weight_sys' : ('true' if is_nominal else 'false'),
+                      'job_label' : job_label,
+                      'other_options' : '',
+                      'rc_release_version' : self.rc_release_version
+                      }
+        batch_filename = self.relative_input_directory+'/'+sample_name+'_'+variation.name+'.sh'
+        if os.path.exists(batch_filename) and not self.overwrite_batch_scripts:
+            if self.debug:
+                print "using existing %s" % batch_filename
+        else:
+            if self.debug:
+                print "generating %s" % (batch_filename)
+            batch_file = open(batch_filename, 'w')
+            batch_file.write(template_contents.format(**parameters))
+            batch_file.close()
+            os.chmod(batch_filename, 0755)
+            if self.verbose:
+                print "created batch file %s" % batch_filename
+        self.jobs.append(Job(batch_filename))
+
+    def split_jobs(self, split_every_n=10):
+        "replace large jobs with jobsplits"
+        for iJob, job in enumerate(self.jobs):
+            if job.number_input_files>split_every_n:
+                self.jobs[iJob] = JobSplit(base_job=job, job_manager=self, max_n_input_files=split_every_n, verbose=self.verbose)
+
+    def merge_jobs(self, min_n_input_files=10):
+        "replace jobs with merged jobsets"
+        jobs = []
+        jobsets = []
+        number_of_input_files = 0
+        for job in self.jobs:
+            number_of_input_files += job.number_input_files
+            jobs.append(job)
+            if number_of_input_files > min_n_input_files:
+                jobsets.append(JobSet(script_path="%s/jobset%03d.sh"% (self.relative_input_directory, len(jobsets)),
+                                      jobs=jobs,
+                                      verbose=self.verbose))
+                jobs = []
+                number_of_input_files = 0
+        if number_of_input_files:
+            jobsets.append(JobSet("%s/jobset%03d.sh"% (self.relative_input_directory, len(jobsets)),
+                                  jobs=jobs,
+                                  verbose=self.verbose))
+        self.jobs = jobsets
+
+    def submit_jobs(self):
+        submission_commands = []
+        for job in self.jobs:
+            if type(job) is JobSplit:
+                for subjob in job.children_jobs:
+                    cmd = self.job_submission_command(queue=self.queue, verbose=self.verbose,
+                                                      base_dir=self.absolute_base_dir, job=subjob)
+                    submission_commands.append(cmd)
+            else:
+                cmd = self.job_submission_command(queue=self.queue, verbose=self.verbose,
+                                                  base_dir=self.absolute_base_dir, job=job)
+                submission_commands.append(cmd)
+        for cmd in submission_commands:
+            if self.verbose:
+                print cmd
+            if not self.dry_run:
+                out = utils.get_command_output(cmd)
+                # status_path = os.path.join(self.relative_status_directory, job.nickname+'.submitted')
+                # with open(status_path, 'w') as status_file:
+                #     status_file.write('stdout:\n'+out['stdout']+
+                #                       'stderr:\n'+ out['stderr'])
+        if self.dry_run:
+            print "This was a dry run. To actually submit the jobs run with '--submit'"
+    def check_outputs(self):
+        counter_job_any = 0
+        counter_job_done = 0
+        counter_job_miss = 0
+        files_done = []
+        files_miss = []
+        for job in self.jobs:
+            out_filenames = job.expected_output_files
+            done = [f for f in out_filenames if os.path.exists(f)]
+            miss = [f for f in out_filenames if f not in done]
+            files_miss.extend(miss)
+            files_done.extend(done)
+            all_done = len(done)==len(miss)
+            counter_job_any += 1
+            counter_job_done += (1 if all_done else 0)
+            counter_job_miss += (0 if all_done else 1)
+        print "Checked %d jobs: %d done, %d missing" % (counter_job_any, counter_job_done, counter_job_miss)
+        print "\t  (%d/%d output files)" % (len(files_done), len(files_done)+len(files_miss))
+        if self.verbose:
+            print 'Missing files:\n'+'\n'.join(files_miss)
+
+    def resubmit_failed_jobs(self):
+        """For merged jobs, now resubmitting the whole script (even when partially done)
+
+        TODO implement a smarter JobSet resubmit where only the
+        required 'runN' functions are called, and the others are
+        commented out.
+        """
+        self.check_outputs()
+        unfinished_jobs = []
+        for job in self.jobs:
+            if type(job) is JobSplit:
+                unfinished_subjobs = [cj for cj in job.children_jobs if not Job.is_completed(cj)]
+                unfinished_jobs.extend(unfinished_jobs)
+            elif not Job.is_completed(job):
+                unfinished_jobs.append(job)
+        self.jobs = unfinished_jobs
+        # note to self: this will not leave any partial JobSplit in the SplitMap, avoiding that it gets corrupted
+        if self.verbose:
+            print "about to resubmit %d failed jobs" % len(self.jobs)
+        self.submit_jobs()
+
+    @property
+    def relative_input_directory(self):
+        "where the job script files will be generated"
+        return 'batch/'+self.jobset+'/input'
+    @property
+    def relative_log_directory(self):
+        return 'batch/'+self.jobset+'/log'
+    @property
+    def relative_output_directory(self):
+        return 'batch/'+self.jobset+'/output'
+    @property
+    def relative_status_directory(self):
+        return 'batch/'+self.jobset+'/status'
+    def create_directories(self, subdir='', verbose=False):
+        for d in [self.relative_input_directory, self.relative_log_directory,
+                  self.relative_output_directory, self.relative_status_directory]:
+            dir_path = utils.mkdir_if_needed(d+(('/'+subdir) if subdir else ''), verbose=verbose)
+    def job_label(self, sample_name=None, variation_name=None):
+        "The label used to distinguish one job from another."
+        job_label = sample_name+'_'+variation_name
+        return job_label
+    def default_tar_file(self):
+        return base_directory()+'/'+self.relative_input_directory+'/packages.tgz'
+    def check_tar_file(self, path):
+        if not os.path.exists(path) or self.overwrite_tar:
+            self.prepare_tar_file(tar_file_name=path)
+        elif self.verbose:
+            print "Using existing tar file: %s"%path
+
+    def prepare_tar_file(self, tar_file_name=None):
+        "create tar; behaves as GNU 'tar', i.e. by default it overwrites existing files"
+        cmd  = "tar czf  %s " % tar_file_name
+        cmd += " BtaggingTRFandRW IFAEReweightingTools IFAETopFramework VLQAnalysis"
+        cmd += " --exclude='.svn' --exclude='.git' --exclude='*.o' --exclude='*.so'"
+        out = utils.get_command_output(cmd)
+        if out['returncode']!=0:
+            print "Failed to create tar file %s" % tar_file_name
+            print out['stderr']
+            print out['stdout']
+        elif self.verbose:
+            print out['stderr']
+            print out['stdout']
+            print "Created tar file %s" % tar_file_name
+
+    @property
+    def tar_file(self):
+        if not self._tar_file: # set value on first call (and check existence if necessary)
+            self._tar_file = self.default_tar_file()
+            self.check_tar_file(self._tar_file)
+        return self._tar_file
+    @tar_file.setter
+    def tar_file(self, value):
+        value = os.path.abspath(value)
+        self.check_tar_file(value)
+        self._tar_file = value
+    def default_queue(self):
+        return self.queues[0]
+    @property
+    def queue(self):
+        if not self._queue: # set value on first call (and check existence if necessary)
+            self._queue = self.default_queue()
+        return self._queue
+    @queue.setter
+    def queue(self, value):
+        if value not in self.queues:
+            raise ValueError("invalid queue '%s', must be from %s" % (value, str(self.queues)))
+        self._queue = value
+    @property
+    def template_contents(self):
+        if not self._template_contents:
+            self._template_contents = open(self.template_path).read()
+        return self._template_contents
+
+
+
+#___________________________________________________________
+
+class LxbJobManager(JobManager):
+    "Job manager for lxbatch queues at cern"
+    def __init__(self, jobset_label, **kwargs):
+        super(LxbJobManager, self).__init__(jobset_label, **kwargs)
+        self.queues = ['8nm', '1nh', '8nh', '1nd', '2nd', '1nw', '2nw', 'test'] # bqueues -u ${USER}
+        self.template_path = 'VLQAnalysis/data/hbsm/batch/templates/cern/one_lep_nom.sh'
+    def job_submission_command(self, queue=None, verbose=None, base_dir=None, job=None):
+        cmd = (" bsub "
+               +" -L /bin/bash " # reset shell
+               +" -q %s " % queue
+               # not sure this is working
+               # +" -o %s/tthf-trex-utils/batch/log/%s_%s.oe" % (base_dir, opts.label, nickname)
+               +" -J %s " % job.nickname
+               +" -o %s.oe " % (self.relative_log_directory+'/'+job.nickname)
+               +" %s" % os.path.join(base_dir, job.script_path)
+               )
+        return cmd
+    def create_job(self, sample, systematic, variation, template_path=None):
+        self.generic_create_job(sample, systematic, variation, template_path)
+
+
+
+#___________________________________________________________
+
+class At3JobManager(JobManager):
+    "Job manager for at3 queues at pic"
+    def __init__(self, jobset, **kwargs):
+        super(LxbJobManager, self).__init__(jobset_label, **kwargs)
+        self.queues = ['at3_short', 'at3', 'at3_8h', 'at3_xxl']
+        self.template_path = 'VLQAnalysis/data/hbsm/batch/templates/ifae/one_lep_nom.sh'
+    def create_job(self, sample, systematic, variation, template_path=None):
+        self.generic_create_job(sample, systematic, variation, template_path)
+    def job_submission_command(self, queue=None, verbose=None, base_dir=None, job=None):
+        cmd = (" qsub "
+               +" -j oe " # join stdout and stderr
+               +" -o %s.oe " % (self.relative_log_directory+'/'+job.nickname)
+               +" -q %s " % queue
+               +" %s" % os.path.join(base_dir, job.script_path)
+               )
+        return cmd
+
+def guess_batch_platform():
+    out = utils.get_command_output('hostname --domain')
+    domain = out['stdout'].strip()
+    if domain=='cern.ch':
+        return 'lxbatch'
+    elif domain=='pic.es':
+        return 'at3'
+    else:
+        raise NotImplementedError("unknown domain '%s'; only pic.es and cern.ch are currently supported" % domain)
+
+def guess_rc_release_version():
+    # out = utils.get_command_output("lsetup 'rcSetup --printMyRelease'")
+    # out = utils.get_command_output("rcSetup --printMyRelease", with_current_environment=True)
+    # print 'release: out >>>>>>>>>>> ',out['stdout'].strip()
+    # print 'release: err >>>>>>>>>>> ',out['stderr'].strip()
+    # rc_release_version = out['stdout'].strip()
+    # TODO the solution above does not work; for now hardcode it here (at least it is in one single place)
+    rc_release_version = 'Base 2.4.14'
+    return rc_release_version
+
+#___________________________________________________________
+
+if __name__=='__main__':
+    print "Testing job manager"
+
+    # sc_hbsm = HbsmSampleCatalogue()
+    # # sc_hbsm.add_samples_from_group_files(glob.glob('VLQAnalysis/data/groups/hbsm/*.txt'))
+    # sc_hbsm.add_samples_from_group_files(glob.glob('VLQAnalysis/data/groups/hbsm/hbsm.txt'))
+    # sc_hbsm.samples = sc_hbsm.add_systematic_variations(sc_hbsm.samples)
+    # input_from_dir = LocalDiskInterface(filelist_dir='VLQAnalysis/data/filelist',
+    #                                     base_input_dir='/tmp/gerbaudo/rucio')
+    # input_from_eos = RucioEosCernInterface()
+    # sc_hbsm.add_filelists(samples=sc_hbsm.samples, input_interface=input_from_eos)
+    # # sc_hbsm.add_filelists(samples=sc_hbsm.samples, input_interface=input_from_dir)
+
+    # job_manager = LxbJobManager('test_2016-10-19')
+    # job_manager.queue = '8nh'
+    # job_manager.verbose = True
+    # job_manager.dry_run = True # False
+    # for sample in sc_hbsm.samples:
+    #     for systematic in sample.systematic_uncertainties:
+    #         for variation in [v for v in systematic.variations if v.name=='nominal']:
+    #             job_manager.create_job(sample, systematic, variation)
+    # job_manager.submit_jobs()
+
+    print 'testing JobSet.extract_function'
+    print JobSet.extract_function(script_path='batch/test_2016-10-24/input/user.mcasolin.341543.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root_nominal.sh',
+                            starting_token=Job.keyword_run_function)
+
+    print 'testing Job.parse_run_cmd_line'
+    print Job.parse_run_cmd_line('batch/test_2016-10-26b/input/user.mcasolin.341541.aMcAtNloPythia8EvtGen.DAOD_TOPQ1.e4336_a766_a821_r7676_p2669.HtX4Tops_00-00-12_out.root_nominal.sh')
diff --git a/python/hbsm_submit.py b/python/hbsm_submit.py
index dda8876a1f2dada13c7f165a4cb34d7921b09647..8490e545346e50031f74b3505d939a35428899bb 100755
--- a/python/hbsm_submit.py
+++ b/python/hbsm_submit.py
@@ -1,18 +1,15 @@
 #!/bin/env python
 
-# TODO add Job class with status
-
 # davide.gerbaudo@gmail.com
 # Jun 2016
 
+import glob
 import optparse
 import os
 
-
-from samples import hbsm as hbsm_samples
-from samples import systematics as hbsm_systematics
-from samples.sample import base_dir_lxplus as base_dir_lxplus
-from samples.sample import base_dir_at3pnfs as base_dir_at3pnfs
+import batch_utils
+import sample_catalogues
+import systematics
 import utils
 
 description = """
@@ -20,8 +17,19 @@ Submit to batch the jobs to run VLQAnalysis/util/VLQAnalysis.cxx
 """
 
 usage = """
-%prog -l bkgonly_2016-05-25
-%prog -l test_2016-07-12 --lxbatch -v -syst nominal --sample-include bbH_m1000
+First time from a new area:
+%prog -l bkgonly_2016-05-25 --generate-groupfiles
+
+Then:
+
+# prepare scripts
+%prog -l test_2016-10-19
+# just nominal, only hbsm samples, specify queue
+%prog -l test_2016-10-19 --groupfile VLQAnalysis/data/groups/hbsm/hbsm.txt  --syst nominal --queue 8nh --verbose
+# command to test the code locally on one sample
+%prog -l test_2016-10-27  --groupfile VLQAnalysis/data/groups/hbsm/hbsm.txt --sample-include 341541 --print-local
+# check the output root files
+%prog -l test_2016-10-19 --groupfile VLQAnalysis/data/groups/hbsm/hbsm.txt  --syst nominal --queue 8nh --check
 
 (run with -h to get all options)
 
@@ -42,31 +50,38 @@ The rules to go from the templates to the job-specific files are:
   Example: 'E_{T}{something}' -> 'E_{{T}}{{something}}'
   This can be done either in the template or in this script.
 
+TODO split large jobs
+TODO merge split outputs
 """
 
 def main():
 
-    default_at3_queue='at3'
-    default_lx_queue='8nh'
-    default_at3_batch_template='VLQAnalysis/data/hbsm/batch/templates/ifae/one_lep_nom.sh'
-    default_lx_batch_template='VLQAnalysis/data/hbsm/batch/templates/cern/one_lep_nom.sh'
-    lxbatch_queues = ['8nm', '1nh', '8nh', '1nd', '2nd', '1nw', '2nw', 'test'] # bqueues -u ${USER}
-    at3_queues = ['at3_short', 'at3', 'at3_8h', 'at3_xxl']
-
     parser = optparse.OptionParser(description=description, usage=usage)
     parser.add_option('-l', '--label', default=None, help='job label; used to make input/output subdirectories')
-    parser.add_option('-q', '--queue', help='queue, defaults %s on at3, %s on lxbatch'%(default_at3_queue, default_lx_queue))
-    parser.add_option('-s', '--syst', default='all', help="variations to process (default %default). Give a comma-sep list or say 'weight', 'object'")
-    parser.add_option('--syst-include', default='.*', help='include only the systematics matching the regexp')
-    parser.add_option('--syst-exclude', default=None, help='exclude the systematics matching the regexp')
+    parser.add_option('-q', '--queue', default=None)
+    # TODO fix syst option
+    parser.add_option('-s', '--syst', default='nominal', help="variations to process ('weight', 'object', default %default).")
     parser.add_option('--list-systematics', action='store_true', help='list the systematics available in the catalogue')
-    parser.add_option('--sample-include', default='.*', help='include only the samples matching the regexp (short name)')
-    parser.add_option('--sample-exclude', default=None, help='exclude the samples matching the regexp (short name)')
+    # parser.add_option('--syst-include', default='.*', help='include only the systematics matching the regexp')
+    # parser.add_option('--syst-exclude', default=None, help='exclude the systematics matching the regexp')
+    parser.add_option('--check', action='store_true', help='check the root output files')
+    parser.add_option('--resubmit', action='store_true', help='resubmit failed jobs')
+    # TODO the sample filtering works only with the --overwrite-scripts option?
+    parser.add_option('--sample-include', default='.*', help='include only the samples matching the regexp (short name if available, else full_name)')
+    parser.add_option('--sample-exclude', default=None, help='include only the samples matching the regexp (short name if available, else full_name)')
     parser.add_option('-S', '--submit', action='store_true', help='actually submit the jobs')
-    parser.add_option('--lxbatch', action='store_true', help='lxbatch rather than at3')
-    parser.add_option('--batch-template', help='batch template, default %s or %s'%(default_at3_batch_template, default_lx_batch_template))
+    parser.add_option('--batch-template', help='batch template; otherwise use default one from JobManager')
     parser.add_option('--tarfile', default=None, help='the tar file will contain the code')
-    parser.add_option('--output-dir', default='batch/output/', help='output base directory (will contain job subdirectories), default %default')
+    parser.add_option('--overwrite-tar', action='store_true', help='re-create tar even when it exists')
+    parser.add_option('--overwrite-scripts', action='store_true', help='re-create the batch scripts even when they exist')
+    parser.add_option('--generate-groupfiles', action='store_true', help='generate group files')
+    parser.add_option('--generate-filelists', action='store_true', help='generate input file lists')
+    parser.add_option('--groupfile', default=None, help='if you just want to run on one group file, eg. data/groups/hbsm/hbsm.txt')
+    parser.add_option('--input-from', default='rucioeos',
+                      help='Where the ntuples are stored; see sample_catalogues.InputDataInterface')
+    parser.add_option('--merge-fewer', default=1, type=int, help='merge jobs if less than N input files')
+    parser.add_option('--split-larger', default=500, type=int, help='split jobs if more than N input files (default %default)')
+    parser.add_option('--print-local', action='store_true', help='print the command to run locally')
     parser.add_option('-v', '--verbose', action='store_true', help='print what it is doing')
     parser.add_option('-d', '--debug', action='store_true', help='print even more debugging information')
 
@@ -75,162 +90,91 @@ def main():
         parser.error('You must provide a label option')
     if opts.label and opts.label[0].isdigit():
         parser.error('Label cannot start with a digit')
+    if not is_valid_input(opts):
+        parser.error('Invalid --input-from')
+    if opts.resubmit and opts.overwrite_scripts:
+        parser.error('These two options are not compatible: --resubmit --overwrite-scripts')
     if opts.list_systematics:
-        hbsm_systematics.SystematicCatalogue().print_all()
+        systematics.SystematicCatalogue().print_all()
         return
 
-    opts.queue = (opts.queue if opts.queue else
-                  default_lx_queue if opts.lxbatch else
-                  default_at3_queue)
-    template_batch = (opts.batch_template if opts.batch_template else
-                      default_at3_batch_template if opts.queue in at3_queues else
-                      default_lx_batch_template if opts.queue in lxbatch_queues else
-                      None)
-    if not template_batch:
-        parser.error('Invalid batch configuration, check queue or template')
-
+    batch_platform = batch_utils.guess_batch_platform()
+    job_manager = (batch_utils.At3JobManager if batch_platform=='at3' else
+                   batch_utils.LxbJobManager)
+    job_manager = job_manager(jobset_label=opts.label, verbose=opts.verbose, debug=opts.debug,
+                              overwrite_batch_scripts=opts.overwrite_scripts)
+    job_manager.dry_run = not opts.submit
+    if opts.queue: job_manager.queue = opts.queue
+    if opts.batch_template: job_manager.template_path = opts.batch_template
+    if opts.overwrite_tar: job_manager.overwrite_tar = True
     if opts.verbose:
         utils.print_running_conditions(parser, opts)
-        print "Preparing jobs using the following template:"
-        print "batch: %s" % template_batch
-
-    lxbatch = opts.queue in lxbatch_queues
-    at3 = opts.queue in at3_queues
-    samples_to_process = hbsm_samples.build_samples_list(base_dir_lxplus if lxbatch else base_dir_at3pnfs)
-    samples_to_process = (utils.filter_with_regexp(samples_to_process, opts.sample_include, func=lambda x: x.short_name)
-                          if opts.sample_include else samples_to_process)
-    samples_to_process = (utils.exclude_with_regexp(samples_to_process, opts.sample_exclude, func=lambda x: x.short_name)
-                          if opts.sample_exclude else samples_to_process)
-    batch_files = prepare_batch_files(opts=opts, template_filename=template_batch, samples=samples_to_process)
-    rel_out_dir = utils.mkdir_if_needed(relative_output_directory(opts))
-    for batch_file in batch_files:
-        submit_job(batch_file, opts, lxbatch=lxbatch, at3=at3)
-    if not opts.submit:
-        msg = ("This was a dry run, no jobs submitted" if not opts.verbose and not opts.debug else
-               "This was a dry run. Check your files in \n%s\n and then re-run with --submit"
-               % (relative_batch_directory(opts)))
-        print msg
-
-
-def relative_batch_directory(opts=None):
-    "where the job script files will be generated"
-    return 'batch/input/'+opts.label
-
-def relative_output_directory(opts=None):
-    "where the job script files will be generated"
-    return 'batch/output/'+opts.label
-
-def base_directory():
-    "The base directory is the one above TtHFitter and tthf-trex-utils"
-    python_dir = os.path.dirname(os.path.abspath(__file__))
-    up_two = (os.pardir, os.pardir)
-    return os.path.normpath(os.path.abspath(os.path.join(python_dir, *up_two)))
-
-def vlq_directory():
-    return base_directory()+'/VLQAnalysis'
-
-def vlq_job_label(sample_name=None, variation_name=None):
-    "The label used to distinguish one job from another."
-    job_label = sample_name+'_'+variation_name
-    return job_label
-
-def prepare_batch_files(opts=None, template_filename=None, samples=[]):
-    batch_dir = utils.mkdir_if_needed(relative_batch_directory(opts))
-    verbose = opts.verbose
-    absolute_output_base_dir = base_directory()
-    tar_file = opts.tarfile if opts.tarfile else "%s/%s/packages.tgz" % (base_directory(), batch_dir)
-    tar_file = os.path.abspath(tar_file)
-    if not os.path.exists(tar_file):
-        prepare_tar_file(tar_file_name=tar_file, verbose=opts.verbose)
-    # TODO : overwrite option
-    # TODO : for lxbatch might need additional job option parameters
-
-    batch_filenames = []
     if opts.debug:
-        print "filling template %s" % (template_filename)
-    template_contents = open(template_filename).read()
-    for sample in samples:
-        sample_name = sample.short_name
-        # sample_name = 'hbsm'
-        print('fixme hack around sample_name')
-        if opts.syst=='all':
-            sample.use_all_uncertainties()
-        elif opts.syst=='object':
-            sample.use_object_uncertainties()
-        elif opts.syst=='weight':
-            sample.use_weight_uncertainties()
-        else:
-            sample.use_nominal_uncertainty()
-        systematics = sample.systematic_uncertainties
-        systematics = (utils.filter_with_regexp(systematics, opts.syst_include, func=lambda x: x.name) if opts.syst_include else
-                       systematics)
-        systematics = (utils.exclude_with_regexp(systematics, opts.syst_exclude, func=lambda x: x.name) if opts.syst_exclude else
-                       systematics)
-        # should we also filter with the same regex on variation.input_tree? it might be a useful feature
-        for systematic in systematics:
-            is_nominal = systematic.is_nominal
-            for variation in systematic.variations:
-                job_label = vlq_job_label(sample_name, variation.name)
-                parameters = {'sample_name' : sample_name,
-                              'tar_file' : tar_file,
-                              'relative_output_dir' : relative_output_directory(opts),
-                              'absolute_output_base_dir' : absolute_output_base_dir,
-                              'filelist_name' : sample.filelist_file,
-                              'input_tree' : variation.input_tree,
-                              'output_file' : (sample_name+'.root' if is_nominal else
-                                               "%s_%s.root" % (sample_name, variation.name)),
-                              'dsid' : sample.dsid,
-                              'compute_weight_sys' : ('true' if is_nominal else 'false'),
-                              'job_label' : job_label,
-                              }
-                batch_filename = batch_dir+'/'+sample_name+'_'+variation.name+'.sh'
-                if opts.debug:
-                    print "generating %s" % (batch_filename)
-                batch_file = open(batch_filename, 'w')
-                batch_file.write(template_contents.format(**parameters))
-                batch_file.close()
-                os.chmod(batch_filename, 0755)
-                if verbose:
-                    print "created batch file %s" % batch_filename
-                batch_filenames.append(batch_filename)
-    return batch_filenames
-
-def submit_job(batch_file=None, opts=None, lxbatch=False, at3=False):
-    queue = opts.queue
-    verbose = opts.verbose
-    base_dir = base_directory()
-    short_batch_file = os.path.splitext(os.path.basename(batch_file))[0] # w/out ext, =sample name
-
-    cmd = ''
-    if opts.lxbatch:
-        cmd = (" bsub "
-               +" -L /bin/bash " # reset shell
-               +" -q %s " % queue
-               # not sure this is working
-               # +" -o %s/tthf-trex-utils/batch/log/%s_%s.oe" % (base_dir, opts.label, short_batch_file)
-               +" -J %s " % short_batch_file
-               +" -o %s.oe " % (relative_output_directory(opts=opts)+'/'+short_batch_file)
-               +" %s" % os.path.join(base_dir, batch_file)
-               )
+        utils.print_parsed_options(parser, opts)
+
+    if opts.generate_groupfiles:
+        sample_catalogue = sample_catalogues.HbsmSampleCatalogue() # TODO or VlqSampleCatalogue
+        # TODO prompt: ask about sample list from new production
+        sample_catalogue.add_samples_from_file(path='VLQAnalysis/data/samples_HtX4TopsNtuple-00-00-12.txt')
+        sample_catalogue.categorise_samples(sample_catalogue.samples)
+        sample_catalogue.write_group_files()
+        return
+    if job_manager.needs_to_generate_scripts:
+        if opts.verbose:
+            print "Need to generate scripts: gathering samples and filelists"
+        sample_catalogue = sample_catalogues.HbsmSampleCatalogue() # TODO or VlqSampleCatalogue
+        sample_catalogue.add_samples_from_group_files(glob.glob(opts.groupfile) if opts.groupfile else
+                                                      glob.glob(sample_catalogue.groupfiles_directory+'/*.txt'))
+        sample_catalogue.prune_samples(regex_include=opts.sample_include,
+                                       regex_exclude=opts.sample_exclude)
+        samples_to_process = (sample_catalogue.samples if opts.syst=='nominal' else
+                              sample_catalogue.add_systematic_variations(samples=sample_catalogue.samples,
+                                                                         verbose=opts.verbose,
+                                                                         syst_option=opts.syst))
+
+        if opts.generate_filelists:
+            pass
+        input_interface = (sample_catalogues.RucioEosCernInterface() if opts.input_from=='rucioeos' else
+                           sample_catalogues.EosUserInterface() if opts.input_from=='eosuser' else
+                           sample_catalogues.RucioPnfsIfaeInterface() if opts.input_from=='ruciopnfs' else
+                           sample_catalogues.At3ScratchDiskInterface() if opts.input_from=='at3disk' else
+                           sample_catalogues.LocalDiskInterface('VLQAnalysis/data/filelist/', base_input_dir=opts.input_from))
+        input_interface.attach_filelists(samples=samples_to_process, verbose=opts.verbose)
+
+        # samples_to_process = sample_catalogue.add_filelists(samples=samples_to_process, input_interface=input_interface)
+
+        for sample in samples_to_process:
+            for systematic in sample.systematic_uncertainties:
+                for variation in systematic.variations:
+                    job_manager.create_job(sample, systematic, variation)
+        if opts.split_larger>2:
+            job_manager.split_jobs(split_every_n=opts.split_larger)
+        if opts.merge_fewer>1:
+            job_manager.merge_jobs(min_n_input_files=opts.merge_fewer)
     else:
-        cmd = (" qsub "
-               +" -j oe " # join stdout and stderr
-               +" -o %s/%s/%s.oe" % (base_dir, relative_output_directory(opts), short_batch_file)
-               +" -q %s " % queue
-               +" %s" % batch_file
-               )
-    if verbose:
-        print cmd
-    if opts.submit:
-        out = utils.get_command_output(cmd)
-        if verbose:
-            print out['stdout']
-            print out['stderr']
-
-def prepare_tar_file(tar_file_name=None, verbose=None):
-    cmd = "tar czf  %s BtaggingTRFandRW IFAEReweightingTools IFAETopFramework VLQAnalysis" % tar_file_name
-    cmd = cmd+" --exclude='.svn' --exclude='*.o' --exclude='*.so'"
-    if not os.path.exists(tar_file_name):
-        raise RuntimeError("Missing tar file; please create it with:\n\n"+cmd)
+        if opts.verbose:
+            print "JobManager: using existing scripts from %s" % job_manager.relative_input_directory
+        # TODO (perhaps: need to think about expected behaviour) filter samples?
+        # Q: how would you filter on merged samples? on the jobset name or on the sample name
+    if opts.print_local:
+        if not all(type(j) is batch_utils.Job for j in job_manager.jobs):
+            raise NotImplementedError("This feature is available only for non-merged jobs; drop the --merge-fewer option")
+        print 'Commands to test the code locally:'
+        print '\n'.join(batch_utils.Job.parse_run_cmd_line(j.script_path) for j in job_manager.jobs)
+        return
+    if opts.check:
+        job_manager.check_outputs()
+    elif opts.resubmit:
+        job_manager.resubmit_failed_jobs()
+    else:
+        job_manager.submit_jobs()
+
+
+def is_valid_input(opts):
+    "either you specified a predifined input interface, or a directory"
+    return (opts.input_from in ['at3disk', 'eosuser', 'rucioeos', 'ruciopnfs'] or
+            os.path.isdir(opts.input_from))
+
+
 if __name__=='__main__':
     main()
diff --git a/python/sample_catalogues.py b/python/sample_catalogues.py
index 9b4045cc92dfed4da500973beb45cac725ad8af5..97a25bfb13e74fb156e9caa149ca8696d963d1c5 100644
--- a/python/sample_catalogues.py
+++ b/python/sample_catalogues.py
@@ -5,52 +5,114 @@ All samples are initially provided in a txt file corresponding to one
 production (the file `datasets_to_download.txt` provided by Peyton).
 
 SampleCatalogue is the main class here. It takes care of:
-- organizing the samples in categories: data, ttbar, signal, etc.
-- TODO attaching systematic uncertainties to the samples
-- TODO attaching ChainProvider(s) to the samples
+
+- organizing the samples in categories: data, ttbar, signal,
+  etc. Categories are flexible (just list of files) and they should be
+  implemented following the `categorise_samples` example.
+- attaching systematic uncertainties to the samples
+- attaching ChainProvider(s) to the Variation
 - TODO building the VLQAnalysis/data/samples_info.dat file
 
+Overall design:
+A Sample has a list of SystematicUncertainty; the 'nominal' case is
+just a SystematicUncertainty.is_nominal.
+Each uncertainty has one or two Variation.
+Each Variation has access to the input chain through a filelist
+generated by InputDataInterface
+
 Different analyses might need to access different samples, or to
 organize them in different ways (e.g. special treatment of signal
 samples, systematic samples, etc.). This is implemented by
 specializing SampleCatalogue.
 
-wTODO : systematic sample might need to attach chain_provider to each systematic uncertainty
+Example usage:
+
+# Step 0
+# If you start from a 'production sample list' --> build group files
+#  > sc = VlqSampleCatalogue() # or HbsmSampleCatalogue
+#  > sc.verbose = True
+#  > sc.add_samples_from_file(path='VLQAnalysis/data/samples_HtX4TopsNtuple-00-00-12.txt')
+#  > sc.categorise_samples(sc.samples)
+#  > sc.write_group_files()
+
+# Step 1
+# Otherwise start from existing group files
+#  > sc.add_samples_from_group_files(sc.groupfiles_directory+'/*.txt'))
+
+# Step 2
+# Write filelists; the input can be either a local dir or a storage interface (eos, pnfs, ...)
+#  > input_from_dir = LocalDiskInterface(filelist_dir='VLQAnalysis/data/filelist',
+#  >                                     base_input_dir='/tmp/gerbaudo/rucio')
+#  > input_from_eos = RucioEosCernInterface()
+#  > sc.add_filelists(samples=sc.samples, input_interface=input_from_eos)
+
+# Note1: the filelists are attached to each 'Variation' (not to each 'Sample').
+# Note2: sample-specific job options can be inserted in the txt files
+#        (as in MultibjetsAnalysis, with the '# config:' comment)
+# Now you can start processing the samples.
+# See instructions in hbsm_submit.py
+
+TODO : implement special treatment of systematic samples.
+TODO : speedup the generation of filelists? (rucio can take hrs w/out
+       multiprocessing, local disk is fine)
 
 davide.gerbaudo@gmail.com
 Jul 2016
 """
 
+import copy
 import glob
 import os
+import re
 import collections
 
 from VLQAnalysis import utils
 
-# import systematics # TODO
-# catalogue = systematics.SystematicCatalogue()
+import systematics # perhaps the catalogue should be instantiated elsewhere (e.g. if it will become a specialised catalogue?)
+catalogue = systematics.SystematicCatalogue()
 
 
+#___________________________________________________________
 
 class Sample(object):
-    "Holds info about a sample and its attributes."
-    def __init__(self, short_name=None, full_name=None, group=None, filelist_dir=None, filelist_file=None, ds_input=None):
+    """Holds info about a sample and its attributes.
+    'full_name' is always there; the other attributes are filled as needed.
+    """
+    def __init__(self, short_name=None, full_name=None, group=None,
+                 job_options=None,
+                 filelist_dir=None, filelist_file=None,
+                 ds_input=None):
         self.short_name = short_name
         self.full_name = full_name
-        self.group = group
+        self._group = group
+        self.job_options = job_options
         self.ds_input = ds_input
-        self.chain_provider = None
-        # self.systematic_uncertainties = [catalogue.nominal]
+        self.systematic_uncertainties = [catalogue.nominal()]
         if not full_name:
             raise ValueError("Sample must have full_name")
 
     def __str__(self):
         plain_attrs = ['short_name', 'full_name', 'group', 'ds_input']
+        # TODO how do you want to print the uncertainties (and the chainprovider)
         return "Sample :"+ ', '.join("%s : '%s'" % (a, va) for a, va in [(pa, getattr(self, pa)) for pa in plain_attrs] if va)
 
     @property
-    def dsid(self):
-        return utils.guess_id_from_name(samplename=self.full_name)
+    def dsid(self, verbose=False):
+        "No need to input this by hand, just extract it from full_name"
+        return utils.guess_id_from_name(samplename=self.full_name, verbose=verbose)
+
+    @property
+    def group(self):
+        return self._group
+
+    @group.setter
+    def group(self, value):
+        "this attribute is set by SampleCatalogue so we need to check it and cache it"
+        if not self._group or self._group==value:
+            self._group = value
+        else:
+            print "overwriting sample group from %s to %s" % (self._group, value)
+            self._group = value
 
     def use_all_uncertainties(self):
         self.systematic_uncertainties = catalogue.all_uncertainties()
@@ -64,19 +126,7 @@ class Sample(object):
     def use_nominal_uncertainty(self):
         "nothing to do, this is the default"
         pass
-
-class ChainProvider(object):
-    "Provides access to a chain built out of files on disk/eos/pnfs"
-    def __init__(self, filelist_dir=None, filelist_file=None):
-        if filelist_file and not os.path.exists(self.filelist_file):
-            print "missing filelist %s" %self.filelist_file
-            self.commands_to_build_list()
-    def commands_to_build_list(self, rse='CERN-PROD_SCRATCHDISK'):
-        cmd = "rucio list-file-replicas --rse %s %s | grep %s | awk '{print $12}'" % (rse, self.full_name, rse)
-        print "# open a shell with rucio, then"
-        print cmd
-        print "# now replace the stuff before 'eos' with 'root://eosatlas/'"
-
+#___________________________________________________________
 
 class SampleCatalogue(object):
     """Keep track of a collection of samples.
@@ -84,76 +134,103 @@ class SampleCatalogue(object):
     Samples can be added to the catalogue in two ways:
 
     - either from a single file, in which case they will not have a
-      'group' until `categorise_all()` is called. This is typically
+      'group' until `categorise_samples()` is called. This is typically
       used when we move to a new production and all the files are just
       listed in a file.
 
     - or from several files group files, in which case the filename is
       used as the group
 
+    In general one builds the catalogue from the group files.
+
     TODO attach syst uncertainties to samples
-    TODO attach
 
     """
+    keyword_job_option = 'config:' # same convention as in MultibjetsAnalysis
+
     def __init__(self):
         self.samples = []
         self.verbose = False
-
-    def add_samples_from_file(self, path, line_parser = lambda l: Sample(full_name=l.strip())):
-        self.samples += [line_parser(l) for l in SampleCatalogue.read_lines_from_txt(path)]
+        self.groupfiles_directory = 'VLQAnalysis/data/groups' # should be overwritten by specific catalogue
 
     def add_samples_from_group_files(self, paths=[]):
-        """A wrapper so that we can read samples that have already been categorised.
+        """This is the usual method to populate the catalogue, from
+        samples that have altready been organised in groups.
+        The name of each group is determined from the filename
         Example:
         > catalogue.add_samples_categorised(glob.glob('path/to/files/*.txt')
-        The name of each group is determined from the filename
         """
         for path in paths:
             group = utils.filename_without_extension(path)
-            self.add_samples_from_file(path, line_parser=lambda l: Sample(full_name=l.strip(), group=group))
+            self.add_samples_from_file(path, group=group)
 
-    def write_group_files(self, output_directory=None):
-        if self.has_uncategorised_samples:
-            print "There are samples that do not belong to any group. The group files will not be complete."
-        if not os.path.isdir(output_directory):
-            raise IOError("'%s' is not a valid directory" % output_directory)
+    def add_samples_from_file(self, path, group=None):
+        """This should be used to populate the catalogue when you have a
+        new production. path should be the file 'datasets_to_download.txt'
+        generated from HtX4TopsNtuple.
+        """
+        job_options = None
+        keyword_job_option = SampleCatalogue.keyword_job_option
+        for line in SampleCatalogue.read_lines_from_txt(path, keywords_useful_comment_line=[keyword_job_option]):
+            if keyword_job_option in line:
+                job_options = line[line.find(keyword_job_option)+len(keyword_job_option):].strip()
+                continue # will affect all upcoming samples
+            self.samples.append(Sample(full_name=line, group=group, job_options=job_options))
+
+    def write_group_files(self, allow_uncategorised_samples=True):
+        """After having called 'categorise_samples', you can write the samples organised in group files.
+        Alternatively, you can also just write your group files by hand.
+        """
+        num_uncategorised_samples = len(self.uncategorised_samples)
+        if num_uncategorised_samples:
+            print ("Warning: there are %d samples that do not belong to any group." % num_uncategorised_samples
+                   +" Please check 'uncategorised.txt'")
+            if not allow_uncategorised_samples:
+                uncategorised = [s for s in self.samples if not s.group]
+                raise NotImplementedError("Do not know how to handle uncategorised samples:\n"+
+                                          '\n'.join(s.full_name for s in uncategorised))
+        utils.mkdir_if_needed(self.groupfiles_directory)
         samples_per_group = collections.defaultdict(list)
+        samples_per_group['uncategorised'] = []
         for s in self.samples:
             if not s.group:
-                continue
-            samples_per_group[s.group].append(s)
+                samples_per_group['uncategorised'].append(s)
+            else:
+                samples_per_group[s.group].append(s)
+        if not samples_per_group['uncategorised']:
+            samples_per_group.pop('uncategorised', None)
         for group, samples in samples_per_group.items():
-            filename = os.path.join(output_directory, group+'.txt')
+            filename = os.path.join(self.groupfiles_directory, group+'.txt')
             with open(filename, 'w') as output_file:
                 output_file.write('\n'.join(s.full_name for s in samples))
             if self.verbose:
                 print "written %s" % filename
+    @classmethod
+    def categorise_samples(cls, samples):
+        raise NotImplementedError("This operation depends on the analysis,"
+                                  " and it is implemented only in the specialised catalogues")
+    @classmethod
+    def add_systematic_variations(cls, samples):
+        raise NotImplementedError("This operation depends on the analysis,"
+                                  " and it is implemented only in the specialised catalogues")
+    @property
+    def uncategorised_samples(self):
+        return [s for s in self.samples if not s.group]
 
     @property
-    def has_uncategorised_samples(self):
-        return any(not s.group for s in self.samples)
+    def groups(self):
+        return sorted(list(set(s.group for s in self.samples if s.group)))
 
-    @classmethod
-    def categorise_all_samples(cls, samples, overwrite_group=False, verbose=False):
-        "try to determine the group using 'determine_group_from_name'"
-        for sample in samples:
-            group = cls.determine_group_from_name(sample)
-            if not sample.group or sample.group==group:
-                sample.group = group
-            elif overwrite_group:
-                if verbose:
-                    print "overwriting group '%s' with '%s' for %s" % (sample.group, group, sample.full_name)
-                sample.group = group
-            else:
-                pass
+    def samples_from_group(self, group=''):
+        return [s for s in self.samples if s.group==group]
 
     @classmethod
     def determine_group_from_name(cls, sample=None):
-        """Determine the group of this sample from its.
+        """Determine the group of this sample from its name.
 
         This is where the analysis-specific catalogues can implement their categorisation.
         """
-        return ('data' if 'physics_Main' in sample.full_name else
+        group = ('data' if cls.is_data(sample) else
                 'ttbar' if cls.is_ttbar(sample) else
                 'wjets' if cls.is_wjets(sample) else
                 'zjets' if cls.is_zjets(sample) else
@@ -162,11 +239,12 @@ class SampleCatalogue(object):
                 'topewk' if cls.is_topewk(sample) else
                 'tth' if cls.is_tth(sample) else
                 'fourtop' if cls.is_fourtop(sample) else
-                'vlq' if cls.is_vlq(sample) else
-                'uerdpp' if cls.is_uerdpp(sample) else
-                'fourtopci' if cls.is_fourtopci(sample) else
-                'hbsm' if cls. is_hbsm(sample) else
                 None)
+        return group
+
+    @staticmethod
+    def is_data(sample):
+        return 'physics_Main' in sample.full_name
 
     @staticmethod
     def is_ttbar(sample):
@@ -206,45 +284,27 @@ class SampleCatalogue(object):
     def is_fourtop(sample):
         return any(str(dsid) in sample.full_name for dsid in [410080])
 
-    # perhaps the is_* below should go to VlqSampleCatalogue? DG-2016-07-14
-    @staticmethod
-    def is_vlq(sample):
-        return any(str(dsid) in sample.full_name for dsid in range(302468, 302519+1))
-
-    @staticmethod
-    def is_uerdpp(sample):
-        return any(str(dsid) in sample.full_name for dsid in range(302055, 302059+1))
-
-    @staticmethod
-    def is_fourtopci(sample):
-        return any(str(dsid) in sample.full_name for dsid in [302777])
-
-    @staticmethod
-    def is_hbsm(sample):
-        return any(str(dsid) in sample.full_name
-                   for dsid in range(304777, 304780+1) + range(341541, 341555+1) + [343434, 343432])
-
-
-
-
     @staticmethod
-    def read_lines_from_txt(txt_filename):
+    def read_lines_from_txt(txt_filename, keywords_useful_comment_line=[]):
         "parse a file dropping comment and empty lines"
         def is_comment_or_empty(l):
             l = l.strip()
             return not l or l.startswith('#')
         lines = []
         with open(txt_filename) as f:
-            lines = [l.strip() for l in f.readlines() if not is_comment_or_empty(l)]
+            lines = [l.strip() for l in f.readlines()
+                     if not is_comment_or_empty(l)
+                     or any(k in l for k in keywords_useful_comment_line)]
         return lines
 
     def write_script_to_generate_rucio_eos_lists(self, script_filename='generate_eos_filelists.sh',
                                                  output_directory='./', rse='CERN-PROD_SCRATCHDISK'):
-        """TODO this should probably go in EosInputChain.
+        """Obsolete, please use SampleCatalogue.add_filelists
 
         I cannot execute the rucio commands in the same shell where I
         run python, so I have to write them out to a file.
         """
+        raise NotImplementedError("Obsolete, please use SampleCatalogue.add_filelists")
         tmp_samplelist = '/tmp/samples.txt'
         with open(script_filename, 'w') as of:
             of.write('#!/bin/bash\n')
@@ -265,26 +325,447 @@ class SampleCatalogue(object):
             of.write('done \n')
             of.write('cd - \n')
         print "To generate the file lists, open a new shell with rucio, then execute 'source %s'" % script_filename
+    def prune_samples(self, regex_include=None, regex_exclude=None):
+        """filter samples with two input regex that are applied to the
+        short name (if available) or to the full_name"""
+        self.samples = (utils.filter_with_regexp(self.samples, regex_include,
+                                                 func=lambda x: x.short_name if x.short_name else x.full_name)
+                        if regex_include else self.samples)
+        self.samples = (utils.exclude_with_regexp(self.samples, regex_exclude,
+                                                  func=lambda x: x.short_name if x.short_name else x.full_name)
+                        if regex_exclude else self.samples)
 
-# class VlqSampleCatalogue(SampleCatalogue):
 
-#     @classmethod
-#     def categorise(cls, sample):
+#___________________________________________________________
 
-#         data_dir = config['data_dir']
-#         for name in os.listdir(data_dir):
-#         yield cls(os.path.join(data_dir, name)) # up to concrete subclass to interpret inputs
+class VlqSampleCatalogue(SampleCatalogue):
+    "Catalogue with features that are specific to the VLQ analysis"
+
+    def __init__(self):
+        super(VlqSampleCatalogue, self).__init__()
+        self.groupfiles_directory = 'VLQAnalysis/data/groups/vlq'
 
+    @classmethod
+    def determine_group_from_name(cls, sample=None):
+        """Determine the group of this sample from its.
+
+        This is where the analysis-specific catalogues can implement their categorisation.
+        """
+        group = ('vlq' if cls.is_vlq(sample) else
+                 'uerdpp' if cls.is_uerdpp(sample) else
+                 'fourtopci' if cls.is_fourtopci(sample) else
+                 SampleCatalogue.determine_group_from_name(sample))
+        return group
+
+    @classmethod
+    def categorise_samples(cls, samples, overwrite_group=False, verbose=False):
+        """This is where the samples are organised in groups.
+
+        For some groups we might need to perform extra steps; for
+        example we need to assign the 'short_name' (this was called
+        'name' in VLQ_Samples.py)
+        """
+        for sample in samples:
+            sample.group = cls.determine_group_from_name(sample)
+            if sample.group == 'vlq':
+                sample.short_name = cls.vlq_short_name(sample)
+            elif sample.group == 'uerdpp':
+                sample.short_name = cls.uerdpp_short_name(sample)
+            elif sample.group == 'fourtopci':
+                sample.short_name = '4tops_CI'
+
+    @staticmethod
+    def is_vlq(sample):
+        return any(str(dsid) in sample.full_name for dsid in range(302468, 302519+1))
+
+    @staticmethod
+    def is_uerdpp(sample):
+        return any(str(dsid) in sample.full_name for dsid in range(302055, 302059+1))
+
+    @staticmethod
+    def is_fourtopci(sample):
+        return any(str(dsid) in sample.full_name for dsid in [302777])
+
+    @classmethod
+    def vlq_short_name(cls, sample=None):
+        dsid = int(sample.dsid)
+        return ('VLQ_TT_600'  if dsid==302469 else
+                'VLQ_TT_700'  if dsid==302470 else
+                'VLQ_TT_750'  if dsid==302471 else
+                'VLQ_TT_800'  if dsid==302472 else
+                'VLQ_TT_850'  if dsid==302473 else
+                'VLQ_TT_900'  if dsid==302474 else
+                'VLQ_TT_950'  if dsid==302475 else
+                'VLQ_TT_1000' if dsid==302476 else
+                'VLQ_TT_1050' if dsid==302477 else
+                'VLQ_TT_1100' if dsid==302478 else
+                'VLQ_TT_1150' if dsid==302479 else
+                'VLQ_TT_1200' if dsid==302480 else
+                'VLQ_TT_1300' if dsid==302481 else
+                'VLQ_TT_1400' if dsid==302482 else
+                None)
+
+    @classmethod
+    def uerdpp_short_name(cls, sample=None):
+        dsid = int(sample.dsid)
+        return ('UEDRPP_1000' if dsid==302055 else
+                'UEDRPP_1200' if dsid==302056 else
+                'UEDRPP_1400' if dsid==302057 else
+                'UEDRPP_1600' if dsid==302058 else
+                'UEDRPP_1800' if dsid==302059 else
+                None)
+
+#___________________________________________________________
+
+class HbsmSampleCatalogue(SampleCatalogue):
+    "Catalogue with features that are specific to the HBSM analysis"
+
+    def __init__(self):
+        super(HbsmSampleCatalogue, self).__init__()
+        self.groupfiles_directory = 'VLQAnalysis/data/groups/hbsm'
+
+    @classmethod
+    def determine_group_from_name(cls, sample=None):
+        """Determine the group of this sample from its.
+
+        This is where the analysis-specific catalogues can implement their categorisation.
+        """
+        group = ('hbsm' if cls.is_hbsm(sample) else
+                 SampleCatalogue.determine_group_from_name(sample))
+        return group
+
+    @classmethod
+    def categorise_samples(cls, samples, overwrite_group=False, verbose=False):
+        """This is where the samples are organised in groups.
+
+        For some groups we might need to perform extra steps; for
+        example we need to assign the 'short_name' (this was called
+        'name' in VLQ_Samples.py)
+        """
+        for sample in samples:
+            sample.group = cls.determine_group_from_name(sample)
+            if sample.group == 'hbsm':
+                sample.short_name = cls.hbsm_short_name(sample)
+
+    @staticmethod
+    def is_hbsm(sample):
+        return any(str(dsid) in sample.full_name
+                   for dsid in range(304777, 304780+1) + range(341541, 341555+1) + [343434, 343432])
+    @classmethod
+    def hbsm_short_name(cls, sample=None):
+        dsid = int(sample.dsid)
+        return ('VLQ_TT_600'  if dsid==302469 else
+                'VLQ_TT_700'  if dsid==302470 else
+                'VLQ_TT_750'  if dsid==302471 else
+                'VLQ_TT_800'  if dsid==302472 else
+                'VLQ_TT_850'  if dsid==302473 else
+                'VLQ_TT_900'  if dsid==302474 else
+                'VLQ_TT_950'  if dsid==302475 else
+                'VLQ_TT_1000' if dsid==302476 else
+                'VLQ_TT_1050' if dsid==302477 else
+                'VLQ_TT_1100' if dsid==302478 else
+                'VLQ_TT_1150' if dsid==302479 else
+                'VLQ_TT_1200' if dsid==302480 else
+                'VLQ_TT_1300' if dsid==302481 else
+                'VLQ_TT_1400' if dsid==302482 else
+                None)
+
+    @classmethod
+    def add_systematic_variations(cls, samples=None, verbose=False, syst_option=False):
+        """Here we might need to add/drop samples, so we will just
+        re-build the list dealing with the groups one at the time
+        """
+        weight_only = syst_option and syst_option=='weight'
+        object_only = syst_option and syst_option=='object'
+        if syst_option and syst_option not in ['weight', 'object', 'all']:
+            raise NotImplementedError("for now can only accept either weight|object|all, not '%s'" % syst_option)
+        updated_samples = []
+        samples_per_group = collections.defaultdict(list)
+        for sample in samples:
+            samples_per_group[sample.group].append(sample)
+        for group, samples in samples_per_group.iteritems():
+            if group=='data':
+                updated_samples += samples
+            elif group=='ttbar':
+                if verbose:
+                    print 'adding ttbar systematics'
+                updated_samples += cls.add_ttbar_systematics(samples)
+            else:
+                if verbose:
+                    print 'adding other systematics'
+                updated_samples += cls.add_generic_systematics(samples,
+                                                               weight_only=weight_only,
+                                                               object_only=object_only)
+            # do we need to do anything special for the signals?
+            # TODO filter systematics with comma-sep list from syst_option
+            # TODO filter systematics with refex
+        return updated_samples
+
+    @staticmethod
+    def add_ttbar_systematics(ttbar_samples, weight_only=False, object_only=False):
+        """Take a list of samples and provide a new list containing
+        samples with syst uncertainties (and additional samples if
+        needed when splitting in hf).
+        """
+        print "add_ttbar_systematics: weight_only, object_only not implemented yet"
+        updated_samples = []
+        hf_splitted = True # should it be configurable?  in this case we need to process n times the samples
+        use_ht_slices = True # should it be configurable? in this case we need to process more samples
+        ht_sliced_dsids = [410000, 407009, 407010, 407011, 407012] # low, 1, 2, 3, met
+        ht_sliced_samples = [s for s in ttbar_samples if int(s.dsid) in ht_sliced_dsids]
+        ht_inclusive_samples = [s for s in ht_sliced_samples if int(s.dsid)==41000]
+
+        if hf_splitted: # this only implements the 'simple' splitting of VLQ_Samples.py
+            samples_to_split_in_hf = ht_sliced_samples if use_ht_slices else ht_inclusive_samples
+            hf_slices = ['light', 'bb', 'cc']
+            for hf_slice in hf_slices: # need to make a copy of the samples b/c they will be processed 3 times
+                samples_this_slice = [copy.deepcopy(s) for s in samples_to_split_in_hf]
+                for s in samples_this_slice:
+                    s.short_name = 'ttbar'+hf_slice
+                updated_samples += samples_this_slice
+        elif use_ht_slices:
+            updated_samples += ht_sliced_samples
+        else:
+            raise NotImplementedError("add_ttbar_systematics not implemented w/out slices, see VLQ_Samples.py")
+        for s in updated_samples: # TODO check with Loic+Mirko that this is always needed
+            s.use_all_uncertainties()
+        return updated_samples
+
+    @staticmethod
+    def add_generic_systematics(samples, weight_only=False, object_only=False):
+        "Toggle on the weight and object systematic variations"
+        if weight_only:
+            for s in samples:
+                s.use_weight_uncertainties()
+        elif object_only:
+            for s in samples:
+                s.use_object_uncertainties()
+        else:
+            for s in samples:
+                s.use_all_uncertainties()
+        return samples
+
+#___________________________________________________________
+
+class InputDataInterface(object):
+    """Base class defining how we access input data (through filelists).
+
+    In general one should just call 'filelist()'; some interfaces
+    (e.g. disk) will automatically generate it if it's not there;
+    others (e.g. eos, rse) will tell you how to generate it. The
+    second behaviour is motivated by the fact that the generation
+    might take time (e.g. eos) or special setup (e.g. rucio), so it
+    might be better to do this asynchronously, once for all samples in
+    a separate shell.
+
+    For an example of the second case, see
+    'SampleCatalogue.add_filelists'.
+
+    For tests, one can also call
+    'InputDataInterface.generate_filelist' for a single sample (rather
+    than trough the catalogue).
+    """
+    def __init__(self, filelist_dir):
+        self.filelist_dir = utils.mkdir_if_needed(filelist_dir)
+    def generate_filelist(self, container):
+        raise NotImplementedError("Should be implemented in specialised classes")
+    def filelist(self, container):
+        raise NotImplementedError("Should be implemented in specialised classes")
+
+    def attach_filelists(self, samples=[], verbose=False):
+        "Attach a filelist to each one of the input samples x variations"
+        if verbose:
+            print "About to check/create filelists for %d samples; this might take some time." % len(samples)
+        for sa in samples:
+            for su in sa.systematic_uncertainties:
+                for v in su.variations:
+                    v.filelist = str(self.generate_filelist(sa.full_name))
+                    if verbose:
+                        n_files = sum(1 for l in open(v.filelist).readlines() if not l.strip().startswith('#'))
+                        print "%s added filelist (%d files) %s %s" % (self.filelist_dir,
+                                                                      n_files,
+                                                                      sa.full_name,
+                                                                      '', #sys not needed for now
+                                                                      )
+                    # note to self: here sample knows about the
+                    # container name, and variation knows about the
+                    # treename. It assumes that the object variation
+                    # trees are in the same file as the nominal one.
+                    # I might need to revise this when we start using
+                    # systematic samples?
+        # return samples
+
+#___________________________________________________________
+
+class LocalDiskInterface(InputDataInterface):
+    """Data on disk that can be accessed through simple os.path.
+    If there is no filelist just generate it.
+    It assumes that there is one sub-directory for each container.
+    """
+    def __init__(self, filelist_dir=None, base_input_dir=None):
+        super(LocalDiskInterface, self).__init__(filelist_dir)
+        self.base_input_dir = base_input_dir
+
+    def generate_filelist(self, container):
+        container = container.strip('/')
+        filelist_path = os.path.join(self.filelist_dir, container+'.txt')
+        if not os.path.exists(filelist_path):
+            with open(filelist_path, 'w') as filelist_file:
+                filenames = [f for f in os.listdir(os.path.join(self.base_input_dir, container))
+                             if '.root' in f]
+                filenames = sorted(filenames)
+                filenames = [os.path.abspath(os.path.join(self.base_input_dir, container, f))
+                             for f in filenames]
+                filelist_file.write('\n'.join(filenames)+'\n')
+        return filelist_path
+
+    def filelist(self, container):
+        filelist_path = os.path.join(self.filelist_dir, container.strip('/')+'.txt')
+        if not os.path.exists(filelist_path):
+            self.generate_filelist(container)
+        return filelist_path
+
+#___________________________________________________________
+
+class At3ScratchDiskInterface(LocalDiskInterface):
+    """Data downloaded to the scratch2 disk on at3.
+    Currently 00-10 production.
+    """
+    def __init__(self,
+                 filelist_dir='VLQAnalysis/data/hbsm/filelist/at3pnfs',
+                 base_input_dir='/nfs/at3/scratch2/lvalery/VLQFiles/AT-00-00-10/'):
+        super(At3ScratchDiskInterface, self).__init__(filelist_dir, base_input_dir)
+
+#___________________________________________________________
+
+class EosUserInterface(InputDataInterface):
+    """Data on eos accessed through 'eos ls' (not via rucio).
+    When the filelist if missing, raise IOError.
+    Prefer not to generate them under the hood (can take time, better
+    if user does it explicitly).
+    """
+    def __init__(self, filelist_dir, base_input_dir):
+        super(EosUserInterface, self).__init__(filelist_dir)
+        self.base_input_dir = base_input_dir
+
+    def filelist(self, container):
+        container = container.strip('/')
+        filelist_path = os.path.join(self.filelist_dir, container+'.txt')
+        if not os.path.exists(filelist_path):
+            raise IOError("Missing filelist from EosUserInterface(%s) for %s" % (self.filelist_dir, container)
+                          +"Probably need to call SampleCatalogue.add_filelists()")
+        return filelist_path
+
+    def generate_filelist(self, container):
+        raise NotImplementedError("Need some cleanup of the output? and perhaps pre-pend 'root://eosatlas//eo'")
+        # todo include lines below
+        container = container.strip('/')
+        cmd = "eos ls %s/%s" % (self.base_input_dir, container)
+        utils.get_command_output(cmd)
+        filelist_path = os.path.join(self.filelist_dir, container+'.txt')
+        with open(filelist_path, 'w') as filelist_file:
+            filenames = [f for f in os.listdir(os.path.join(self.base_input_dir, container)) if '.root' in f]
+            filelist_file.write('\n'.join(filenames))
+        return filelist_path
+
+#___________________________________________________________
+
+class RseInterface(InputDataInterface):
+    """Interface to a remote storage element accessed through rucio.
+    When the filelist if missing, raise IOError.
+    Prefer not to generate them under the hood (can take time, better
+    if user does it explicitly).
+
+    Users should usually instantiate objects of site-specific classes
+    (see RucioEosCernInterface and RucioPnfsIfaeInterface)
+    """
+    def __init__(self, filelist_dir, rse, root_prefix, root_prefix_placeholder):
+        """
+        Example arguments:
+        - rse : CERN-PROD_SCRATCHDISK
+        - root_prefix : root://eosatlas//eos
+        - root_prefix_placeholder : eos
+        See generate_filelist.clean_line for details
+        """
+        super(RseInterface, self).__init__(filelist_dir)
+        self.rse = rse
+        self.root_prefix = root_prefix
+        self.root_prefix_placeholder = root_prefix_placeholder
+
+    def filelist(self, container):
+        filelist_path = os.path.join(self.filelist_dir, container.strip('/')+'.txt')
+        if not os.path.exists(filelist_path):
+            raise IOError("Missing filelist from RseUserInterface(%s) for %s" % (self.filelist_dir, container)
+                          +"Probably need to call SampleCatalogue.add_filelists()")
+        return filelist_path
+
+    def generate_filelist(self, container, overwrite_filelist=False):
+        container = container.strip('/')
+        filelist_path = os.path.join(self.filelist_dir, container+'.txt')
+        if os.path.exists(filelist_path):
+            if not overwrite_filelist:
+                return filelist_path
+        has_rucio = any('RUCIO' in k for k in os.environ.keys())
+        has_voms = any('VOMS' in k for k in os.environ.keys()) # does not take into account expired token
+        filelist_path = os.path.join(self.filelist_dir, container+'.txt')
+        if not has_rucio or not has_voms:
+            raise EnvironmentError("Invalid environment: please 'lsetup rucio' and 'voms-proxy-init -voms atlas'")
+
+        cmd = "rucio list-file-replicas --rse {rse:s} {container:s} | grep {rse:s}".format(**{'rse':self.rse,
+                                                                                              'container':container })
+        def clean_line(line, rse, prefix_from, prefix_to):
+            """
+            Convert output line that looks like:
+            | user.prose | user.prose.8949257._000001.out.root | 3.4 GB     | 22acf9ae  | CERN-PROD_SCRATCHDISK: gsiftp://eosatlassftp.cern.ch:2811/eos/atlas/atlasscratchdisk/rucio/user/prose/6d/a7/user.prose.8949257._000001.out.root |
+            into a line that looks like:
+            root://eosatlas//eos/atlas/atlasscratchdisk/rucio/user/prose/6d/a7/user.prose.8949257._000001.out.root
+            """
+            fields = [f.strip() for f in line.split('|')]
+            file_column = next((f for f in fields if rse in f), None)
+            file_path = next((f.strip() for f in file_column.split() if prefix_from in f), None)
+            return re.sub(r'.*'+prefix_from, prefix_to, file_path, 1)
+        out = utils.get_command_output(cmd)
+        if out['returncode']:
+            raise IOError("Command failed: '%s'\nstdout:\n%s\nstderr:\n%s" %
+                          (cmd, out['stdout'], out['stderr']))
+        else:
+            lines = [l for l in out['stdout'].split('\n') if self.rse in l]
+            filenames = [clean_line(l, self.rse, self.root_prefix_placeholder, self.root_prefix)
+                         for l in lines]
+            with open(filelist_path, 'w') as filelist_file:
+                filelist_file.write('\n'.join(filenames))
+        return filelist_path
+#___________________________________________________________
+
+class RucioEosCernInterface(RseInterface):
+    "Access files on CERN-PROD_SCRATCHDISK through eos"
+    def __init__(self, filelist_dir='VLQAnalysis/data/filelist/eos',
+                 rse='CERN-PROD_SCRATCHDISK',
+                 root_prefix='root://eosatlas//eos/atlas',
+                 root_prefix_placeholder='/eos/atlas'):
+        super(RucioEosCernInterface, self).__init__(filelist_dir, rse, root_prefix, root_prefix_placeholder)
+
+#___________________________________________________________
+
+class RucioPnfsIfaeInterface(RseInterface):
+    "Access files on IFAE_SCRATCHDISK through pnfs"
+    def __init__(self, filelist_dir='VLQAnalysis/data/filelist/pnfs',
+                 rse='IFAE_SCRATCHDISK',
+                 root_prefix='root://xrootd.pic.es//pnfs/pic.es',
+                 root_prefix_placeholder='/pnfs/pic.es'):
+        super(RucioPnfsIfaeInterface, self).__init__(filelist_dir, rse, root_prefix, root_prefix_placeholder)
+
+#___________________________________________________________
 
 if __name__=='__main__':
     print "Testing sample catalogues"
 
-    print "build catalogue from Peyton's file:"
-    sc = SampleCatalogue()
-    sc.verbose = True
-    sc.add_samples_from_file(path='VLQAnalysis/data/samples_HtX4TopsNtuple-00-00-11.txt')
-    print "collected %d samples" % len(sc.samples)
-    sc.categorise_all_samples(sc.samples)
+    # print "build catalogue from Peyton's file:"
+    # sc = SampleCatalogue()
+    # sc.verbose = True
+    # sc.add_samples_from_file(path='VLQAnalysis/data/samples_HtX4TopsNtuple-00-00-11.txt')
+    # print "collected %d samples" % len(sc.samples)
+    # sc.categorise_samples(sc.samples) # only for specialised catalogues
 
     # -- tested: ok
     # print 'samples:'
@@ -302,8 +783,43 @@ if __name__=='__main__':
     #     print '\n'.join(s.full_name for s in uncategorised_samples)
 
     # -- tested: ok (go from one list to group files and back)
-    groupfiles_directory = 'VLQAnalysis/data/hbsm/groups'
-    sc.write_group_files(output_directory=groupfiles_directory)
-    sc2 = SampleCatalogue()    
-    sc2.add_samples_from_group_files(glob.glob(groupfiles_directory+'/*.txt'))
+    sc = HbsmSampleCatalogue()
+    # sc = VlqSampleCatalogue()
+    sc.verbose = True
+    sc.add_samples_from_file(path='VLQAnalysis/data/samples_HtX4TopsNtuple-00-00-12.txt')
+    sc.categorise_samples(sc.samples) # only for specialised catalogues
+    sc.write_group_files()
+    sc2 = SampleCatalogue()
+    sc2.add_samples_from_group_files(glob.glob(sc.groupfiles_directory+'/*.txt'))
     print "%d samples from production file, and %d samples from group files" % (len(sc.samples), len(sc2.samples))
+
+    # -- tested: ok (also the ttbar ht + hf splitting)
+    # groupfiles_directory = 'VLQAnalysis/data/groups/hbsm'
+    # sc_hbsm = HbsmSampleCatalogue()
+    # sc_hbsm.add_samples_from_group_files(glob.glob('VLQAnalysis/data/groups/hbsm/*.txt'))
+    # print "%d samples from group files" % (len(sc_hbsm.samples))
+    # sc_hbsm.samples = sc_hbsm.add_systematic_variations(sc_hbsm.samples)
+    # print "%d samples after syst variations" % (len(sc_hbsm.samples))
+    # ttbar_samples = sc_hbsm.samples_from_group('ttbar')
+    # for s in ttbar_samples:
+    #     print s.short_name,' ',s.full_name
+
+    # -- tested: ok for both eos and disk
+    sc_hbsm = HbsmSampleCatalogue()
+    # sc_hbsm.add_samples_from_group_files(glob.glob('VLQAnalysis/data/groups/hbsm/*.txt'))
+    sc_hbsm.add_samples_from_group_files(glob.glob(sc_hbsm.groupfiles_directory+'/hbsm.txt'))
+    sc_hbsm.samples = sc_hbsm.add_systematic_variations(sc_hbsm.samples)
+    input_from_dir = LocalDiskInterface(filelist_dir='VLQAnalysis/data/filelist',
+                                        base_input_dir='/tmp/gerbaudo/rucio')
+    input_from_eos = RucioEosCernInterface()
+    def print_filelists(samples):
+        for sample in samples:
+            for systematic in sample.systematic_uncertainties:
+                for variation in [v for v in systematic.variations if v.name=='nominal']:
+                    print "%s %s : %s" % (variation.name, sample.full_name, variation.filelist)
+    try:
+        print_filelists(sc_hbsm.samples)
+    except IOError:
+        print "Missing filelists, generating them"
+        sc_hbsm.add_filelists(samples=sc_hbsm.samples, input_interface=input_from_eos)
+    print_filelists(sc_hbsm.samples)
diff --git a/python/samples/systematics.py b/python/systematics.py
similarity index 88%
rename from python/samples/systematics.py
rename to python/systematics.py
index fd18a96e974c0480fcf34a04190384df795b28eb..3d6f894932dd4c95cf0259c1294999dae4598547 100644
--- a/python/samples/systematics.py
+++ b/python/systematics.py
@@ -9,18 +9,30 @@ davide.gerbaudo@gmail.com
 Jul 2016
 """
 
+import copy
+
 class Variation(object):
     treename_suffix = '_Loose' # this is the suffix that should be removed from the output filenames
     def __init__(self, input_tree):
         self.input_tree = input_tree
         self.is_weight_variation = False
         self.is_object_variation = False
+        self._filelist = None
     @property
     def name(self):
         if self.input_tree.endswith(Variation.treename_suffix):
             return self.input_tree[:-len(Variation.treename_suffix)]
         else:
             raise ValueError("Variation: cannot interpred treename '%s' as a variation name" % self.input_tree)
+    @property
+    def filelist(self):
+        if not self._filelist:
+            raise IOError("missing input data for '%s'\nPlease call SampleCatalogue.add_filelists" % self.name)
+        else:
+            return self._filelist
+    @filelist.setter
+    def filelist(self, value):
+        self._filelist = value
 
 class WeightVariation(Variation):
     "A variation that only changes the event weight, but not the event selection"
@@ -52,8 +64,18 @@ class SystematicUncertainty(object):
         return self.name=='nominal'
 
 class SystematicCatalogue(object):
+    """A catalogue of uncertainties
+
+    Note: when a used asks for the uncertainties (and their
+    variations), we always provide a copy. The reason for this is that
+    they will be attached to samples, so we want to have different
+    copies of the same Variation objects for each sample.
+
+    Note to self: perhaps I should also hide '._' all the remaining
+    attributes (such as electron_object_uncertainties etc.).
+    """
     def __init__(self):
-        self.nominal = SystematicUncertainty(name='nominal', variations=[Variation(input_tree='nominal_Loose')])
+        self._nominal = SystematicUncertainty(name='nominal', variations=[Variation(input_tree='nominal_Loose')])
         self.electron_object_uncertainties = [
             SystematicUncertainty(name='EG_RESOLUTION_ALL',
                                   variations=[ObjectVariation(input_tree='EG_RESOLUTION_ALL__1up_Loose'),
@@ -142,16 +164,19 @@ class SystematicCatalogue(object):
             ]
 
     def object_uncertainties(self):
-        return (self.electron_object_uncertainties +
-                self.jet_object_uncertainties +
-                self.met_object_uncertainties +
-                self.muon_object_uncertainties)
+        return copy.deepcopy([self.electron_object_uncertainties +
+                              self.jet_object_uncertainties +
+                              self.met_object_uncertainties +
+                              self.muon_object_uncertainties])
+
+    def nominal(self):
+        return copy.deepcopy(self._nominal)
 
     def weight_uncertainties(self):
         raise NotImplementedError("SystematicCatalogue: weight_uncertainties not there yet")
 
     def all_uncertainties(self):
-        return [self.nominal] + self.object_uncertainties() # + self.weight_uncertainties()
+        return [self.nominal()] + self.object_uncertainties() # + self.weight_uncertainties()
 
     def print_all(self):
         print "SystematicCatalogue:\n"+'\n'.join([s.name for s in self.all_uncertainties()])
diff --git a/python/utils.py b/python/utils.py
index 62fdfe95bc9140ec59abcc8c2e99cdc4998728c9..e4a33b03ae8e812580a15c874c0651e1e35c614c 100644
--- a/python/utils.py
+++ b/python/utils.py
@@ -16,9 +16,10 @@ import sys
 import subprocess
 import unittest
 
-def get_command_output(command):
+def get_command_output(command, with_current_environment=False):
     "lifted from supy (https://github.com/elaird/supy/blob/master/utils/io.py)"
-    p = subprocess.Popen(command, shell = True, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
+    env = None if not with_current_environment else os.environ.copy()
+    p = subprocess.Popen(command, shell = True, stdout = subprocess.PIPE, stderr = subprocess.PIPE, env=env)
     stdout,stderr = p.communicate()
     return {"stdout":stdout, "stderr":stderr, "returncode":p.returncode}
 
@@ -79,12 +80,13 @@ def json_read(fname) :
 def rm_if_exists(filename) :
     if os.path.exists(filename) : os.remove(filename)
 
-def mkdir_if_needed(dirname) :
+def mkdir_if_needed(dirname, verbose=False) :
     dest_dir = None
     if os.path.exists(dirname) and os.path.isdir(dirname) :
         dest_dir = dirname
     elif not os.path.exists(dirname) :
         os.makedirs(dirname)
+        if verbose: print "created %s" % dirname
         dest_dir = dirname
     return dest_dir
 
@@ -120,6 +122,8 @@ def remove_duplicates(seq=[]) :
 def print_running_conditions(parser, opts):
     print "working from {0}".format(os.getcwd())
     print "being called as : {0}".format(' '.join(os.sys.argv))
+
+def print_parsed_options(parser, opts):
     all_options = [x.dest for x in parser._get_all_options()[1:]]
     print "options parsed:\n"+'\n'.join("%s : %s"%(o, str(getattr(opts, o))) for o in all_options)
 
@@ -138,6 +142,22 @@ def filename_without_extension(filename):
         raise IOError("'%s' is not a file" % filename)
     return os.path.splitext(os.path.basename(filename))[0]
 
+def drop_continuation_lines(split_lines=[]):
+    """remove continuation characters '\' from split lines
+
+    Adapted from
+    http://stackoverflow.com/questions/16480495/read-a-file-with-line-continuation-characters-in-python
+    """
+    out_lines = []
+    current_line = ''
+    for line in split_lines:
+        line = line.rstrip('\n')
+        if line.endswith('\\'):
+            current_line += line[:-1]
+        else:
+            out_lines.append(current_line)
+            current_line = ''
+    return out_lines
 #
 # testing
 #